code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Galua.Spec.Parser.Monad
( Parser
, onMissingClose
, lexerM
, runParser
, parseError
, ParseError
) where
import Control.Monad
import qualified Data.Text as Text
import Control.Exception
import Galua.Spec.Parser.Lexer
import Debug.Trace
newtype Parser a = Parser (RW -> Either ParseError (a,RW))
newtype ParseError = ParseError String
deriving Show
instance Exception ParseError
data RW = RW { next :: [Lexeme Token]
, lastPos :: SourcePos
, tokens :: [Lexeme Token]
, layoutContext :: [Int]
} deriving Show
instance Functor Parser where
fmap = liftM
instance Applicative Parser where
pure a = Parser (\s -> Right (a,s))
(<*>) = ap
instance Monad Parser where
Parser m >>= f = Parser (\s -> case m s of
Left err -> Left err
Right (a,s1) ->
let Parser m1 = f a
in m1 s1)
runParser :: Parser a -> [Lexeme Token] -> Either ParseError a
runParser (Parser m) ts = case m rw of
Left err -> Left err
Right (a,_) -> Right a
where
rw = RW { next = []
, lastPos = SourcePos { sourceLine = 0
, sourceColumn = 0
, sourceIndex = -1
, sourceFile = Text.pack "" }
, tokens = ts
, layoutContext = []
}
parseError :: Lexeme Token -> Parser a
parseError p = Parser (\_ ->
let q = sourceFrom (range p)
in Left $ ParseError $ "Parser error on line " ++ ppPos q)
ppPos :: SourcePos -> String
ppPos p = show (sourceLine p) ++ ":" ++ show (sourceColumn p)
onMissingClose :: Parser SourceRange
onMissingClose = Parser $ \rw ->
case layoutContext rw of
m : ms
| m /= 0 -> Right ( range (lastPos rw)
, rw { next = next rw
, layoutContext = ms
, tokens = tokens rw })
_ -> Left $ ParseError $ "Unterminated block at " ++ ppPos (lastPos rw)
lexerM :: (Lexeme Token -> Parser a) -> Parser a
lexerM k = k =<< nextToken
lx :: Char -> Token -> SourcePos -> Lexeme Token
lx c tok p = Lexeme { lexemeText = Text.singleton c
, lexemeToken = tok
, lexemeRange = range p -- virtual do not take space
}
semi,open,close :: SourcePos -> Lexeme Token
semi = lx ';' KW_semi
open = lx '{' KW_open_brace
close = lx '}' KW_close_brace
nextToken :: Parser (Lexeme Token)
nextToken = Parser (\rw -> case next rw of
[] -> pick (lastPos rw)
(tokens rw)
(layoutContext rw)
p : ps -> Right (p, rw { next = ps
, lastPos = sourceTo (range p) }))
where
ok a ts ms = Right (a, RW { next = []
, tokens = ts
, layoutContext = ms
, lastPos = sourceTo (range a)
})
-- This is the same as in Haskell
pick _ toks@(Lexeme { lexemeToken = Indent n, lexemeRange = p } : ts)
ctxt@(m : ms)
| m == n = ok (semi (sourceTo p)) ts ctxt
| n < m = ok (close (sourceTo p)) toks ms
pick _ (Lexeme { lexemeToken = Indent _, lexemeRange = p } : ts) ms =
pick (sourceTo p) ts ms
pick _ (Lexeme { lexemeToken = Open n, lexemeRange = p } : ts) (m : ms)
| n > m = ok (open (sourceTo p)) ts (n : m : ms)
pick _ (Lexeme { lexemeToken = Open n, lexemeRange = p } : ts) [] =
ok (open (sourceTo p)) ts [n]
pick _ (Lexeme { lexemeToken = Open n, lexemeRange = p } : ts) ms =
let loc = sourceTo p
in Right ( open loc
, RW { next = [close loc]
, lastPos = loc
, tokens = Lexeme
{ lexemeText = Text.pack ""
, lexemeRange = range loc
, lexemeToken = Indent n
}
: ts
, layoutContext = ms })
pick _ (t@Lexeme { lexemeToken = KW_close_brace } : ts) (0 : ms) = ok t ts ms
pick _ (Lexeme { lexemeToken = KW_close_brace, lexemeRange = r } : _) _ =
Left $ ParseError $ "Unexpected end of block at " ++ show r
pick _ (t@Lexeme { lexemeToken = KW_open_brace } : ts) ms = ok t ts (0 : ms)
-- error case
pick _ (t : ts) ms = ok t ts ms
pick p [] [] = ok Lexeme { lexemeToken = EOF
, lexemeText = Text.pack ""
, lexemeRange = range p
} [] []
pick p [] (m : ms)
| m > 0 = ok (close p) [] ms
| otherwise = Left $ ParseError "Unterminated block at the end of file."
|
GaloisInc/galua
|
galua-spec/src/Galua/Spec/Parser/Monad.hs
|
mit
| 5,150 | 0 | 17 | 2,248 | 1,716 | 906 | 810 | 112 | 12 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
module Odin.Scripts.ArrowControl (arrowControl) where
import Gelatin.SDL2
import SDL
import Data.Monoid ((<>))
import Data.Maybe (mapMaybe)
import Control.Lens
import Odin.Core
data Direction = North | East | South | West deriving (Show, Eq, Bounded)
codeToDirection :: Scancode -> Maybe Direction
codeToDirection ScancodeUp = Just North
codeToDirection ScancodeRight = Just East
codeToDirection ScancodeDown = Just South
codeToDirection ScancodeLeft = Just West
codeToDirection _ = Nothing
directionToCode :: Direction -> Scancode
directionToCode North = ScancodeUp
directionToCode East = ScancodeRight
directionToCode South = ScancodeDown
directionToCode West = ScancodeLeft
directionToV2 :: Direction -> V2 Float
directionToV2 North = V2 0 (-1)
directionToV2 East = V2 1 0
directionToV2 South = V2 0 1
directionToV2 West = V2 (-1) 0
arrowCodes :: [Scancode]
arrowCodes = [ScancodeUp, ScancodeLeft, ScancodeDown, ScancodeRight]
onTrue :: (a -> Bool) -> a -> Maybe a
onTrue f x = if f x then Just x else Nothing
arrowControl :: (Events s m
,Scripts s m
,Tfrms s m
,Time s m
) => Entity -> m Script
arrowControl actor = do
-- First wait until the user presses an arrow key
-- For that we'll need some scafolding so we can test and extract the
-- arrow direction
let isArrowPressed (KeyboardEvent (KeyboardEventData _ Pressed False Keysym{..})) =
msum $ map (onTrue (== keysymScancode)) arrowCodes
isArrowPressed _ = Nothing
evs <- use events
let codes = mapMaybe isArrowPressed evs
unless (null codes) $ do
let dirs = mapMaybe codeToDirection codes
-- If we got a direction then apply the arrow move script to each
-- direction.
unless (null dirs) $ do
ss <- mapM (arrowControlMove actor) dirs
scripts.at actor %= Just . (maybe ss (++ ss))
nextScript $ arrowControl actor
arrowControlMove :: (Events s m
,Tfrms s m
,Time s m
) => Entity -> Direction -> m Script
arrowControlMove actor dir = do
-- Update the transform of the actom
dt <- readTimeDeltaSeconds
let t = PictureTransform (mat4Translate $ promoteV2 $ dt * 100 *^ directionToV2 dir) 1 1 Nothing
tfrms.at actor %= Just . (maybe t (t <>))
-- Find if the arrow key was released
let isArrowReleased (KeyboardEvent (KeyboardEventData _ Released False Keysym{..})) =
keysymScancode == directionToCode dir
isArrowReleased _ = False
released <- any isArrowReleased <$> (use events)
-- Restart the process all over again, from the top
if released
then endScript
else nextScript $ arrowControlMove actor dir
|
schell/odin
|
src/Odin/Scripts/ArrowControl.hs
|
mit
| 2,768 | 0 | 16 | 639 | 807 | 409 | 398 | -1 | -1 |
-- Copyright (c) Microsoft. All rights reserved.
-- Licensed under the MIT license. See LICENSE file in the project root for full license information.
{-# LANGUAGE QuasiQuotes, OverloadedStrings, RecordWildCards #-}
module Bond.Template.Cpp.Enum_h (enum_h) where
import Data.Monoid
import Prelude
import Data.Text.Lazy (Text)
import Text.Shakespeare.Text
import Bond.Schema.Types
import Bond.Template.TypeMapping
import Bond.Template.Util
import qualified Bond.Template.Cpp.Util as CPP
-- generate the *_types.h file from parsed .bond file
enum_h :: MappingContext -> String -> [Import] -> [Declaration] -> (String, Text)
enum_h cpp _file _imports declarations = ("_enum.h", [lt|
#pragma once
#{CPP.openNamespace cpp}
namespace _bond_enumerators
{
#{newlineSep 1 typeDeclaration declarations}
} // namespace _bond_enumerators
#{newlineSep 0 usingDeclaration declarations}
#{CPP.closeNamespace cpp}
|])
where
-- enum definition
typeDeclaration e@Enum {..} = [lt|
namespace #{declName}
{
#{CPP.enumDefinition e}
} // namespace #{declName}
|]
typeDeclaration _ = mempty
usingDeclaration Enum {..} = [lt|using namespace _bond_enumerators::#{declName};|]
usingDeclaration _ = mempty
|
innovimax/bond
|
compiler/Bond/Template/Cpp/Enum_h.hs
|
mit
| 1,231 | 0 | 10 | 192 | 185 | 117 | 68 | 16 | 3 |
module Main where
import Evaluator
import Parser
import System.Environment (getArgs)
parse :: String -> BrainfuckSource
parse x = case parseBrainfuck x of
(Left s) -> error s
(Right bs) -> bs
main :: IO ()
main = do
argv <- getArgs
case argv of
(file:_) -> readFile file >>= runBrainfuck . parse
_ -> error "Bad input, it should be: brainfokt <filename.bf>"
|
Rydgel/Brainfokt
|
src/Main.hs
|
mit
| 447 | 0 | 12 | 151 | 133 | 68 | 65 | 14 | 2 |
{-# OPTIONS_GHC -Wall #-}
import Data.List
-- | @xor@ performs XOR logic operation on a list of booleans
xor :: [Bool] -> Bool
xor = foldr (\ x y -> not (x==y)) False
-- | @map'@ is an alternate implementation of map using foldr
map' :: (a->b) -> [a] -> [b]
map' f = foldr (\ x y -> (f x) : y) []
-- | @sieveSundaram@ produces a list of odd primes upto 2n + 1
-- Refer https://en.wikipedia.org/wiki/Sieve_of_Sundaram
sieveSundaram :: Integer -> [Integer]
sieveSundaram = (map multiplier).filterer
-- | @filterer@ is a helper for sieveSundaram
filterer :: Integer -> [Integer]
filterer n = [1..n] \\
[(i+j+2*i*j) | i<-[1..n], j<-[1..n], i<=j, j<=n, i+j+2*i*j <= n]
-- | @multiplier@ is a helper for sieveSundaram
multiplier :: Integer -> Integer
multiplier x = 2*x + 1
|
vaibhav276/haskell_cs194_assignments
|
higher_order/MoreFolds.hs
|
mit
| 790 | 0 | 12 | 157 | 283 | 155 | 128 | 13 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RecordWildCards #-}
module Eurocode5.Fasteners.Bulldogs where
import qualified Eurocode5.Wood.WoodCommon as WC
data BDCat = C1 | C2 | C3 | C4 | C5 | C6 | C7 | C8 | C9 | C10 | C11 deriving (Eq,Show)
-- ^ Ensidig: C2,C4,C7,C9,C11
-- ^ Tosidig: C1,C1,C3,C5,C6,C8,C10
--
data Bulldog =
-- | Typene C1,C2,C6,C7,C10,C11
Bulldog {
bcat :: BDCat, -- ^ Bulldog type
dc :: Double, -- ^ Diameter bulldog [mm]
d :: Double, -- ^ Diameter bolt [mm]
he :: Double -- ^ Tennenes inntrengningsdybde [mm]
-- t1 :: WC.Wood, -- ^ (Tykkelse) ytre trevirke [mm]
-- t2 :: Maybe WC.Wood -- ^ (Tykkelse) indre/midterste trevirke [mm]
} deriving Show
k1 :: Bulldog
-> WC.Wood -- ^ (Tykkelse) ytre trevirke [mm]
-> Maybe WC.Wood -- ^ (Tykkelse) indre/midterste trevirke [mm]
-> Double
k1 Bulldog { he } t1 t2 =
case t2 of Just t2' -> minimum [1.0, (WC.t t1)/(3*he), (WC.t t2')/(5*he)]
Nothing -> minimum [1.0, (WC.t t1)/(3*he)]
a3t :: Bulldog -> Double
a3t Bulldog { bcat,dc,d } = maximum [80, dcf*dc, 7*d ]
where dcf | bcat == C10 = 1.5
| bcat == C11 = 1.5
| otherwise = 1.1
k2 :: Bulldog -> Double
k2 b = minimum [1.0, (a3t b)/(dcf*(dc b))]
where bt' = bcat b
dcf | bt' == C10 = 2.0
| bt' == C11 = 2.0
| otherwise = 1.5
k3 :: WC.Wood -> Double
k3 w = min 1.5 ((WC.rho w)/350.0)
checkT :: Bulldog
-> WC.Wood
-> Bool
checkT b w = (WC.t w) > 2.25 * (he b)
fvrk :: Bulldog
-> WC.Wood -- ^ Ytre trevirke
-> Maybe WC.Wood -- ^ Indre/midterste trevirke
-> Double -- ^ Capacity bulldog [kN]
fvrk b w1 w2 = fk*k1'*(k2 b)*k3'*(dc'**1.5)/1000.0 -- fk*(k11 b w)*(k2 b)*(k3 w)*(dc'**1.5)/1000.0
where bt' = bcat b
dc' = dc b
fk | bt' == C10 = 25
| bt' == C11 = 25
| otherwise = 18
k1' = k1 b w1 w2
k3' = case w2 of Just w2' -> min (k3 w1) (k3 w2')
Nothing -> k3 w1
{-
fvrk21 :: Bulldog
-> WC.Wood
-> Double -- ^ Capacity bulldog [kN]
fvrk21 b w = fk*(k11 b w)*(k2 b)*(k3 w)*(dc'**1.5)/1000.0
where bt' = bcat b
dc' = dc b
fk | bt' == C10 = 25
| bt' == C11 = 25
| otherwise = 18
fvrk22 :: Bulldog
-> WC.Wood
-> WC.Wood
-> Double -- ^ Capacity bulldog [kN]
fvrk22 b w1 w2 = fk*(k12 b w1 w2)*(k2 b)*(k3 w)*(dc'**1.5)/1000.0
where bt' = bcat b
dc' = dc b
fk | bt' == C10 = 25
| bt' == C11 = 25
| otherwise = 18
fvrk :: Bulldog
-> WC.Wood
-> Maybe WC.Wood
-> Double -- ^ Capacity bulldog [kN]
fvrk b t1 t2 =
case t2 of Just t2' -> min (fvrk2 b t1) (fvrk2 b t2')
Nothing -> fvrk2 b t1
-}
|
baalbek/eurocode5
|
src/Eurocode5/Fasteners/Bulldogs.hs
|
gpl-2.0
| 3,017 | 0 | 13 | 1,114 | 708 | 381 | 327 | 49 | 2 |
module Format where
import Format.DataFormat
|
marco-vassena/svc
|
src/Format.hs
|
gpl-3.0
| 46 | 0 | 4 | 6 | 9 | 6 | 3 | 2 | 0 |
{-# LANGUAGE FlexibleContexts #-}
module Parsing (parse, validCharacters, balance) where
import Text.Parsec ((<|>))
import qualified Text.Parsec as Parsec
-- import Text.Parsec.Error (newErrorMessage, Message(..))
-- import Text.Parsec.Pos (newPos)
import qualified Data.Set as Set
import qualified Data.Map as Map
import Obj
import Types
import Util
import Control.Monad.Error.Class (throwError)
import Debug.Trace
newtype ParseState = ParseState { parseInfo :: Info }
createInfo :: Parsec.Parsec String ParseState (Maybe Info)
createInfo = do i <- fmap parseInfo Parsec.getState
return (Just i)
firstDigit :: Parsec.Parsec String ParseState Char
firstDigit = Parsec.choice [Parsec.digit, Parsec.char '-']
maybeSigned :: Parsec.Parsec String ParseState (Maybe Info, String)
maybeSigned = do i <- createInfo
num0 <- firstDigit
num1 <- Parsec.many Parsec.digit
let num = num0 : num1
incColumn (length num)
return (i, num)
double :: Parsec.Parsec String ParseState XObj
double = do (i, num) <- maybeSigned
_ <- Parsec.char '.'
incColumn 1
decimals <- Parsec.many1 Parsec.digit
incColumn (length decimals)
if num == "-"
then return (XObj (Sym (SymPath [] "-") Symbol) i Nothing)
else return (XObj (Num DoubleTy (read (num ++ "." ++ decimals))) i Nothing)
float :: Parsec.Parsec String ParseState XObj
float = do (i, num) <- maybeSigned
_ <- Parsec.char '.'
incColumn 1
decimals <- Parsec.many1 Parsec.digit
incColumn (length decimals)
_ <- Parsec.char 'f'
incColumn 1
if num == "-"
then return (XObj (Sym (SymPath [] "-") Symbol) i Nothing)
else return (XObj (Num FloatTy (read (num ++ "." ++ decimals))) i Nothing)
floatNoPeriod :: Parsec.Parsec String ParseState XObj
floatNoPeriod =
do (i, num) <- maybeSigned
_ <- Parsec.char 'f'
incColumn 1
if num == "-"
then return (XObj (Sym (SymPath [] "-") Symbol) i Nothing)
else return (XObj (Num FloatTy (read num)) i Nothing)
integer :: Parsec.Parsec String ParseState XObj
integer = do (i, num) <- maybeSigned
if num == "-"
then return (XObj (Sym (SymPath [] "-") Symbol) i Nothing)
else return (XObj (Num IntTy (read num)) i Nothing)
long :: Parsec.Parsec String ParseState XObj
long = do (i, num) <- maybeSigned
_ <- Parsec.char 'l'
incColumn 1
if num == "-"
then return (XObj (Sym (SymPath [] "-") Symbol) i Nothing)
else return (XObj (Num LongTy (read num)) i Nothing)
number :: Parsec.Parsec String ParseState XObj
number = Parsec.try float <|>
Parsec.try floatNoPeriod <|>
Parsec.try double <|>
Parsec.try long <|>
Parsec.try integer
string :: Parsec.Parsec String ParseState XObj
string = do i <- createInfo
_ <- Parsec.char '"'
strL <- Parsec.many (Parsec.try escaped <|> simple)
let str = concat strL
_ <- Parsec.char '"'
incColumn (length str + 2)
return (XObj (Str str) i Nothing)
where simple = do c <- Parsec.noneOf ['"']
return [c]
parseInternalPattern :: Parsec.Parsec String ParseState String
parseInternalPattern = do maybeAnchor <- Parsec.optionMaybe (Parsec.char '^')
str <- Parsec.many (Parsec.try patternEscaped <|>
Parsec.try bracketClass <|>
Parsec.try capture <|>
simple)
maybeEnd <- Parsec.optionMaybe (Parsec.char '$')
return $ unwrapMaybe maybeAnchor ++ concat str ++
unwrapMaybe maybeEnd
where unwrapMaybe (Just c) = [c]
unwrapMaybe (Nothing) = []
simple :: Parsec.Parsec String ParseState String
simple = do char <- Parsec.noneOf "^$()[]\\\""
return [char]
patternEscaped :: Parsec.Parsec String ParseState String
patternEscaped = do
_ <- Parsec.char '\\'
c <- Parsec.oneOf ['1', '2', '3', '4', '5', '6', '7', '8', '9',
'a', 'c', 'd', 'g', 'l', 'p', 's', 'u', 'w',
'x', 'n', 't', 'b', 'f', '[', ']', '\\', '$',
'(', ')', '^', '"']
case c of
'b' -> do c1 <- Parsec.noneOf ['"']
c2 <- Parsec.noneOf ['"']
return ['\\', c, c1, c2]
'f' -> do str <- bracketClass
return $ '\\' : c : str
_ -> return ['\\', c]
capture :: Parsec.Parsec String ParseState String
capture = do
opening <- Parsec.char '('
str <- Parsec.many (Parsec.try patternEscaped <|>
Parsec.try bracketClass <|>
simple)
closing <- Parsec.char ')'
return $ "(" ++ concat str ++ ")"
range :: Parsec.Parsec String ParseState String
range = do
begin <- Parsec.alphaNum
_ <- Parsec.char '-'
end <- Parsec.alphaNum
return [begin, '-', end]
bracketClass :: Parsec.Parsec String ParseState String
bracketClass = do
opening <- Parsec.char '['
maybeAnchor <- Parsec.optionMaybe (Parsec.char '^')
str <- Parsec.many (Parsec.try range <|>
Parsec.try patternEscaped <|>
Parsec.many1 (Parsec.noneOf "-^$()[]\\\""))
closing <- Parsec.char ']'
return $ "[" ++ unwrapMaybe maybeAnchor ++ concat str ++ "]"
pattern :: Parsec.Parsec String ParseState XObj
pattern = do i <- createInfo
_ <- Parsec.char '#'
_ <- Parsec.char '"'
str <- parseInternalPattern
_ <- Parsec.char '"'
incColumn (length str + 2)
return (XObj (Pattern $ treat str) i Nothing)
-- auto-escaping backslashes
where treat :: String -> String
treat [] = []
treat ('\\':r) = "\\\\" ++ treat r
treat (x:r) = x : treat r
escaped :: Parsec.Parsec String ParseState [Char]
escaped = do
_ <- Parsec.char '\\'
c <- Parsec.oneOf ['\\', '\"']
case c of
'\\' -> return "\\\\"
'\"' -> return "\""
escapedQuoteChar :: Parsec.Parsec String ParseState Char
escapedQuoteChar = do c <- Parsec.string "\""
incColumn 2
return '\"'
escapedSpaceChar :: Parsec.Parsec String ParseState Char
escapedSpaceChar = do c <- Parsec.string "space"
incColumn 5
return ' '
escapedNewlineChar :: Parsec.Parsec String ParseState Char
escapedNewlineChar = do c <- Parsec.string "newline"
incColumn 7
return '\n'
escapedTabChar :: Parsec.Parsec String ParseState Char
escapedTabChar = do c <- Parsec.string "tab"
incColumn 3
return '\t'
aChar :: Parsec.Parsec String ParseState XObj
aChar = do i <- createInfo
_ <- Parsec.char '\\'
c <- Parsec.try escapedQuoteChar <|>
Parsec.try escapedNewlineChar <|>
Parsec.try escapedTabChar <|>
Parsec.try escapedSpaceChar <|>
Parsec.anyChar
incColumn 2
return (XObj (Chr c) i Nothing)
{-# ANN validCharacters "HLint: ignore Use String" #-}
validCharacters :: [Char]
validCharacters = "+-*/?!><=_:\9580\9559"
symbolSegment :: Parsec.Parsec String ParseState String
symbolSegment = do sym <- Parsec.many1 validInSymbol
incColumn (length sym)
return sym
where validInSymbol = Parsec.choice [Parsec.letter, Parsec.digit, Parsec.oneOf validCharacters]
period :: Parsec.Parsec String ParseState ()
period = do Parsec.char '.'
incColumn 1
return ()
symbol :: Parsec.Parsec String ParseState XObj
symbol = do i <- createInfo
segments <- Parsec.sepBy1 symbolSegment period
case last segments of
"defn" -> return (XObj Defn i Nothing)
"def" -> return (XObj Def i Nothing)
-- TODO: What about the other def- forms?
"do" -> return (XObj Do i Nothing)
"while" -> return (XObj While i Nothing)
"fn" -> return (XObj (Fn Nothing Set.empty) i Nothing)
"let" -> return (XObj Let i Nothing)
"break" -> return (XObj Break i Nothing)
"if" -> return (XObj If i Nothing)
"true" -> return (XObj (Bol True) i Nothing)
"false" -> return (XObj (Bol False) i Nothing)
"address" -> return (XObj Address i Nothing)
"set!" -> return (XObj SetBang i Nothing)
"the" -> return (XObj The i Nothing)
"ref" -> return (XObj Ref i Nothing)
"deref" -> return (XObj Deref i Nothing)
"with" -> return (XObj With i Nothing)
name -> return (XObj (Sym (SymPath (init segments) name) Symbol) i Nothing)
atom :: Parsec.Parsec String ParseState XObj
atom = Parsec.choice [number, pattern, string, aChar, symbol]
incColumn :: Int -> Parsec.Parsec String ParseState ()
incColumn x = do s <- Parsec.getState
let i = parseInfo s
line = infoLine i
column = infoColumn i
identifier = infoIdentifier i
file = infoFile i
newInfo = Info line (column + x) file (Set.fromList []) identifier
Parsec.putState (s { parseInfo = newInfo })
return ()
comment :: Parsec.Parsec String ParseState ()
comment = do _ <- Parsec.char ';'
_ <- Parsec.many (Parsec.noneOf ['\n'])
return ()
linebreak :: Parsec.Parsec String ParseState ()
linebreak = do s <- Parsec.getState
let i = parseInfo s
line = infoLine i
identifier = infoIdentifier i
file = infoFile i
newInfo = Info (line + 1) 1 file (Set.fromList []) identifier
Parsec.putState (s { parseInfo = newInfo })
_ <- Parsec.char '\n'
return ()
space :: Parsec.Parsec String ParseState ()
space = do incColumn 1
_ <- Parsec.char ' '
return ()
comma :: Parsec.Parsec String ParseState ()
comma = do incColumn 1
_ <- Parsec.char ','
return ()
tab :: Parsec.Parsec String ParseState ()
tab = do incColumn 1
_ <- Parsec.char '\t'
return ()
eof :: Parsec.Parsec String ParseState ()
eof = do _ <- Parsec.char '\0'
return ()
emptyCharacters :: [Parsec.Parsec String ParseState ()]
emptyCharacters = [space, tab, comma, linebreak, eof, comment]
whitespace :: Parsec.Parsec String ParseState ()
whitespace = do _ <- Parsec.many1 (Parsec.choice emptyCharacters)
return ()
whitespaceOrNothing :: Parsec.Parsec String ParseState ()
whitespaceOrNothing = do _ <- Parsec.many (Parsec.choice emptyCharacters)
return ()
readObjs :: Parsec.Parsec String ParseState [XObj]
readObjs = do padding <- Parsec.many whitespace
incColumn (length padding)
Parsec.many sexpr
array :: Parsec.Parsec String ParseState XObj
array = do i <- createInfo
_ <- Parsec.char '['
incColumn 1
objs <- readObjs
_ <- Parsec.char ']'
incColumn 1
return (XObj (Arr objs) i Nothing)
list :: Parsec.Parsec String ParseState XObj
list = do i <- createInfo
_ <- Parsec.char '('
incColumn 1
objs <- readObjs
_ <- Parsec.char ')'
incColumn 1
return (XObj (Lst objs) i Nothing)
dictionary :: Parsec.Parsec String ParseState XObj
dictionary = do i <- createInfo
_ <- Parsec.char '{'
incColumn 1
objs <- readObjs
_ <- Parsec.char '}'
incColumn 1
let objs' = if even (length objs) then objs else init objs -- Drop last if uneven nr of forms.
-- TODO! Signal error here!
--return (XObj (Dict (Map.fromList (pairwise objs'))) i Nothing)
pairInit = XObj (Sym (SymPath ["Pair"] "init") (LookupGlobal CarpLand AFunction)) i Nothing
pairs = map (\(k,v) -> XObj (Lst [pairInit, k, v]) i Nothing) (pairwise objs')
arrayLiteral = XObj (Arr pairs) i Nothing
reffedArrayLiteral = XObj (Lst [(XObj Ref i Nothing), arrayLiteral]) i Nothing
fromArraySymbol = XObj (Sym (SymPath ["Map"] "from-array") (LookupGlobal CarpLand AFunction)) i Nothing
fromArraySexp = XObj (Lst [fromArraySymbol, reffedArrayLiteral]) i Nothing
return fromArraySexp
ref :: Parsec.Parsec String ParseState XObj
ref = do i <- createInfo
_ <- Parsec.char '&'
incColumn 1
expr <- sexpr
return (XObj (Lst [XObj Ref Nothing Nothing, expr]) i Nothing)
deref :: Parsec.Parsec String ParseState XObj
deref = do i <- createInfo
_ <- Parsec.char '~'
incColumn 1
expr <- sexpr
return (XObj (Lst [XObj Deref Nothing Nothing, expr]) i Nothing)
copy :: Parsec.Parsec String ParseState XObj
copy = do i1 <- createInfo
i2 <- createInfo
_ <- Parsec.char '@'
incColumn 1
expr <- sexpr
return (XObj (Lst [XObj (Sym (SymPath [] "copy") Symbol) i1 Nothing, expr]) i2 Nothing)
quote :: Parsec.Parsec String ParseState XObj
quote = do i1 <- createInfo
i2 <- createInfo
_ <- Parsec.char '\''
incColumn 1
expr <- sexpr
return (XObj (Lst [XObj (Sym (SymPath [] "quote") Symbol) i1 Nothing, expr]) i2 Nothing)
sexpr :: Parsec.Parsec String ParseState XObj
sexpr = do x <- Parsec.choice [ref, deref, copy, quote, list, array, dictionary, atom]
_ <- whitespaceOrNothing
return x
lispSyntax :: Parsec.Parsec String ParseState [XObj]
lispSyntax = do padding <- Parsec.many whitespace
incColumn (length padding)
result <- Parsec.sepBy sexpr whitespaceOrNothing
Parsec.eof
return result
parse :: String -> String -> Either Parsec.ParseError [XObj]
parse text fileName = let initState = ParseState (Info 1 1 fileName (Set.fromList []) 0)
in Parsec.runParser lispSyntax initState fileName text
{-# ANN balance "HLint: ignore Use String" #-}
-- | For detecting the parenthesis balance in a string, i.e. "((( ))" = 1
balance :: String -> Int
balance text =
case Parsec.runParser parenSyntax [] "(parens)" text of
Left err -> error (show err)
Right ok -> ok
where
parenSyntax :: Parsec.Parsec String [Char] Int
parenSyntax = do _ <- Parsec.many character
parens <- Parsec.getState
return (length parens)
character :: Parsec.Parsec String [Char] ()
character = do c <- Parsec.anyChar
parens <- Parsec.getState
case parens of
[] -> push c
'"':xs -> case c of
'\\' -> do c <- Parsec.anyChar -- consume next
return ()
'"' -> Parsec.putState xs -- close string
_ -> return () -- inside string
(x:xs) -> case (x, c) of
('(', ')') -> Parsec.putState xs
('[', ']') -> Parsec.putState xs
('"', '"') -> Parsec.putState xs
--('\\', _) -> Parsec.putState xs -- ignore char after '\'
_ -> push c
push :: Char -> Parsec.Parsec String String ()
push c =
do parens <- Parsec.getState
case c of
'(' -> Parsec.putState (c : parens)
'[' -> Parsec.putState (c : parens)
'"' -> Parsec.putState (c : parens)
_ -> return ()
|
eriksvedang/Carp
|
src/Parsing.hs
|
mpl-2.0
| 16,816 | 12 | 18 | 6,055 | 5,225 | 2,552 | 2,673 | 365 | 17 |
module ISO where
import Data.Maybe
import Data.Void
-- a type of `Void` have no value.
-- So it is impossible to construct `Void`,
-- unless using undefined, error, unsafeCoerce, infinite recursion, etc
-- And there is a function
-- absurd :: Void -> a
-- That get any value out of `Void`
-- We can do this becuase we can never have void in the zeroth place.
-- Please copy your code of Isomorphism to here.
-- Sometimes, we can treat a Type as a Number:
-- if a Type t has n distinct value, it's Number is n.
-- This is formally called cardinality.
-- See https://en.wikipedia.org/wiki/Cardinality
-- Void has cardinality of 0 (we will abreviate it Void is 0).
-- () is 1.
-- Bool is 2.
-- Maybe a is 1 + a.
-- We will be using peano arithmetic so we will write it as S a.
-- https://en.wikipedia.org/wiki/Peano_axioms
-- Either a b is a + b.
-- (a, b) is a * b.
-- a -> b is b ^ a. Try counting (() -> Bool) and (Bool -> ())
-- Algebraic data type got the name because
-- it satisfies a lot of algebraic rules under isomorphism
type ISO a b = (a -> b, b -> a)
-- given ISO a b, we can go from a to b
substL :: ISO a b -> (a -> b)
substL = fst
-- and vice versa
substR :: ISO a b -> (b -> a)
substR = snd
-- There can be more than one ISO a b
isoBool :: ISO Bool Bool
isoBool = (id, id)
isoBoolNot :: ISO Bool Bool
isoBoolNot = (not, not)
-- isomorphism is reflexive
refl :: ISO a a
refl = (id, id)
-- isomorphism is symmetric
symm :: ISO a b -> ISO b a
symm (a, b) = (b, a)
-- isomorphism is transitive
trans :: ISO a b -> ISO b c -> ISO a c
trans (ab, ba) (bc, cb) = (bc . ab, ba . cb)
-- We can combine isomorphism:
isoTuple :: ISO a b -> ISO c d -> ISO (a, c) (b, d)
isoTuple (ab, ba) (cd, dc) = (\(a, c) -> (ab a, cd c), \(b, d) -> (ba b, dc d))
--
isoList :: ISO a b -> ISO [a] [b]
isoList (a, b) = ((a <$>), (b <$>))
isoMaybe :: ISO a b -> ISO (Maybe a) (Maybe b)
isoMaybe (a, b) = ((a <$>), (b <$>))
isoEither :: ISO a b -> ISO c d -> ISO (Either a c) (Either b d)
isoEither (ab, ba) (cd, dc) = (fun ab cd, fun ba dc)
where fun f g o = case o of
(Left obj) -> Left $ f obj
(Right obj) -> Right $ g obj
--
isoFunc :: ISO a b -> ISO c d -> ISO (a -> c) (b -> d)
isoFunc (ab, ba) (cd, dc) = ((\ac -> cd . ac . ba), (\bd -> dc . bd . ab))
-- Going another way is hard (and is generally impossible)
isoUnMaybe :: ISO (Maybe a) (Maybe b) -> ISO a b
isoUnMaybe m@(mamb, mbma) =
(\a -> get $ mamb $ Just a, substL $ isoUnMaybe $ symm m)
where
get (Just b) = b
get Nothing = getJust (mamb Nothing)
getJust (Just b) = b
getJust Nothing = undefined
-- The implementation is given fully.
-- But why can we use undefined in our implmentation?
-- Because it is impossible for getJust Nothing to be invoke.
-- We wont test for isoUnMaybe,
-- but try to state why it is impossible in the comment here.
-- We cannot have
-- isoUnEither :: ISO (Either a b) (Either c d) -> ISO a c -> ISO b d.
-- Try to state why in the comment here.
-- Suppose we have isoUnEither,
-- ...,
-- Impossible,
-- We cannot have isoUnEither
-- Again, this wont be tested.
-- And we have isomorphism on isomorphism!
isoSymm :: ISO (ISO a b) (ISO b a)
isoSymm = (symm, symm)
-- a = b -> c = d -> a * c = b * d
isoProd :: ISO a b -> ISO c d -> ISO (a, c) (b, d)
isoProd = isoTuple
-- a = b -> c = d -> a + c = b + d
isoPlus :: ISO a b -> ISO c d -> ISO (Either a c) (Either b d)
isoPlus = isoEither
-- a = b -> S a = S b
isoS :: ISO a b -> ISO (Maybe a) (Maybe b)
isoS = isoMaybe
-- a = b -> c = d -> c ^ a = d ^ b
isoPow :: ISO a b -> ISO c d -> ISO (a -> c) (b -> d)
isoPow = isoFunc
mirror :: (Either a b) -> (Either b a)
mirror (Left l) = Right l
mirror (Right r) = Left r
-- a + b = b + a
plusComm :: ISO (Either a b) (Either b a)
plusComm = (mirror, mirror)
mirror2L :: (Either (Either a b) c) -> (Either a (Either b c))
mirror2L (Left (Left a)) = Left a
mirror2L (Left (Right a)) = Right $ Left a
mirror2L (Right a) = Right $ Right a
mirror2R :: (Either a (Either b c)) -> (Either (Either a b) c)
mirror2R (Right (Right a)) = Right a
mirror2R (Right (Left a)) = Left $ Right a
mirror2R (Left a) = Left $ Left a
-- a + b + c = a + (b + c)
plusAssoc :: ISO (Either (Either a b) c) (Either a (Either b c))
plusAssoc = (mirror2L, mirror2R)
-- a * b = b * a
multComm :: ISO (a, b) (b, a)
multComm = (f, f)
where f (x, y) = (y, x)
--
-- a * b * c = a * (b * c)
multAssoc :: ISO ((a, b), c) (a, (b, c))
multAssoc = (\((a, b), c) -> (a, (b, c)), \(a, (b, c)) -> ((a, b), c))
distL :: (a, (Either b c)) -> (Either (a, b) (a, c))
distL (a, Left b) = Left (a, b)
distL (a, Right c) = Right (a, c)
distR :: (Either (a, b) (a, c)) -> (a, (Either b c))
distR (Left (a, b)) = (a, Left b)
distR (Right (a, c)) = (a, Right c)
-- dist :: a * (b + c) = a * b + a * c
dist :: ISO (a, (Either b c)) (Either (a, b) (a, c))
dist = (distL, distR)
-- (c ^ b) ^ a = c ^ (a * b)
curryISO :: ISO (a -> b -> c) ((a, b) -> c)
curryISO = (\f -> (\(a, b) -> f a b), \f -> (\a b -> f (a, b)))
-- 1 = S O (we are using peano arithmetic)
-- https://en.wikipedia.org/wiki/Peano_axioms
one :: ISO () (Maybe Void)
one = (const Nothing, const ())
twoL :: Bool -> (Maybe (Maybe Void))
twoL True = Just Nothing
twoL False = Nothing
-- 2 = S (S O)
two :: ISO Bool (Maybe (Maybe Void))
two = (twoL, isJust)
-- O + b = b
plusO :: ISO (Either Void b) b
plusO = (left, Right)
where
left (Left x) = absurd x -- absurd :: Void -> a
left (Right x) = x
--
plusSL :: (Either (Maybe a) b) -> (Maybe (Either a b))
plusSL (Left (Just a)) = Just $ Left a
plusSL (Right b) = Just $ Right b
plusSL _ = Nothing
plusSR :: (Maybe (Either a b)) -> (Either (Maybe a) b)
plusSR (Just (Left a)) = Left $ Just a
plusSR (Just (Right b)) = Right b
plusSR _ = Left Nothing
-- S a + b = S (a + b)
plusS :: ISO (Either (Maybe a) b) (Maybe (Either a b))
plusS = (plusSL, plusSR)
-- 1 + b = S b
plusSO :: ISO (Either () b) (Maybe b)
plusSO = isoPlus one refl `trans` plusS `trans` isoS plusO
-- O * a = O
multO :: ISO (Void, a) Void
multO = (fst, absurd)
multSL :: (Maybe a, b) -> (Either b (a, b))
multSL ((Just a), b) = Right (a, b)
multSL (_ , b) = Left b
multSR :: (Either b (a, b)) -> (Maybe a, b)
multSR (Left b) = (Nothing, b)
multSR (Right (a, b)) = (Just a, b)
-- S a * b = b + a * b
multS :: ISO (Maybe a, b) (Either b (a, b))
multS = (multSL, multSR)
-- 1 * b = b
multSO :: ISO ((), b) b
multSO =
isoProd one refl `trans`
multS `trans`
isoPlus refl multO `trans`
plusComm `trans`
plusO
--
-- a ^ O = 1
powO :: ISO (Void -> a) ()
powO = (undefined, \_ -> absurd)
-- a ^ (S b) = a * (a ^ b)
powS :: ISO (Maybe b -> a) (a, b -> a)
powS = (\f -> (f Nothing, f . Just),
\(a, f) -> (\b -> fromMaybe a $ f <$> b))
--
-- a ^ 1 = a
-- Go the hard way to prove that you really get what is going on!
powSO :: ISO (() -> a) a
powSO = (\f -> f (), const)
--_ `trans` powS `trans` _
-- Here's a trick:
-- replace undefined with the rhs of the comment on previous line
-- When you're not sure what to fill in for a value,
-- Have it as a _
-- GHC will goes like
-- "Found hole `_' with type: (Num (ISO (() -> a) (Maybe b0 -> a0)))"
-- So you can immediately see value of what type are needed
-- This process can be repeat indefinitely -
-- For example you might replace `_` with `isoFunc _ _`
-- So GHC hint you on more specific type.
-- This is especially usefull if you have complex type.
-- See https://wiki.haskell.org/GHC/Typed_holes
-- And "stepwise refinement" for more details.
|
ice1000/OI-codes
|
codewars/101-200/algebraic-isomorphism.hs
|
agpl-3.0
| 7,690 | 0 | 11 | 2,004 | 2,948 | 1,635 | 1,313 | 125 | 3 |
module RemoteCache(
-- * Types
RemoteCache,
-- * Functions
newRemoteCache
) where
import Control.Concurrent(
Chan,MVar,forkIO,newEmptyMVar,putMVar,takeMVar,newChan,readChan,writeChan)
import Data.ByteString(ByteString)
import qualified Data.ByteString as B
import Data.Time(NominalDiffTime)
import Data.Word(Word32,Word64)
import Network.Socket(
Socket,SocketType(Stream),HostName,ServiceName,
connect,socket,getAddrInfo,addrFamily,addrAddress,addrProtocol,defaultProtocol)
import Network.Socket.ByteString(recv,sendMany)
import Cache(
Cache(get,set,add,replace,delete,expire,touch),
CacheError(EntryExists,NotFound,VersionMismatch,UnknownError),
Expires(Expires),Timestamp(Timestamp),Version(Version))
import Logger(Logger)
import Protocol(
Packet, readPacket, writeRequest,
packetStatus, packetVersion, packetExtras, packetValue,
makeExpiry, getFlags, makeFlags,
magicResponse,
Cmd, cmdGet, cmdSet, cmdAdd, cmdReplace, cmdDelete,
statusNoError, statusKeyNotFound, statusKeyExists,
statusItemNotStored, statusNotSupported, statusInternalError)
-- | RemoteCache accesses a remote cache, using a memcache protocol subset.
data RemoteCache t e = RemoteCache {
rcSerialize :: e -> (Word32,ByteString),
rcDeserialize :: (Word32,ByteString) -> e,
rcConvertExpiry :: t -> t -> NominalDiffTime,
rcChan :: Chan Req
}
data Req = Req {
reqCmd :: Cmd,
reqVersion :: Word64,
reqExtras :: [ByteString],
reqKey :: ByteString,
reqValue :: Maybe ByteString,
reqResult :: MVar (Maybe Packet)
}
-- | Create a new RemoteCache that connects to the given host and port.
-- The Logger logs the requests and responses. serializeValue and
-- deserializeValue convert the entry values to and from the bytes
-- used with the remote cache. convertExpiry converts timestamps to
-- NominalDiffTime.
newRemoteCache :: Ord t => Logger -> HostName -> ServiceName -> (e -> (Word32,ByteString)) -> ((Word32,ByteString) -> e) -> (t -> t -> NominalDiffTime) -> IO (RemoteCache t e)
newRemoteCache logger hostname service serializeValue deserializeValue convertExpiry = do
(addrInfo:_) <- getAddrInfo Nothing (Just hostname) (Just service)
sock <- socket (addrFamily addrInfo) Stream defaultProtocol
connect sock (addrAddress addrInfo)
chan <- newChan
forkIO (process sock chan)
return RemoteCache {
rcSerialize = serializeValue,
rcDeserialize = deserializeValue,
rcConvertExpiry = convertExpiry,
rcChan = chan
}
where
process sock chan = do
req <- readChan chan
writeRequest logger sock (reqCmd req) 0 (reqVersion req) (reqExtras req) (Just $ reqKey req) (reqValue req)
packet <- readPacket logger sock magicResponse
putMVar (reqResult req) packet
process sock chan
instance Cache RemoteCache where
get RemoteCache { rcDeserialize = deser, rcChan = chan } key _ = do
mVar <- newEmptyMVar
writeChan chan Req {
reqCmd = cmdGet,
reqVersion = 0,
reqExtras = [],
reqKey = key,
reqValue = Nothing,
reqResult = mVar
}
maybePacket <- takeMVar mVar
return $ makeResult (fmap packetStatus maybePacket) (fmap packetVersion maybePacket) (fmap packetExtras maybePacket) (fmap packetValue maybePacket)
where
makeResult (Just status) (Just version) (Just extras) (Just (Just value))
| status == statusNoError =
Right (deser (getFlags (B.concat extras) 0, value), Version version)
| status == statusKeyNotFound = Left NotFound
| otherwise = Left UnknownError
makeResult (Just status) _ _ _
| status == statusKeyNotFound = Left NotFound
| otherwise = Left UnknownError
makeResult _ _ _ _ = Left UnknownError
set RemoteCache { rcSerialize = ser, rcConvertExpiry = expiry, rcChan = chan } key e (Expires exp) (Timestamp t) = do
let (flags,value) = ser e
mVar <- newEmptyMVar
writeChan chan Req {
reqCmd = cmdSet,
reqVersion = 0,
reqExtras = [makeFlags flags, makeExpiry $ expiry exp t],
reqKey = key,
reqValue = Just value,
reqResult = mVar
}
maybePacket <- takeMVar mVar
return $ makeResult (fmap packetStatus maybePacket) (fmap packetVersion maybePacket)
where
makeResult (Just status) (Just version)
| status == statusNoError = Right (Version version)
| otherwise = Left UnknownError
makeResult _ _ = Left UnknownError
add RemoteCache { rcSerialize = ser, rcConvertExpiry = expiry, rcChan = chan } key e (Expires exp) (Timestamp t) = do
let (flags,value) = ser e
mVar <- newEmptyMVar
writeChan chan Req {
reqCmd = cmdAdd,
reqVersion = 0,
reqExtras = [makeFlags flags, makeExpiry $ expiry exp t],
reqKey = key,
reqValue = Just value,
reqResult = mVar
}
maybePacket <- takeMVar mVar
return $ makeResult (fmap packetStatus maybePacket) (fmap packetVersion maybePacket)
where
makeResult (Just status) (Just version)
| status == statusNoError = Right (Version version)
| status == statusKeyExists = Left EntryExists
| otherwise = Left UnknownError
makeResult _ _ = Left UnknownError
replace RemoteCache { rcSerialize = ser, rcConvertExpiry = expiry, rcChan = chan } key (Version version) e (Expires exp) (Timestamp t) = do
let (flags,value) = ser e
mVar <- newEmptyMVar
writeChan chan Req {
reqCmd = cmdReplace,
reqVersion = version,
reqExtras = [makeFlags flags, makeExpiry $ expiry exp t],
reqKey = key,
reqValue = Just value,
reqResult = mVar
}
maybePacket <- takeMVar mVar
return $ makeResult (fmap packetStatus maybePacket) (fmap packetVersion maybePacket)
where
makeResult (Just status) (Just version)
| status == statusNoError = Right (Version version)
| status == statusKeyNotFound = Left NotFound
| status == statusItemNotStored = Left VersionMismatch
| otherwise = Left UnknownError
makeResult _ _ = Left UnknownError
delete RemoteCache { rcChan = chan } key (Version version) _ = do
mVar <- newEmptyMVar
writeChan chan Req {
reqCmd = cmdDelete,
reqVersion = version,
reqExtras = [],
reqKey = key,
reqValue = Nothing,
reqResult = mVar
}
maybePacket <- takeMVar mVar
return $ makeResult (fmap packetStatus maybePacket)
where
makeResult (Just status)
| status == statusNoError = Nothing
| status == statusKeyNotFound = Just NotFound
| status == statusItemNotStored = Just VersionMismatch
| otherwise = Just UnknownError
makeResult _ = Just UnknownError
touch _ _ _ = return (Left UnknownError)
expire _ _ = return ()
|
qpliu/lecache
|
hs/RemoteCache.hs
|
lgpl-3.0
| 7,677 | 0 | 16 | 2,444 | 2,110 | 1,116 | 994 | 147 | 1 |
module Main where
import Control.Error
import C18.Actions
import Opts
main :: IO ()
main = runScript . action =<< parseActions
|
erochest/c18sgml
|
app/Main.hs
|
apache-2.0
| 163 | 0 | 6 | 56 | 40 | 23 | 17 | 6 | 1 |
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ < 710
{-# LANGUAGE OverlappingInstances #-}
#endif
module Data.Convert.Base where
import Prelude
import Control.Lens
----------------------------------------------------------------------
-- Conversions
----------------------------------------------------------------------
class MaybeConvertible a e b | a b -> e where
tryConvert :: a -> Either e b
class MaybeConvertible' a e b | b -> a e where
tryConvert' :: a -> Either e b
class Convertible a b where
convert :: a -> b
-- Maybe we should change the name to Specializable and specialize?
class Convertible' a b | b -> a where
convert' :: a -> b
type BiCastable a b = (Castable a b, Castable b a)
class Castable a b where
cast :: a -> b
default cast :: Convertible a b => a -> b
cast = convert
type IsoMaybeConvertible a e b = (MaybeConvertible a e b, MaybeConvertible b e a)
type IsoMaybeConvertible' a e b = (MaybeConvertible' a e b, MaybeConvertible' b e a)
type IsoConvertible a b = (Convertible a b , Convertible b a )
type IsoConvertible' a b = (Convertible' a b , Convertible' b a )
type IsoCastable a b = (Castable a b , Castable b a )
class ConvertibleM m n where convertM :: m t1 -> n t1
class ConvertibleM2 m n where convertM2 :: m t1 t2 -> n t1 t2
class ConvertibleM3 m n where convertM3 :: m t1 t2 t3 -> n t1 t2 t3
class ConvertibleM4 m n where convertM4 :: m t1 t2 t3 t4 -> n t1 t2 t3 t4
class ConvertibleM5 m n where convertM5 :: m t1 t2 t3 t4 t5 -> n t1 t2 t3 t4 t5
-- utils
unsafeConvert :: Show e => MaybeConvertible a e b => a -> b
unsafeConvert a =
case tryConvert a of
Left e -> error $ show e
Right r -> r
casted :: IsoCastable a b => Iso' a b
casted = iso cast cast
converted :: IsoConvertible a b => Iso' a b
converted = iso convert convert
-- instances
-- The following instances are commented out because they are pure EVIL.
-- Lets consider following situation - we've got an instance:
-- instance Castable (Edge src tgt) (Edge src' tgt') where ...
-- if we use it passing the same arguments it will overlap with the
-- instance Castable a a
-- in such way it ould be not possible to resolve by GHC!
--
instance {-# OVERLAPPABLE #-} Castable a a where cast = id ; {-# INLINE cast #-}
-- instance {-# OVERLAPPABLE #-} Convertible a a where convert = id ; {-# INLINE convert #-}
instance {-# OVERLAPPABLE #-} Convertible a b => Convertible (Maybe a) (Maybe b) where convert = fmap convert ; {-# INLINE convert #-}
|
wdanilo/convert
|
src/Data/Convert/Base.hs
|
apache-2.0
| 2,761 | 0 | 9 | 639 | 706 | 378 | 328 | -1 | -1 |
-- 232792560
nn = 20
tryReduce ds n d = if m == 0 && isDivisible then tryReduce ds q d else n
where (q,m) = n `divMod` d
isDivisible = all (\x -> q `mod` x == 0) ds
smallestMult n = foldl (tryReduce ds) p ds
where p = product ds
ds = [2..n]
main = putStrLn $ show $ smallestMult nn
|
higgsd/euler
|
hs/5.hs
|
bsd-2-clause
| 313 | 0 | 11 | 95 | 147 | 79 | 68 | 8 | 2 |
-- http://www.codewars.com/kata/5512ec4bbe2074421d00028c
module Stream where
import Control.Arrow
import Control.Applicative
import Stream.Internal
-- Defined in Stream.Internal:
-- data Stream a = a :> Stream a
-- infixr :>
-- | Get the first element of a stream.
headS :: Stream a -> a
headS (x :> xs) = x
-- | Drop the first element of a stream.
tailS :: Stream a -> Stream a
tailS (x :> xs) = xs
-- {{{ Stream constructors
-- | Construct a stream by repeating a value.
repeatS :: a -> Stream a
repeatS x = x :> repeatS x
-- | Construct a stream by repeatedly applying a function.
iterateS :: (a -> a) -> a -> Stream a
iterateS f x = x :> iterateS f (f x)
-- | Construct a stream by repeating a list forever.
cycleS :: [a] -> Stream a
cycleS xs = foldr (:>) (cycleS xs) xs where
-- | Construct a stream by counting numbers starting from a given one.
fromS :: Num a => a -> Stream a
fromS = iterateS (+1)
-- | Same as 'fromS', but count with a given step width.
fromThenS :: Num a => a -> a -> Stream a
fromThenS x s = iterateS (+s) x
-- }}}
-- | Fold a stream from the left.
foldrS :: (a -> b -> b) -> Stream a -> b
foldrS f (x :> xs) = f x (foldrS f xs)
-- | Filter a stream with a predicate.
filterS :: (a -> Bool) -> Stream a -> Stream a
filterS p = foldrS (\a b -> if p a then a:>b else b)
-- | Take a given amount of elements from a stream.
takeS :: Int -> Stream a -> [a]
takeS i (x:>xs)
| i <= 0 = []
| otherwise = x : takeS (i-1) xs
-- | Drop a given amount of elements from a stream.
dropS :: Int -> Stream a -> Stream a
dropS i xs'@(x:>xs)
| i <= 0 = xs'
| otherwise = dropS (i-1) xs
-- | Do take and drop simultaneous.
splitAtS :: Int -> Stream a -> ([a], Stream a)
splitAtS i xs'@(x:>xs)
| i <= 0 = ([], xs')
| otherwise = first (x:) $ splitAtS (i-1) xs
-- | Combine two streams with a function.
zipWithS :: (a -> b -> c) -> Stream a -> Stream b -> Stream c
zipWithS f (x:>xs) (y:>ys) = f x y :> zipWithS f xs ys
zipS :: Stream a -> Stream b -> Stream (a, b)
zipS = zipWithS (,)
instance Functor Stream where
-- fmap :: (a -> b) -> Stream a -> Stream b
fmap f = foldrS (\x ys -> f x :> ys)
instance Applicative Stream where
-- pure :: a -> Stream a
pure = repeatS
-- (<*>) :: Stream (a -> b) -> Stream a -> Stream b
(<*>) = zipWithS ($)
-- | The stream of fibonacci numbers.
fibS :: Stream Integer
fibS = 0 :> 1 :> zipWithS (+) fibS (tailS fibS)
-- | The stream of prime numbers.
primeS :: Stream Integer
primeS = fmap fst $ iterateS (\(_, x:>xs) -> (x, filterS ((/= 0) . (`mod` x)) xs)) (2, fromThenS 3 2)
|
Bodigrim/katas
|
src/haskell/B-Functional-streams.hs
|
bsd-2-clause
| 2,601 | 0 | 13 | 623 | 944 | 500 | 444 | 47 | 2 |
-- | C simulation of a RISK kernel.
module RISK.Sim
( generateSimulator
) where
import Language.GIGL
import Text.Printf
import RISK.Compile
import RISK.Config
import RISK.Kernel
import RISK.Spec
generateSimulator :: Spec -> IO ()
generateSimulator spec = writeFile "risk_sim.c" $ unlines
[ "// RISK Simulator"
, ""
, "#include <stdlib.h>"
, "#include <stdio.h>"
, ""
, "#include \"risk_lib.h\""
, ""
, "// Partition entry points."
, unlines [ printf "void %s_main (void);" name | name <- partitionNames config ]
, "// Partition memories (recv buffers + send buffers + data space)."
, unlines [ printf "static word %s_memory[%d];" name $ partitionMemorySize config name | name <- partitionNames config ]
, "// Variables from kernel model."
, unlines [ printf "word %s;" name | name <- variables program ]
, "// Set the partition memory pointers."
, "void risk_set_memory_ptrs(void)"
, block $ unlines [ printf "%s_memory_ptr = (word) %s_memory;" name name | name <- partitionNames config ]
, ""
, "// Functions from kernel model."
, compile spec
, ""
, "// RISK simulator main."
, "int main (int argc, char **argv)"
, block $ unlines
[ "// First argument to simulator is the number of scheduling steps to run."
, "risk_cycle_count = atoi(argv[1]);"
, "risk_init();"
, "return 0;"
]
]
where
config = configure spec
program = kernelProgram spec
|
tomahawkins/risk
|
RISK/Sim.hs
|
bsd-2-clause
| 1,424 | 0 | 13 | 299 | 286 | 155 | 131 | 39 | 1 |
module Main where
main :: IO ()
main = putStrLn "<<<projName>>> Sample application!"
|
creswick/Newt
|
tests/testFiles/dirTemplates/cabalProject/appsrc/Main.hs
|
bsd-3-clause
| 85 | 0 | 6 | 13 | 22 | 12 | 10 | 3 | 1 |
module Handler.Api.Resource where
import Import
import Data.Aeson
postApiResourceExists :: Handler TypedContent
postApiResourceExists = return $ toTypedContent $
object
[ "success" .= True
, "result" .= True
]
|
duplode/dohaskell
|
src/Handler/Api/Resource.hs
|
bsd-3-clause
| 242 | 0 | 8 | 59 | 53 | 30 | 23 | 8 | 1 |
{-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving, TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Distribution.Server.Users.AuthToken
( AuthToken
, parseAuthToken, parseAuthTokenM, renderAuthToken
, OriginalToken
, convertToken, viewOriginalToken, generateOriginalToken
, parseOriginalToken
)
where
import Distribution.Server.Framework.MemSize
import Distribution.Server.Util.Nonce
import Distribution.Text
( Text(..) )
import qualified Distribution.Compat.ReadP as Parse
import qualified Text.PrettyPrint as Disp
import qualified Data.Char as Char
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.ByteString.Short as BSS
import qualified Data.ByteString.Base16 as BS16
import qualified Crypto.Hash.SHA256 as SHA256
import Distribution.Pretty (Pretty(..))
import Distribution.Parsec.Class (Parsec(..))
import qualified Distribution.Compat.CharParsing as P
import Control.Applicative ((<$>))
import Data.SafeCopy
import Data.Typeable (Typeable)
-- | Contains the original token which will be shown to the user
-- once and is NOT stored on the server. The user is expected
-- to provide this token on each request that should be
-- authed by it
newtype OriginalToken = OriginalToken Nonce
deriving (Eq, Ord, Show, Typeable)
-- | Contains a hash of the original token
newtype AuthToken = AuthToken BSS.ShortByteString
deriving (Eq, Ord, Read, Show, Typeable, MemSize)
convertToken :: OriginalToken -> AuthToken
convertToken (OriginalToken bs) =
AuthToken $ BSS.toShort $ SHA256.hash $ getRawNonceBytes bs
viewOriginalToken :: OriginalToken -> T.Text
viewOriginalToken (OriginalToken ot) = T.pack $ renderNonce ot
-- | Generate a random 32 byte auth token. The token is represented as
-- in textual base16 way so it can easily be printed and parsed.
-- Note that this operation is not very efficient because it
-- calls 'withSystemRandom' for each token, but for the current
-- use case we only generate tokens infrequently so this should be fine.
generateOriginalToken :: IO OriginalToken
generateOriginalToken = OriginalToken <$> newRandomNonce 32
parseOriginalToken :: T.Text -> Either String OriginalToken
parseOriginalToken t = OriginalToken <$> parseNonce (T.unpack t)
parseAuthTokenM :: Monad m => T.Text -> m AuthToken
parseAuthTokenM t =
case parseAuthToken t of
Left err -> fail err
Right ok -> return ok
parseAuthToken :: T.Text -> Either String AuthToken
parseAuthToken t
| T.length t /= 64 = Left "auth token must be 64 charaters long"
| not (T.all Char.isHexDigit t) = Left "only hex digits are allowed in tokens"
| otherwise =
Right $ AuthToken $ BSS.toShort $ fst $ BS16.decode $ T.encodeUtf8 t
renderAuthToken :: AuthToken -> T.Text
renderAuthToken (AuthToken bss) = T.decodeUtf8 $ BS16.encode $ BSS.fromShort bss
-- TODO: remove this instance for Cabal 3.0
instance Text AuthToken where
disp tok = Disp.text . T.unpack . renderAuthToken $ tok
parse =
Parse.munch1 Char.isHexDigit >>= \x ->
case parseAuthToken (T.pack x) of
Left err -> fail err
Right ok -> return ok
instance Parsec AuthToken where
parsec =
P.munch1 Char.isHexDigit >>= \x ->
case parseAuthToken (T.pack x) of
Left err -> fail err
Right ok -> return ok
instance Pretty AuthToken where
pretty = Disp.text . T.unpack . renderAuthToken
instance SafeCopy AuthToken where
putCopy (AuthToken bs) = contain $ safePut (BSS.fromShort bs)
getCopy =
contain $ AuthToken . BSS.toShort <$> safeGet
|
edsko/hackage-server
|
Distribution/Server/Users/AuthToken.hs
|
bsd-3-clause
| 3,644 | 0 | 12 | 692 | 861 | 469 | 392 | 71 | 2 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.AMD.GPUShaderInt64
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/AMD/gpu_shader_int64.txt AMD_gpu_shader_int64> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.AMD.GPUShaderInt64 (
-- * Enums
gl_FLOAT16_NV,
gl_FLOAT16_VEC2_NV,
gl_FLOAT16_VEC3_NV,
gl_FLOAT16_VEC4_NV,
gl_INT16_NV,
gl_INT16_VEC2_NV,
gl_INT16_VEC3_NV,
gl_INT16_VEC4_NV,
gl_INT64_NV,
gl_INT64_VEC2_NV,
gl_INT64_VEC3_NV,
gl_INT64_VEC4_NV,
gl_INT8_NV,
gl_INT8_VEC2_NV,
gl_INT8_VEC3_NV,
gl_INT8_VEC4_NV,
gl_UNSIGNED_INT16_NV,
gl_UNSIGNED_INT16_VEC2_NV,
gl_UNSIGNED_INT16_VEC3_NV,
gl_UNSIGNED_INT16_VEC4_NV,
gl_UNSIGNED_INT64_NV,
gl_UNSIGNED_INT64_VEC2_NV,
gl_UNSIGNED_INT64_VEC3_NV,
gl_UNSIGNED_INT64_VEC4_NV,
gl_UNSIGNED_INT8_NV,
gl_UNSIGNED_INT8_VEC2_NV,
gl_UNSIGNED_INT8_VEC3_NV,
gl_UNSIGNED_INT8_VEC4_NV,
-- * Functions
glGetUniformi64vNV,
glGetUniformui64vNV,
glProgramUniform1i64NV,
glProgramUniform1i64vNV,
glProgramUniform1ui64NV,
glProgramUniform1ui64vNV,
glProgramUniform2i64NV,
glProgramUniform2i64vNV,
glProgramUniform2ui64NV,
glProgramUniform2ui64vNV,
glProgramUniform3i64NV,
glProgramUniform3i64vNV,
glProgramUniform3ui64NV,
glProgramUniform3ui64vNV,
glProgramUniform4i64NV,
glProgramUniform4i64vNV,
glProgramUniform4ui64NV,
glProgramUniform4ui64vNV,
glUniform1i64NV,
glUniform1i64vNV,
glUniform1ui64NV,
glUniform1ui64vNV,
glUniform2i64NV,
glUniform2i64vNV,
glUniform2ui64NV,
glUniform2ui64vNV,
glUniform3i64NV,
glUniform3i64vNV,
glUniform3ui64NV,
glUniform3ui64vNV,
glUniform4i64NV,
glUniform4i64vNV,
glUniform4ui64NV,
glUniform4ui64vNV
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
|
phaazon/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/AMD/GPUShaderInt64.hs
|
bsd-3-clause
| 2,129 | 0 | 4 | 268 | 229 | 160 | 69 | 65 | 0 |
module RunModes
( chkKeyFileExists
, chkOverwrite
, chkTeXFilesExist
, readKeyFile
, readTeXFiles
, processFiles
, findAllFiles
, getAllKeys
, processKeys
) where
---------------------------------------------------------------------
-- This module contains implementations for the monad transformers
-- that are run in the Controller module. It Interacts with the Model
-- and Viewer for processing and output.
---------------------------------------------------------------------
import Control.Monad ( forM_ )
import Control.Monad.Trans.Class ( lift )
import Control.Monad.Trans.Writer ( WriterT, execWriterT, tell )
import System.FilePath ( (</>) )
import qualified Data.List as DL ( sort )
import qualified System.IO as SIO ( readFile, hFlush, stdout )
import qualified System.FilePath as SFP ( takeExtension, FilePath )
import qualified System.Directory as SD ( doesFileExist
, doesDirectoryExist
, listDirectory)
import qualified Model.Types as T
import qualified Model.Model as M
import qualified Model.Help as H
import qualified Model.SimpleCLP as SC
import qualified Viewer as V
---------------------------------------------------------------------
-- Functions
---------------------------------------------------------------------
---------------------------------------
-- |General runtime user queries
confirmFileName :: SFP.FilePath -> IO Bool
-- ^Ask user to confirm the name of a file that will be overwritten.
confirmFileName n = do
putStr $ " File " ++ n ++ " already exists. Overwrite (y/n)? "
SIO.hFlush SIO.stdout
answer <- fmap ( unwords . words ) getLine
return $ elem answer ["y", "Y"]
confirmNotOnto :: IO Bool
-- ^Ask user to confirm overwrite when the key-value mapping is not
-- a bijection.
confirmNotOnto = do
putStr $ H.multValWarning
putStr $ " Continue anyways (y/n): "
SIO.hFlush $ SIO.stdout
answer <- fmap ( unwords . words ) getLine
return $ elem answer ["y", "Y"]
---------------------------------------
-- |Reformat Mode Functions
chkKeyFileExists :: T.ChemnoState -> T.CMTrans IO T.ChemnoState
-- ^Checks if the key file was supplied and exists. If no key file
-- was supplied or it does not exist, then the underlying mode state
-- is changed to the error mode with an approriate message.
chkKeyFileExists c = do
case T.keyFile c of
Nothing -> fail "No key file specified."
Just x -> do exists <- lift . SD.doesFileExist $ x
if exists
then return c
else fail . M.fileDNE $ x
chkTeXFilesExist :: T.ChemnoState -> T.CMTrans IO T.ChemnoState
-- ^Checks if LaTeX files were supplied and exist. If no LaTeX files
-- were provided or they do not exist, then the underlying mode state
-- is changed to the error mode with an appropriate message.
chkTeXFilesExist c = do
case T.files c of
[] -> fail "No LaTeX files specified."
xs -> do
let fPaths = map T.texInPath xs
fsExist <- lift . mapM SD.doesFileExist $ fPaths
if all id fsExist
then return c
else let missing = [x | (b, x) <- zip fsExist fPaths, not b]
errMsgs = map M.fileDNE $ missing
in fail . unlines $ errMsgs
readKeyFile :: T.ChemnoState -> T.CMTrans IO T.ChemnoState
-- ^Parse the key file and check for errors. Any parsing errors lead
-- to changing the underlying mode state to the error mode with an
-- appropriate message describing the problem.
readKeyFile c = do
let Just keyFile = T.keyFile c
keyFile <- lift . SIO.readFile $ keyFile
let ethrDict = M.parseKeyFile ( T.kvHandling c ) keyFile
case ethrDict of
Right dict -> return $ c { T.dict = dict }
Left errMsgs -> fail errMsgs
readTeXFiles :: T.ChemnoState -> T.CMTrans IO T.ChemnoState
-- ^Parse the LaTeX files and check for errors. Any parsing errors
-- lead to changing the underlying mode state to the error mode with
-- an approriate message identifying the problem and offending files.
readTeXFiles c = do
let fPaths = map T.texInPath $ T.files c
texContents <- lift . mapM readFile $ fPaths
let ethrParsedTeX = M.parseTeX ( T.texCmd c ) $ zip fPaths texContents
case ethrParsedTeX of
Right good -> let upDated = zipWith ( \ t g -> t { T.texParsed = g } )
( T.files c ) good
in return $ c { T.files = upDated }
Left bad -> let errMsgs = map M.cantParseMsg bad
in fail . unlines $ errMsgs
chkOverwrite :: T.ChemnoState -> T.CMTrans IO T.ChemnoState
-- ^Checks to see if LaTeX files will be overwritten. If so, it
-- checks to make sure the key-value mapping is onto. If the mapping
-- is not onto, then the user is given the option to abort.
chkOverwrite c = do
if all ( null . T.texOutPath ) ( T.files c )
then return c
else if not . M.hasRepeats $ [ v | ( _ , v ) <- T.dict c ]
then return c
else do proceed <- lift confirmNotOnto
if proceed
then return c
else fail "Overwrite aborted due to repeated values."
processFiles :: T.ChemnoState -> T.CMTrans IO T.ChemnoState
-- ^Formats the LaTeX files and saves them. A pair is returned with
-- the final chemno state value and a report listing the output file
-- paths and if there were any associated issues during formatting.
processFiles c = do
let formatted = M.reformat ( T.texCmd c ) ( T.dict c ) ( T.files c )
upDated <- lift . mapM V.writeOutput $ formatted
return $ c { T.files = upDated }
---------------------------------------
-- |Directory Search Functions
findAllFiles :: SC.Options -> T.ChemnoState -> T.CMTrans IO T.ChemnoState
-- ^Extract all LaTeX files from the command line and by
-- recursively searching any directories for .tex files.
findAllFiles opts c = do
case map T.texInPath . T.files $ c of
[] -> fail "No files or directories specified."
fs -> do allFiles <- mapM getTeXFiles $ fs
return $ c { T.files = M.initTeX opts . concat $ allFiles }
searchDir :: SFP.FilePath -> WriterT [SFP.FilePath] IO ()
-- ^Returns a list of files paths to all TeX files in a directory.
-- If the path is itself a TeX file, then the path alone is returned.
searchDir path = do
contents <- lift . SD.listDirectory $ path
forM_ contents $ \ fileName -> do
let subPath = path </> fileName
isDir <- lift . SD.doesDirectoryExist $ subPath
if isDir
then searchDir subPath
else if SFP.takeExtension subPath == ".tex"
then tell [subPath]
else tell []
getTeXFiles :: SFP.FilePath -> T.CMTrans IO [SFP.FilePath]
-- ^Check if a file path is a directory, if so return all TeX files
-- found recursively in the directory according to .tex extension. If
-- path is a file, just return the file regardless of its extension.
getTeXFiles path = do
isDir <- lift . SD.doesDirectoryExist $ path
lift $ if isDir
then execWriterT ( searchDir path )
else return [path]
---------------------------------------
-- |Keys Mode Functions
getAllKeys :: T.ChemnoState -> T.CMTrans IO T.ChemnoState
-- ^Extract all the unique keys from all the parsed LaTeX files and
-- and set them as the dictionary in the chemno state. Values for
-- each key in the association list are just empty strings.
getAllKeys c = do
let keys = M.extractKeys . T.files $ c
return $ c { T.dict = keys }
queryKeyFile :: IO SFP.FilePath
-- ^Query user for the name of the key file that will be generated.
queryKeyFile = do
putStr "Enter name for the key file to generate: "
SIO.hFlush SIO.stdout
fmap ( unwords . words ) getLine
getKeyFile :: Maybe SFP.FilePath -> IO SFP.FilePath
-- ^Obtain a valid path for the key file to be generated. Make sure
-- the file does not already exist and confirm overwrite otherwise.
getKeyFile Nothing = queryKeyFile >>= getKeyFile . Just
getKeyFile (Just path) = do
alreadyExists <- SD.doesFileExist path
proceed <- if alreadyExists
then confirmFileName path
else return True
if proceed && length path > 0
then return path
else getKeyFile Nothing
processKeys :: T.ChemnoState -> T.CMTrans IO T.ChemnoState
-- ^Extract unique keys from the parsed LaTeX files, and write a new
-- key file to a valid path.
processKeys c = do
let keys = [ x | (x,_) <- T.dict c ]
sortKeys = T.kvHandling c == T.SortKeys
path <- lift . getKeyFile $ T.keyFile c
lift . V.writeKeys path $ if sortKeys then DL.sort keys else keys
return $ c { T.keyFile = Just path }
|
MWRuszczycky/Chemno
|
src/RunModes.hs
|
bsd-3-clause
| 9,056 | 0 | 20 | 2,443 | 1,957 | 1,010 | 947 | 141 | 4 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
module SOPBench.Show where
import Data.List (intersperse)
import Generics.SOP
gshow ::
(Generic a, HasDatatypeInfo a, All2 Show (Code a)) => a -> String
gshow x =
gshowsPrec 0 x ""
gshowsPrec ::
(Generic a, HasDatatypeInfo a, All2 Show (Code a)) => Int -> a -> ShowS
gshowsPrec d x =
hcollapse
$ hczipWith pallshow (gshowsConstructor d)
(constructorInfo (datatypeInfo (I x)))
(unSOP (from x))
gshowsConstructor ::
forall xs . (All Show xs) => Int -> ConstructorInfo xs -> NP I xs -> K ShowS xs
gshowsConstructor d i =
case i of
Constructor n -> \ x -> K
$ showParen (d > app_prec)
$ showString n . showString " " . gshowsConstructorArgs (app_prec + 1) x
Infix n _ prec -> \ (I l :* I r :* Nil) -> K
$ showParen (d > prec)
$ showsPrec (prec + 1) l
. showString " " . showString n . showString " "
. showsPrec (prec + 1) r
Record n fi -> \ x -> K
$ showParen (d > app_prec) -- could be even higher, but seems to match GHC behaviour
$ showString n . showString " {" . gshowsRecordArgs fi x . showString "}"
gshowsConstructorArgs ::
(All Show xs) => Int -> NP I xs -> ShowS
gshowsConstructorArgs d x =
foldr (.) id $ hcollapse $ hcmap pshow (K . showsPrec d . unI) x
gshowsRecordArgs ::
(All Show xs) => NP FieldInfo xs -> NP I xs -> ShowS
gshowsRecordArgs fi x =
foldr (.) id
$ intersperse (showString ", ")
$ hcollapse
$ hczipWith pshow
(\ (FieldInfo l) (I y) -> K (showString l . showString " = " . showsPrec 0 y))
fi x
pallshow :: Proxy (All Show)
pallshow = Proxy
pshow :: Proxy Show
pshow = Proxy
app_prec :: Int
app_prec = 10
|
well-typed/generics-sop
|
generics-sop/bench/SOPBench/Show.hs
|
bsd-3-clause
| 1,739 | 0 | 17 | 438 | 704 | 354 | 350 | 51 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module SignedAuth.Sign where
import qualified Crypto.Error as Crypto
import qualified Crypto.PubKey.Ed25519 as Ed25519
import qualified Data.ASN1.BinaryEncoding as ASN1
import qualified Data.ASN1.Encoding as ASN1
import Data.ASN1.Prim
import qualified Data.ByteArray as BA
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Base64 as Base64
import qualified Data.ByteString.Char8 as BS8
type PrivateKey = (Ed25519.SecretKey, Ed25519.PublicKey)
type PublicKey = Ed25519.PublicKey
-- | Read .pem-wrapped DER octet sequence of a private key
decodePrivateKeyPem :: ByteString -> Either String ByteString
decodePrivateKeyPem input = case BS8.lines input of
( "-----BEGIN PRIVATE KEY-----"
: b64Key
: "-----END PRIVATE KEY-----"
: _
) -> Right b64Key
_ -> Left "Could not parse .pem data"
-- | Parse private key binary data from ASN1 abstract type
parsePrivateKeyAsn1 :: [ASN1] -> Either String ByteString
-- ED25519 sequence deduced from looking at key OpenSSL produces
parsePrivateKeyAsn1
[ Start Sequence
, IntVal 0
, Start Sequence
, OID [1,3,101,112]
, End Sequence
, OctetString payload
, End Sequence
] = Right (BS.drop 2 payload)
parsePrivateKeyAsn1 _ = Left "Could not parse ASN.1 key data"
-- | Read Base64- DER encoded private key
readPrivateKeyDer :: ByteString -> Either String PrivateKey
readPrivateKeyDer b64 = do
binaryKey <- Base64.decode b64
der <- case ASN1.decodeASN1' ASN1.DER binaryKey of
Left e -> Left $ show e
Right r -> Right r
keyBytes <- parsePrivateKeyAsn1 der
case Ed25519.secretKey keyBytes of
Crypto.CryptoPassed r -> Right (r, Ed25519.toPublic r)
Crypto.CryptoFailed e -> Left $ "Could not read binary private key: " <> show e
-- | Read a private key from .pem
readPrivateKeyPem :: ByteString -> Either String PrivateKey
readPrivateKeyPem pem = do -- Either String
der <- decodePrivateKeyPem pem
readPrivateKeyDer der
-- | Read .pem-wrapped DER octet sequence of a public key
decodePublicKeyPem :: ByteString -> Either String ByteString
decodePublicKeyPem input = case BS8.lines input of
( "-----BEGIN PUBLIC KEY-----"
: b64Key
: "-----END PUBLIC KEY-----"
: _
) -> Right b64Key
_ -> Left "Could not parse .pem data"
-- | Parse public key binary data from ASN1 abstract type
parsePublicKeyAsn1 :: [ASN1] -> Either String ByteString
parsePublicKeyAsn1
[ Start Sequence
, Start Sequence
, OID [1,3,101,112]
, End Sequence
, BitString payload
, End Sequence
] = Right (BS.drop 1 $ putBitString payload)
parsePublicKeyAsn1 _ = Left "Could not parse ASN.1 key data"
readPublicKeyDer :: ByteString -> Either String PublicKey
readPublicKeyDer der = do
binaryKey <- Base64.decode der
asn1 <- case ASN1.decodeASN1' ASN1.DER binaryKey of
Left e -> Left $ show e
Right r -> Right r
keyBytes <- parsePublicKeyAsn1 asn1
case Ed25519.publicKey keyBytes of
Crypto.CryptoPassed r -> Right r
Crypto.CryptoFailed e -> Left $ "Could not read binary private key: " <> show e
-- | Read a public key from .pem
readPublicKeyPem :: ByteString -> Either String PublicKey
readPublicKeyPem pem = do
der <- decodePublicKeyPem pem
readPublicKeyDer der
newtype Signature = Signature ByteString
-- | Sign a ByteString
sign :: PrivateKey -> ByteString -> Signature
sign (secret, public) input =
Signature $ BA.convert $ Ed25519.sign secret public input
-- | Check the signature of a ByteString
verifySignature :: PublicKey -> ByteString -> Signature -> Bool
verifySignature pubkey bs (Signature sigbytes) =
case Ed25519.signature sigbytes of
Crypto.CryptoFailed e -> error $ show e
Crypto.CryptoPassed sig -> Ed25519.verify pubkey bs sig
mkKeys :: IO (PrivateKey, PublicKey)
mkKeys = do
secret <- Ed25519.generateSecretKey
let pub = Ed25519.toPublic secret
return ((secret, pub), pub)
|
nejla/auth-service
|
auth-service-core/src/SignedAuth/Sign.hs
|
bsd-3-clause
| 4,022 | 0 | 13 | 808 | 1,025 | 525 | 500 | 93 | 3 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# OPTIONS_GHC -Wno-incomplete-record-updates #-}
module TcHoleErrors ( findValidHoleFits, tcFilterHoleFits
, tcCheckHoleFit, tcSubsumes
, withoutUnification
, fromPureHFPlugin
-- Re-exports for convenience
, hfIsLcl
, pprHoleFit, debugHoleFitDispConfig
-- Re-exported from TcHoleFitTypes
, TypedHole (..), HoleFit (..), HoleFitCandidate (..)
, CandPlugin, FitPlugin
, HoleFitPlugin (..), HoleFitPluginR (..)
) where
import GhcPrelude
import TcRnTypes
import TcRnMonad
import Constraint
import TcOrigin
import TcMType
import TcEvidence
import TcType
import Type
import DataCon
import Name
import RdrName ( pprNameProvenance , GlobalRdrElt (..), globalRdrEnvElts )
import PrelNames ( gHC_ERR )
import Id
import VarSet
import VarEnv
import Bag
import ConLike ( ConLike(..) )
import Util
import TcEnv (tcLookup)
import Outputable
import DynFlags
import Maybes
import FV ( fvVarList, fvVarSet, unionFV, mkFVs, FV )
import Control.Arrow ( (&&&) )
import Control.Monad ( filterM, replicateM, foldM )
import Data.List ( partition, sort, sortOn, nubBy )
import Data.Graph ( graphFromEdges, topSort )
import TcSimplify ( simpl_top, runTcSDeriveds )
import TcUnify ( tcSubType_NC )
import ExtractDocs ( extractDocs )
import qualified Data.Map as Map
import GHC.Hs.Doc ( unpackHDS, DeclDocMap(..) )
import HscTypes ( ModIface_(..) )
import GHC.Iface.Load ( loadInterfaceForNameMaybe )
import PrelInfo (knownKeyNames)
import TcHoleFitTypes
{-
Note [Valid hole fits include ...]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
`findValidHoleFits` returns the "Valid hole fits include ..." message.
For example, look at the following definitions in a file called test.hs:
import Data.List (inits)
f :: [String]
f = _ "hello, world"
The hole in `f` would generate the message:
• Found hole: _ :: [Char] -> [String]
• In the expression: _
In the expression: _ "hello, world"
In an equation for ‘f’: f = _ "hello, world"
• Relevant bindings include f :: [String] (bound at test.hs:6:1)
Valid hole fits include
lines :: String -> [String]
(imported from ‘Prelude’ at mpt.hs:3:8-9
(and originally defined in ‘base-4.11.0.0:Data.OldList’))
words :: String -> [String]
(imported from ‘Prelude’ at mpt.hs:3:8-9
(and originally defined in ‘base-4.11.0.0:Data.OldList’))
inits :: forall a. [a] -> [[a]]
with inits @Char
(imported from ‘Data.List’ at mpt.hs:4:19-23
(and originally defined in ‘base-4.11.0.0:Data.OldList’))
repeat :: forall a. a -> [a]
with repeat @String
(imported from ‘Prelude’ at mpt.hs:3:8-9
(and originally defined in ‘GHC.List’))
fail :: forall (m :: * -> *). Monad m => forall a. String -> m a
with fail @[] @String
(imported from ‘Prelude’ at mpt.hs:3:8-9
(and originally defined in ‘GHC.Base’))
return :: forall (m :: * -> *). Monad m => forall a. a -> m a
with return @[] @String
(imported from ‘Prelude’ at mpt.hs:3:8-9
(and originally defined in ‘GHC.Base’))
pure :: forall (f :: * -> *). Applicative f => forall a. a -> f a
with pure @[] @String
(imported from ‘Prelude’ at mpt.hs:3:8-9
(and originally defined in ‘GHC.Base’))
read :: forall a. Read a => String -> a
with read @[String]
(imported from ‘Prelude’ at mpt.hs:3:8-9
(and originally defined in ‘Text.Read’))
mempty :: forall a. Monoid a => a
with mempty @([Char] -> [String])
(imported from ‘Prelude’ at mpt.hs:3:8-9
(and originally defined in ‘GHC.Base’))
Valid hole fits are found by checking top level identifiers and local bindings
in scope for whether their type can be instantiated to the the type of the hole.
Additionally, we also need to check whether all relevant constraints are solved
by choosing an identifier of that type as well, see Note [Relevant Constraints]
Since checking for subsumption results in the side-effect of type variables
being unified by the simplifier, we need to take care to restore them after
to being flexible type variables after we've checked for subsumption.
This is to avoid affecting the hole and later checks by prematurely having
unified one of the free unification variables.
When outputting, we sort the hole fits by the size of the types we'd need to
apply by type application to the type of the fit to to make it fit. This is done
in order to display "more relevant" suggestions first. Another option is to
sort by building a subsumption graph of fits, i.e. a graph of which fits subsume
what other fits, and then outputting those fits which are are subsumed by other
fits (i.e. those more specific than other fits) first. This results in the ones
"closest" to the type of the hole to be displayed first.
To help users understand how the suggested fit works, we also display the values
that the quantified type variables would take if that fit is used, like
`mempty @([Char] -> [String])` and `pure @[] @String` in the example above.
If -XTypeApplications is enabled, this can even be copied verbatim as a
replacement for the hole.
Note [Nested implications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For the simplifier to be able to use any givens present in the enclosing
implications to solve relevant constraints, we nest the wanted subsumption
constraints and relevant constraints within the enclosing implications.
As an example, let's look at the following code:
f :: Show a => a -> String
f x = show _
The hole will result in the hole constraint:
[WD] __a1ph {0}:: a0_a1pd[tau:2] (CHoleCan: ExprHole(_))
Here the nested implications are just one level deep, namely:
[Implic {
TcLevel = 2
Skolems = a_a1pa[sk:2]
No-eqs = True
Status = Unsolved
Given = $dShow_a1pc :: Show a_a1pa[sk:2]
Wanted =
WC {wc_simple =
[WD] __a1ph {0}:: a_a1pd[tau:2] (CHoleCan: ExprHole(_))
[WD] $dShow_a1pe {0}:: Show a_a1pd[tau:2] (CDictCan(psc))}
Binds = EvBindsVar<a1pi>
Needed inner = []
Needed outer = []
the type signature for:
f :: forall a. Show a => a -> String }]
As we can see, the givens say that the information about the skolem
`a_a1pa[sk:2]` fulfills the Show constraint.
The simples are:
[[WD] __a1ph {0}:: a0_a1pd[tau:2] (CHoleCan: ExprHole(_)),
[WD] $dShow_a1pe {0}:: Show a0_a1pd[tau:2] (CNonCanonical)]
I.e. the hole `a0_a1pd[tau:2]` and the constraint that the type of the hole must
fulfill `Show a0_a1pd[tau:2])`.
So when we run the check, we need to make sure that the
[WD] $dShow_a1pe {0}:: Show a0_a1pd[tau:2] (CNonCanonical)
Constraint gets solved. When we now check for whether `x :: a0_a1pd[tau:2]` fits
the hole in `tcCheckHoleFit`, the call to `tcSubType` will end up writing the
meta type variable `a0_a1pd[tau:2] := a_a1pa[sk:2]`. By wrapping the wanted
constraints needed by tcSubType_NC and the relevant constraints (see
Note [Relevant Constraints] for more details) in the nested implications, we
can pass the information in the givens along to the simplifier. For our example,
we end up needing to check whether the following constraints are soluble.
WC {wc_impl =
Implic {
TcLevel = 2
Skolems = a_a1pa[sk:2]
No-eqs = True
Status = Unsolved
Given = $dShow_a1pc :: Show a_a1pa[sk:2]
Wanted =
WC {wc_simple =
[WD] $dShow_a1pe {0}:: Show a0_a1pd[tau:2] (CNonCanonical)}
Binds = EvBindsVar<a1pl>
Needed inner = []
Needed outer = []
the type signature for:
f :: forall a. Show a => a -> String }}
But since `a0_a1pd[tau:2] := a_a1pa[sk:2]` and we have from the nested
implications that Show a_a1pa[sk:2] is a given, this is trivial, and we end up
with a final WC of WC {}, confirming x :: a0_a1pd[tau:2] as a match.
To avoid side-effects on the nested implications, we create a new EvBindsVar so
that any changes to the ev binds during a check remains localised to that check.
Note [Valid refinement hole fits include ...]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the `-frefinement-level-hole-fits=N` flag is given, we additionally look
for "valid refinement hole fits"", i.e. valid hole fits with up to N
additional holes in them.
With `-frefinement-level-hole-fits=0` (the default), GHC will find all
identifiers 'f' (top-level or nested) that will fit in the hole.
With `-frefinement-level-hole-fits=1`, GHC will additionally find all
applications 'f _' that will fit in the hole, where 'f' is an in-scope
identifier, applied to single argument. It will also report the type of the
needed argument (a new hole).
And similarly as the number of arguments increases
As an example, let's look at the following code:
f :: [Integer] -> Integer
f = _
with `-frefinement-level-hole-fits=1`, we'd get:
Valid refinement hole fits include
foldl1 (_ :: Integer -> Integer -> Integer)
with foldl1 @[] @Integer
where foldl1 :: forall (t :: * -> *).
Foldable t =>
forall a. (a -> a -> a) -> t a -> a
foldr1 (_ :: Integer -> Integer -> Integer)
with foldr1 @[] @Integer
where foldr1 :: forall (t :: * -> *).
Foldable t =>
forall a. (a -> a -> a) -> t a -> a
const (_ :: Integer)
with const @Integer @[Integer]
where const :: forall a b. a -> b -> a
($) (_ :: [Integer] -> Integer)
with ($) @'GHC.Types.LiftedRep @[Integer] @Integer
where ($) :: forall a b. (a -> b) -> a -> b
fail (_ :: String)
with fail @((->) [Integer]) @Integer
where fail :: forall (m :: * -> *).
Monad m =>
forall a. String -> m a
return (_ :: Integer)
with return @((->) [Integer]) @Integer
where return :: forall (m :: * -> *). Monad m => forall a. a -> m a
(Some refinement hole fits suppressed;
use -fmax-refinement-hole-fits=N or -fno-max-refinement-hole-fits)
Which are hole fits with holes in them. This allows e.g. beginners to
discover the fold functions and similar, but also allows for advanced users
to figure out the valid functions in the Free monad, e.g.
instance Functor f => Monad (Free f) where
Pure a >>= f = f a
Free f >>= g = Free (fmap _a f)
Will output (with -frefinment-level-hole-fits=1):
Found hole: _a :: Free f a -> Free f b
Where: ‘a’, ‘b’ are rigid type variables bound by
the type signature for:
(>>=) :: forall a b. Free f a -> (a -> Free f b) -> Free f b
at fms.hs:25:12-14
‘f’ is a rigid type variable bound by
...
Relevant bindings include
g :: a -> Free f b (bound at fms.hs:27:16)
f :: f (Free f a) (bound at fms.hs:27:10)
(>>=) :: Free f a -> (a -> Free f b) -> Free f b
(bound at fms.hs:25:12)
...
Valid refinement hole fits include
...
(=<<) (_ :: a -> Free f b)
with (=<<) @(Free f) @a @b
where (=<<) :: forall (m :: * -> *) a b.
Monad m =>
(a -> m b) -> m a -> m b
(imported from ‘Prelude’ at fms.hs:5:18-22
(and originally defined in ‘GHC.Base’))
...
Where `(=<<) _` is precisely the function we want (we ultimately want `>>= g`).
We find these refinement suggestions by considering hole fits that don't
fit the type of the hole, but ones that would fit if given an additional
argument. We do this by creating a new type variable with `newOpenFlexiTyVar`
(e.g. `t_a1/m[tau:1]`), and then considering hole fits of the type
`t_a1/m[tau:1] -> v` where `v` is the type of the hole.
Since the simplifier is free to unify this new type variable with any type, we
can discover any identifiers that would fit if given another identifier of a
suitable type. This is then generalized so that we can consider any number of
additional arguments by setting the `-frefinement-level-hole-fits` flag to any
number, and then considering hole fits like e.g. `foldl _ _` with two additional
arguments.
To make sure that the refinement hole fits are useful, we check that the types
of the additional holes have a concrete value and not just an invented type
variable. This eliminates suggestions such as `head (_ :: [t0 -> a]) (_ :: t0)`,
and limits the number of less than useful refinement hole fits.
Additionally, to further aid the user in their implementation, we show the
types of the holes the binding would have to be applied to in order to work.
In the free monad example above, this is demonstrated with
`(=<<) (_ :: a -> Free f b)`, which tells the user that the `(=<<)` needs to
be applied to an expression of type `a -> Free f b` in order to match.
If -XScopedTypeVariables is enabled, this hole fit can even be copied verbatim.
Note [Relevant Constraints]
~~~~~~~~~~~~~~~~~~~
As highlighted by #14273, we need to check any relevant constraints as well
as checking for subsumption. Relevant constraints are the simple constraints
whose free unification variables are mentioned in the type of the hole.
In the simplest case, these are all non-hole constraints in the simples, such
as is the case in
f :: String
f = show _
Where the simples will be :
[[WD] __a1kz {0}:: a0_a1kv[tau:1] (CHoleCan: ExprHole(_)),
[WD] $dShow_a1kw {0}:: Show a0_a1kv[tau:1] (CNonCanonical)]
However, when there are multiple holes, we need to be more careful. As an
example, Let's take a look at the following code:
f :: Show a => a -> String
f x = show (_b (show _a))
Here there are two holes, `_a` and `_b`, and the simple constraints passed to
findValidHoleFits are:
[[WD] _a_a1pi {0}:: String
-> a0_a1pd[tau:2] (CHoleCan: ExprHole(_b)),
[WD] _b_a1ps {0}:: a1_a1po[tau:2] (CHoleCan: ExprHole(_a)),
[WD] $dShow_a1pe {0}:: Show a0_a1pd[tau:2] (CNonCanonical),
[WD] $dShow_a1pp {0}:: Show a1_a1po[tau:2] (CNonCanonical)]
Here we have the two hole constraints for `_a` and `_b`, but also additional
constraints that these holes must fulfill. When we are looking for a match for
the hole `_a`, we filter the simple constraints to the "Relevant constraints",
by throwing out all hole constraints and any constraints which do not mention
a variable mentioned in the type of the hole. For hole `_a`, we will then
only require that the `$dShow_a1pp` constraint is solved, since that is
the only non-hole constraint that mentions any free type variables mentioned in
the hole constraint for `_a`, namely `a_a1pd[tau:2]` , and similarly for the
hole `_b` we only require that the `$dShow_a1pe` constraint is solved.
Note [Leaking errors]
~~~~~~~~~~~~~~~~~~~
When considering candidates, GHC believes that we're checking for validity in
actual source. However, As evidenced by #15321, #15007 and #15202, this can
cause bewildering error messages. The solution here is simple: if a candidate
would cause the type checker to error, it is not a valid hole fit, and thus it
is discarded.
-}
data HoleFitDispConfig = HFDC { showWrap :: Bool
, showWrapVars :: Bool
, showType :: Bool
, showProv :: Bool
, showMatches :: Bool }
debugHoleFitDispConfig :: HoleFitDispConfig
debugHoleFitDispConfig = HFDC True True True False False
-- We read the various -no-show-*-of-hole-fits flags
-- and set the display config accordingly.
getHoleFitDispConfig :: TcM HoleFitDispConfig
getHoleFitDispConfig
= do { sWrap <- goptM Opt_ShowTypeAppOfHoleFits
; sWrapVars <- goptM Opt_ShowTypeAppVarsOfHoleFits
; sType <- goptM Opt_ShowTypeOfHoleFits
; sProv <- goptM Opt_ShowProvOfHoleFits
; sMatc <- goptM Opt_ShowMatchesOfHoleFits
; return HFDC{ showWrap = sWrap, showWrapVars = sWrapVars
, showProv = sProv, showType = sType
, showMatches = sMatc } }
-- Which sorting algorithm to use
data SortingAlg = NoSorting -- Do not sort the fits at all
| BySize -- Sort them by the size of the match
| BySubsumption -- Sort by full subsumption
deriving (Eq, Ord)
getSortingAlg :: TcM SortingAlg
getSortingAlg =
do { shouldSort <- goptM Opt_SortValidHoleFits
; subsumSort <- goptM Opt_SortBySubsumHoleFits
; sizeSort <- goptM Opt_SortBySizeHoleFits
-- We default to sizeSort unless it has been explicitly turned off
-- or subsumption sorting has been turned on.
; return $ if not shouldSort
then NoSorting
else if subsumSort
then BySubsumption
else if sizeSort
then BySize
else NoSorting }
-- If enabled, we go through the fits and add any associated documentation,
-- by looking it up in the module or the environment (for local fits)
addDocs :: [HoleFit] -> TcM [HoleFit]
addDocs fits =
do { showDocs <- goptM Opt_ShowDocsOfHoleFits
; if showDocs
then do { (_, DeclDocMap lclDocs, _) <- extractDocs <$> getGblEnv
; mapM (upd lclDocs) fits }
else return fits }
where
msg = text "TcHoleErrors addDocs"
lookupInIface name (ModIface { mi_decl_docs = DeclDocMap dmap })
= Map.lookup name dmap
upd lclDocs fit@(HoleFit {hfCand = cand}) =
do { let name = getName cand
; doc <- if hfIsLcl fit
then pure (Map.lookup name lclDocs)
else do { mbIface <- loadInterfaceForNameMaybe msg name
; return $ mbIface >>= lookupInIface name }
; return $ fit {hfDoc = doc} }
upd _ fit = return fit
-- For pretty printing hole fits, we display the name and type of the fit,
-- with added '_' to represent any extra arguments in case of a non-zero
-- refinement level.
pprHoleFit :: HoleFitDispConfig -> HoleFit -> SDoc
pprHoleFit _ (RawHoleFit sd) = sd
pprHoleFit (HFDC sWrp sWrpVars sTy sProv sMs) (HoleFit {..}) =
hang display 2 provenance
where name = getName hfCand
tyApp = sep $ zipWithEqual "pprHoleFit" pprArg vars hfWrap
where pprArg b arg = case binderArgFlag b of
Specified -> text "@" <> pprParendType arg
-- Do not print type application for inferred
-- variables (#16456)
Inferred -> empty
Required -> pprPanic "pprHoleFit: bad Required"
(ppr b <+> ppr arg)
tyAppVars = sep $ punctuate comma $
zipWithEqual "pprHoleFit" (\v t -> ppr (binderVar v) <+>
text "~" <+> pprParendType t)
vars hfWrap
vars = unwrapTypeVars hfType
where
-- Attempts to get all the quantified type variables in a type,
-- e.g.
-- return :: forall (m :: * -> *) Monad m => (forall a . a -> m a)
-- into [m, a]
unwrapTypeVars :: Type -> [TyCoVarBinder]
unwrapTypeVars t = vars ++ case splitFunTy_maybe unforalled of
Just (_, unfunned) -> unwrapTypeVars unfunned
_ -> []
where (vars, unforalled) = splitForAllVarBndrs t
holeVs = sep $ map (parens . (text "_" <+> dcolon <+>) . ppr) hfMatches
holeDisp = if sMs then holeVs
else sep $ replicate (length hfMatches) $ text "_"
occDisp = pprPrefixOcc name
tyDisp = ppWhen sTy $ dcolon <+> ppr hfType
has = not . null
wrapDisp = ppWhen (has hfWrap && (sWrp || sWrpVars))
$ text "with" <+> if sWrp || not sTy
then occDisp <+> tyApp
else tyAppVars
docs = case hfDoc of
Just d -> text "{-^" <>
(vcat . map text . lines . unpackHDS) d
<> text "-}"
_ -> empty
funcInfo = ppWhen (has hfMatches && sTy) $
text "where" <+> occDisp <+> tyDisp
subDisp = occDisp <+> if has hfMatches then holeDisp else tyDisp
display = subDisp $$ nest 2 (funcInfo $+$ docs $+$ wrapDisp)
provenance = ppWhen sProv $ parens $
case hfCand of
GreHFCand gre -> pprNameProvenance gre
_ -> text "bound at" <+> ppr (getSrcLoc name)
getLocalBindings :: TidyEnv -> Ct -> TcM [Id]
getLocalBindings tidy_orig ct
= do { (env1, _) <- zonkTidyOrigin tidy_orig (ctLocOrigin loc)
; go env1 [] (removeBindingShadowing $ tcl_bndrs lcl_env) }
where
loc = ctEvLoc (ctEvidence ct)
lcl_env = ctLocEnv loc
go :: TidyEnv -> [Id] -> [TcBinder] -> TcM [Id]
go _ sofar [] = return (reverse sofar)
go env sofar (tc_bndr : tc_bndrs) =
case tc_bndr of
TcIdBndr id _ -> keep_it id
_ -> discard_it
where
discard_it = go env sofar tc_bndrs
keep_it id = go env (id:sofar) tc_bndrs
-- See Note [Valid hole fits include ...]
findValidHoleFits :: TidyEnv -- ^ The tidy_env for zonking
-> [Implication] -- ^ Enclosing implications for givens
-> [Ct]
-- ^ The unsolved simple constraints in the implication for
-- the hole.
-> Ct -- ^ The hole constraint itself
-> TcM (TidyEnv, SDoc)
findValidHoleFits tidy_env implics simples ct | isExprHoleCt ct =
do { rdr_env <- getGlobalRdrEnv
; lclBinds <- getLocalBindings tidy_env ct
; maxVSubs <- maxValidHoleFits <$> getDynFlags
; hfdc <- getHoleFitDispConfig
; sortingAlg <- getSortingAlg
; dflags <- getDynFlags
; hfPlugs <- tcg_hf_plugins <$> getGblEnv
; let findVLimit = if sortingAlg > NoSorting then Nothing else maxVSubs
refLevel = refLevelHoleFits dflags
hole = TyH (listToBag relevantCts) implics (Just ct)
(candidatePlugins, fitPlugins) =
unzip $ map (\p-> ((candPlugin p) hole, (fitPlugin p) hole)) hfPlugs
; traceTc "findingValidHoleFitsFor { " $ ppr hole
; traceTc "hole_lvl is:" $ ppr hole_lvl
; traceTc "simples are: " $ ppr simples
; traceTc "locals are: " $ ppr lclBinds
; let (lcl, gbl) = partition gre_lcl (globalRdrEnvElts rdr_env)
-- We remove binding shadowings here, but only for the local level.
-- this is so we e.g. suggest the global fmap from the Functor class
-- even though there is a local definition as well, such as in the
-- Free monad example.
locals = removeBindingShadowing $
map IdHFCand lclBinds ++ map GreHFCand lcl
globals = map GreHFCand gbl
syntax = map NameHFCand builtIns
to_check = locals ++ syntax ++ globals
; cands <- foldM (flip ($)) to_check candidatePlugins
; traceTc "numPlugins are:" $ ppr (length candidatePlugins)
; (searchDiscards, subs) <-
tcFilterHoleFits findVLimit hole (hole_ty, []) cands
; (tidy_env, tidy_subs) <- zonkSubs tidy_env subs
; tidy_sorted_subs <- sortFits sortingAlg tidy_subs
; plugin_handled_subs <- foldM (flip ($)) tidy_sorted_subs fitPlugins
; let (pVDisc, limited_subs) = possiblyDiscard maxVSubs plugin_handled_subs
vDiscards = pVDisc || searchDiscards
; subs_with_docs <- addDocs limited_subs
; let vMsg = ppUnless (null subs_with_docs) $
hang (text "Valid hole fits include") 2 $
vcat (map (pprHoleFit hfdc) subs_with_docs)
$$ ppWhen vDiscards subsDiscardMsg
-- Refinement hole fits. See Note [Valid refinement hole fits include ...]
; (tidy_env, refMsg) <- if refLevel >= Just 0 then
do { maxRSubs <- maxRefHoleFits <$> getDynFlags
-- We can use from just, since we know that Nothing >= _ is False.
; let refLvls = [1..(fromJust refLevel)]
-- We make a new refinement type for each level of refinement, where
-- the level of refinement indicates number of additional arguments
-- to allow.
; ref_tys <- mapM mkRefTy refLvls
; traceTc "ref_tys are" $ ppr ref_tys
; let findRLimit = if sortingAlg > NoSorting then Nothing
else maxRSubs
; refDs <- mapM (flip (tcFilterHoleFits findRLimit hole)
cands) ref_tys
; (tidy_env, tidy_rsubs) <- zonkSubs tidy_env $ concatMap snd refDs
; tidy_sorted_rsubs <- sortFits sortingAlg tidy_rsubs
-- For refinement substitutions we want matches
-- like id (_ :: t), head (_ :: [t]), asTypeOf (_ :: t),
-- and others in that vein to appear last, since these are
-- unlikely to be the most relevant fits.
; (tidy_env, tidy_hole_ty) <- zonkTidyTcType tidy_env hole_ty
; let hasExactApp = any (tcEqType tidy_hole_ty) . hfWrap
(exact, not_exact) = partition hasExactApp tidy_sorted_rsubs
; plugin_handled_rsubs <- foldM (flip ($))
(not_exact ++ exact) fitPlugins
; let (pRDisc, exact_last_rfits) =
possiblyDiscard maxRSubs $ plugin_handled_rsubs
rDiscards = pRDisc || any fst refDs
; rsubs_with_docs <- addDocs exact_last_rfits
; return (tidy_env,
ppUnless (null rsubs_with_docs) $
hang (text "Valid refinement hole fits include") 2 $
vcat (map (pprHoleFit hfdc) rsubs_with_docs)
$$ ppWhen rDiscards refSubsDiscardMsg) }
else return (tidy_env, empty)
; traceTc "findingValidHoleFitsFor }" empty
; return (tidy_env, vMsg $$ refMsg) }
where
-- We extract the type, the tcLevel and the types free variables
-- from from the constraint.
hole_ty :: TcPredType
hole_ty = ctPred ct
hole_fvs :: FV
hole_fvs = tyCoFVsOfType hole_ty
hole_lvl = ctLocLevel $ ctEvLoc $ ctEvidence ct
-- BuiltInSyntax names like (:) and []
builtIns :: [Name]
builtIns = filter isBuiltInSyntax knownKeyNames
-- We make a refinement type by adding a new type variable in front
-- of the type of t h hole, going from e.g. [Integer] -> Integer
-- to t_a1/m[tau:1] -> [Integer] -> Integer. This allows the simplifier
-- to unify the new type variable with any type, allowing us
-- to suggest a "refinement hole fit", like `(foldl1 _)` instead
-- of only concrete hole fits like `sum`.
mkRefTy :: Int -> TcM (TcType, [TcTyVar])
mkRefTy refLvl = (wrapWithVars &&& id) <$> newTyVars
where newTyVars = replicateM refLvl $ setLvl <$>
(newOpenTypeKind >>= newFlexiTyVar)
setLvl = flip setMetaTyVarTcLevel hole_lvl
wrapWithVars vars = mkVisFunTys (map mkTyVarTy vars) hole_ty
sortFits :: SortingAlg -- How we should sort the hole fits
-> [HoleFit] -- The subs to sort
-> TcM [HoleFit]
sortFits NoSorting subs = return subs
sortFits BySize subs
= (++) <$> sortBySize (sort lclFits)
<*> sortBySize (sort gblFits)
where (lclFits, gblFits) = span hfIsLcl subs
-- To sort by subsumption, we invoke the sortByGraph function, which
-- builds the subsumption graph for the fits and then sorts them using a
-- graph sort. Since we want locals to come first anyway, we can sort
-- them separately. The substitutions are already checked in local then
-- global order, so we can get away with using span here.
-- We use (<*>) to expose the parallelism, in case it becomes useful later.
sortFits BySubsumption subs
= (++) <$> sortByGraph (sort lclFits)
<*> sortByGraph (sort gblFits)
where (lclFits, gblFits) = span hfIsLcl subs
-- See Note [Relevant Constraints]
relevantCts :: [Ct]
relevantCts = if isEmptyVarSet (fvVarSet hole_fvs) then []
else filter isRelevant simples
where ctFreeVarSet :: Ct -> VarSet
ctFreeVarSet = fvVarSet . tyCoFVsOfType . ctPred
hole_fv_set = fvVarSet hole_fvs
anyFVMentioned :: Ct -> Bool
anyFVMentioned ct = not $ isEmptyVarSet $
ctFreeVarSet ct `intersectVarSet` hole_fv_set
-- We filter out those constraints that have no variables (since
-- they won't be solved by finding a type for the type variable
-- representing the hole) and also other holes, since we're not
-- trying to find hole fits for many holes at once.
isRelevant ct = not (isEmptyVarSet (ctFreeVarSet ct))
&& anyFVMentioned ct
&& not (isHoleCt ct)
-- We zonk the hole fits so that the output aligns with the rest
-- of the typed hole error message output.
zonkSubs :: TidyEnv -> [HoleFit] -> TcM (TidyEnv, [HoleFit])
zonkSubs = zonkSubs' []
where zonkSubs' zs env [] = return (env, reverse zs)
zonkSubs' zs env (hf:hfs) = do { (env', z) <- zonkSub env hf
; zonkSubs' (z:zs) env' hfs }
zonkSub :: TidyEnv -> HoleFit -> TcM (TidyEnv, HoleFit)
zonkSub env hf@RawHoleFit{} = return (env, hf)
zonkSub env hf@HoleFit{hfType = ty, hfMatches = m, hfWrap = wrp}
= do { (env, ty') <- zonkTidyTcType env ty
; (env, m') <- zonkTidyTcTypes env m
; (env, wrp') <- zonkTidyTcTypes env wrp
; let zFit = hf {hfType = ty', hfMatches = m', hfWrap = wrp'}
; return (env, zFit ) }
-- Based on the flags, we might possibly discard some or all the
-- fits we've found.
possiblyDiscard :: Maybe Int -> [HoleFit] -> (Bool, [HoleFit])
possiblyDiscard (Just max) fits = (fits `lengthExceeds` max, take max fits)
possiblyDiscard Nothing fits = (False, fits)
-- Sort by size uses as a measure for relevance the sizes of the
-- different types needed to instantiate the fit to the type of the hole.
-- This is much quicker than sorting by subsumption, and gives reasonable
-- results in most cases.
sortBySize :: [HoleFit] -> TcM [HoleFit]
sortBySize = return . sortOn sizeOfFit
where sizeOfFit :: HoleFit -> TypeSize
sizeOfFit = sizeTypes . nubBy tcEqType . hfWrap
-- Based on a suggestion by phadej on #ghc, we can sort the found fits
-- by constructing a subsumption graph, and then do a topological sort of
-- the graph. This makes the most specific types appear first, which are
-- probably those most relevant. This takes a lot of work (but results in
-- much more useful output), and can be disabled by
-- '-fno-sort-valid-hole-fits'.
sortByGraph :: [HoleFit] -> TcM [HoleFit]
sortByGraph fits = go [] fits
where tcSubsumesWCloning :: TcType -> TcType -> TcM Bool
tcSubsumesWCloning ht ty = withoutUnification fvs (tcSubsumes ht ty)
where fvs = tyCoFVsOfTypes [ht,ty]
go :: [(HoleFit, [HoleFit])] -> [HoleFit] -> TcM [HoleFit]
go sofar [] = do { traceTc "subsumptionGraph was" $ ppr sofar
; return $ uncurry (++)
$ partition hfIsLcl topSorted }
where toV (hf, adjs) = (hf, hfId hf, map hfId adjs)
(graph, fromV, _) = graphFromEdges $ map toV sofar
topSorted = map ((\(h,_,_) -> h) . fromV) $ topSort graph
go sofar (hf:hfs) =
do { adjs <-
filterM (tcSubsumesWCloning (hfType hf) . hfType) fits
; go ((hf, adjs):sofar) hfs }
-- We don't (as of yet) handle holes in types, only in expressions.
findValidHoleFits env _ _ _ = return (env, empty)
-- | tcFilterHoleFits filters the candidates by whether, given the implications
-- and the relevant constraints, they can be made to match the type by
-- running the type checker. Stops after finding limit matches.
tcFilterHoleFits :: Maybe Int
-- ^ How many we should output, if limited
-> TypedHole -- ^ The hole to filter against
-> (TcType, [TcTyVar])
-- ^ The type to check for fits and a list of refinement
-- variables (free type variables in the type) for emulating
-- additional holes.
-> [HoleFitCandidate]
-- ^ The candidates to check whether fit.
-> TcM (Bool, [HoleFit])
-- ^ We return whether or not we stopped due to hitting the limit
-- and the fits we found.
tcFilterHoleFits (Just 0) _ _ _ = return (False, []) -- Stop right away on 0
tcFilterHoleFits limit (TyH {..}) ht@(hole_ty, _) candidates =
do { traceTc "checkingFitsFor {" $ ppr hole_ty
; (discards, subs) <- go [] emptyVarSet limit ht candidates
; traceTc "checkingFitsFor }" empty
; return (discards, subs) }
where
hole_fvs :: FV
hole_fvs = tyCoFVsOfType hole_ty
-- Kickoff the checking of the elements.
-- We iterate over the elements, checking each one in turn for whether
-- it fits, and adding it to the results if it does.
go :: [HoleFit] -- What we've found so far.
-> VarSet -- Ids we've already checked
-> Maybe Int -- How many we're allowed to find, if limited
-> (TcType, [TcTyVar]) -- The type, and its refinement variables.
-> [HoleFitCandidate] -- The elements we've yet to check.
-> TcM (Bool, [HoleFit])
go subs _ _ _ [] = return (False, reverse subs)
go subs _ (Just 0) _ _ = return (True, reverse subs)
go subs seen maxleft ty (el:elts) =
-- See Note [Leaking errors]
tryTcDiscardingErrs discard_it $
do { traceTc "lookingUp" $ ppr el
; maybeThing <- lookup el
; case maybeThing of
Just id | not_trivial id ->
do { fits <- fitsHole ty (idType id)
; case fits of
Just (wrp, matches) -> keep_it id wrp matches
_ -> discard_it }
_ -> discard_it }
where
-- We want to filter out undefined and the likes from GHC.Err
not_trivial id = nameModule_maybe (idName id) /= Just gHC_ERR
lookup :: HoleFitCandidate -> TcM (Maybe Id)
lookup (IdHFCand id) = return (Just id)
lookup hfc = do { thing <- tcLookup name
; return $ case thing of
ATcId {tct_id = id} -> Just id
AGlobal (AnId id) -> Just id
AGlobal (AConLike (RealDataCon con)) ->
Just (dataConWrapId con)
_ -> Nothing }
where name = case hfc of
IdHFCand id -> idName id
GreHFCand gre -> gre_name gre
NameHFCand name -> name
discard_it = go subs seen maxleft ty elts
keep_it eid wrp ms = go (fit:subs) (extendVarSet seen eid)
((\n -> n - 1) <$> maxleft) ty elts
where
fit = HoleFit { hfId = eid, hfCand = el, hfType = (idType eid)
, hfRefLvl = length (snd ty)
, hfWrap = wrp, hfMatches = ms
, hfDoc = Nothing }
unfoldWrapper :: HsWrapper -> [Type]
unfoldWrapper = reverse . unfWrp'
where unfWrp' (WpTyApp ty) = [ty]
unfWrp' (WpCompose w1 w2) = unfWrp' w1 ++ unfWrp' w2
unfWrp' _ = []
-- The real work happens here, where we invoke the type checker using
-- tcCheckHoleFit to see whether the given type fits the hole.
fitsHole :: (TcType, [TcTyVar]) -- The type of the hole wrapped with the
-- refinement variables created to simulate
-- additional holes (if any), and the list
-- of those variables (possibly empty).
-- As an example: If the actual type of the
-- hole (as specified by the hole
-- constraint CHoleExpr passed to
-- findValidHoleFits) is t and we want to
-- simulate N additional holes, h_ty will
-- be r_1 -> ... -> r_N -> t, and
-- ref_vars will be [r_1, ... , r_N].
-- In the base case with no additional
-- holes, h_ty will just be t and ref_vars
-- will be [].
-> TcType -- The type we're checking to whether it can be
-- instantiated to the type h_ty.
-> TcM (Maybe ([TcType], [TcType])) -- If it is not a match, we
-- return Nothing. Otherwise,
-- we Just return the list of
-- types that quantified type
-- variables in ty would take
-- if used in place of h_ty,
-- and the list types of any
-- additional holes simulated
-- with the refinement
-- variables in ref_vars.
fitsHole (h_ty, ref_vars) ty =
-- We wrap this with the withoutUnification to avoid having side-effects
-- beyond the check, but we rely on the side-effects when looking for
-- refinement hole fits, so we can't wrap the side-effects deeper than this.
withoutUnification fvs $
do { traceTc "checkingFitOf {" $ ppr ty
; (fits, wrp) <- tcCheckHoleFit hole h_ty ty
; traceTc "Did it fit?" $ ppr fits
; traceTc "wrap is: " $ ppr wrp
; traceTc "checkingFitOf }" empty
; z_wrp_tys <- zonkTcTypes (unfoldWrapper wrp)
-- We'd like to avoid refinement suggestions like `id _ _` or
-- `head _ _`, and only suggest refinements where our all phantom
-- variables got unified during the checking. This can be disabled
-- with the `-fabstract-refinement-hole-fits` flag.
-- Here we do the additional handling when there are refinement
-- variables, i.e. zonk them to read their final value to check for
-- abstract refinements, and to report what the type of the simulated
-- holes must be for this to be a match.
; if fits
then if null ref_vars
then return (Just (z_wrp_tys, []))
else do { let -- To be concrete matches, matches have to
-- be more than just an invented type variable.
fvSet = fvVarSet fvs
notAbstract :: TcType -> Bool
notAbstract t = case getTyVar_maybe t of
Just tv -> tv `elemVarSet` fvSet
_ -> True
allConcrete = all notAbstract z_wrp_tys
; z_vars <- zonkTcTyVars ref_vars
; let z_mtvs = mapMaybe tcGetTyVar_maybe z_vars
; allFilled <- not <$> anyM isFlexiTyVar z_mtvs
; allowAbstract <- goptM Opt_AbstractRefHoleFits
; if allowAbstract || (allFilled && allConcrete )
then return $ Just (z_wrp_tys, z_vars)
else return Nothing }
else return Nothing }
where fvs = mkFVs ref_vars `unionFV` hole_fvs `unionFV` tyCoFVsOfType ty
hole = TyH tyHRelevantCts tyHImplics Nothing
subsDiscardMsg :: SDoc
subsDiscardMsg =
text "(Some hole fits suppressed;" <+>
text "use -fmax-valid-hole-fits=N" <+>
text "or -fno-max-valid-hole-fits)"
refSubsDiscardMsg :: SDoc
refSubsDiscardMsg =
text "(Some refinement hole fits suppressed;" <+>
text "use -fmax-refinement-hole-fits=N" <+>
text "or -fno-max-refinement-hole-fits)"
-- | Checks whether a MetaTyVar is flexible or not.
isFlexiTyVar :: TcTyVar -> TcM Bool
isFlexiTyVar tv | isMetaTyVar tv = isFlexi <$> readMetaTyVar tv
isFlexiTyVar _ = return False
-- | Takes a list of free variables and restores any Flexi type variables in
-- free_vars after the action is run.
withoutUnification :: FV -> TcM a -> TcM a
withoutUnification free_vars action =
do { flexis <- filterM isFlexiTyVar fuvs
; result <- action
-- Reset any mutated free variables
; mapM_ restore flexis
; return result }
where restore = flip writeTcRef Flexi . metaTyVarRef
fuvs = fvVarList free_vars
-- | Reports whether first type (ty_a) subsumes the second type (ty_b),
-- discarding any errors. Subsumption here means that the ty_b can fit into the
-- ty_a, i.e. `tcSubsumes a b == True` if b is a subtype of a.
tcSubsumes :: TcSigmaType -> TcSigmaType -> TcM Bool
tcSubsumes ty_a ty_b = fst <$> tcCheckHoleFit dummyHole ty_a ty_b
where dummyHole = TyH emptyBag [] Nothing
-- | A tcSubsumes which takes into account relevant constraints, to fix trac
-- #14273. This makes sure that when checking whether a type fits the hole,
-- the type has to be subsumed by type of the hole as well as fulfill all
-- constraints on the type of the hole.
-- Note: The simplifier may perform unification, so make sure to restore any
-- free type variables to avoid side-effects.
tcCheckHoleFit :: TypedHole -- ^ The hole to check against
-> TcSigmaType
-- ^ The type to check against (possibly modified, e.g. refined)
-> TcSigmaType -- ^ The type to check whether fits.
-> TcM (Bool, HsWrapper)
-- ^ Whether it was a match, and the wrapper from hole_ty to ty.
tcCheckHoleFit _ hole_ty ty | hole_ty `eqType` ty
= return (True, idHsWrapper)
tcCheckHoleFit (TyH {..}) hole_ty ty = discardErrs $
do { -- We wrap the subtype constraint in the implications to pass along the
-- givens, and so we must ensure that any nested implications and skolems
-- end up with the correct level. The implications are ordered so that
-- the innermost (the one with the highest level) is first, so it
-- suffices to get the level of the first one (or the current level, if
-- there are no implications involved).
innermost_lvl <- case tyHImplics of
[] -> getTcLevel
-- imp is the innermost implication
(imp:_) -> return (ic_tclvl imp)
; (wrp, wanted) <- setTcLevel innermost_lvl $ captureConstraints $
tcSubType_NC ExprSigCtxt ty hole_ty
; traceTc "Checking hole fit {" empty
; traceTc "wanteds are: " $ ppr wanted
; if isEmptyWC wanted && isEmptyBag tyHRelevantCts
then traceTc "}" empty >> return (True, wrp)
else do { fresh_binds <- newTcEvBinds
-- The relevant constraints may contain HoleDests, so we must
-- take care to clone them as well (to avoid #15370).
; cloned_relevants <- mapBagM cloneWanted tyHRelevantCts
-- We wrap the WC in the nested implications, see
-- Note [Nested Implications]
; let outermost_first = reverse tyHImplics
setWC = setWCAndBinds fresh_binds
-- We add the cloned relevants to the wanteds generated by
-- the call to tcSubType_NC, see Note [Relevant Constraints]
-- There's no need to clone the wanteds, because they are
-- freshly generated by `tcSubtype_NC`.
w_rel_cts = addSimples wanted cloned_relevants
w_givens = foldr setWC w_rel_cts outermost_first
; traceTc "w_givens are: " $ ppr w_givens
; rem <- runTcSDeriveds $ simpl_top w_givens
-- We don't want any insoluble or simple constraints left, but
-- solved implications are ok (and necessary for e.g. undefined)
; traceTc "rems was:" $ ppr rem
; traceTc "}" empty
; return (isSolvedWC rem, wrp) } }
where
setWCAndBinds :: EvBindsVar -- Fresh ev binds var.
-> Implication -- The implication to put WC in.
-> WantedConstraints -- The WC constraints to put implic.
-> WantedConstraints -- The new constraints.
setWCAndBinds binds imp wc
= WC { wc_simple = emptyBag
, wc_impl = unitBag $ imp { ic_wanted = wc , ic_binds = binds } }
-- | Maps a plugin that needs no state to one with an empty one.
fromPureHFPlugin :: HoleFitPlugin -> HoleFitPluginR
fromPureHFPlugin plug =
HoleFitPluginR { hfPluginInit = newTcRef ()
, hfPluginRun = const plug
, hfPluginStop = const $ return () }
|
sdiehl/ghc
|
compiler/typecheck/TcHoleErrors.hs
|
bsd-3-clause
| 46,690 | 86 | 35 | 14,903 | 6,102 | 3,286 | 2,816 | 455 | 17 |
import Test.HUnit
import Core.Partition
import Data.Word (Word8)
main :: IO ()
main = do runTestTT suite
return ()
t_fromList_toList = "fromList, toList" ~: test
[
let xs = [(1, 'a'), (2, 'c'), (1, 'z'), (0, maxBound)]
in toList ((fromList xs) :: PartitionL Char) ~=? xs
, let xs = [(5, 32), (19, 65), (0, 69), (16, 92), (1, 255)]
in toList ((fromList xs) :: PartitionL Word8) ~=? xs
, let xs = [(0, 255)]
in toList ((fromList xs) :: PartitionL Word8) ~=? xs
]
t_getBlock = "getBlock" ~: test
[
let pa :: PartitionL Char
pa = fromList [(1, 'B'), (6, 'c'), (1, 'z'), (6, maxBound)]
in (gb 'A' pa, gb 'B' pa, gb 'C' pa) ~=? (1, 1, 6)
, let pa :: PartitionL Word8
pa = fromList [(1, 5), (2, 6), (3, 7), (4, maxBound)]
in (gb 4 pa, gb 5 pa, gb 6 pa, gb 7 pa, gb 8 pa) ~=? (1, 1, 2, 3, 4)
, let pa :: PartitionL Word8
pa = fromList [(1, 0), (2, 1), (3, 254), (4, maxBound)]
in (gb 0 pa, gb 1 pa, gb 2 pa, gb 253 pa, gb 254 pa, gb 255 pa) ~=?
(1, 2, 3, 3, 3, 4)
]
where
gb :: Pa p s => s -> p s -> BlockId
gb = getBlock
--t_mergeWith
t_representatives = "representatives" ~: test
[
let pa :: PartitionL Char
pa = fromList [(7, 'B'), (6, 'c'), (1, 'z'), (5, maxBound)]
in representatives pa ~=? [(1, 'z'), (5, maxBound), (6, 'c'), (7, 'B')]
, let pa :: PartitionL Word8
pa = fromList [(1, 5), (3, 6), (1, 8), (3, 9), (1, maxBound)]
in representatives pa ~=? [(1, 5), (3, 6)]
, let pa :: PartitionL Word8
pa = fromList [(15, maxBound)]
in representatives pa ~=? [(15, maxBound)]
]
t_pmap = "pmap" ~: test
[
let pa :: PartitionL Word8
pa = fromList [(6, 5), (7, 6), (1, 8), (5, 9), (4, maxBound)]
in (mapList f1 pa, mapList f2 pa, mapList f3 pa) ~=?
([(7, 5), (8, 6), (2, 8), (6, 9), (5, maxBound)]
,[(9, maxBound)]
,[(9, 6), (1, 8), (9, maxBound)])
, let pa :: PartitionL Word8
pa = fromList [(4, 5), (3, 6), (2, 8), (3, 9), (0, maxBound)]
in (mapList f1 pa, mapList f2 pa, mapList f3 pa) ~=?
([(5, 5), (4, 6), (3, 8), (4, 9), (1, maxBound)]
,[(9, maxBound)]
,[(9, 6), (4, 8), (9, 9), (0, maxBound)])
]
where
mapList f = toList . pmap f
f1 = succ
f2 = const 9
f3 x | x `elem` [0..3] = x^2
| otherwise = 9
t_toIntervals = "toIntervals" ~: test
[
let pa :: PartitionL Word8
pa = fromList [(6, 5), (7, 6), (1, 8), (5, 9), (4, maxBound)]
in toIntervals pa ~=?
[(6, 0, 5), (7, 6, 6), (1, 7, 8), (5, 9, 9), (4, 10, maxBound)]
, let pa :: PartitionL Word8
pa = fromList [(6, 0), (5, pred $ maxBound), (7, maxBound)]
in toIntervals pa ~=?
[(6, 0, 0), (5, 1, pred $ maxBound), (7, maxBound, maxBound)]
]
t_fromRanges = "fromRanges" ~:
[
fr [r 'a' 'a'] ~=? fr [r 'a' 'a', r 'a' 'a']
, fr [r 'a' 'b'] ~=? fr [r 'a' 'a', r 'b' 'b']
, fr [r 'a' 'd'] ~=? fr [r 'a' 'b', r 'c' 'd']
, fr [r 'a' 'e'] ~=? fr [r 'a' 'c', r 'b' 'e']
, fr [r 'a' 'e'] ~=? fr [r 'a' 'b', r 'b' 'b', r 'd' 'e', r 'c' 'c']
, fr [r 'c' 'd', r 'a' 'a'] ~=? fr [r 'a' 'a', r 'd' 'd', r 'c' 'c']
, fr [r 'a' 'g', r 'A' 'a', r 'c' 'd', r 'i' 'l', r 'm' 'z'] ~=?
fr [r 'A' 'g', r 'i' 'z']
, fr' [r 12 12] ~=? fr' [r 12 12, r 12 12]
, fr' [r 12 13] ~=? fr' [r 12 12, r 13 13]
, fr' [r 12 13] ~=? fr' [r 13 13, r 12 12]
, fr' [r 12 13] ~=? fr' [r 13 13, r 12 13]
, fr' [r 12 13] ~=? fr' [r 12 13, r 12 12]
, fr' [r 10 90] ~=? fr' [r 41 57, r 89 90, r 60 65, r 10 29, r 30 31
,r 80 88, r 66 79, r 32 40, r 58 59]
, fr' [r 12 16, r 95 95] ~=? fr' [r 14 15, r 95 95, r 12 13, r 16 16]
, fr' [r 12 16, r 95 95] ~=? fr' [r 12 15, r 95 95, r 12 16]
, alphabet' ~=? fr' [r 15 27, r 0 14, r 27 255]
, alphabet' ~=? fr' [r 15 27, r 0 15, r 28 maxBound]
, alphabet' ~=? fr' [r 15 27, r 0 14, r 28 30, r 31 43, r 42 255]
, fr' [r 0 10, r 35 45, r 243 255] ~=?
fr' [r 243 244, r 10 10, r 36 42, r 245 255, r 0 9, r 42 45, r 35 35]
]
where
alphabet' = toList (alphabet :: PartitionL Word8)
fr = toList . (fromRanges :: [Range Char] -> PartitionL Char)
fr' = toList . (fromRanges :: [Range Word8] -> PartitionL Word8)
r = Range
t_toRanges = "toRanges" ~: test
[
[r 'a' 'a'] ~=? to (fr [r 'a' 'a'])
, [r 'a' 'b'] ~=? to (fr [r 'a' 'b'])
, [r 'a' 'b'] ~=? to (fr [r 'a' 'a', r 'b' 'b'])
, [r 'a' 'e'] ~=?
to (fr [r 'a' 'a', r 'd' 'd', r 'e' 'e', r 'c' 'c', r 'b' 'b'])
, [r 'F' 'M', r 'c' 'd'] ~=? to (fr [r 'c' 'd', r 'F' 'M'])
, let x = fr [r 'g' 'j', r '0' '8', r 'a' 'b', r 'c' 'd', r 'X' 'X']
in toList x ~=? toList (fr $ to x)
, [r 0 5, r 12 32, r 83 108] ~=?
to' (fr' [r 12 25, r 26 32, r 0 5, r 5 5, r 83 106, r 106 108])
, [r 10 12, r 254 255] ~=?
to' (fr' [r 11 12, r 255 255, r 10 10, r 254 254])
]
where
to = toRanges :: PartitionL Char -> [Range Char]
fr = fromRanges :: [Range Char] -> PartitionL Char
to' = toRanges :: PartitionL Word8 -> [Range Word8]
fr' = fromRanges :: [Range Word8] -> PartitionL Word8
r = Range
t_alphabet_empty = "alphabet, empty" ~: test
[
all (not . member' (empty :: PartitionL Word8)) bytes ~=? True
, all ( member' (alphabet :: PartitionL Word8)) bytes ~=? True
]
where
bytes = [minBound..maxBound]
member' = flip member
t_member = "member" ~: test
[
True ~=? member 'a' (fr [r 'a' 'a'])
, True ~=? member 'c' (fr [r 'a' 'z'])
, True ~=? member 'c' (fr [r 'A' 'B', r 'C' 'E', r 'a' 'c'])
, True ~=? member 'c' (fr [r 'A' 'F', r 'c' 'e'])
, False ~=? member 'c' (fr [r 'A' 'F', r 'a' 'b'])
, False ~=? member 'c' (fr [r 'A' 'F', r 'a' 'b', r 'd' 'z'])
]
where
fr = fromRanges :: [Range Char] -> PartitionL Char
r = Range
t_complement = "complement" ~: test
[
(toList $ fr [r minBound 'a', r 'y' maxBound]) ~=?
(toList $ co (fr [r 'b' 'x']))
, (toList $ fr [r 'a' 'b', r 'i' 'i', r 'x' 'z']) ~=?
(toList $ co $ co (fr [r 'a' 'b', r 'i' 'i', r 'x' 'z']))
, toList alphabet' ~=? (toList $ co empty')
, toList empty' ~=? (toList $ co alphabet')
, toList empty' ~=? (toList $ co $ co empty')
, toList empty' ~=? (toList $ co $ co $ co alphabet')
, toList empty' ~=?
(toList $ co $ pmap succ $ fr [r minBound 'a', r 'y' maxBound])
]
where
fr = fromRanges :: [Range Char] -> PartitionL Char
r = Range
co = complement
alphabet' = alphabet :: PartitionL Char
empty' = empty :: PartitionL Char
t_union = "union" ~: test
[
frL [r 'a' 'a'] ~=? unionL (fr [r 'a' 'a']) empty
, frL [r 'a' 'a'] ~=? unionL (fr [r 'a' 'a']) (fr [r 'a' 'a'])
, frL [r 'a' 'b'] ~=? unionL (fr [r 'a' 'a']) (fr [r 'b' 'b'])
, frL [r 'a' 'd'] ~=? unionL (fr [r 'a' 'c']) (fr [r 'b' 'd'])
, frL [r 'a' 'd'] ~=? unionL (fr [r 'a' 'a']) (fr [r 'b' 'd'])
, frL [r 'a' 'd'] ~=? unionL (fr [r 'b' 'd']) (fr [r 'a' 'a'])
, frL [r 'a' 'd'] ~=? unionL (fr [r 'c' 'd']) (fr [r 'a' 'b'])
, frL [r 'a' 'e'] ~=? unionL (fr [r 'c' 'e']) (fr [r 'a' 'd'])
, frL [r 'a' 'c', r 'i' 'j'] ~=? unionL (fr [r 'a' 'c'])
(fr [r 'i' 'j'])
, frL [r 'a' 'c', r 'i' 'j'] ~=? unionL (fr [r 'a' 'c'])
(fr [r 'i' 'j', r 'b' 'c'])
, frL [r 'a' 'e', r 'i' 'j', r 'm' 'n'] ~=?
unionL (fr [r 'a' 'c', r 'm' 'n']) (fr [r 'i' 'j', r 'b' 'e'])
]
where
fr = fromRanges :: [Range Char] -> PartitionL Char
frL = toList . fr
unionL a b = toList $ union a b
r = Range
t_intersect = "intersect" ~: test
[
frL [r 'a' 'a'] ~=? intersectL (fr [r 'a' 'a']) (fr [r 'a' 'a'])
, toList empty' ~=? intersectL (fr [r 'a' 'a']) (fr [r 'b' 'b'])
, frL [r 'b' 'b'] ~=? intersectL (fr [r 'a' 'b']) (fr [r 'b' 'c'])
, frL [r 'b' 'b'] ~=? intersectL (fr [r 'b' 'c']) (fr [r 'a' 'b'])
, frL [r 'c' 'e'] ~=? intersectL (fr [r 'a' 'e']) (fr [r 'c' 'x'])
, frL [r 'c' 'c'] ~=? intersectL (fr [r 'a' 'a', r 'c' 'f'])
(fr [r 'b' 'c', r 'g' 'i'])
, frL [r 'e' 'f'] ~=? intersectL (fr [r 'a' 'a', r 'c' 'f'])
(fr [r 'b' 'b', r 'e' 'i'])
]
where
fr = fromRanges :: [Range Char] -> PartitionL Char
frL = toList . fr
intersectL a b = toList $ intersect a b
empty' = empty :: PartitionL Char
r = Range
suite = test
[
t_fromList_toList, t_getBlock, t_representatives, t_pmap, t_toIntervals
, t_fromRanges, t_toRanges, t_alphabet_empty, t_member, t_complement
, t_union, t_intersect
]
|
radekm/crep
|
tests/Partition.hs
|
bsd-3-clause
| 8,682 | 0 | 13 | 2,669 | 5,061 | 2,661 | 2,400 | 189 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
module Fragment.TmVar.Rules.Type.Infer.Offline (
TmVarInferTypeContext
, tmVarInferTypeRules
) where
import Control.Monad.Reader (MonadReader)
import Control.Monad.Except (MonadError)
import Rules.Type.Infer.Offline
import Context.Term
import Fragment.TmVar.Rules.Type.Infer.Common
type TmVarInferTypeContext e w s r m ki ty pt tm a = (InferTypeContext e w s r m ki ty pt tm a, Ord a, MonadReader r m, HasTermContext r ki ty a, MonadError e m, AsUnboundTermVariable e a)
tmVarInferTypeRules :: TmVarInferTypeContext e w s r m ki ty pt tm a
=> InferTypeInput e w s r m (UnifyT ki ty a m) ki ty pt tm a
tmVarInferTypeRules =
inferTypeInput
|
dalaing/type-systems
|
src/Fragment/TmVar/Rules/Type/Infer/Offline.hs
|
bsd-3-clause
| 919 | 0 | 8 | 165 | 217 | 128 | 89 | 16 | 1 |
module Language.LaTeX.Length
(
-- Units
inch, pt, em, cm, mm, ex, pc, sp, bp, dd, cc, mu
-- Dynamic Length
, stretch
-- Predefined lengths
, parindent, textwidth, linewidth, textheight, parsep, parskip, baselineskip
, baselinestrech, fill, columnsep, columnseprule, mathindent, oddsidemargin
, evensidemargin, marginparwidth, marginparsep, marginparpush, topmargin
, headheight, headsep, topskip, footheight, footskip, topsep, partopsep, itemsep
, itemindent, labelsep, labelwidth, leftmargin, rightmargin, listparindent, jot
, abovedisplayskip, belowdisplayskip, abovedisplayshortskip
, belowdisplayshortskip, floatsep, textfloatsep, intextsep, dblfloatsep
, dbltextfloatsep, textfraction, floatpagefraction, dbltopfaction
, dblfloatpagefraction, arraycolsep, tabcolsep, arrayrulewidth, doublerulesep
, arraystretch, bigskipamount, medskipamount, smallskipamount, fboxrule, fboxsep
)
where
import Language.LaTeX.Types
inch, pt, em, cm, mm, ex, pc, sp, bp, dd, cc, mu :: Rational -> LatexLength
pt = withUnit Pt
em = withUnit Em
cm = withUnit Cm
mm = withUnit Mm
ex = withUnit Ex
pc = withUnit Pc
-- | Since 'in' is a keyword in Haskell, this one is called 'inch'.
inch = withUnit In
sp = withUnit Sp
bp = withUnit Bp
dd = withUnit Dd
cc = withUnit Cc
mu = withUnit Mu
-- | Internal function to make LatexLength commands
lengthCmd :: String -> LatexLength
lengthCmd = LengthCmd
-- | Internal function to make LatexLength commands
withUnit :: TexUnit -> Rational -> LatexLength
withUnit unit = LengthCst (Just unit)
-- robust
stretch :: Rational -> LatexLength
stretch = LengthCmdRatArg "stretch"
parindent, textwidth, linewidth, textheight, parsep, parskip, baselineskip, baselinestrech,
fill, columnsep, columnseprule, mathindent, oddsidemargin, evensidemargin, marginparwidth,
marginparsep, marginparpush, topmargin, headheight, headsep, topskip, footheight, footskip,
topsep, partopsep, itemsep, itemindent, labelsep, labelwidth, leftmargin, rightmargin,
listparindent, jot, abovedisplayskip, belowdisplayskip, abovedisplayshortskip,
belowdisplayshortskip, floatsep, textfloatsep, intextsep, dblfloatsep, dbltextfloatsep,
textfraction, floatpagefraction, dbltopfaction, dblfloatpagefraction, arraycolsep,
tabcolsep, arrayrulewidth, doublerulesep, arraystretch, bigskipamount, medskipamount,
smallskipamount, fboxrule, fboxsep :: LatexLength
parindent = lengthCmd "parindent"
textwidth = lengthCmd "textwidth"
linewidth = lengthCmd "linewidth"
textheight = lengthCmd "textheight"
parsep = lengthCmd "parsep"
parskip = lengthCmd "parskip"
baselineskip = lengthCmd "baselineskip"
baselinestrech = lengthCmd "baselinestrech"
fill = lengthCmd "fill"
columnsep = lengthCmd "columnsep"
columnseprule = lengthCmd "columnseprule"
mathindent = lengthCmd "mathindent"
oddsidemargin = lengthCmd "oddsidemargin"
evensidemargin = lengthCmd "evensidemargin"
marginparwidth = lengthCmd "marginparwidth"
marginparsep = lengthCmd "marginparsep"
marginparpush = lengthCmd "marginparpush"
topmargin = lengthCmd "topmargin"
headheight = lengthCmd "headheight"
headsep = lengthCmd "headsep"
topskip = lengthCmd "topskip"
footheight = lengthCmd "footheight"
footskip = lengthCmd "footskip"
topsep = lengthCmd "topsep"
partopsep = lengthCmd "partopsep"
itemsep = lengthCmd "itemsep"
itemindent = lengthCmd "itemindent"
labelsep = lengthCmd "labelsep"
labelwidth = lengthCmd "labelwidth"
leftmargin = lengthCmd "leftmargin"
rightmargin = lengthCmd "rightmargin"
listparindent = lengthCmd "listparindent"
jot = lengthCmd "jot"
abovedisplayskip = lengthCmd "abovedisplayskip"
belowdisplayskip = lengthCmd "belowdisplayskip"
abovedisplayshortskip = lengthCmd "abovedisplayshortskip"
belowdisplayshortskip = lengthCmd "belowdisplayshortskip"
floatsep = lengthCmd "floatsep"
textfloatsep = lengthCmd "textfloatsep"
intextsep = lengthCmd "intextsep"
dblfloatsep = lengthCmd "dblfloatsep"
dbltextfloatsep = lengthCmd "dbltextfloatsep"
textfraction = lengthCmd "textfraction"
floatpagefraction = lengthCmd "floatpagefraction"
dbltopfaction = lengthCmd "dbltopfaction"
dblfloatpagefraction = lengthCmd "dblfloatpagefraction"
arraycolsep = lengthCmd "arraycolsep"
tabcolsep = lengthCmd "tabcolsep"
arrayrulewidth = lengthCmd "arrayrulewidth"
doublerulesep = lengthCmd "doublerulesep"
arraystretch = lengthCmd "arraystretch"
bigskipamount = lengthCmd "bigskipamount"
medskipamount = lengthCmd "medskipamount"
smallskipamount = lengthCmd "smallskipamount"
fboxrule = lengthCmd "fboxrule"
fboxsep = lengthCmd "fboxsep"
|
np/hlatex
|
Language/LaTeX/Length.hs
|
bsd-3-clause
| 4,547 | 0 | 7 | 566 | 980 | 599 | 381 | 99 | 1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-2006
\section[RnEnv]{Environment manipulation for the renamer monad}
-}
{-# LANGUAGE CPP, MultiWayIf #-}
module RnEnv (
newTopSrcBinder,
lookupLocatedTopBndrRn, lookupTopBndrRn,
lookupLocatedOccRn, lookupOccRn, lookupOccRn_maybe,
lookupLocalOccRn_maybe, lookupInfoOccRn,
lookupLocalOccThLvl_maybe,
lookupTypeOccRn, lookupKindOccRn,
lookupGlobalOccRn, lookupGlobalOccRn_maybe,
lookupOccRn_overloaded, lookupGlobalOccRn_overloaded, lookupExactOcc,
reportUnboundName, unknownNameSuggestions,
addNameClashErrRn,
HsSigCtxt(..), lookupLocalTcNames, lookupSigOccRn,
lookupSigCtxtOccRn,
lookupFixityRn, lookupFixityRn_help,
lookupFieldFixityRn, lookupTyFixityRn,
lookupInstDeclBndr, lookupRecFieldOcc, lookupFamInstName,
lookupConstructorFields,
lookupSyntaxName, lookupSyntaxName', lookupSyntaxNames,
lookupIfThenElse,
lookupGreAvailRn,
getLookupOccRn,mkUnboundName, mkUnboundNameRdr, isUnboundName,
addUsedGRE, addUsedGREs, addUsedDataCons,
newLocalBndrRn, newLocalBndrsRn,
bindLocalNames, bindLocalNamesFV,
MiniFixityEnv,
addLocalFixities,
bindLocatedLocalsFV, bindLocatedLocalsRn,
extendTyVarEnvFVRn,
-- Role annotations
RoleAnnotEnv, emptyRoleAnnotEnv, mkRoleAnnotEnv,
lookupRoleAnnot, getRoleAnnots,
checkDupRdrNames, checkShadowedRdrNames,
checkDupNames, checkDupAndShadowedNames, dupNamesErr,
checkTupSize,
addFvRn, mapFvRn, mapMaybeFvRn, mapFvRnCPS,
warnUnusedMatches, warnUnusedTypePatterns,
warnUnusedTopBinds, warnUnusedLocalBinds,
mkFieldEnv,
dataTcOccs, kindSigErr, perhapsForallMsg, unknownSubordinateErr,
HsDocContext(..), pprHsDocContext,
inHsDocContext, withHsDocContext
) where
#include "HsVersions.h"
import LoadIface ( loadInterfaceForName, loadSrcInterface_maybe )
import IfaceEnv
import HsSyn
import RdrName
import HscTypes
import TcEnv
import TcRnMonad
import RdrHsSyn ( setRdrNameSpace )
import TysWiredIn ( starKindTyConName, unicodeStarKindTyConName )
import Name
import NameSet
import NameEnv
import Avail
import Module
import ConLike
import DataCon
import TyCon
import PrelNames ( mkUnboundName, isUnboundName, rOOT_MAIN, forall_tv_RDR )
import ErrUtils ( MsgDoc )
import BasicTypes ( Fixity(..), FixityDirection(..), minPrecedence, defaultFixity )
import SrcLoc
import Outputable
import Util
import Maybes
import BasicTypes ( TopLevelFlag(..) )
import ListSetOps ( removeDups )
import DynFlags
import FastString
import Control.Monad
import Data.List
import Data.Function ( on )
import ListSetOps ( minusList )
import Constants ( mAX_TUPLE_SIZE )
import qualified GHC.LanguageExtensions as LangExt
{-
*********************************************************
* *
Source-code binders
* *
*********************************************************
Note [Signature lazy interface loading]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC's lazy interface loading can be a bit confusing, so this Note is an
empirical description of what happens in one interesting case. When
compiling a signature module against an its implementation, we do NOT
load interface files associated with its names until after the type
checking phase. For example:
module ASig where
data T
f :: T -> T
Suppose we compile this with -sig-of "A is ASig":
module B where
data T = T
f T = T
module A(module B) where
import B
During type checking, we'll load A.hi because we need to know what the
RdrEnv for the module is, but we DO NOT load the interface for B.hi!
It's wholly unnecessary: our local definition 'data T' in ASig is all
the information we need to finish type checking. This is contrast to
type checking of ordinary Haskell files, in which we would not have the
local definition "data T" and would need to consult B.hi immediately.
(Also, this situation never occurs for hs-boot files, since you're not
allowed to reexport from another module.)
After type checking, we then check that the types we provided are
consistent with the backing implementation (in checkHiBootOrHsigIface).
At this point, B.hi is loaded, because we need something to compare
against.
I discovered this behavior when trying to figure out why type class
instances for Data.Map weren't in the EPS when I was type checking a
test very much like ASig (sigof02dm): the associated interface hadn't
been loaded yet! (The larger issue is a moot point, since an instance
declared in a signature can never be a duplicate.)
This behavior might change in the future. Consider this
alternate module B:
module B where
{-# DEPRECATED T, f "Don't use" #-}
data T = T
f T = T
One might conceivably want to report deprecation warnings when compiling
ASig with -sig-of B, in which case we need to look at B.hi to find the
deprecation warnings during renaming. At the moment, you don't get any
warning until you use the identifier further downstream. This would
require adjusting addUsedGRE so that during signature compilation,
we do not report deprecation warnings for LocalDef. See also
Note [Handling of deprecations]
-}
newTopSrcBinder :: Located RdrName -> RnM Name
newTopSrcBinder (L loc rdr_name)
| Just name <- isExact_maybe rdr_name
= -- This is here to catch
-- (a) Exact-name binders created by Template Haskell
-- (b) The PrelBase defn of (say) [] and similar, for which
-- the parser reads the special syntax and returns an Exact RdrName
-- We are at a binding site for the name, so check first that it
-- the current module is the correct one; otherwise GHC can get
-- very confused indeed. This test rejects code like
-- data T = (,) Int Int
-- unless we are in GHC.Tup
if isExternalName name then
do { this_mod <- getModule
; unless (this_mod == nameModule name)
(addErrAt loc (badOrigBinding rdr_name))
; return name }
else -- See Note [Binders in Template Haskell] in Convert.hs
do { this_mod <- getModule
; externaliseName this_mod name }
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { this_mod <- getModule
; unless (rdr_mod == this_mod || rdr_mod == rOOT_MAIN)
(addErrAt loc (badOrigBinding rdr_name))
-- When reading External Core we get Orig names as binders,
-- but they should agree with the module gotten from the monad
--
-- We can get built-in syntax showing up here too, sadly. If you type
-- data T = (,,,)
-- the constructor is parsed as a type, and then RdrHsSyn.tyConToDataCon
-- uses setRdrNameSpace to make it into a data constructors. At that point
-- the nice Exact name for the TyCon gets swizzled to an Orig name.
-- Hence the badOrigBinding error message.
--
-- Except for the ":Main.main = ..." definition inserted into
-- the Main module; ugh!
-- Because of this latter case, we call newGlobalBinder with a module from
-- the RdrName, not from the environment. In principle, it'd be fine to
-- have an arbitrary mixture of external core definitions in a single module,
-- (apart from module-initialisation issues, perhaps).
; newGlobalBinder rdr_mod rdr_occ loc }
| otherwise
= do { unless (not (isQual rdr_name))
(addErrAt loc (badQualBndrErr rdr_name))
-- Binders should not be qualified; if they are, and with a different
-- module name, we we get a confusing "M.T is not in scope" error later
; stage <- getStage
; if isBrackStage stage then
-- We are inside a TH bracket, so make an *Internal* name
-- See Note [Top-level Names in Template Haskell decl quotes] in RnNames
do { uniq <- newUnique
; return (mkInternalName uniq (rdrNameOcc rdr_name) loc) }
else
do { this_mod <- getModule
; traceRn "newTopSrcBinder" (ppr this_mod $$ ppr rdr_name $$ ppr loc)
; newGlobalBinder this_mod (rdrNameOcc rdr_name) loc }
}
{-
*********************************************************
* *
Source code occurrences
* *
*********************************************************
Looking up a name in the RnEnv.
Note [Type and class operator definitions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to reject all of these unless we have -XTypeOperators (Trac #3265)
data a :*: b = ...
class a :*: b where ...
data (:*:) a b = ....
class (:*:) a b where ...
The latter two mean that we are not just looking for a
*syntactically-infix* declaration, but one that uses an operator
OccName. We use OccName.isSymOcc to detect that case, which isn't
terribly efficient, but there seems to be no better way.
-}
lookupTopBndrRn :: RdrName -> RnM Name
lookupTopBndrRn n = do nopt <- lookupTopBndrRn_maybe n
case nopt of
Just n' -> return n'
Nothing -> do traceRn "lookupTopBndrRn fail" (ppr n)
unboundName WL_LocalTop n
lookupLocatedTopBndrRn :: Located RdrName -> RnM (Located Name)
lookupLocatedTopBndrRn = wrapLocM lookupTopBndrRn
lookupTopBndrRn_maybe :: RdrName -> RnM (Maybe Name)
-- Look up a top-level source-code binder. We may be looking up an unqualified 'f',
-- and there may be several imported 'f's too, which must not confuse us.
-- For example, this is OK:
-- import Foo( f )
-- infix 9 f -- The 'f' here does not need to be qualified
-- f x = x -- Nor here, of course
-- So we have to filter out the non-local ones.
--
-- A separate function (importsFromLocalDecls) reports duplicate top level
-- decls, so here it's safe just to choose an arbitrary one.
--
-- There should never be a qualified name in a binding position in Haskell,
-- but there can be if we have read in an external-Core file.
-- The Haskell parser checks for the illegal qualified name in Haskell
-- source files, so we don't need to do so here.
lookupTopBndrRn_maybe rdr_name
| Just name <- isExact_maybe rdr_name
= do { name' <- lookupExactOcc name; return (Just name') }
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
-- This deals with the case of derived bindings, where
-- we don't bother to call newTopSrcBinder first
-- We assume there is no "parent" name
= do { loc <- getSrcSpanM
; n <- newGlobalBinder rdr_mod rdr_occ loc
; return (Just n)}
| otherwise
= do { -- Check for operators in type or class declarations
-- See Note [Type and class operator definitions]
let occ = rdrNameOcc rdr_name
; when (isTcOcc occ && isSymOcc occ)
(do { op_ok <- xoptM LangExt.TypeOperators
; unless op_ok (addErr (opDeclErr rdr_name)) })
; env <- getGlobalRdrEnv
; case filter isLocalGRE (lookupGRE_RdrName rdr_name env) of
[gre] -> return (Just (gre_name gre))
_ -> return Nothing -- Ambiguous (can't happen) or unbound
}
-----------------------------------------------
-- | Lookup an @Exact@ @RdrName@. See Note [Looking up Exact RdrNames].
-- This adds an error if the name cannot be found.
lookupExactOcc :: Name -> RnM Name
lookupExactOcc name
= do { result <- lookupExactOcc_either name
; case result of
Left err -> do { addErr err
; return name }
Right name' -> return name' }
-- | Lookup an @Exact@ @RdrName@. See Note [Looking up Exact RdrNames].
-- This never adds an error, but it may return one.
lookupExactOcc_either :: Name -> RnM (Either MsgDoc Name)
-- See Note [Looking up Exact RdrNames]
lookupExactOcc_either name
| Just thing <- wiredInNameTyThing_maybe name
, Just tycon <- case thing of
ATyCon tc -> Just tc
AConLike (RealDataCon dc) -> Just (dataConTyCon dc)
_ -> Nothing
, isTupleTyCon tycon
= do { checkTupSize (tyConArity tycon)
; return (Right name) }
| isExternalName name
= return (Right name)
| otherwise
= do { env <- getGlobalRdrEnv
; let -- See Note [Splicing Exact names]
main_occ = nameOccName name
demoted_occs = case demoteOccName main_occ of
Just occ -> [occ]
Nothing -> []
gres = [ gre | occ <- main_occ : demoted_occs
, gre <- lookupGlobalRdrEnv env occ
, gre_name gre == name ]
; case gres of
[gre] -> return (Right (gre_name gre))
[] -> -- See Note [Splicing Exact names]
do { lcl_env <- getLocalRdrEnv
; if name `inLocalRdrEnvScope` lcl_env
then return (Right name)
else
#ifdef GHCI
do { th_topnames_var <- fmap tcg_th_topnames getGblEnv
; th_topnames <- readTcRef th_topnames_var
; if name `elemNameSet` th_topnames
then return (Right name)
else return (Left exact_nm_err)
}
#else /* !GHCI */
return (Left exact_nm_err)
#endif /* !GHCI */
}
gres -> return (Left (sameNameErr gres)) -- Ugh! See Note [Template Haskell ambiguity]
}
where
exact_nm_err = hang (text "The exact Name" <+> quotes (ppr name) <+> ptext (sLit "is not in scope"))
2 (vcat [ text "Probable cause: you used a unique Template Haskell name (NameU), "
, text "perhaps via newName, but did not bind it"
, text "If that's it, then -ddump-splices might be useful" ])
sameNameErr :: [GlobalRdrElt] -> MsgDoc
sameNameErr [] = panic "addSameNameErr: empty list"
sameNameErr gres@(_ : _)
= hang (text "Same exact name in multiple name-spaces:")
2 (vcat (map pp_one sorted_names) $$ th_hint)
where
sorted_names = sortWith nameSrcLoc (map gre_name gres)
pp_one name
= hang (pprNameSpace (occNameSpace (getOccName name))
<+> quotes (ppr name) <> comma)
2 (text "declared at:" <+> ppr (nameSrcLoc name))
th_hint = vcat [ text "Probable cause: you bound a unique Template Haskell name (NameU),"
, text "perhaps via newName, in different name-spaces."
, text "If that's it, then -ddump-splices might be useful" ]
-----------------------------------------------
lookupInstDeclBndr :: Name -> SDoc -> RdrName -> RnM Name
-- This is called on the method name on the left-hand side of an
-- instance declaration binding. eg. instance Functor T where
-- fmap = ...
-- ^^^^ called on this
-- Regardless of how many unqualified fmaps are in scope, we want
-- the one that comes from the Functor class.
--
-- Furthermore, note that we take no account of whether the
-- name is only in scope qualified. I.e. even if method op is
-- in scope as M.op, we still allow plain 'op' on the LHS of
-- an instance decl
--
-- The "what" parameter says "method" or "associated type",
-- depending on what we are looking up
lookupInstDeclBndr cls what rdr
= do { when (isQual rdr)
(addErr (badQualBndrErr rdr))
-- In an instance decl you aren't allowed
-- to use a qualified name for the method
-- (Although it'd make perfect sense.)
; mb_name <- lookupSubBndrOcc
False -- False => we don't give deprecated
-- warnings when a deprecated class
-- method is defined. We only warn
-- when it's used
cls doc rdr
; case mb_name of
Left err -> do { addErr err; return (mkUnboundNameRdr rdr) }
Right nm -> return nm }
where
doc = what <+> text "of class" <+> quotes (ppr cls)
-----------------------------------------------
lookupFamInstName :: Maybe Name -> Located RdrName -> RnM (Located Name)
-- Used for TyData and TySynonym family instances only,
-- See Note [Family instance binders]
lookupFamInstName (Just cls) tc_rdr -- Associated type; c.f RnBinds.rnMethodBind
= wrapLocM (lookupInstDeclBndr cls (text "associated type")) tc_rdr
lookupFamInstName Nothing tc_rdr -- Family instance; tc_rdr is an *occurrence*
= lookupLocatedOccRn tc_rdr
-----------------------------------------------
lookupConstructorFields :: Name -> RnM [FieldLabel]
-- Look up the fields of a given constructor
-- * For constructors from this module, use the record field env,
-- which is itself gathered from the (as yet un-typechecked)
-- data type decls
--
-- * For constructors from imported modules, use the *type* environment
-- since imported modles are already compiled, the info is conveniently
-- right there
lookupConstructorFields con_name
= do { this_mod <- getModule
; if nameIsLocalOrFrom this_mod con_name then
do { field_env <- getRecFieldEnv
; traceTc "lookupCF" (ppr con_name $$ ppr (lookupNameEnv field_env con_name) $$ ppr field_env)
; return (lookupNameEnv field_env con_name `orElse` []) }
else
do { con <- tcLookupConLike con_name
; traceTc "lookupCF 2" (ppr con)
; return (conLikeFieldLabels con) } }
-----------------------------------------------
-- Used for record construction and pattern matching
-- When the -XDisambiguateRecordFields flag is on, take account of the
-- constructor name to disambiguate which field to use; it's just the
-- same as for instance decls
--
-- NB: Consider this:
-- module Foo where { data R = R { fld :: Int } }
-- module Odd where { import Foo; fld x = x { fld = 3 } }
-- Arguably this should work, because the reference to 'fld' is
-- unambiguous because there is only one field id 'fld' in scope.
-- But currently it's rejected.
lookupRecFieldOcc :: Maybe Name -- Nothing => just look it up as usual
-- Just tycon => use tycon to disambiguate
-> SDoc -> RdrName
-> RnM Name
lookupRecFieldOcc parent doc rdr_name
| Just tc_name <- parent
= do { mb_name <- lookupSubBndrOcc True tc_name doc rdr_name
; case mb_name of
Left err -> do { addErr err; return (mkUnboundNameRdr rdr_name) }
Right n -> return n }
| otherwise
= lookupGlobalOccRn rdr_name
lookupSubBndrOcc :: Bool
-> Name -- Parent
-> SDoc
-> RdrName
-> RnM (Either MsgDoc Name)
-- Find all the things the rdr-name maps to
-- and pick the one with the right parent namep
lookupSubBndrOcc warn_if_deprec the_parent doc rdr_name
| Just n <- isExact_maybe rdr_name -- This happens in derived code
= do { n <- lookupExactOcc n
; return (Right n) }
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { n <- lookupOrig rdr_mod rdr_occ
; return (Right n) }
| isUnboundName the_parent
-- Avoid an error cascade from malformed decls:
-- instance Int where { foo = e }
-- We have already generated an error in rnLHsInstDecl
= return (Right (mkUnboundNameRdr rdr_name))
| otherwise
= do { env <- getGlobalRdrEnv
; let gres = lookupGlobalRdrEnv env (rdrNameOcc rdr_name)
-- NB: lookupGlobalRdrEnv, not lookupGRE_RdrName!
-- The latter does pickGREs, but we want to allow 'x'
-- even if only 'M.x' is in scope
; traceRn "lookupSubBndrOcc"
(vcat [ ppr the_parent, ppr rdr_name
, ppr gres, ppr (pick_gres rdr_name gres)])
; case pick_gres rdr_name gres of
(gre:_) -> do { addUsedGRE warn_if_deprec gre
-- Add a usage; this is an *occurrence* site
-- Note [Usage for sub-bndrs]
; return (Right (gre_name gre)) }
-- If there is more than one local GRE for the
-- same OccName 'f', that will be reported separately
-- as a duplicate top-level binding for 'f'
[] -> do { ns <- lookupQualifiedNameGHCi rdr_name
; case ns of
(n:_) -> return (Right n) -- Unlikely to be more than one...?
[] -> return (Left (unknownSubordinateErr doc rdr_name))
} }
where
-- If Parent = NoParent, just do a normal lookup
-- If Parent = Parent p then find all GREs that
-- (a) have parent p
-- (b) for Unqual, are in scope qualified or unqualified
-- for Qual, are in scope with that qualification
pick_gres rdr_name gres
| isUnqual rdr_name = filter right_parent gres
| otherwise = filter right_parent (pickGREs rdr_name gres)
right_parent (GRE { gre_par = p })
| ParentIs parent <- p = parent == the_parent
| FldParent { par_is = parent } <- p = parent == the_parent
| otherwise = False
{-
Note [Family instance binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data family F a
data instance F T = X1 | X2
The 'data instance' decl has an *occurrence* of F (and T), and *binds*
X1 and X2. (This is unlike a normal data type declaration which would
bind F too.) So we want an AvailTC F [X1,X2].
Now consider a similar pair:
class C a where
data G a
instance C S where
data G S = Y1 | Y2
The 'data G S' *binds* Y1 and Y2, and has an *occurrence* of G.
But there is a small complication: in an instance decl, we don't use
qualified names on the LHS; instead we use the class to disambiguate.
Thus:
module M where
import Blib( G )
class C a where
data G a
instance C S where
data G S = Y1 | Y2
Even though there are two G's in scope (M.G and Blib.G), the occurrence
of 'G' in the 'instance C S' decl is unambiguous, because C has only
one associated type called G. This is exactly what happens for methods,
and it is only consistent to do the same thing for types. That's the
role of the function lookupTcdName; the (Maybe Name) give the class of
the encloseing instance decl, if any.
Note [Looking up Exact RdrNames]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Exact RdrNames are generated by Template Haskell. See Note [Binders
in Template Haskell] in Convert.
For data types and classes have Exact system Names in the binding
positions for constructors, TyCons etc. For example
[d| data T = MkT Int |]
when we splice in and Convert to HsSyn RdrName, we'll get
data (Exact (system Name "T")) = (Exact (system Name "MkT")) ...
These System names are generated by Convert.thRdrName
But, constructors and the like need External Names, not System Names!
So we do the following
* In RnEnv.newTopSrcBinder we spot Exact RdrNames that wrap a
non-External Name, and make an External name for it. This is
the name that goes in the GlobalRdrEnv
* When looking up an occurrence of an Exact name, done in
RnEnv.lookupExactOcc, we find the Name with the right unique in the
GlobalRdrEnv, and use the one from the envt -- it will be an
External Name in the case of the data type/constructor above.
* Exact names are also use for purely local binders generated
by TH, such as \x_33. x_33
Both binder and occurrence are Exact RdrNames. The occurrence
gets looked up in the LocalRdrEnv by RnEnv.lookupOccRn, and
misses, because lookupLocalRdrEnv always returns Nothing for
an Exact Name. Now we fall through to lookupExactOcc, which
will find the Name is not in the GlobalRdrEnv, so we just use
the Exact supplied Name.
Note [Splicing Exact names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the splice $(do { x <- newName "x"; return (VarE x) })
This will generate a (HsExpr RdrName) term that mentions the
Exact RdrName "x_56" (or whatever), but does not bind it. So
when looking such Exact names we want to check that it's in scope,
otherwise the type checker will get confused. To do this we need to
keep track of all the Names in scope, and the LocalRdrEnv does just that;
we consult it with RdrName.inLocalRdrEnvScope.
There is another wrinkle. With TH and -XDataKinds, consider
$( [d| data Nat = Zero
data T = MkT (Proxy 'Zero) |] )
After splicing, but before renaming we get this:
data Nat_77{tc} = Zero_78{d}
data T_79{tc} = MkT_80{d} (Proxy 'Zero_78{tc}) |] )
The occurrence of 'Zero in the data type for T has the right unique,
but it has a TcClsName name-space in its OccName. (This is set by
the ctxt_ns argument of Convert.thRdrName.) When we check that is
in scope in the GlobalRdrEnv, we need to look up the DataName namespace
too. (An alternative would be to make the GlobalRdrEnv also have
a Name -> GRE mapping.)
Note [Template Haskell ambiguity]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The GlobalRdrEnv invariant says that if
occ -> [gre1, ..., gren]
then the gres have distinct Names (INVARIANT 1 of GlobalRdrEnv).
This is guaranteed by extendGlobalRdrEnvRn (the dups check in add_gre).
So how can we get multiple gres in lookupExactOcc_maybe? Because in
TH we might use the same TH NameU in two different name spaces.
eg (Trac #7241):
$(newName "Foo" >>= \o -> return [DataD [] o [] [RecC o []] [''Show]])
Here we generate a type constructor and data constructor with the same
unique, but differnt name spaces.
It'd be nicer to rule this out in extendGlobalRdrEnvRn, but that would
mean looking up the OccName in every name-space, just in case, and that
seems a bit brutal. So it's just done here on lookup. But we might
need to revisit that choice.
Note [Usage for sub-bndrs]
~~~~~~~~~~~~~~~~~~~~~~~~~~
If you have this
import qualified M( C( f ) )
instance M.C T where
f x = x
then is the qualified import M.f used? Obviously yes.
But the RdrName used in the instance decl is unqualified. In effect,
we fill in the qualification by looking for f's whose class is M.C
But when adding to the UsedRdrNames we must make that qualification
explicit (saying "used M.f"), otherwise we get "Redundant import of M.f".
So we make up a suitable (fake) RdrName. But be careful
import qualifed M
import M( C(f) )
instance C T where
f x = x
Here we want to record a use of 'f', not of 'M.f', otherwise
we'll miss the fact that the qualified import is redundant.
--------------------------------------------------
-- Occurrences
--------------------------------------------------
-}
getLookupOccRn :: RnM (Name -> Maybe Name)
getLookupOccRn
= do local_env <- getLocalRdrEnv
return (lookupLocalRdrOcc local_env . nameOccName)
mkUnboundNameRdr :: RdrName -> Name
mkUnboundNameRdr rdr = mkUnboundName (rdrNameOcc rdr)
lookupLocatedOccRn :: Located RdrName -> RnM (Located Name)
lookupLocatedOccRn = wrapLocM lookupOccRn
lookupLocalOccRn_maybe :: RdrName -> RnM (Maybe Name)
-- Just look in the local environment
lookupLocalOccRn_maybe rdr_name
= do { local_env <- getLocalRdrEnv
; return (lookupLocalRdrEnv local_env rdr_name) }
lookupLocalOccThLvl_maybe :: Name -> RnM (Maybe (TopLevelFlag, ThLevel))
-- Just look in the local environment
lookupLocalOccThLvl_maybe name
= do { lcl_env <- getLclEnv
; return (lookupNameEnv (tcl_th_bndrs lcl_env) name) }
-- lookupOccRn looks up an occurrence of a RdrName
lookupOccRn :: RdrName -> RnM Name
lookupOccRn rdr_name
= do { mb_name <- lookupOccRn_maybe rdr_name
; case mb_name of
Just name -> return name
Nothing -> reportUnboundName rdr_name }
lookupKindOccRn :: RdrName -> RnM Name
-- Looking up a name occurring in a kind
lookupKindOccRn rdr_name
| isVarOcc (rdrNameOcc rdr_name) -- See Note [Promoted variables in types]
= badVarInType rdr_name
| otherwise
= do { typeintype <- xoptM LangExt.TypeInType
; if | typeintype -> lookupTypeOccRn rdr_name
-- With -XNoTypeInType, treat any usage of * in kinds as in scope
-- this is a dirty hack, but then again so was the old * kind.
| is_star rdr_name -> return starKindTyConName
| is_uni_star rdr_name -> return unicodeStarKindTyConName
| otherwise -> lookupOccRn rdr_name }
-- lookupPromotedOccRn looks up an optionally promoted RdrName.
lookupTypeOccRn :: RdrName -> RnM Name
-- see Note [Demotion]
lookupTypeOccRn rdr_name
| isVarOcc (rdrNameOcc rdr_name) -- See Note [Promoted variables in types]
= badVarInType rdr_name
| otherwise
= do { mb_name <- lookupOccRn_maybe rdr_name
; case mb_name of {
Just name -> return name ;
Nothing -> do { dflags <- getDynFlags
; lookup_demoted rdr_name dflags } } }
lookup_demoted :: RdrName -> DynFlags -> RnM Name
lookup_demoted rdr_name dflags
| Just demoted_rdr <- demoteRdrName rdr_name
-- Maybe it's the name of a *data* constructor
= do { data_kinds <- xoptM LangExt.DataKinds
; mb_demoted_name <- lookupOccRn_maybe demoted_rdr
; case mb_demoted_name of
Nothing -> unboundNameX WL_Any rdr_name star_info
Just demoted_name
| data_kinds ->
do { whenWOptM Opt_WarnUntickedPromotedConstructors $
addWarn (Reason Opt_WarnUntickedPromotedConstructors)
(untickedPromConstrWarn demoted_name)
; return demoted_name }
| otherwise -> unboundNameX WL_Any rdr_name suggest_dk }
| otherwise
= reportUnboundName rdr_name
where
suggest_dk = text "A data constructor of that name is in scope; did you mean DataKinds?"
untickedPromConstrWarn name =
text "Unticked promoted constructor" <> colon <+> quotes (ppr name) <> dot
$$
hsep [ text "Use"
, quotes (char '\'' <> ppr name)
, text "instead of"
, quotes (ppr name) <> dot ]
star_info
| is_star rdr_name || is_uni_star rdr_name
= if xopt LangExt.TypeInType dflags
then text "NB: With TypeInType, you must import" <+>
ppr rdr_name <+> text "from Data.Kind"
else empty
| otherwise
= empty
is_star, is_uni_star :: RdrName -> Bool
is_star = (fsLit "*" ==) . occNameFS . rdrNameOcc
is_uni_star = (fsLit "★" ==) . occNameFS . rdrNameOcc
badVarInType :: RdrName -> RnM Name
badVarInType rdr_name
= do { addErr (text "Illegal promoted term variable in a type:"
<+> ppr rdr_name)
; return (mkUnboundNameRdr rdr_name) }
{- Note [Promoted variables in types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (Trac #12686):
x = True
data Bad = Bad 'x
The parser treats the quote in 'x as saying "use the term
namespace", so we'll get (Bad x{v}), with 'x' in the
VarName namespace. If we don't test for this, the renamer
will happily rename it to the x bound at top level, and then
the typecheck falls over because it doesn't have 'x' in scope
when kind-checking.
Note [Demotion]
~~~~~~~~~~~~~~~
When the user writes:
data Nat = Zero | Succ Nat
foo :: f Zero -> Int
'Zero' in the type signature of 'foo' is parsed as:
HsTyVar ("Zero", TcClsName)
When the renamer hits this occurrence of 'Zero' it's going to realise
that it's not in scope. But because it is renaming a type, it knows
that 'Zero' might be a promoted data constructor, so it will demote
its namespace to DataName and do a second lookup.
The final result (after the renamer) will be:
HsTyVar ("Zero", DataName)
-}
-- Use this version to get tracing
--
-- lookupOccRn_maybe, lookupOccRn_maybe' :: RdrName -> RnM (Maybe Name)
-- lookupOccRn_maybe rdr_name
-- = do { mb_res <- lookupOccRn_maybe' rdr_name
-- ; gbl_rdr_env <- getGlobalRdrEnv
-- ; local_rdr_env <- getLocalRdrEnv
-- ; traceRn $ text "lookupOccRn_maybe" <+>
-- vcat [ ppr rdr_name <+> ppr (getUnique (rdrNameOcc rdr_name))
-- , ppr mb_res
-- , text "Lcl env" <+> ppr local_rdr_env
-- , text "Gbl env" <+> ppr [ (getUnique (nameOccName (gre_name (head gres'))),gres') | gres <- occEnvElts gbl_rdr_env
-- , let gres' = filter isLocalGRE gres, not (null gres') ] ]
-- ; return mb_res }
lookupOccRn_maybe :: RdrName -> RnM (Maybe Name)
-- lookupOccRn looks up an occurrence of a RdrName
lookupOccRn_maybe rdr_name
= do { local_env <- getLocalRdrEnv
; case lookupLocalRdrEnv local_env rdr_name of {
Just name -> return (Just name) ;
Nothing -> do
; lookupGlobalOccRn_maybe rdr_name } }
lookupGlobalOccRn_maybe :: RdrName -> RnM (Maybe Name)
-- Looks up a RdrName occurrence in the top-level
-- environment, including using lookupQualifiedNameGHCi
-- for the GHCi case
-- No filter function; does not report an error on failure
-- Uses addUsedRdrName to record use and deprecations
lookupGlobalOccRn_maybe rdr_name
| Just n <- isExact_maybe rdr_name -- This happens in derived code
= do { n' <- lookupExactOcc n; return (Just n') }
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { n <- lookupOrig rdr_mod rdr_occ
; return (Just n) }
| otherwise
= do { mb_gre <- lookupGreRn_maybe rdr_name
; case mb_gre of {
Just gre -> return (Just (gre_name gre)) ;
Nothing ->
do { ns <- lookupQualifiedNameGHCi rdr_name
-- This test is not expensive,
-- and only happens for failed lookups
; case ns of
(n:_) -> return (Just n) -- Unlikely to be more than one...?
[] -> return Nothing } } }
lookupGlobalOccRn :: RdrName -> RnM Name
-- lookupGlobalOccRn is like lookupOccRn, except that it looks in the global
-- environment. Adds an error message if the RdrName is not in scope.
lookupGlobalOccRn rdr_name
= do { mb_name <- lookupGlobalOccRn_maybe rdr_name
; case mb_name of
Just n -> return n
Nothing -> do { traceRn "lookupGlobalOccRn" (ppr rdr_name)
; unboundName WL_Global rdr_name } }
lookupInfoOccRn :: RdrName -> RnM [Name]
-- lookupInfoOccRn is intended for use in GHCi's ":info" command
-- It finds all the GREs that RdrName could mean, not complaining
-- about ambiguity, but rather returning them all
-- C.f. Trac #9881
lookupInfoOccRn rdr_name
| Just n <- isExact_maybe rdr_name -- e.g. (->)
= return [n]
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { n <- lookupOrig rdr_mod rdr_occ
; return [n] }
| otherwise
= do { rdr_env <- getGlobalRdrEnv
; let ns = map gre_name (lookupGRE_RdrName rdr_name rdr_env)
; qual_ns <- lookupQualifiedNameGHCi rdr_name
; return (ns ++ (qual_ns `minusList` ns)) }
-- | Like 'lookupOccRn_maybe', but with a more informative result if
-- the 'RdrName' happens to be a record selector:
--
-- * Nothing -> name not in scope (no error reported)
-- * Just (Left x) -> name uniquely refers to x,
-- or there is a name clash (reported)
-- * Just (Right xs) -> name refers to one or more record selectors;
-- if overload_ok was False, this list will be
-- a singleton.
lookupOccRn_overloaded :: Bool -> RdrName -> RnM (Maybe (Either Name [FieldOcc Name]))
lookupOccRn_overloaded overload_ok rdr_name
= do { local_env <- getLocalRdrEnv
; case lookupLocalRdrEnv local_env rdr_name of {
Just name -> return (Just (Left name)) ;
Nothing -> do
{ mb_name <- lookupGlobalOccRn_overloaded overload_ok rdr_name
; case mb_name of {
Just name -> return (Just name) ;
Nothing -> do
{ ns <- lookupQualifiedNameGHCi rdr_name
-- This test is not expensive,
-- and only happens for failed lookups
; case ns of
(n:_) -> return $ Just $ Left n -- Unlikely to be more than one...?
[] -> return Nothing } } } } }
lookupGlobalOccRn_overloaded :: Bool -> RdrName -> RnM (Maybe (Either Name [FieldOcc Name]))
lookupGlobalOccRn_overloaded overload_ok rdr_name
| Just n <- isExact_maybe rdr_name -- This happens in derived code
= do { n' <- lookupExactOcc n; return (Just (Left n')) }
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { n <- lookupOrig rdr_mod rdr_occ
; return (Just (Left n)) }
| otherwise
= do { env <- getGlobalRdrEnv
; case lookupGRE_RdrName rdr_name env of
[] -> return Nothing
[gre] | isRecFldGRE gre
-> do { addUsedGRE True gre
; let
fld_occ :: FieldOcc Name
fld_occ
= FieldOcc (noLoc rdr_name) (gre_name gre)
; return (Just (Right [fld_occ])) }
| otherwise
-> do { addUsedGRE True gre
; return (Just (Left (gre_name gre))) }
gres | all isRecFldGRE gres && overload_ok
-- Don't record usage for ambiguous selectors
-- until we know which is meant
-> return
(Just (Right
(map (FieldOcc (noLoc rdr_name) . gre_name)
gres)))
gres -> do { addNameClashErrRn rdr_name gres
; return (Just (Left (gre_name (head gres)))) } }
--------------------------------------------------
-- Lookup in the Global RdrEnv of the module
--------------------------------------------------
lookupGreRn_maybe :: RdrName -> RnM (Maybe GlobalRdrElt)
-- Look up the RdrName in the GlobalRdrEnv
-- Exactly one binding: records it as "used", return (Just gre)
-- No bindings: return Nothing
-- Many bindings: report "ambiguous", return an arbitrary (Just gre)
-- (This API is a bit strange; lookupGRERn2_maybe is simpler.
-- But it works and I don't want to fiddle too much.)
-- Uses addUsedRdrName to record use and deprecations
lookupGreRn_maybe rdr_name
= do { env <- getGlobalRdrEnv
; case lookupGRE_RdrName rdr_name env of
[] -> return Nothing
[gre] -> do { addUsedGRE True gre
; return (Just gre) }
gres -> do { addNameClashErrRn rdr_name gres
; traceRn "lookupGreRn:name clash"
(ppr rdr_name $$ ppr gres $$ ppr env)
; return (Just (head gres)) } }
lookupGreRn2_maybe :: RdrName -> RnM (Maybe GlobalRdrElt)
-- Look up the RdrName in the GlobalRdrEnv
-- Exactly one binding: record it as "used", return (Just gre)
-- No bindings: report "not in scope", return Nothing
-- Many bindings: report "ambiguous", return Nothing
-- Uses addUsedRdrName to record use and deprecations
lookupGreRn2_maybe rdr_name
= do { env <- getGlobalRdrEnv
; case lookupGRE_RdrName rdr_name env of
[] -> do { _ <- unboundName WL_Global rdr_name
; return Nothing }
[gre] -> do { addUsedGRE True gre
; return (Just gre) }
gres -> do { addNameClashErrRn rdr_name gres
; traceRn "lookupGreRn_maybe:name clash"
(ppr rdr_name $$ ppr gres $$ ppr env)
; return Nothing } }
lookupGreAvailRn :: RdrName -> RnM (Name, AvailInfo)
-- Used in export lists
-- If not found or ambiguous, add error message, and fake with UnboundName
-- Uses addUsedRdrName to record use and deprecations
lookupGreAvailRn rdr_name
= do { mb_gre <- lookupGreRn2_maybe rdr_name
; case mb_gre of {
Just gre -> return (gre_name gre, availFromGRE gre) ;
Nothing ->
do { traceRn "lookupGreAvailRn" (ppr rdr_name)
; let name = mkUnboundNameRdr rdr_name
; return (name, avail name) } } }
{-
*********************************************************
* *
Deprecations
* *
*********************************************************
Note [Handling of deprecations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* We report deprecations at each *occurrence* of the deprecated thing
(see Trac #5867)
* We do not report deprecations for locally-defined names. For a
start, we may be exporting a deprecated thing. Also we may use a
deprecated thing in the defn of another deprecated things. We may
even use a deprecated thing in the defn of a non-deprecated thing,
when changing a module's interface.
* addUsedGREs: we do not report deprecations for sub-binders:
- the ".." completion for records
- the ".." in an export item 'T(..)'
- the things exported by a module export 'module M'
-}
addUsedDataCons :: GlobalRdrEnv -> TyCon -> RnM ()
-- Remember use of in-scope data constructors (Trac #7969)
addUsedDataCons rdr_env tycon
= addUsedGREs [ gre
| dc <- tyConDataCons tycon
, Just gre <- [lookupGRE_Name rdr_env (dataConName dc)] ]
addUsedGRE :: Bool -> GlobalRdrElt -> RnM ()
-- Called for both local and imported things
-- Add usage *and* warn if deprecated
addUsedGRE warn_if_deprec gre
= do { when warn_if_deprec (warnIfDeprecated gre)
; unless (isLocalGRE gre) $
do { env <- getGblEnv
; traceRn "addUsedGRE" (ppr gre)
; updMutVar (tcg_used_gres env) (gre :) } }
addUsedGREs :: [GlobalRdrElt] -> RnM ()
-- Record uses of any *imported* GREs
-- Used for recording used sub-bndrs
-- NB: no call to warnIfDeprecated; see Note [Handling of deprecations]
addUsedGREs gres
| null imp_gres = return ()
| otherwise = do { env <- getGblEnv
; traceRn "addUsedGREs" (ppr imp_gres)
; updMutVar (tcg_used_gres env) (imp_gres ++) }
where
imp_gres = filterOut isLocalGRE gres
warnIfDeprecated :: GlobalRdrElt -> RnM ()
warnIfDeprecated gre@(GRE { gre_name = name, gre_imp = iss })
| (imp_spec : _) <- iss
= do { dflags <- getDynFlags
; this_mod <- getModule
; when (wopt Opt_WarnWarningsDeprecations dflags &&
not (nameIsLocalOrFrom this_mod name)) $
-- See Note [Handling of deprecations]
do { iface <- loadInterfaceForName doc name
; case lookupImpDeprec iface gre of
Just txt -> addWarn (Reason Opt_WarnWarningsDeprecations)
(mk_msg imp_spec txt)
Nothing -> return () } }
| otherwise
= return ()
where
occ = greOccName gre
name_mod = ASSERT2( isExternalName name, ppr name ) nameModule name
doc = text "The name" <+> quotes (ppr occ) <+> ptext (sLit "is mentioned explicitly")
mk_msg imp_spec txt
= sep [ sep [ text "In the use of"
<+> pprNonVarNameSpace (occNameSpace occ)
<+> quotes (ppr occ)
, parens imp_msg <> colon ]
, ppr txt ]
where
imp_mod = importSpecModule imp_spec
imp_msg = text "imported from" <+> ppr imp_mod <> extra
extra | imp_mod == moduleName name_mod = Outputable.empty
| otherwise = text ", but defined in" <+> ppr name_mod
lookupImpDeprec :: ModIface -> GlobalRdrElt -> Maybe WarningTxt
lookupImpDeprec iface gre
= mi_warn_fn iface (greOccName gre) `mplus` -- Bleat if the thing,
case gre_par gre of -- or its parent, is warn'd
ParentIs p -> mi_warn_fn iface (nameOccName p)
FldParent { par_is = p } -> mi_warn_fn iface (nameOccName p)
NoParent -> Nothing
{-
Note [Used names with interface not loaded]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's (just) possible to find a used
Name whose interface hasn't been loaded:
a) It might be a WiredInName; in that case we may not load
its interface (although we could).
b) It might be GHC.Real.fromRational, or GHC.Num.fromInteger
These are seen as "used" by the renamer (if -XRebindableSyntax)
is on), but the typechecker may discard their uses
if in fact the in-scope fromRational is GHC.Read.fromRational,
(see tcPat.tcOverloadedLit), and the typechecker sees that the type
is fixed, say, to GHC.Base.Float (see Inst.lookupSimpleInst).
In that obscure case it won't force the interface in.
In both cases we simply don't permit deprecations;
this is, after all, wired-in stuff.
*********************************************************
* *
GHCi support
* *
*********************************************************
A qualified name on the command line can refer to any module at
all: we try to load the interface if we don't already have it, just
as if there was an "import qualified M" declaration for every
module.
If we fail we just return Nothing, rather than bleating
about "attempting to use module ‘D’ (./D.hs) which is not loaded"
which is what loadSrcInterface does.
Note [Safe Haskell and GHCi]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We DONT do this Safe Haskell as we need to check imports. We can
and should instead check the qualified import but at the moment
this requires some refactoring so leave as a TODO
-}
lookupQualifiedNameGHCi :: RdrName -> RnM [Name]
lookupQualifiedNameGHCi rdr_name
= -- We want to behave as we would for a source file import here,
-- and respect hiddenness of modules/packages, hence loadSrcInterface.
do { dflags <- getDynFlags
; is_ghci <- getIsGHCi
; go_for_it dflags is_ghci }
where
go_for_it dflags is_ghci
| Just (mod,occ) <- isQual_maybe rdr_name
, is_ghci
, gopt Opt_ImplicitImportQualified dflags -- Enables this GHCi behaviour
, not (safeDirectImpsReq dflags) -- See Note [Safe Haskell and GHCi]
= do { res <- loadSrcInterface_maybe doc mod False Nothing
; case res of
Succeeded iface
-> return [ name
| avail <- mi_exports iface
, name <- availNames avail
, nameOccName name == occ ]
_ -> -- Either we couldn't load the interface, or
-- we could but we didn't find the name in it
do { traceRn "lookupQualifiedNameGHCi" (ppr rdr_name)
; return [] } }
| otherwise
= do { traceRn "lookupQualifedNameGHCi: off" (ppr rdr_name)
; return [] }
doc = text "Need to find" <+> ppr rdr_name
{-
Note [Looking up signature names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
lookupSigOccRn is used for type signatures and pragmas
Is this valid?
module A
import M( f )
f :: Int -> Int
f x = x
It's clear that the 'f' in the signature must refer to A.f
The Haskell98 report does not stipulate this, but it will!
So we must treat the 'f' in the signature in the same way
as the binding occurrence of 'f', using lookupBndrRn
However, consider this case:
import M( f )
f :: Int -> Int
g x = x
We don't want to say 'f' is out of scope; instead, we want to
return the imported 'f', so that later on the reanamer will
correctly report "misplaced type sig".
Note [Signatures for top level things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
data HsSigCtxt = ... | TopSigCtxt NameSet | ....
* The NameSet says what is bound in this group of bindings.
We can't use isLocalGRE from the GlobalRdrEnv, because of this:
f x = x
$( ...some TH splice... )
f :: Int -> Int
When we encounter the signature for 'f', the binding for 'f'
will be in the GlobalRdrEnv, and will be a LocalDef. Yet the
signature is mis-placed
* For type signatures the NameSet should be the names bound by the
value bindings; for fixity declarations, the NameSet should also
include class sigs and record selectors
infix 3 `f` -- Yes, ok
f :: C a => a -> a -- No, not ok
class C a where
f :: a -> a
-}
data HsSigCtxt
= TopSigCtxt NameSet -- At top level, binding these names
-- See Note [Signatures for top level things]
| LocalBindCtxt NameSet -- In a local binding, binding these names
| ClsDeclCtxt Name -- Class decl for this class
| InstDeclCtxt NameSet -- Instance decl whose user-written method
-- bindings are for these methods
| HsBootCtxt NameSet -- Top level of a hs-boot file, binding these names
| RoleAnnotCtxt NameSet -- A role annotation, with the names of all types
-- in the group
instance Outputable HsSigCtxt where
ppr (TopSigCtxt ns) = text "TopSigCtxt" <+> ppr ns
ppr (LocalBindCtxt ns) = text "LocalBindCtxt" <+> ppr ns
ppr (ClsDeclCtxt n) = text "ClsDeclCtxt" <+> ppr n
ppr (InstDeclCtxt ns) = text "InstDeclCtxt" <+> ppr ns
ppr (HsBootCtxt ns) = text "HsBootCtxt" <+> ppr ns
ppr (RoleAnnotCtxt ns) = text "RoleAnnotCtxt" <+> ppr ns
lookupSigOccRn :: HsSigCtxt
-> Sig RdrName
-> Located RdrName -> RnM (Located Name)
lookupSigOccRn ctxt sig = lookupSigCtxtOccRn ctxt (hsSigDoc sig)
-- | Lookup a name in relation to the names in a 'HsSigCtxt'
lookupSigCtxtOccRn :: HsSigCtxt
-> SDoc -- ^ description of thing we're looking up,
-- like "type family"
-> Located RdrName -> RnM (Located Name)
lookupSigCtxtOccRn ctxt what
= wrapLocM $ \ rdr_name ->
do { mb_name <- lookupBindGroupOcc ctxt what rdr_name
; case mb_name of
Left err -> do { addErr err; return (mkUnboundNameRdr rdr_name) }
Right name -> return name }
lookupBindGroupOcc :: HsSigCtxt
-> SDoc
-> RdrName -> RnM (Either MsgDoc Name)
-- Looks up the RdrName, expecting it to resolve to one of the
-- bound names passed in. If not, return an appropriate error message
--
-- See Note [Looking up signature names]
lookupBindGroupOcc ctxt what rdr_name
| Just n <- isExact_maybe rdr_name
= lookupExactOcc_either n -- allow for the possibility of missing Exacts;
-- see Note [dataTcOccs and Exact Names]
-- Maybe we should check the side conditions
-- but it's a pain, and Exact things only show
-- up when you know what you are doing
| Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name
= do { n' <- lookupOrig rdr_mod rdr_occ
; return (Right n') }
| otherwise
= case ctxt of
HsBootCtxt ns -> lookup_top (`elemNameSet` ns)
TopSigCtxt ns -> lookup_top (`elemNameSet` ns)
RoleAnnotCtxt ns -> lookup_top (`elemNameSet` ns)
LocalBindCtxt ns -> lookup_group ns
ClsDeclCtxt cls -> lookup_cls_op cls
InstDeclCtxt ns -> lookup_top (`elemNameSet` ns)
where
lookup_cls_op cls
= lookupSubBndrOcc True cls doc rdr_name
where
doc = text "method of class" <+> quotes (ppr cls)
lookup_top keep_me
= do { env <- getGlobalRdrEnv
; let all_gres = lookupGlobalRdrEnv env (rdrNameOcc rdr_name)
; case filter (keep_me . gre_name) all_gres of
[] | null all_gres -> bale_out_with Outputable.empty
| otherwise -> bale_out_with local_msg
(gre:_) -> return (Right (gre_name gre)) }
lookup_group bound_names -- Look in the local envt (not top level)
= do { local_env <- getLocalRdrEnv
; case lookupLocalRdrEnv local_env rdr_name of
Just n
| n `elemNameSet` bound_names -> return (Right n)
| otherwise -> bale_out_with local_msg
Nothing -> bale_out_with Outputable.empty }
bale_out_with msg
= return (Left (sep [ text "The" <+> what
<+> text "for" <+> quotes (ppr rdr_name)
, nest 2 $ text "lacks an accompanying binding"]
$$ nest 2 msg))
local_msg = parens $ text "The" <+> what <+> ptext (sLit "must be given where")
<+> quotes (ppr rdr_name) <+> text "is declared"
---------------
lookupLocalTcNames :: HsSigCtxt -> SDoc -> RdrName -> RnM [(RdrName, Name)]
-- GHC extension: look up both the tycon and data con or variable.
-- Used for top-level fixity signatures and deprecations.
-- Complain if neither is in scope.
-- See Note [Fixity signature lookup]
lookupLocalTcNames ctxt what rdr_name
= do { mb_gres <- mapM lookup (dataTcOccs rdr_name)
; let (errs, names) = splitEithers mb_gres
; when (null names) $ addErr (head errs) -- Bleat about one only
; return names }
where
lookup rdr = do { name <- lookupBindGroupOcc ctxt what rdr
; return (fmap ((,) rdr) name) }
dataTcOccs :: RdrName -> [RdrName]
-- Return both the given name and the same name promoted to the TcClsName
-- namespace. This is useful when we aren't sure which we are looking at.
-- See also Note [dataTcOccs and Exact Names]
dataTcOccs rdr_name
| isDataOcc occ || isVarOcc occ
= [rdr_name, rdr_name_tc]
| otherwise
= [rdr_name]
where
occ = rdrNameOcc rdr_name
rdr_name_tc = setRdrNameSpace rdr_name tcName
{-
Note [dataTcOccs and Exact Names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Exact RdrNames can occur in code generated by Template Haskell, and generally
those references are, well, exact. However, the TH `Name` type isn't expressive
enough to always track the correct namespace information, so we sometimes get
the right Unique but wrong namespace. Thus, we still have to do the double-lookup
for Exact RdrNames.
There is also an awkward situation for built-in syntax. Example in GHCi
:info []
This parses as the Exact RdrName for nilDataCon, but we also want
the list type constructor.
Note that setRdrNameSpace on an Exact name requires the Name to be External,
which it always is for built in syntax.
*********************************************************
* *
Fixities
* *
*********************************************************
Note [Fixity signature lookup]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A fixity declaration like
infixr 2 ?
can refer to a value-level operator, e.g.:
(?) :: String -> String -> String
or a type-level operator, like:
data (?) a b = A a | B b
so we extend the lookup of the reader name '?' to the TcClsName namespace, as
well as the original namespace.
The extended lookup is also used in other places, like resolution of
deprecation declarations, and lookup of names in GHCi.
-}
--------------------------------
type MiniFixityEnv = FastStringEnv (Located Fixity)
-- Mini fixity env for the names we're about
-- to bind, in a single binding group
--
-- It is keyed by the *FastString*, not the *OccName*, because
-- the single fixity decl infix 3 T
-- affects both the data constructor T and the type constrctor T
--
-- We keep the location so that if we find
-- a duplicate, we can report it sensibly
--------------------------------
-- Used for nested fixity decls to bind names along with their fixities.
-- the fixities are given as a UFM from an OccName's FastString to a fixity decl
addLocalFixities :: MiniFixityEnv -> [Name] -> RnM a -> RnM a
addLocalFixities mini_fix_env names thing_inside
= extendFixityEnv (mapMaybe find_fixity names) thing_inside
where
find_fixity name
= case lookupFsEnv mini_fix_env (occNameFS occ) of
Just (L _ fix) -> Just (name, FixItem occ fix)
Nothing -> Nothing
where
occ = nameOccName name
{-
--------------------------------
lookupFixity is a bit strange.
* Nested local fixity decls are put in the local fixity env, which we
find with getFixtyEnv
* Imported fixities are found in the PIT
* Top-level fixity decls in this module may be for Names that are
either Global (constructors, class operations)
or Local/Exported (everything else)
(See notes with RnNames.getLocalDeclBinders for why we have this split.)
We put them all in the local fixity environment
-}
lookupFixityRn :: Name -> RnM Fixity
lookupFixityRn name = lookupFixityRn' name (nameOccName name)
lookupFixityRn' :: Name -> OccName -> RnM Fixity
lookupFixityRn' name = fmap snd . lookupFixityRn_help' name
-- | 'lookupFixityRn_help' returns @(True, fixity)@ if it finds a 'Fixity'
-- in a local environment or from an interface file. Otherwise, it returns
-- @(False, fixity)@ (e.g., for unbound 'Name's or 'Name's without
-- user-supplied fixity declarations).
lookupFixityRn_help :: Name
-> RnM (Bool, Fixity)
lookupFixityRn_help name =
lookupFixityRn_help' name (nameOccName name)
lookupFixityRn_help' :: Name
-> OccName
-> RnM (Bool, Fixity)
lookupFixityRn_help' name occ
| isUnboundName name
= return (False, Fixity (show minPrecedence) minPrecedence InfixL)
-- Minimise errors from ubound names; eg
-- a>0 `foo` b>0
-- where 'foo' is not in scope, should not give an error (Trac #7937)
| otherwise
= do { local_fix_env <- getFixityEnv
; case lookupNameEnv local_fix_env name of {
Just (FixItem _ fix) -> return (True, fix) ;
Nothing ->
do { this_mod <- getModule
; if nameIsLocalOrFrom this_mod name
-- Local (and interactive) names are all in the
-- fixity env, and don't have entries in the HPT
then return (False, defaultFixity)
else lookup_imported } } }
where
lookup_imported
-- For imported names, we have to get their fixities by doing a
-- loadInterfaceForName, and consulting the Ifaces that comes back
-- from that, because the interface file for the Name might not
-- have been loaded yet. Why not? Suppose you import module A,
-- which exports a function 'f', thus;
-- module CurrentModule where
-- import A( f )
-- module A( f ) where
-- import B( f )
-- Then B isn't loaded right away (after all, it's possible that
-- nothing from B will be used). When we come across a use of
-- 'f', we need to know its fixity, and it's then, and only
-- then, that we load B.hi. That is what's happening here.
--
-- loadInterfaceForName will find B.hi even if B is a hidden module,
-- and that's what we want.
= do { iface <- loadInterfaceForName doc name
; let mb_fix = mi_fix_fn iface occ
; let msg = case mb_fix of
Nothing ->
text "looking up name" <+> ppr name
<+> text "in iface, but found no fixity for it."
<+> text "Using default fixity instead."
Just f ->
text "looking up name in iface and found:"
<+> vcat [ppr name, ppr f]
; traceRn "lookupFixityRn_either:" msg
; return (maybe (False, defaultFixity) (\f -> (True, f)) mb_fix) }
doc = text "Checking fixity for" <+> ppr name
---------------
lookupTyFixityRn :: Located Name -> RnM Fixity
lookupTyFixityRn (L _ n) = lookupFixityRn n
-- | Look up the fixity of a (possibly ambiguous) occurrence of a record field
-- selector. We use 'lookupFixityRn'' so that we can specifiy the 'OccName' as
-- the field label, which might be different to the 'OccName' of the selector
-- 'Name' if @DuplicateRecordFields@ is in use (Trac #1173). If there are
-- multiple possible selectors with different fixities, generate an error.
lookupFieldFixityRn :: AmbiguousFieldOcc Name -> RnM Fixity
lookupFieldFixityRn (Unambiguous (L _ rdr) n)
= lookupFixityRn' n (rdrNameOcc rdr)
lookupFieldFixityRn (Ambiguous (L _ rdr) _) = get_ambiguous_fixity rdr
where
get_ambiguous_fixity :: RdrName -> RnM Fixity
get_ambiguous_fixity rdr_name = do
traceRn "get_ambiguous_fixity" (ppr rdr_name)
rdr_env <- getGlobalRdrEnv
let elts = lookupGRE_RdrName rdr_name rdr_env
fixities <- groupBy ((==) `on` snd) . zip elts
<$> mapM lookup_gre_fixity elts
case fixities of
-- There should always be at least one fixity.
-- Something's very wrong if there are no fixity candidates, so panic
[] -> panic "get_ambiguous_fixity: no candidates for a given RdrName"
[ (_, fix):_ ] -> return fix
ambigs -> addErr (ambiguous_fixity_err rdr_name ambigs)
>> return (Fixity(show minPrecedence) minPrecedence InfixL)
lookup_gre_fixity gre = lookupFixityRn' (gre_name gre) (greOccName gre)
ambiguous_fixity_err rn ambigs
= vcat [ text "Ambiguous fixity for record field" <+> quotes (ppr rn)
, hang (text "Conflicts: ") 2 . vcat .
map format_ambig $ concat ambigs ]
format_ambig (elt, fix) = hang (ppr fix)
2 (pprNameProvenance elt)
{- *********************************************************************
* *
Role annotations
* *
********************************************************************* -}
type RoleAnnotEnv = NameEnv (LRoleAnnotDecl Name)
mkRoleAnnotEnv :: [LRoleAnnotDecl Name] -> RoleAnnotEnv
mkRoleAnnotEnv role_annot_decls
= mkNameEnv [ (name, ra_decl)
| ra_decl <- role_annot_decls
, let name = roleAnnotDeclName (unLoc ra_decl)
, not (isUnboundName name) ]
-- Some of the role annots will be unbound;
-- we don't wish to include these
emptyRoleAnnotEnv :: RoleAnnotEnv
emptyRoleAnnotEnv = emptyNameEnv
lookupRoleAnnot :: RoleAnnotEnv -> Name -> Maybe (LRoleAnnotDecl Name)
lookupRoleAnnot = lookupNameEnv
getRoleAnnots :: [Name] -> RoleAnnotEnv -> ([LRoleAnnotDecl Name], RoleAnnotEnv)
getRoleAnnots bndrs role_env
= ( mapMaybe (lookupRoleAnnot role_env) bndrs
, delListFromNameEnv role_env bndrs )
{-
************************************************************************
* *
Rebindable names
Dealing with rebindable syntax is driven by the
Opt_RebindableSyntax dynamic flag.
In "deriving" code we don't want to use rebindable syntax
so we switch off the flag locally
* *
************************************************************************
Haskell 98 says that when you say "3" you get the "fromInteger" from the
Standard Prelude, regardless of what is in scope. However, to experiment
with having a language that is less coupled to the standard prelude, we're
trying a non-standard extension that instead gives you whatever "Prelude.fromInteger"
happens to be in scope. Then you can
import Prelude ()
import MyPrelude as Prelude
to get the desired effect.
At the moment this just happens for
* fromInteger, fromRational on literals (in expressions and patterns)
* negate (in expressions)
* minus (arising from n+k patterns)
* "do" notation
We store the relevant Name in the HsSyn tree, in
* HsIntegral/HsFractional/HsIsString
* NegApp
* NPlusKPat
* HsDo
respectively. Initially, we just store the "standard" name (PrelNames.fromIntegralName,
fromRationalName etc), but the renamer changes this to the appropriate user
name if Opt_NoImplicitPrelude is on. That is what lookupSyntaxName does.
We treat the original (standard) names as free-vars too, because the type checker
checks the type of the user thing against the type of the standard thing.
-}
lookupIfThenElse :: RnM (Maybe (SyntaxExpr Name), FreeVars)
-- Different to lookupSyntaxName because in the non-rebindable
-- case we desugar directly rather than calling an existing function
-- Hence the (Maybe (SyntaxExpr Name)) return type
lookupIfThenElse
= do { rebindable_on <- xoptM LangExt.RebindableSyntax
; if not rebindable_on
then return (Nothing, emptyFVs)
else do { ite <- lookupOccRn (mkVarUnqual (fsLit "ifThenElse"))
; return ( Just (mkRnSyntaxExpr ite)
, unitFV ite ) } }
lookupSyntaxName' :: Name -- ^ The standard name
-> RnM Name -- ^ Possibly a non-standard name
lookupSyntaxName' std_name
= do { rebindable_on <- xoptM LangExt.RebindableSyntax
; if not rebindable_on then
return std_name
else
-- Get the similarly named thing from the local environment
lookupOccRn (mkRdrUnqual (nameOccName std_name)) }
lookupSyntaxName :: Name -- The standard name
-> RnM (SyntaxExpr Name, FreeVars) -- Possibly a non-standard name
lookupSyntaxName std_name
= do { rebindable_on <- xoptM LangExt.RebindableSyntax
; if not rebindable_on then
return (mkRnSyntaxExpr std_name, emptyFVs)
else
-- Get the similarly named thing from the local environment
do { usr_name <- lookupOccRn (mkRdrUnqual (nameOccName std_name))
; return (mkRnSyntaxExpr usr_name, unitFV usr_name) } }
lookupSyntaxNames :: [Name] -- Standard names
-> RnM ([HsExpr Name], FreeVars) -- See comments with HsExpr.ReboundNames
-- this works with CmdTop, which wants HsExprs, not SyntaxExprs
lookupSyntaxNames std_names
= do { rebindable_on <- xoptM LangExt.RebindableSyntax
; if not rebindable_on then
return (map (HsVar . noLoc) std_names, emptyFVs)
else
do { usr_names <- mapM (lookupOccRn . mkRdrUnqual . nameOccName) std_names
; return (map (HsVar . noLoc) usr_names, mkFVs usr_names) } }
{-
*********************************************************
* *
\subsection{Binding}
* *
*********************************************************
-}
newLocalBndrRn :: Located RdrName -> RnM Name
-- Used for non-top-level binders. These should
-- never be qualified.
newLocalBndrRn (L loc rdr_name)
| Just name <- isExact_maybe rdr_name
= return name -- This happens in code generated by Template Haskell
-- See Note [Binders in Template Haskell] in Convert.hs
| otherwise
= do { unless (isUnqual rdr_name)
(addErrAt loc (badQualBndrErr rdr_name))
; uniq <- newUnique
; return (mkInternalName uniq (rdrNameOcc rdr_name) loc) }
newLocalBndrsRn :: [Located RdrName] -> RnM [Name]
newLocalBndrsRn = mapM newLocalBndrRn
---------------------
bindLocatedLocalsRn :: [Located RdrName]
-> ([Name] -> RnM a)
-> RnM a
bindLocatedLocalsRn rdr_names_w_loc enclosed_scope
= do { checkDupRdrNames rdr_names_w_loc
; checkShadowedRdrNames rdr_names_w_loc
-- Make fresh Names and extend the environment
; names <- newLocalBndrsRn rdr_names_w_loc
; bindLocalNames names (enclosed_scope names) }
bindLocalNames :: [Name] -> RnM a -> RnM a
bindLocalNames names enclosed_scope
= do { lcl_env <- getLclEnv
; let th_level = thLevel (tcl_th_ctxt lcl_env)
th_bndrs' = extendNameEnvList (tcl_th_bndrs lcl_env)
[ (n, (NotTopLevel, th_level)) | n <- names ]
rdr_env' = extendLocalRdrEnvList (tcl_rdr lcl_env) names
; setLclEnv (lcl_env { tcl_th_bndrs = th_bndrs'
, tcl_rdr = rdr_env' })
enclosed_scope }
bindLocalNamesFV :: [Name] -> RnM (a, FreeVars) -> RnM (a, FreeVars)
bindLocalNamesFV names enclosed_scope
= do { (result, fvs) <- bindLocalNames names enclosed_scope
; return (result, delFVs names fvs) }
-------------------------------------
-- binLocalsFVRn is the same as bindLocalsRn
-- except that it deals with free vars
bindLocatedLocalsFV :: [Located RdrName]
-> ([Name] -> RnM (a,FreeVars)) -> RnM (a, FreeVars)
bindLocatedLocalsFV rdr_names enclosed_scope
= bindLocatedLocalsRn rdr_names $ \ names ->
do (thing, fvs) <- enclosed_scope names
return (thing, delFVs names fvs)
-------------------------------------
extendTyVarEnvFVRn :: [Name] -> RnM (a, FreeVars) -> RnM (a, FreeVars)
-- This function is used only in rnSourceDecl on InstDecl
extendTyVarEnvFVRn tyvars thing_inside = bindLocalNamesFV tyvars thing_inside
-------------------------------------
checkDupRdrNames :: [Located RdrName] -> RnM ()
-- Check for duplicated names in a binding group
checkDupRdrNames rdr_names_w_loc
= mapM_ (dupNamesErr getLoc) dups
where
(_, dups) = removeDups (\n1 n2 -> unLoc n1 `compare` unLoc n2) rdr_names_w_loc
checkDupNames :: [Name] -> RnM ()
-- Check for duplicated names in a binding group
checkDupNames names = check_dup_names (filterOut isSystemName names)
-- See Note [Binders in Template Haskell] in Convert
check_dup_names :: [Name] -> RnM ()
check_dup_names names
= mapM_ (dupNamesErr nameSrcSpan) dups
where
(_, dups) = removeDups (\n1 n2 -> nameOccName n1 `compare` nameOccName n2) names
---------------------
checkShadowedRdrNames :: [Located RdrName] -> RnM ()
checkShadowedRdrNames loc_rdr_names
= do { envs <- getRdrEnvs
; checkShadowedOccs envs get_loc_occ filtered_rdrs }
where
filtered_rdrs = filterOut (isExact . unLoc) loc_rdr_names
-- See Note [Binders in Template Haskell] in Convert
get_loc_occ (L loc rdr) = (loc,rdrNameOcc rdr)
checkDupAndShadowedNames :: (GlobalRdrEnv, LocalRdrEnv) -> [Name] -> RnM ()
checkDupAndShadowedNames envs names
= do { check_dup_names filtered_names
; checkShadowedOccs envs get_loc_occ filtered_names }
where
filtered_names = filterOut isSystemName names
-- See Note [Binders in Template Haskell] in Convert
get_loc_occ name = (nameSrcSpan name, nameOccName name)
-------------------------------------
checkShadowedOccs :: (GlobalRdrEnv, LocalRdrEnv)
-> (a -> (SrcSpan, OccName))
-> [a] -> RnM ()
checkShadowedOccs (global_env,local_env) get_loc_occ ns
= whenWOptM Opt_WarnNameShadowing $
do { traceRn "checkShadowedOccs:shadow" (ppr (map get_loc_occ ns))
; mapM_ check_shadow ns }
where
check_shadow n
| startsWithUnderscore occ = return () -- Do not report shadowing for "_x"
-- See Trac #3262
| Just n <- mb_local = complain [text "bound at" <+> ppr (nameSrcLoc n)]
| otherwise = do { gres' <- filterM is_shadowed_gre gres
; complain (map pprNameProvenance gres') }
where
(loc,occ) = get_loc_occ n
mb_local = lookupLocalRdrOcc local_env occ
gres = lookupGRE_RdrName (mkRdrUnqual occ) global_env
-- Make an Unqualified RdrName and look that up, so that
-- we don't find any GREs that are in scope qualified-only
complain [] = return ()
complain pp_locs = addWarnAt (Reason Opt_WarnNameShadowing)
loc
(shadowedNameWarn occ pp_locs)
is_shadowed_gre :: GlobalRdrElt -> RnM Bool
-- Returns False for record selectors that are shadowed, when
-- punning or wild-cards are on (cf Trac #2723)
is_shadowed_gre gre | isRecFldGRE gre
= do { dflags <- getDynFlags
; return $ not (xopt LangExt.RecordPuns dflags
|| xopt LangExt.RecordWildCards dflags) }
is_shadowed_gre _other = return True
{-
************************************************************************
* *
What to do when a lookup fails
* *
************************************************************************
-}
data WhereLooking = WL_Any -- Any binding
| WL_Global -- Any top-level binding (local or imported)
| WL_LocalTop -- Any top-level binding in this module
reportUnboundName :: RdrName -> RnM Name
reportUnboundName rdr = unboundName WL_Any rdr
unboundName :: WhereLooking -> RdrName -> RnM Name
unboundName wl rdr = unboundNameX wl rdr Outputable.empty
unboundNameX :: WhereLooking -> RdrName -> SDoc -> RnM Name
unboundNameX where_look rdr_name extra
= do { dflags <- getDynFlags
; let show_helpful_errors = gopt Opt_HelpfulErrors dflags
what = pprNonVarNameSpace (occNameSpace (rdrNameOcc rdr_name))
err = unknownNameErr what rdr_name $$ extra
; if not show_helpful_errors
then addErr err
else do { local_env <- getLocalRdrEnv
; global_env <- getGlobalRdrEnv
; impInfo <- getImports
; let suggestions = unknownNameSuggestions_ where_look
dflags global_env local_env impInfo rdr_name
; addErr (err $$ suggestions) }
; return (mkUnboundNameRdr rdr_name) }
unknownNameErr :: SDoc -> RdrName -> SDoc
unknownNameErr what rdr_name
= vcat [ hang (text "Not in scope:")
2 (what <+> quotes (ppr rdr_name))
, extra ]
where
extra | rdr_name == forall_tv_RDR = perhapsForallMsg
| otherwise = Outputable.empty
type HowInScope = Either SrcSpan ImpDeclSpec
-- Left loc => locally bound at loc
-- Right ispec => imported as specified by ispec
-- | Called from the typechecker (TcErrors) when we find an unbound variable
unknownNameSuggestions :: DynFlags
-> GlobalRdrEnv -> LocalRdrEnv -> ImportAvails
-> RdrName -> SDoc
unknownNameSuggestions = unknownNameSuggestions_ WL_Any
unknownNameSuggestions_ :: WhereLooking -> DynFlags
-> GlobalRdrEnv -> LocalRdrEnv -> ImportAvails
-> RdrName -> SDoc
unknownNameSuggestions_ where_look dflags global_env local_env imports tried_rdr_name =
similarNameSuggestions where_look dflags global_env local_env tried_rdr_name $$
importSuggestions dflags imports tried_rdr_name
similarNameSuggestions :: WhereLooking -> DynFlags
-> GlobalRdrEnv -> LocalRdrEnv
-> RdrName -> SDoc
similarNameSuggestions where_look dflags global_env
local_env tried_rdr_name
= case suggest of
[] -> Outputable.empty
[p] -> perhaps <+> pp_item p
ps -> sep [ perhaps <+> text "one of these:"
, nest 2 (pprWithCommas pp_item ps) ]
where
all_possibilities :: [(String, (RdrName, HowInScope))]
all_possibilities
= [ (showPpr dflags r, (r, Left loc))
| (r,loc) <- local_possibilities local_env ]
++ [ (showPpr dflags r, rp) | (r, rp) <- global_possibilities global_env ]
suggest = fuzzyLookup (showPpr dflags tried_rdr_name) all_possibilities
perhaps = text "Perhaps you meant"
pp_item :: (RdrName, HowInScope) -> SDoc
pp_item (rdr, Left loc) = pp_ns rdr <+> quotes (ppr rdr) <+> loc' -- Locally defined
where loc' = case loc of
UnhelpfulSpan l -> parens (ppr l)
RealSrcSpan l -> parens (text "line" <+> int (srcSpanStartLine l))
pp_item (rdr, Right is) = pp_ns rdr <+> quotes (ppr rdr) <+> -- Imported
parens (text "imported from" <+> ppr (is_mod is))
pp_ns :: RdrName -> SDoc
pp_ns rdr | ns /= tried_ns = pprNameSpace ns
| otherwise = Outputable.empty
where ns = rdrNameSpace rdr
tried_occ = rdrNameOcc tried_rdr_name
tried_is_sym = isSymOcc tried_occ
tried_ns = occNameSpace tried_occ
tried_is_qual = isQual tried_rdr_name
correct_name_space occ = nameSpacesRelated (occNameSpace occ) tried_ns
&& isSymOcc occ == tried_is_sym
-- Treat operator and non-operators as non-matching
-- This heuristic avoids things like
-- Not in scope 'f'; perhaps you meant '+' (from Prelude)
local_ok = case where_look of { WL_Any -> True; _ -> False }
local_possibilities :: LocalRdrEnv -> [(RdrName, SrcSpan)]
local_possibilities env
| tried_is_qual = []
| not local_ok = []
| otherwise = [ (mkRdrUnqual occ, nameSrcSpan name)
| name <- localRdrEnvElts env
, let occ = nameOccName name
, correct_name_space occ]
gre_ok :: GlobalRdrElt -> Bool
gre_ok = case where_look of
WL_LocalTop -> isLocalGRE
_ -> \_ -> True
global_possibilities :: GlobalRdrEnv -> [(RdrName, (RdrName, HowInScope))]
global_possibilities global_env
| tried_is_qual = [ (rdr_qual, (rdr_qual, how))
| gre <- globalRdrEnvElts global_env
, gre_ok gre
, let name = gre_name gre
occ = nameOccName name
, correct_name_space occ
, (mod, how) <- quals_in_scope gre
, let rdr_qual = mkRdrQual mod occ ]
| otherwise = [ (rdr_unqual, pair)
| gre <- globalRdrEnvElts global_env
, gre_ok gre
, let name = gre_name gre
occ = nameOccName name
rdr_unqual = mkRdrUnqual occ
, correct_name_space occ
, pair <- case (unquals_in_scope gre, quals_only gre) of
(how:_, _) -> [ (rdr_unqual, how) ]
([], pr:_) -> [ pr ] -- See Note [Only-quals]
([], []) -> [] ]
-- Note [Only-quals]
-- The second alternative returns those names with the same
-- OccName as the one we tried, but live in *qualified* imports
-- e.g. if you have:
--
-- > import qualified Data.Map as Map
-- > foo :: Map
--
-- then we suggest @Map.Map@.
--------------------
unquals_in_scope :: GlobalRdrElt -> [HowInScope]
unquals_in_scope (GRE { gre_name = n, gre_lcl = lcl, gre_imp = is })
| lcl = [ Left (nameSrcSpan n) ]
| otherwise = [ Right ispec
| i <- is, let ispec = is_decl i
, not (is_qual ispec) ]
--------------------
quals_in_scope :: GlobalRdrElt -> [(ModuleName, HowInScope)]
-- Ones for which the qualified version is in scope
quals_in_scope (GRE { gre_name = n, gre_lcl = lcl, gre_imp = is })
| lcl = case nameModule_maybe n of
Nothing -> []
Just m -> [(moduleName m, Left (nameSrcSpan n))]
| otherwise = [ (is_as ispec, Right ispec)
| i <- is, let ispec = is_decl i ]
--------------------
quals_only :: GlobalRdrElt -> [(RdrName, HowInScope)]
-- Ones for which *only* the qualified version is in scope
quals_only (GRE { gre_name = n, gre_imp = is })
= [ (mkRdrQual (is_as ispec) (nameOccName n), Right ispec)
| i <- is, let ispec = is_decl i, is_qual ispec ]
-- | Generate helpful suggestions if a qualified name Mod.foo is not in scope.
importSuggestions :: DynFlags -> ImportAvails -> RdrName -> SDoc
importSuggestions _dflags imports rdr_name
| not (isQual rdr_name || isUnqual rdr_name) = Outputable.empty
| null interesting_imports
, Just name <- mod_name
= hsep
[ text "No module named"
, quotes (ppr name)
, text "is imported."
]
| is_qualified
, null helpful_imports
, [(mod,_)] <- interesting_imports
= hsep
[ text "Module"
, quotes (ppr mod)
, text "does not export"
, quotes (ppr occ_name) <> dot
]
| is_qualified
, null helpful_imports
, mods <- map fst interesting_imports
= hsep
[ text "Neither"
, quotedListWithNor (map ppr mods)
, text "exports"
, quotes (ppr occ_name) <> dot
]
| [(mod,imv)] <- helpful_imports_non_hiding
= fsep
[ text "Perhaps you want to add"
, quotes (ppr occ_name)
, text "to the import list"
, text "in the import of"
, quotes (ppr mod)
, parens (ppr (imv_span imv)) <> dot
]
| not (null helpful_imports_non_hiding)
= fsep
[ text "Perhaps you want to add"
, quotes (ppr occ_name)
, text "to one of these import lists:"
]
$$
nest 2 (vcat
[ quotes (ppr mod) <+> parens (ppr (imv_span imv))
| (mod,imv) <- helpful_imports_non_hiding
])
| [(mod,imv)] <- helpful_imports_hiding
= fsep
[ text "Perhaps you want to remove"
, quotes (ppr occ_name)
, text "from the explicit hiding list"
, text "in the import of"
, quotes (ppr mod)
, parens (ppr (imv_span imv)) <> dot
]
| not (null helpful_imports_hiding)
= fsep
[ text "Perhaps you want to remove"
, quotes (ppr occ_name)
, text "from the hiding clauses"
, text "in one of these imports:"
]
$$
nest 2 (vcat
[ quotes (ppr mod) <+> parens (ppr (imv_span imv))
| (mod,imv) <- helpful_imports_hiding
])
| otherwise
= Outputable.empty
where
is_qualified = isQual rdr_name
(mod_name, occ_name) = case rdr_name of
Unqual occ_name -> (Nothing, occ_name)
Qual mod_name occ_name -> (Just mod_name, occ_name)
_ -> error "importSuggestions: dead code"
-- What import statements provide "Mod" at all
-- or, if this is an unqualified name, are not qualified imports
interesting_imports = [ (mod, imp)
| (mod, mod_imports) <- moduleEnvToList (imp_mods imports)
, Just imp <- return $ pick mod_imports
]
-- We want to keep only one for each original module; preferably one with an
-- explicit import list (for no particularly good reason)
pick :: [ImportedModsVal] -> Maybe ImportedModsVal
pick = listToMaybe . sortBy (compare `on` prefer) . filter select
where select imv = case mod_name of Just name -> imv_name imv == name
Nothing -> not (imv_qualified imv)
prefer imv = (imv_is_hiding imv, imv_span imv)
-- Which of these would export a 'foo'
-- (all of these are restricted imports, because if they were not, we
-- wouldn't have an out-of-scope error in the first place)
helpful_imports = filter helpful interesting_imports
where helpful (_,imv)
= not . null $ lookupGlobalRdrEnv (imv_all_exports imv) occ_name
-- Which of these do that because of an explicit hiding list resp. an
-- explicit import list
(helpful_imports_hiding, helpful_imports_non_hiding)
= partition (imv_is_hiding . snd) helpful_imports
{-
************************************************************************
* *
\subsection{Free variable manipulation}
* *
************************************************************************
-}
-- A useful utility
addFvRn :: FreeVars -> RnM (thing, FreeVars) -> RnM (thing, FreeVars)
addFvRn fvs1 thing_inside = do { (res, fvs2) <- thing_inside
; return (res, fvs1 `plusFV` fvs2) }
mapFvRn :: (a -> RnM (b, FreeVars)) -> [a] -> RnM ([b], FreeVars)
mapFvRn f xs = do stuff <- mapM f xs
case unzip stuff of
(ys, fvs_s) -> return (ys, plusFVs fvs_s)
mapMaybeFvRn :: (a -> RnM (b, FreeVars)) -> Maybe a -> RnM (Maybe b, FreeVars)
mapMaybeFvRn _ Nothing = return (Nothing, emptyFVs)
mapMaybeFvRn f (Just x) = do { (y, fvs) <- f x; return (Just y, fvs) }
-- because some of the rename functions are CPSed:
-- maps the function across the list from left to right;
-- collects all the free vars into one set
mapFvRnCPS :: (a -> (b -> RnM c) -> RnM c)
-> [a] -> ([b] -> RnM c) -> RnM c
mapFvRnCPS _ [] cont = cont []
mapFvRnCPS f (x:xs) cont = f x $ \ x' ->
mapFvRnCPS f xs $ \ xs' ->
cont (x':xs')
{-
************************************************************************
* *
\subsection{Envt utility functions}
* *
************************************************************************
-}
warnUnusedTopBinds :: [GlobalRdrElt] -> RnM ()
warnUnusedTopBinds gres
= whenWOptM Opt_WarnUnusedTopBinds
$ do env <- getGblEnv
let isBoot = tcg_src env == HsBootFile
let noParent gre = case gre_par gre of
NoParent -> True
_ -> False
-- Don't warn about unused bindings with parents in
-- .hs-boot files, as you are sometimes required to give
-- unused bindings (trac #3449).
-- HOWEVER, in a signature file, you are never obligated to put a
-- definition in the main text. Thus, if you define something
-- and forget to export it, we really DO want to warn.
gres' = if isBoot then filter noParent gres
else gres
warnUnusedGREs gres'
warnUnusedLocalBinds, warnUnusedMatches, warnUnusedTypePatterns
:: [Name] -> FreeVars -> RnM ()
warnUnusedLocalBinds = check_unused Opt_WarnUnusedLocalBinds
warnUnusedMatches = check_unused Opt_WarnUnusedMatches
warnUnusedTypePatterns = check_unused Opt_WarnUnusedTypePatterns
check_unused :: WarningFlag -> [Name] -> FreeVars -> RnM ()
check_unused flag bound_names used_names
= whenWOptM flag (warnUnused flag (filterOut (`elemNameSet` used_names)
bound_names))
-------------------------
-- Helpers
warnUnusedGREs :: [GlobalRdrElt] -> RnM ()
warnUnusedGREs gres = mapM_ warnUnusedGRE gres
warnUnused :: WarningFlag -> [Name] -> RnM ()
warnUnused flag names = do
fld_env <- mkFieldEnv <$> getGlobalRdrEnv
mapM_ (warnUnused1 flag fld_env) names
warnUnused1 :: WarningFlag -> NameEnv (FieldLabelString, Name) -> Name -> RnM ()
warnUnused1 flag fld_env name
= when (reportable name occ) $
addUnusedWarning flag
occ (nameSrcSpan name)
(text "Defined but not used")
where
occ = case lookupNameEnv fld_env name of
Just (fl, _) -> mkVarOccFS fl
Nothing -> nameOccName name
warnUnusedGRE :: GlobalRdrElt -> RnM ()
warnUnusedGRE gre@(GRE { gre_name = name, gre_lcl = lcl, gre_imp = is })
| lcl = do fld_env <- mkFieldEnv <$> getGlobalRdrEnv
warnUnused1 Opt_WarnUnusedTopBinds fld_env name
| otherwise = when (reportable name occ) (mapM_ warn is)
where
occ = greOccName gre
warn spec = addUnusedWarning Opt_WarnUnusedTopBinds occ span msg
where
span = importSpecLoc spec
pp_mod = quotes (ppr (importSpecModule spec))
msg = text "Imported from" <+> pp_mod <+> ptext (sLit "but not used")
-- | Make a map from selector names to field labels and parent tycon
-- names, to be used when reporting unused record fields.
mkFieldEnv :: GlobalRdrEnv -> NameEnv (FieldLabelString, Name)
mkFieldEnv rdr_env = mkNameEnv [ (gre_name gre, (lbl, par_is (gre_par gre)))
| gres <- occEnvElts rdr_env
, gre <- gres
, Just lbl <- [greLabel gre]
]
-- | Should we report the fact that this 'Name' is unused? The
-- 'OccName' may differ from 'nameOccName' due to
-- DuplicateRecordFields.
reportable :: Name -> OccName -> Bool
reportable name occ
| isWiredInName name = False -- Don't report unused wired-in names
-- Otherwise we get a zillion warnings
-- from Data.Tuple
| otherwise = not (startsWithUnderscore occ)
addUnusedWarning :: WarningFlag -> OccName -> SrcSpan -> SDoc -> RnM ()
addUnusedWarning flag occ span msg
= addWarnAt (Reason flag) span $
sep [msg <> colon,
nest 2 $ pprNonVarNameSpace (occNameSpace occ)
<+> quotes (ppr occ)]
addNameClashErrRn :: RdrName -> [GlobalRdrElt] -> RnM ()
addNameClashErrRn rdr_name gres
| all isLocalGRE gres && not (all isRecFldGRE gres)
-- If there are two or more *local* defns, we'll have reported
= return () -- that already, and we don't want an error cascade
| otherwise
= addErr (vcat [text "Ambiguous occurrence" <+> quotes (ppr rdr_name),
text "It could refer to" <+> vcat (msg1 : msgs)])
where
(np1:nps) = gres
msg1 = ptext (sLit "either") <+> mk_ref np1
msgs = [text " or" <+> mk_ref np | np <- nps]
mk_ref gre = sep [nom <> comma, pprNameProvenance gre]
where nom = case gre_par gre of
FldParent { par_lbl = Just lbl } -> text "the field" <+> quotes (ppr lbl)
_ -> quotes (ppr (gre_name gre))
shadowedNameWarn :: OccName -> [SDoc] -> SDoc
shadowedNameWarn occ shadowed_locs
= sep [text "This binding for" <+> quotes (ppr occ)
<+> text "shadows the existing binding" <> plural shadowed_locs,
nest 2 (vcat shadowed_locs)]
perhapsForallMsg :: SDoc
perhapsForallMsg
= vcat [ text "Perhaps you intended to use ExplicitForAll or similar flag"
, text "to enable explicit-forall syntax: forall <tvs>. <type>"]
unknownSubordinateErr :: SDoc -> RdrName -> SDoc
unknownSubordinateErr doc op -- Doc is "method of class" or
-- "field of constructor"
= quotes (ppr op) <+> text "is not a (visible)" <+> doc
badOrigBinding :: RdrName -> SDoc
badOrigBinding name
= text "Illegal binding of built-in syntax:" <+> ppr (rdrNameOcc name)
-- The rdrNameOcc is because we don't want to print Prelude.(,)
dupNamesErr :: Outputable n => (n -> SrcSpan) -> [n] -> RnM ()
dupNamesErr get_loc names
= addErrAt big_loc $
vcat [text "Conflicting definitions for" <+> quotes (ppr (head names)),
locations]
where
locs = map get_loc names
big_loc = foldr1 combineSrcSpans locs
locations = text "Bound at:" <+> vcat (map ppr (sort locs))
kindSigErr :: Outputable a => a -> SDoc
kindSigErr thing
= hang (text "Illegal kind signature for" <+> quotes (ppr thing))
2 (text "Perhaps you intended to use KindSignatures")
badQualBndrErr :: RdrName -> SDoc
badQualBndrErr rdr_name
= text "Qualified name in binding position:" <+> ppr rdr_name
opDeclErr :: RdrName -> SDoc
opDeclErr n
= hang (text "Illegal declaration of a type or class operator" <+> quotes (ppr n))
2 (text "Use TypeOperators to declare operators in type and declarations")
checkTupSize :: Int -> RnM ()
checkTupSize tup_size
| tup_size <= mAX_TUPLE_SIZE
= return ()
| otherwise
= addErr (sep [text "A" <+> int tup_size <> ptext (sLit "-tuple is too large for GHC"),
nest 2 (parens (text "max size is" <+> int mAX_TUPLE_SIZE)),
nest 2 (text "Workaround: use nested tuples or define a data type")])
{-
************************************************************************
* *
\subsection{Contexts for renaming errors}
* *
************************************************************************
-}
-- AZ:TODO: Change these all to be Name instead of RdrName.
-- Merge TcType.UserTypeContext in to it.
data HsDocContext
= TypeSigCtx SDoc
| PatCtx
| SpecInstSigCtx
| DefaultDeclCtx
| ForeignDeclCtx (Located RdrName)
| DerivDeclCtx
| RuleCtx FastString
| TyDataCtx (Located RdrName)
| TySynCtx (Located RdrName)
| TyFamilyCtx (Located RdrName)
| FamPatCtx (Located RdrName) -- The patterns of a type/data family instance
| ConDeclCtx [Located Name]
| ClassDeclCtx (Located RdrName)
| ExprWithTySigCtx
| TypBrCtx
| HsTypeCtx
| GHCiCtx
| SpliceTypeCtx (LHsType RdrName)
| ClassInstanceCtx
| VectDeclCtx (Located RdrName)
| GenericCtx SDoc -- Maybe we want to use this more!
withHsDocContext :: HsDocContext -> SDoc -> SDoc
withHsDocContext ctxt doc = doc $$ inHsDocContext ctxt
inHsDocContext :: HsDocContext -> SDoc
inHsDocContext ctxt = text "In" <+> pprHsDocContext ctxt
pprHsDocContext :: HsDocContext -> SDoc
pprHsDocContext (GenericCtx doc) = doc
pprHsDocContext (TypeSigCtx doc) = text "the type signature for" <+> doc
pprHsDocContext PatCtx = text "a pattern type-signature"
pprHsDocContext SpecInstSigCtx = text "a SPECIALISE instance pragma"
pprHsDocContext DefaultDeclCtx = text "a `default' declaration"
pprHsDocContext DerivDeclCtx = text "a deriving declaration"
pprHsDocContext (RuleCtx name) = text "the transformation rule" <+> ftext name
pprHsDocContext (TyDataCtx tycon) = text "the data type declaration for" <+> quotes (ppr tycon)
pprHsDocContext (FamPatCtx tycon) = text "a type pattern of family instance for" <+> quotes (ppr tycon)
pprHsDocContext (TySynCtx name) = text "the declaration for type synonym" <+> quotes (ppr name)
pprHsDocContext (TyFamilyCtx name) = text "the declaration for type family" <+> quotes (ppr name)
pprHsDocContext (ClassDeclCtx name) = text "the declaration for class" <+> quotes (ppr name)
pprHsDocContext ExprWithTySigCtx = text "an expression type signature"
pprHsDocContext TypBrCtx = text "a Template-Haskell quoted type"
pprHsDocContext HsTypeCtx = text "a type argument"
pprHsDocContext GHCiCtx = text "GHCi input"
pprHsDocContext (SpliceTypeCtx hs_ty) = text "the spliced type" <+> quotes (ppr hs_ty)
pprHsDocContext ClassInstanceCtx = text "TcSplice.reifyInstances"
pprHsDocContext (ForeignDeclCtx name)
= text "the foreign declaration for" <+> quotes (ppr name)
pprHsDocContext (ConDeclCtx [name])
= text "the definition of data constructor" <+> quotes (ppr name)
pprHsDocContext (ConDeclCtx names)
= text "the definition of data constructors" <+> interpp'SP names
pprHsDocContext (VectDeclCtx tycon)
= text "the VECTORISE pragma for type constructor" <+> quotes (ppr tycon)
|
mettekou/ghc
|
compiler/rename/RnEnv.hs
|
bsd-3-clause
| 97,041 | 92 | 35 | 28,187 | 16,274 | 8,394 | 7,880 | 1,206 | 10 |
{-# LANGUAGE TemplateHaskell #-}
module Gitter.Types
( Gitter (..)
, ResourcePath
, Room (..)
, RoomId
, RoomUri
) where
import Data.Aeson.TH (SumEncoding (TaggedObject), contentsFieldName,
defaultOptions, deriveJSON, fieldLabelModifier,
sumEncoding, tagFieldName)
import Data.String.X (dropPrefix)
import Data.Text (Text)
type ResourcePath = [Text]
type UserName = Text
type RepoName = Text
type RoomId = Text
type RoomUri = Text
data Room
= ONETOONE UserName
| REPO UserName RepoName
deriving Show
deriveJSON
defaultOptions
{ sumEncoding =
TaggedObject{tagFieldName = "type", contentsFieldName = "uri"}
}
''Room
data Gitter = Gitter
{ gitterBaseUrl :: String
, gitterRoom :: Room
, gitterTokenFile :: FilePath
}
deriveJSON defaultOptions{fieldLabelModifier = dropPrefix "gitter"} ''Gitter
|
cblp/haskell-gitter
|
src/Gitter/Types.hs
|
bsd-3-clause
| 939 | 0 | 9 | 251 | 218 | 135 | 83 | 31 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Lens.Internal.Level
-- Copyright : (C) 2012-2016 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-- This module provides implementation details of the combinators in
-- "Control.Lens.Level", which provides for the breadth-first 'Control.Lens.Traversal.Traversal' of
-- an arbitrary 'Control.Lens.Traversal.Traversal'.
----------------------------------------------------------------------------
module Control.Lens.Internal.Level
(
-- * Levels
Level(..)
, Deepening(..), deepening
, Flows(..)
) where
import Prelude ()
import Control.Lens.Internal.Prelude
import Data.Functor.Apply
------------------------------------------------------------------------------
-- Levels
------------------------------------------------------------------------------
-- | This data type represents a path-compressed copy of one level of a source
-- data structure. We can safely use path-compression because we know the depth
-- of the tree.
--
-- Path compression is performed by viewing a 'Level' as a PATRICIA trie of the
-- paths into the structure to leaves at a given depth, similar in many ways
-- to a 'Data.IntMap.IntMap', but unlike a regular PATRICIA trie we do not need
-- to store the mask bits merely the depth of the fork.
--
-- One invariant of this structure is that underneath a 'Two' node you will not
-- find any 'Zero' nodes, so 'Zero' can only occur at the root.
data Level i a
= Two {-# UNPACK #-} !Word !(Level i a) !(Level i a)
| One i a
| Zero
deriving (Eq,Ord,Show,Read)
-- | Append a pair of 'Level' values to get a new 'Level' with path compression.
--
-- As the 'Level' type is user-visible, we do not expose this as an illegal
-- 'Semigroup' instance, and just use it directly in 'Deepening' as needed.
lappend :: Level i a -> Level i a -> Level i a
lappend Zero Zero = Zero
lappend Zero r@One{} = r
lappend l@One{} Zero = l
lappend Zero (Two n l r) = Two (n + 1) l r
lappend (Two n l r) Zero = Two (n + 1) l r
lappend l r = Two 0 l r
{-# INLINE lappend #-}
instance Functor (Level i) where
fmap f = go where
go (Two n l r) = Two n (go l) (go r)
go (One i a) = One i (f a)
go Zero = Zero
{-# INLINE fmap #-}
instance Foldable (Level i) where
foldMap f = go where
go (Two _ l r) = go l `mappend` go r
go (One _ a) = f a
go Zero = mempty
{-# INLINE foldMap #-}
instance Traversable (Level i) where
traverse f = go where
go (Two n l r) = Two n <$> go l <*> go r
go (One i a) = One i <$> f a
go Zero = pure Zero
{-# INLINE traverse #-}
------------------------------------------------------------------------------
-- Generating Levels
------------------------------------------------------------------------------
-- | This is an illegal 'Monoid' used to construct a single 'Level'.
newtype Deepening i a = Deepening { runDeepening :: forall r. Int -> (Level i a -> Bool -> r) -> r }
instance Semigroup (Deepening i a) where
Deepening l <> Deepening r = Deepening $ \ n k -> case n of
0 -> k Zero True
_ -> let n' = n - 1 in l n' $ \x a -> r n' $ \y b -> k (lappend x y) (a || b)
{-# INLINE (<>) #-}
-- | This is an illegal 'Monoid'.
instance Monoid (Deepening i a) where
mempty = Deepening $ \ _ k -> k Zero False
{-# INLINE mempty #-}
mappend (Deepening l) (Deepening r) = Deepening $ \ n k -> case n of
0 -> k Zero True
_ -> let n' = n - 1 in l n' $ \x a -> r n' $ \y b -> k (lappend x y) (a || b)
{-# INLINE mappend #-}
-- | Generate the leaf of a given 'Deepening' based on whether or not we're at the correct depth.
deepening :: i -> a -> Deepening i a
deepening i a = Deepening $ \n k -> k (if n == 0 then One i a else Zero) False
{-# INLINE deepening #-}
------------------------------------------------------------------------------
-- Reassembling Levels
------------------------------------------------------------------------------
-- | This is an illegal 'Applicative' used to replace the contents of a list of consecutive 'Level' values
-- representing each layer of a structure into the original shape that they were derived from.
--
-- Attempting to 'Flow' something back into a shape other than the one it was taken from will fail.
newtype Flows i b a = Flows { runFlows :: [Level i b] -> a }
instance Functor (Flows i b) where
fmap f (Flows g) = Flows (f . g)
{-# INLINE fmap #-}
-- | Walk down one constructor in a 'Level', veering left.
triml :: Level i b -> Level i b
triml (Two 0 l _) = l
triml (Two n l r) = Two (n - 1) l r
triml x = x
{-# INLINE triml #-}
-- | Walk down one constructor in a 'Level', veering right.
trimr :: Level i b -> Level i b
trimr (Two 0 _ r) = r
trimr (Two n l r) = Two (n - 1) l r
trimr x = x
{-# INLINE trimr #-}
instance Apply (Flows i b) where
Flows mf <.> Flows ma = Flows $ \ xss -> case xss of
[] -> mf [] (ma [])
(_:xs) -> mf (triml <$> xs) $ ma (trimr <$> xs)
{-# INLINE (<.>) #-}
-- | This is an illegal 'Applicative'.
instance Applicative (Flows i b) where
pure a = Flows (const a)
{-# INLINE pure #-}
Flows mf <*> Flows ma = Flows $ \ xss -> case xss of
[] -> mf [] (ma [])
(_:xs) -> mf (triml <$> xs) $ ma (trimr <$> xs)
{-# INLINE (<*>) #-}
|
ddssff/lens
|
src/Control/Lens/Internal/Level.hs
|
bsd-3-clause
| 5,767 | 0 | 19 | 1,313 | 1,459 | 775 | 684 | 92 | 2 |
foldr1 f z [] = z
foldr1 f z (x:xs) = f x (foldr1 f z xs)
|
abhinav-mehta/CipherSolver
|
src/other/Foldr1.hs
|
bsd-3-clause
| 60 | 4 | 7 | 19 | 53 | 26 | 27 | 2 | 1 |
module PackageTests.Tests(tests) where
import PackageTests.PackageTester
import qualified PackageTests.BenchmarkStanza.Check
import qualified PackageTests.TestStanza.Check
import qualified PackageTests.DeterministicAr.Check
import qualified PackageTests.TestSuiteTests.ExeV10.Check
import Control.Monad
import Data.Version
import Test.Tasty (mkTimeout, localOption)
import Test.Tasty.HUnit (testCase)
tests :: SuiteConfig -> TestTreeM ()
tests config = do
---------------------------------------------------------------------
-- * External tests
-- Test that Cabal parses 'benchmark' sections correctly
tc "BenchmarkStanza" PackageTests.BenchmarkStanza.Check.suite
-- Test that Cabal parses 'test' sections correctly
tc "TestStanza" PackageTests.TestStanza.Check.suite
-- Test that Cabal determinstically generates object archives
tc "DeterministicAr" PackageTests.DeterministicAr.Check.suite
---------------------------------------------------------------------
-- * Test suite tests
groupTests "TestSuiteTests" $ do
-- Test exitcode-stdio-1.0 test suites (and HPC)
groupTests "ExeV10"
(PackageTests.TestSuiteTests.ExeV10.Check.tests config)
-- Test detailed-0.9 test suites
groupTests "LibV09" $
let
tcs :: FilePath -> TestM a -> TestTreeM ()
tcs name m
= testTree' $ testCase name (runTestM config
"TestSuiteTests/LibV09" (Just name) m)
in do
-- Test if detailed-0.9 builds correctly
tcs "Build" $ cabal_build ["--enable-tests"]
-- Tests for #2489, stdio deadlock
mapTestTrees (localOption (mkTimeout $ 10 ^ (8 :: Int)))
. tcs "Deadlock" $ do
cabal_build ["--enable-tests"]
shouldFail $ cabal "test" []
---------------------------------------------------------------------
-- * Inline tests
-- Test if exitcode-stdio-1.0 benchmark builds correctly
tc "BenchmarkExeV10" $ cabal_build ["--enable-benchmarks"]
-- Test --benchmark-option(s) flags on ./Setup bench
tc "BenchmarkOptions" $ do
cabal_build ["--enable-benchmarks"]
cabal "bench" [ "--benchmark-options=1 2 3" ]
cabal "bench" [ "--benchmark-option=1"
, "--benchmark-option=2"
, "--benchmark-option=3"
]
-- Test --test-option(s) flags on ./Setup test
tc "TestOptions" $ do
cabal_build ["--enable-tests"]
cabal "test" ["--test-options=1 2 3"]
cabal "test" [ "--test-option=1"
, "--test-option=2"
, "--test-option=3"
]
-- Test attempt to have executable depend on internal
-- library, but cabal-version is too old.
tc "BuildDeps/InternalLibrary0" $ do
r <- shouldFail $ cabal' "configure" []
-- Should tell you how to enable the desired behavior
let sb = "library which is defined within the same package."
assertOutputContains sb r
-- Test executable depends on internal library.
tc "BuildDeps/InternalLibrary1" $ cabal_build []
-- Test that internal library is preferred to an installed on
-- with the same name and version
tc "BuildDeps/InternalLibrary2" $ internal_lib_test "internal"
-- Test that internal library is preferred to an installed on
-- with the same name and LATER version
tc "BuildDeps/InternalLibrary3" $ internal_lib_test "internal"
-- Test that an explicit dependency constraint which doesn't
-- match the internal library causes us to use external library
tc "BuildDeps/InternalLibrary4" $ internal_lib_test "installed"
-- Test "old build-dep behavior", where we should get the
-- same package dependencies on all targets if cabal-version
-- is sufficiently old.
tc "BuildDeps/SameDepsAllRound" $ cabal_build []
-- Test "new build-dep behavior", where each target gets
-- separate dependencies. This tests that an executable
-- dep does not leak into the library.
tc "BuildDeps/TargetSpecificDeps1" $ do
cabal "configure" []
r <- shouldFail $ cabal' "build" []
assertRegex "error should be in MyLibrary.hs" "^MyLibrary.hs:" r
assertRegex
"error should be \"Could not find module `Text\\.PrettyPrint\""
"Could not find module.*Text\\.PrettyPrint" r
-- This is a control on TargetSpecificDeps1; it should
-- succeed.
tc "BuildDeps/TargetSpecificDeps2" $ cabal_build []
-- Test "new build-dep behavior", where each target gets
-- separate dependencies. This tests that an library
-- dep does not leak into the executable.
tc "BuildDeps/TargetSpecificDeps3" $ do
cabal "configure" []
r <- shouldFail $ cabal' "build" []
assertRegex "error should be in lemon.hs" "^lemon.hs:" r
assertRegex
"error should be \"Could not find module `Text\\.PrettyPrint\""
"Could not find module.*Text\\.PrettyPrint" r
-- Test that Paths module is generated and available for executables.
tc "PathsModule/Executable" $ cabal_build []
-- Test that Paths module is generated and available for libraries.
tc "PathsModule/Library" $ cabal_build []
-- Check that preprocessors (hsc2hs) are run
tc "PreProcess" $ cabal_build ["--enable-tests", "--enable-benchmarks"]
-- Check that preprocessors that generate extra C sources are handled
tc "PreProcessExtraSources" $ cabal_build ["--enable-tests",
"--enable-benchmarks"]
-- Test building a vanilla library/executable which uses Template Haskell
tc "TemplateHaskell/vanilla" $ cabal_build []
-- Test building a profiled library/executable which uses Template Haskell
-- (Cabal has to build the non-profiled version first)
tc "TemplateHaskell/profiling" $ cabal_build ["--enable-library-profiling",
"--enable-profiling"]
-- Test building a dynamic library/executable which uses Template
-- Haskell
testWhen (hasSharedLibraries config) $
tc "TemplateHaskell/dynamic" $ cabal_build ["--enable-shared",
"--enable-executable-dynamic"]
-- Test building an executable whose main() function is defined in a C
-- file
tc "CMain" $ cabal_build []
-- Test build when the library is empty, for #1241
tc "EmptyLib" $
withPackage "empty" $ cabal_build []
-- Test that "./Setup haddock" works correctly
tc "Haddock" $ do
dist_dir <- distDir
let haddocksDir = dist_dir </> "doc" </> "html" </> "Haddock"
cabal "configure" []
cabal "haddock" []
let docFiles
= map (haddocksDir </>)
["CPP.html", "Literate.html", "NoCPP.html", "Simple.html"]
mapM_ (assertFindInFile "For hiding needles.") docFiles
-- Test that Haddock with a newline in synopsis works correctly, #3004
tc "HaddockNewline" $ do
cabal "configure" []
cabal "haddock" []
-- Test that Cabal properly orders GHC flags passed to GHC (when
-- there are multiple ghc-options fields.)
tc "OrderFlags" $ cabal_build []
-- Test that reexported modules build correctly
-- TODO: should also test that they import OK!
tc "ReexportedModules" $ do
whenGhcVersion (>= Version [7,9] []) $ cabal_build []
-- Test that Cabal computes different IPIDs when the source changes.
tc "UniqueIPID" . withPackageDb $ do
withPackage "P1" $ cabal "configure" []
withPackage "P2" $ cabal "configure" []
withPackage "P1" $ cabal "build" []
withPackage "P1" $ cabal "build" [] -- rebuild should work
r1 <- withPackage "P1" $ cabal' "register" ["--print-ipid", "--inplace"]
withPackage "P2" $ cabal "build" []
r2 <- withPackage "P2" $ cabal' "register" ["--print-ipid", "--inplace"]
let exIPID s = takeWhile (/= '\n') $
head . filter (isPrefixOf $ "UniqueIPID-0.1-") $ (tails s)
when ((exIPID $ resultOutput r1) == (exIPID $ resultOutput r2)) $
assertFailure $ "cabal has not calculated different Installed " ++
"package ID when source is changed."
tc "DuplicateModuleName" $ do
cabal_build ["--enable-tests"]
r1 <- shouldFail $ cabal' "test" ["foo"]
assertOutputContains "test B" r1
assertOutputContains "test A" r1
r2 <- shouldFail $ cabal' "test" ["foo2"]
assertOutputContains "test C" r2
assertOutputContains "test A" r2
tc "TestNameCollision" $ do
withPackageDb $ do
withPackage "parent" $ cabal_install []
withPackage "child" $ do
cabal_build ["--enable-tests"]
cabal "test" []
-- Test that '--allow-newer' works via the 'Setup.hs configure' interface.
tc "AllowNewer" $ do
shouldFail $ cabal "configure" []
cabal "configure" ["--allow-newer"]
shouldFail $ cabal "configure" ["--allow-newer=baz,quux"]
cabal "configure" ["--allow-newer=base", "--allow-newer=baz,quux"]
cabal "configure" ["--allow-newer=bar", "--allow-newer=base,baz"
,"--allow-newer=quux"]
shouldFail $ cabal "configure" ["--enable-tests"]
cabal "configure" ["--enable-tests", "--allow-newer"]
shouldFail $ cabal "configure" ["--enable-benchmarks"]
cabal "configure" ["--enable-benchmarks", "--allow-newer"]
shouldFail $ cabal "configure" ["--enable-benchmarks", "--enable-tests"]
cabal "configure" ["--enable-benchmarks", "--enable-tests"
,"--allow-newer"]
shouldFail $ cabal "configure" ["--allow-newer=Foo:base"]
shouldFail $ cabal "configure" ["--allow-newer=Foo:base"
,"--enable-tests", "--enable-benchmarks"]
cabal "configure" ["--allow-newer=AllowNewer:base"]
cabal "configure" ["--allow-newer=AllowNewer:base"
,"--allow-newer=Foo:base"]
cabal "configure" ["--allow-newer=AllowNewer:base"
,"--allow-newer=Foo:base"
,"--enable-tests", "--enable-benchmarks"]
-- Test that Cabal can choose flags to disable building a component when that
-- component's dependencies are unavailable. The build should succeed without
-- requiring the component's dependencies or imports.
tc "BuildableField" $ do
r <- cabal' "configure" ["-v"]
assertOutputContains "Flags chosen: build-exe=False" r
cabal "build" []
-- TODO: Enable these tests on Windows
unlessWindows $ do
tc "GhcPkgGuess/SameDirectory" $ ghc_pkg_guess "ghc"
tc "GhcPkgGuess/SameDirectoryVersion" $ ghc_pkg_guess "ghc-7.10"
tc "GhcPkgGuess/SameDirectoryGhcVersion" $ ghc_pkg_guess "ghc-7.10"
unlessWindows $ do
tc "GhcPkgGuess/Symlink" $ ghc_pkg_guess "ghc"
tc "GhcPkgGuess/SymlinkVersion" $ ghc_pkg_guess "ghc"
tc "GhcPkgGuess/SymlinkGhcVersion" $ ghc_pkg_guess "ghc"
where
ghc_pkg_guess bin_name = do
cwd <- packageDir
with_ghc <- getWithGhcPath
r <- withEnv [("WITH_GHC", Just with_ghc)]
. shouldFail $ cabal' "configure" ["-w", cwd </> bin_name]
assertOutputContains "is version 9999999" r
return ()
-- Shared test function for BuildDeps/InternalLibrary* tests.
internal_lib_test expect = withPackageDb $ do
withPackage "to-install" $ cabal_install []
cabal_build []
r <- runExe' "lemon" []
assertEqual
("executable should have linked with the " ++ expect ++ " library")
("foo foo myLibFunc " ++ expect)
(concatOutput (resultOutput r))
assertRegex :: String -> String -> Result -> TestM ()
assertRegex msg regex r = let out = resultOutput r
in assertBool (msg ++ ",\nactual output:\n" ++ out)
(out =~ regex)
tc :: FilePath -> TestM a -> TestTreeM ()
tc name = testTree config name Nothing
|
garetxe/cabal
|
Cabal/tests/PackageTests/Tests.hs
|
bsd-3-clause
| 12,005 | 0 | 22 | 2,973 | 2,159 | 1,026 | 1,133 | 179 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Text.Digestive.Heist.Extras.List
( dfInputListStatic
, dfInputListCustom
, dfInputListSpan
) where
import Data.Map.Syntax ((##))
import qualified Data.Text as T
import Data.Text (Text)
import Heist
import Heist.Interpreted
import qualified Text.XmlHtml as X
import Text.Digestive.Form.List
import Text.Digestive.View
import Text.Digestive.Heist.Extras.Conditional (dfIfDisabled, dfIfEnabled)
import Text.Digestive.Heist.Extras.Internal.Attribute (getRefAttributes, appendAttr)
import Text.Digestive.Heist.Extras.Internal.View (disableView)
----------------------------------------------------------------------
-- this is an extremely condensed version of dfInputList that only generates the list items,
-- does not generate the indices input element or additional markup. it will also remove
-- the child nodes if the list is empty
dfInputListStatic :: Monad m => (View Text -> Splices (Splice m)) -> View Text -> Splice m
dfInputListStatic splices view = do
(ref, _) <- getRefAttributes Nothing
let
items = listSubViews ref view
case items of
[] -> return []
_ -> runChildrenWith $ "dfListItem" ## mapSplices (runChildrenWith . splices) items
-- this is a variation on the dfInputList splice found in Text.Digestive.Heist
-- that does not generate any extra markup. Instead, multiple splices and
-- attribute splices are available for recreating it using the exact markup
-- you want.
--
-- If you need to recreate the add/remove controls attributes,
-- use the dfListPath and dfListItemPath splices.
{-
Attribute Splices:
listAttrs (intended for the list's container element; eg. div, fieldset)
Splices:
indices
dfListItem
Attribute Splices:
wrapperAttrs (intended for container elements; eg. li, tr)
itemAttrs (intended for form elements; eg. input, fieldset, textarea)
isDisabled
isHidden
Splices
dfListItemIndex (contains the index value for the list item; eg. 0)
dfListItemPath (contains the path to the list item; eg. form.list_name.0)
dfListItemType (indicates the item type; eg. inputListTemplate or inputListItem)
dfIfInputListItem (show content if it is an inputListItem)
dfIfInputListTemplate (show content if it is an inputListTemplate)
dfEditEnabled (similar to dfIfEnabled, but is always enabled for templates)
dfEditDisabled (similar to dfIfDisabled, but is always disabled for templates)
dfListPath (contains the path to the list; eg. form.list_name)
-}
dfInputListCustom :: Monad m => (View Text -> Splices (Splice m)) -> View Text -> Splice m
dfInputListCustom splices view = do
(ref, _) <- getRefAttributes Nothing
let
listRef = absoluteRef ref view
listAttrs _ = return
[ ("id", listRef)
, ("class", "inputList")
]
items = listSubViews ref view
attrSplices = "listAttrs" ## listAttrs
localHS (bindAttributeSplices attrSplices) $ runChildrenWith $ do
"indices" ## indicesSplice listRef items
"dfListItem" ## onlyListItemsSplice splices (listTemplateView ref view) items
"dfListPath" ## return [X.TextNode listRef]
-- A variation on dfInputListCustom that exects to work with a lit of forms
-- that contains a list of forms with the intent of having the outer list in
-- a table that spans the inner list. Note that it requires 2 ref attributes.
-- Also note that you don't want to use a dfInputList* splice inside of it for
-- the inner list, that's automaticaly handled for you. The inner list cannot
-- be dynamic (this is mostly due to a limitation with the JavaScript library).
{-
<dfInputListSpan group="outerlistref" ref="innerlistref"><div listAttrs>
<indices/><!-- outer list indices -->
<table>
<thead>
<th>Outer list content</th>
<th>Inner list content</th>
<th>Inner list indices</th>
</thead>
<dfListGroup><tbody wrapperAttrs>
<dfListItem><tr>
<dfGroupItem><td groupspan><!-- outer list content -->
This cell spans all the rows
</td></dfGroupItem>
<td>This cell exists in each row</td><!-- inner list content -->
<td><indices/></td><!-- inner list content -->
</tr></dfListItem>
</tbody></dfListGroup>
</table>
</div></dfInputListSpan>
-}
dfInputListSpan :: Monad m => (View Text -> Splices (Splice m)) -> View Text -> Splice m
dfInputListSpan splices view = do
(groupRef, _) <- getRefAttributes $ Just "group"
(itemRef, _) <- getRefAttributes Nothing
let
groupListRef = absoluteRef groupRef view
listAttrs _ = return
[ ("id", groupListRef)
, ("class", "inputList inputGroup")
]
groupItems = listSubViews groupRef view
-- this splice is for the individual items in groupItems
groupItemSplices v = do
let
listRef = absoluteRef itemRef v
items = listSubViews itemRef v
totalItemsT = T.pack $ show $ length items
headAttrSplices = "groupspan" ## const $ return [("rowspan", totalItemsT)]
headSplice = localHS (bindAttributeSplices headAttrSplices) $ runChildrenWith $ do
splices v
"total_items" ## return [X.TextNode totalItemsT]
tailSplice = return []
splice s v' = runChildrenWith $ do
splices v'
"dfGroupItem" ## s
itemsSplice = case items of
(x:xs) -> do
x' <- splice headSplice x
xs' <- mapSplices (splice tailSplice) xs
return $ x' ++ xs'
_ -> mapSplices (splice headSplice) items
--splices v
"indices" ## indicesSplice listRef items
"dfListItem" ## itemsSplice
"dfListPath" ## return [X.TextNode listRef]
attrSplices = "listAttrs" ## listAttrs
localHS (bindAttributeSplices attrSplices) $ runChildrenWith $ do
"indices" ## indicesSplice groupListRef groupItems
"dfListGroup" ## onlyListItemsSplice groupItemSplices (listTemplateView groupRef view) groupItems
"dfListGroupPath" ## return [X.TextNode groupListRef]
{----------------------------------------------------------------------------------------------------{
| Common Helper Functions
}----------------------------------------------------------------------------------------------------}
indicesSplice :: Monad m => Text -> [View v] -> Splice m
indicesSplice listRef items =
return $ [ X.Element "input"
[ ("type", "hidden")
, ("name", T.intercalate "." [listRef, indicesRef])
, ("value", T.intercalate "," $ map
(last . ("0":) . viewContext) items)
] []
]
-- this splice looks for an "only" attribute that will let you pick
-- between only the template or only the data, omitting the attribute
-- will display both
onlyListItemsSplice :: Monad m => (View Text -> Splices (Splice m)) -> View Text -> [View Text] -> Splice m
onlyListItemsSplice splices template items = do
node <- getParamNode
let
listTemplate = listItemSplice splices True $ disableView template
listItems = mapSplices (listItemSplice splices False) items
case X.getAttribute "only" node of
Just "template" -> listTemplate
Just _ -> listItems
Nothing -> do
t <- listTemplate
xs <- listItems
return $ t ++ xs
listTemplateView :: Text -> View Text -> View Text
listTemplateView ref view = makeListSubView ref (-1) view
listItemSplice :: Monad m => (View Text -> Splices (Splice m)) -> Bool -> View Text -> Splice m
listItemSplice splices isTemplate v = localHS (bindAttributeSplices (listItemAttrs isTemplate v)) $ runChildrenWith $ do
splices v
"dfListItemIndex" ## return [X.TextNode $ last $ T.split (== '.') $ absoluteRef "" v]
"dfListItemPath" ## return [X.TextNode $ absoluteRef "" v]
"dfListItemType" ## return [X.TextNode $ ifElseTemplate "inputListTemplate" "inputListItem"]
"dfIfInputListItem" ## ifElseTemplate (return []) runChildren
"dfIfInputListTemplate" ## ifElseTemplate runChildren (return [])
"dfEditEnabled" ## ifElseTemplate runChildren $ dfIfEnabled v
"dfEditDisabled" ## ifElseTemplate (return []) $ dfIfDisabled v
where
ifElseTemplate forTemplate forListItem = if isTemplate then forTemplate else forListItem
--------------------------------------------------------------------- | Attribute Splices
listItemAttrs :: Monad m => Bool -> View Text -> Splices (AttrSplice m)
listItemAttrs isTemplate v = do
let
itemRef = absoluteRef "" v
"wrapperAttrs" ## \_ -> return $ appendAttr isTemplate ("style", "display: none")
[ ("id", itemRef)
, ("class", T.append itemRef $ if isTemplate then ".inputListTemplate inputListTemplate" else ".inputListItem inputListItem")
]
"itemAttrs" ## \_ -> return $ case isTemplate of
True ->
[ ("style", "display: none")
, ("disabled", "disabled")
]
False -> []
"isDisabled" ## const (return $ appendAttr isTemplate ("disabled", "disabled") [])
"isHidden" ## const (return $ appendAttr isTemplate ("style", "display: none") [])
|
cimmanon/digestive-functors-heist-extras
|
src/Text/Digestive/Heist/Extras/List.hs
|
bsd-3-clause
| 8,712 | 30 | 23 | 1,545 | 1,882 | 954 | 928 | 123 | 3 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RecordWildCards #-}
module Numeric.LinearAlgebra.Arnoldi
( Arpack, eig
-- * Options
, Options(..), Which(..)
-- * Exceptions
, MaxIterations(..)
, NoShifts(..)
, Reallocate(..)
, XYAUPD(..)
, XYEUPD(..)
, Unimplemented(..)
) where
import Data.Vector.Storable (Vector)
import Data.Vector.Storable.Mutable (IOVector)
import Numeric.LinearAlgebra (Matrix)
import System.IO.Unsafe (unsafePerformIO)
import Arpack.Exceptions
import Arpack.Foreign
import Arpack.Options
eig :: Arpack t => Options t -> Int -> (IOVector t -> IOVector t -> IO ())
-> (Vector t, Matrix t)
eig !options !dim !multiply = unsafePerformIO (arpack options dim multiply)
|
ttuegel/arpack
|
src/Numeric/LinearAlgebra/Arnoldi.hs
|
bsd-3-clause
| 834 | 0 | 13 | 184 | 225 | 134 | 91 | 23 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
module Fragment.IsoRec.Rules.Kind.Infer.SyntaxDirected (
) where
|
dalaing/type-systems
|
src/Fragment/IsoRec/Rules/Kind/Infer/SyntaxDirected.hs
|
bsd-3-clause
| 220 | 0 | 3 | 36 | 14 | 11 | 3 | 1 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Arbitrary instances, and wrappers around types from Ray.Types,
-- for more useful Arbitrary instances.
module Types where
import qualified Data.Vector as V
import Ray.Imports
import Ray.Types
import Test.Tasty.QuickCheck as QC
import System.Random (Random)
nonZero :: (Num a, Ord a, Arbitrary a) => Gen a
nonZero = getNonZero <$> arbitrary
instance (Num a, Ord a, Arbitrary a) => Arbitrary (V2 a) where
arbitrary = V2 <$> arbitrary <*> arbitrary
instance (Num a, Ord a, Arbitrary a) => Arbitrary (V3 a) where
arbitrary = V3 <$> nonZero <*> nonZero <*> nonZero
instance (Num a, Ord a, Arbitrary a) => Arbitrary (V4 a) where
arbitrary = V4 <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
instance (Num a, Ord a, Arbitrary a, Arbitrary (v a)) => Arbitrary (Point v a)
where
arbitrary = P <$> arbitrary
instance Arbitrary Ray where
arbitrary = mkRay <$> arbitrary <*> (getNonZero <$> arbitrary) <*> arbitrary
data Sphere = SSphere Shape
deriving (Show)
getSphere :: Sphere -> Shape
getSphere (SSphere s) = s
instance Arbitrary Sphere where
arbitrary = SSphere <$> (Sphere <$> arbitrary <*> arbitrary)
data Plane = SPlane Shape
deriving (Show)
getPlane :: Plane -> Shape
getPlane (SPlane s) = s
instance Arbitrary Plane where
arbitrary = SPlane <$> (Plane <$> arbitrary <*> arbitrary)
instance Arbitrary Shape where
arbitrary = oneof [ getSphere <$> arbitrary, getPlane <$> arbitrary ]
-- | A Mesh with a single random triangle.
data Tri = STri Shape
deriving (Show)
getTri :: Tri -> Shape
getTri (STri s) = s
oneTriangleMesh :: Triangle -> Shape
oneTriangleMesh t = simpleMesh (V.fromList . toList $ t) (V.singleton $ V3 0 1 2)
instance Arbitrary Tri where
arbitrary = STri . oneTriangleMesh <$> arbitrary where
-- ps = V.fromList <$> QC.vector 3
-- vis = pure . V.singleton $ V3 0 1 2
-- no = pure Nothing
-- | A single triangle.
type Triangle = V3 (P3D)
firstTriangle :: Tri -> Triangle
firstTriangle (STri (Mesh ps vis _ _ _)) = (ps V.!) <$> (V.head vis)
firstTriangle _ = error "Somehow we constructed a Triangle newtype that isn't a triangle mesh"
normalToTriangle :: Triangle -> V3D
normalToTriangle (V3 p1 p2 p3) = e1 `cross` e2 where
e1 = p2 .-. p1
e2 = p3 .-. p1
-- | The closed unit intervalp
unitInterval :: (Num a, Random a) => Gen a
unitInterval = choose (0,1)
newtype Barycentric a = Barycentric (V3 a)
deriving (Show)
getBary :: Barycentric a -> V3 a
getBary (Barycentric b) = b
instance (Num a, Random a) => Arbitrary (Barycentric a) where
arbitrary = do
x <- unitInterval
y <- choose (0, 1 - x)
let z = 1 - x - y
return . Barycentric $ V3 x y z
-- Barycentric <$> (V3 <$> unitInterval <*> unitInterval <*> unitInterval)
barycentric :: (Functor f, Num (f a), Num a) => V3 (f a) -> Barycentric a -> f a
barycentric ps (Barycentric coords) = sum weighted where
weighted = (*^) <$> coords <*> ps
newtype OutOfGamut a = OutOfGamut (V3 a)
deriving (Show)
getOoG :: OutOfGamut a -> V3 a
getOoG (OutOfGamut b) = b
instance (Num a, Random a, Arbitrary a, Ord a) => Arbitrary (OutOfGamut a) where
arbitrary = OutOfGamut <$> arbitrary `suchThat` \u ->
maximum u > 1
|
bergey/panther
|
tests/Types.hs
|
bsd-3-clause
| 3,631 | 0 | 12 | 909 | 1,140 | 610 | 530 | 76 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module KnownCodes where
import Data.Word
import qualified Data.Vector.Storable as V
import BakedVector
code2RawCode :: Word16 -> Maybe Word16
code2RawCode code = knownRawCodes V.!? fromIntegral code
firstObjectCode :: Word16
firstObjectCode = 1000
lastObjectCode :: Word16
-- Empirically, the pen does not recognize OIDs >= 15000:
lastObjectCode = 14999
-- Last object code accoring to the known raw codes:
-- lastObjectCode = fromIntegral $ V.length knownRawCodes - 1
knownRawCodes :: V.Vector Word16
knownRawCodes =
[word16Vector|
4
, 5
, 6
, 7
, 12
, 15
, 20
, 21
, 22
, 23
, 28
, 36
, 37
, 38
, 39
, 60
, 68
, 69
, 84
, 85
, 100
, 101
, 196
, 197
, 198
, 199
, 204
, 207
, 212
, 213
, 214
, 215
, 223
, 228
, 229
, 230
, 231
, 236
, 252
, 256
, 257
, 259
, 272
, 273
, 275
, 320
, 321
, 323
, 336
, 337
, 339
, 384
, 385
, 386
, 387
, 400
, 401
, 402
, 403
, 449
, 450
, 451
, 465
, 466
, 467
, 768
, 769
, 771
, 776
, 777
, 778
, 779
, 784
, 792
, 793
, 794
, 795
, 801
, 808
, 809
, 816
, 817
, 962
, 963
, 969
, 970
, 971
, 979
, 985
, 986
, 987
, 994
, 1001
, 1010
, 1011
, 1280
, 1297
, 1299
, 1305
, 1329
, 1331
, 1345
, 1347
, 1360
, 1368
, 1370
, 1392
, 1408
, 1410
, 1425
, 1427
, 1433
, 1435
, 1457
, 1459
, 1473
, 1475
, 1490
, 1498
, 1522
, 1542
, 1548
, 1549
, 1551
, 1565
, 1567
, 1596
, 1597
, 1604
, 1605
, 1606
, 1607
, 1612
, 1613
, 1615
, 1620
, 1622
, 1628
, 1660
, 1661
, 1670
, 1676
, 1677
, 1678
, 1679
, 1693
, 1695
, 1724
, 1725
, 1734
, 1740
, 1741
, 1742
, 1743
, 1750
, 1756
, 1758
, 1788
, 1789
, 1800
, 1802
, 1817
, 1819
, 1841
, 1993
, 1995
, 2010
, 2034
, 2100
, 2102
, 2108
, 2165
, 2293
, 2295
, 2304
, 2305
, 2307
, 2320
, 2330
, 2337
, 2339
, 2345
, 2352
, 2353
, 2355
, 2368
, 2369
, 2371
, 2385
, 2387
, 2393
, 2395
, 2400
, 2408
, 2416
, 2417
, 2419
, 2434
, 2435
, 2450
, 2456
, 2458
, 2465
, 2467
, 2473
, 2480
, 2481
, 2482
, 2483
, 2498
, 2499
, 2515
, 2521
, 2530
, 2545
, 2546
, 2547
, 2572
, 2573
, 2575
, 2605
, 2614
, 2620
, 2621
, 2636
, 2637
, 2639
, 2660
, 2662
, 2668
, 2676
, 2677
, 2678
, 2679
, 2684
, 2685
, 2702
, 2703
, 2733
, 2742
, 2748
, 2749
, 2766
, 2767
, 2790
, 2796
, 2806
, 2812
, 2813
, 2825
, 2827
, 2840
, 2842
, 2857
, 2864
, 2872
, 3018
, 3035
, 3059
, 3065
, 3076
, 3077
, 3078
, 3079
, 3084
, 3087
, 3092
, 3093
, 3094
, 3095
, 3103
, 3108
, 3109
, 3110
, 3111
, 3116
, 3124
, 3125
, 3126
, 3127
, 3132
, 3140
, 3141
, 3156
, 3157
, 3172
, 3173
, 3188
, 3189
, 3270
, 3271
, 3276
, 3279
, 3286
, 3287
, 3292
, 3300
, 3301
, 3302
, 3303
, 3316
, 3317
, 3318
, 3319
, 3324
, 3328
, 3329
, 3331
, 3344
, 3345
, 3347
, 3360
, 3361
, 3363
, 3376
, 3377
, 3379
, 3392
, 3393
, 3395
, 3408
, 3409
, 3411
, 3424
, 3425
, 3427
, 3440
, 3441
, 3443
, 3458
, 3459
, 3474
, 3475
, 3488
, 3489
, 3490
, 3491
, 3504
, 3505
, 3506
, 3507
, 3522
, 3523
, 3538
, 3539
, 3553
, 3554
, 3555
, 3569
, 3570
, 3571
, 3848
, 3849
, 3850
, 3851
, 3864
, 3865
, 3866
, 3867
, 3880
, 3881
, 3888
, 3889
, 3891
, 3896
, 3897
, 4042
, 4043
, 4058
, 4059
, 4073
, 4082
, 4083
, 4089
, 4096
, 4097
, 4099
, 4112
, 4120
, 4121
, 4122
, 4123
, 4129
, 4131
, 4136
, 4137
, 4144
, 4145
, 4147
, 4357
, 4358
, 4373
, 4374
, 4423
, 4439
, 5382
, 5388
, 5397
, 5407
, 5447
, 5455
, 6168
, 6170
, 6185
, 6192
, 6405
, 6406
, 6412
, 6415
, 6422
, 6428
, 6437
, 6453
, 6454
, 6460
, 6471
, 6479
, 6487
, 6495
, 6519
, 7168
, 7169
, 7171
, 7192
, 7193
, 7194
, 7195
, 7200
, 7208
, 7209
, 7216
, 7217
, 7219
, 7429
, 7430
, 7445
, 7446
, 7461
, 7462
, 7477
, 7478
, 7495
, 7511
, 7527
, 7543
, 8192
, 8193
, 8195
, 8209
, 8211
, 8216
, 8217
, 8218
, 8219
, 8224
, 8232
, 8233
, 8240
, 8241
, 8243
, 8323
, 8327
, 8339
, 8343
, 8359
, 8371
, 8384
, 8385
, 8386
, 8387
, 8388
, 8389
, 8390
, 8391
, 8396
, 8399
, 8400
, 8402
, 8404
, 8405
, 8406
, 8407
, 8408
, 8409
, 8410
, 8411
, 8415
, 8417
, 8419
, 8420
, 8421
, 8422
, 8423
, 8424
, 8425
, 8428
, 8432
, 8433
, 8434
, 8435
, 8444
, 8453
, 8454
, 8469
, 8470
, 8519
, 8535
, 8579
, 8583
, 8595
, 8599
, 8643
, 8645
, 8646
, 8659
, 8661
, 8662
, 9217
, 9219
, 9240
, 9242
, 9264
, 9347
, 9408
, 9410
, 9433
, 9435
, 9457
, 9459
, 9477
, 9478
, 9484
, 9487
, 9494
, 9500
, 9532
, 9543
, 9551
, 9559
, 9567
, 9607
, 9615
, 9619
, 9627
, 9651
, 9667
, 9669
, 9670
, 9676
, 9679
, 9685
, 9690
, 9695
, 9724
, 10240
, 10265
, 10267
, 10280
, 10289
, 10291
, 10292
, 10294
, 10300
, 10419
, 10435
, 10456
, 10458
, 10473
, 10480
, 10482
, 10485
, 10487
, 10496
, 10499
, 10502
, 10508
, 10512
, 10517
, 10522
, 10527
, 10531
, 10534
, 10537
, 10540
, 10544
, 10547
, 10549
, 10563
, 10567
, 10575
, 10579
, 10587
, 10599
, 10611
, 10627
, 10647
, 10655
, 10659
, 10675
, 10679
, 10691
, 10703
, 10707
, 10710
, 10713
, 10716
, 10725
, 10739
, 10742
, 10748
, 11264
, 11265
, 11267
, 11268
, 11269
, 11270
, 11271
, 11276
, 11279
, 11284
, 11285
, 11286
, 11287
, 11288
, 11289
, 11290
, 11291
, 11295
, 11297
, 11299
, 11300
, 11301
, 11302
, 11303
, 11304
, 11305
, 11308
, 11312
, 11313
, 11315
, 11316
, 11317
, 11318
, 11319
, 11324
, 11395
, 11399
, 11415
, 11427
, 11431
, 11443
, 11447
, 11458
, 11459
, 11462
, 11463
, 11468
, 11471
, 11478
, 11479
, 11480
, 11481
, 11482
, 11483
, 11484
, 11488
, 11490
, 11492
, 11493
, 11494
, 11495
, 11496
, 11497
, 11504
, 11505
, 11506
, 11507
, 11508
, 11509
, 11510
, 11511
, 11516
, 11520
, 11523
, 11536
, 11539
, 11552
, 11555
, 11568
, 11571
, 11587
, 11603
, 11619
, 11635
, 11651
, 11667
, 11683
, 11699
, 11715
, 11731
, 11747
, 11763
, 12292
, 12293
, 12294
, 12295
, 12300
, 12303
, 12308
, 12309
, 12310
, 12311
, 12319
, 12324
, 12325
, 12326
, 12327
, 12332
, 12348
, 12419
, 12423
, 12439
, 12451
, 12455
, 12467
, 12480
, 12481
, 12482
, 12483
, 12484
, 12485
, 12486
, 12487
, 12492
, 12495
, 12497
, 12499
, 12500
, 12501
, 12502
, 12503
, 12504
, 12505
, 12506
, 12507
, 12508
, 12512
, 12514
, 12516
, 12517
, 12518
, 12519
, 12520
, 12521
, 12528
, 12529
, 12530
, 12531
, 12540
, 12544
, 12545
, 12547
, 12560
, 12561
, 12563
, 12611
, 12627
, 12675
, 12679
, 12691
, 12695
, 12737
, 12738
, 12739
, 12741
, 12742
, 12753
, 12754
, 12755
, 12757
, 12758
, 13056
, 13057
, 13059
, 13061
, 13062
, 13063
, 13064
, 13065
, 13066
, 13067
, 13068
, 13069
, 13071
, 13073
, 13075
, 13077
, 13078
, 13079
, 13080
, 13081
, 13082
, 13083
, 13084
, 13088
, 13093
, 13096
, 13097
, 13101
, 13104
, 13105
, 13116
, 13117
, 13250
, 13251
, 13254
, 13255
, 13257
, 13258
, 13259
, 13260
, 13261
, 13262
, 13263
, 13266
, 13270
, 13271
, 13273
, 13274
, 13275
, 13277
, 13279
, 13283
, 13286
, 13287
, 13289
, 13292
, 13298
, 13299
, 13308
, 13309
, 13317
, 13319
, 13327
, 13332
, 13334
, 13372
, 13447
, 13508
, 13510
, 13516
, 13525
, 13527
, 13568
, 13569
, 13571
, 13584
, 13594
, 13616
, 13617
, 13619
, 13635
, 13651
, 13659
, 13683
, 13699
, 13719
, 13727
, 13747
, 13761
, 13762
, 13763
, 13765
, 13775
, 13777
, 13779
, 13782
, 13785
, 13788
, 13809
, 13810
, 13811
, 13820
, 13830
, 13833
, 13834
, 13836
, 13837
, 13839
, 13846
, 13849
, 13852
, 13872
, 13873
, 13884
, 13885
, 13891
, 13895
, 13899
, 13903
, 13911
, 13919
, 13963
, 13967
, 13979
, 14003
, 14018
, 14022
, 14026
, 14028
, 14029
, 14030
, 14031
, 14042
, 14045
, 14047
, 14066
, 14076
, 14077
, 14089
, 14091
, 14104
, 14106
, 14128
, 14141
, 14282
, 14297
, 14299
, 14323
, 14332
, 14340
, 14342
, 14348
, 14360
, 14362
, 14372
, 14374
, 14377
, 14384
, 14389
, 14391
, 14519
, 14535
, 14543
, 14553
, 14555
, 14565
, 14567
, 14568
, 14577
, 14579
, 14580
, 14582
, 14588
, 14592
, 14597
, 14598
, 14604
, 14607
, 14609
, 14611
, 14614
, 14617
, 14620
, 14624
, 14629
, 14641
, 14643
, 14645
, 14646
, 14652
, 14659
, 14663
, 14671
, 14679
, 14687
, 14691
, 14711
, 14727
, 14735
, 14739
, 14747
, 14759
, 14771
, 14775
, 14787
, 14790
, 14796
, 14799
, 14802
, 14810
, 14815
, 14817
, 14819
, 14822
, 14825
, 14828
, 14834
, 14837
, 14838
, 14844
, 14857
, 14858
, 14860
, 14874
, 14886
, 14889
, 14892
, 14896
, 14897
, 14899
, 14905
, 14909
, 14923
, 14927
, 14939
, 14951
, 14963
, 14987
, 15027
, 15050
, 15055
, 15074
, 15085
, 15090
, 15094
, 15100
, 15112
, 15114
, 15129
, 15131
, 15144
, 15153
, 15155
, 15158
, 15161
, 15164
, 15307
, 15322
, 15337
, 15346
, 15351
, 15357
, 15360
, 15361
, 15363
, 15364
, 15365
, 15366
, 15367
, 15372
, 15375
, 15380
, 15381
, 15382
, 15383
, 15384
, 15385
, 15386
, 15387
, 15388
, 15392
, 15396
, 15397
, 15398
, 15399
, 15400
, 15401
, 15408
, 15409
, 15411
, 15412
, 15413
, 15414
, 15415
, 15420
, 15491
, 15495
, 15511
, 15527
, 15539
, 15543
, 15554
, 15555
, 15558
, 15559
, 15564
, 15567
, 15574
, 15575
, 15576
, 15577
, 15578
, 15579
, 15583
, 15585
, 15587
, 15588
, 15589
, 15590
, 15591
, 15592
, 15593
, 15596
, 15600
, 15601
, 15602
, 15603
, 15604
, 15605
, 15606
, 15607
, 15612
, 15621
, 15622
, 15637
, 15638
, 15653
, 15654
, 15669
, 15670
, 15687
, 15703
, 15719
, 15735
, 15751
, 15767
, 15783
, 15799
, 15814
, 15830
, 15845
, 15846
, 15861
, 15862
, 16136
, 16137
, 16138
, 16139
, 16140
, 16141
, 16143
, 16152
, 16153
, 16154
, 16155
, 16157
, 16159
, 16166
, 16168
, 16169
, 16172
, 16176
, 16177
, 16179
, 16181
, 16182
, 16183
, 16184
, 16185
, 16188
, 16189
, 16330
, 16331
, 16334
, 16335
, 16346
, 16347
, 16350
, 16358
, 16361
, 16365
, 16370
, 16371
, 16374
, 16375
, 16377
, 16380
, 16381
, 16384
, 16385
, 16387
, 16400
, 16401
, 16403
, 16409
, 16410
, 16416
, 16417
, 16419
, 16425
, 16432
, 16433
, 16435
, 16448
, 16449
, 16464
, 16465
, 16480
, 16481
, 16496
, 16497
, 16576
, 16577
, 16578
, 16579
, 16592
, 16593
, 16594
, 16595
, 16601
, 16602
, 16608
, 16609
, 16610
, 16611
, 16617
, 16624
, 16625
, 16626
, 16627
, 16709
, 16725
, 16726
, 16774
, 16789
, 16790
, 17158
, 17164
, 17173
, 17181
, 17183
, 17196
, 17213
, 17357
, 17359
, 17366
, 17372
, 17374
, 17389
, 17404
, 17408
, 17409
, 17411
, 17433
, 17456
, 17457
, 17459
, 17472
, 17473
, 17520
, 17521
, 17600
, 17601
, 17602
, 17603
, 17626
, 17648
, 17649
, 17650
, 17651
, 17733
, 17734
, 17740
, 17743
, 17750
, 17756
, 17788
, 17797
, 17798
, 17804
, 17807
, 17813
, 17823
, 17852
, 17984
, 17994
, 18033
, 18057
, 18098
, 18189
, 18191
, 18236
, 18374
, 18380
, 18382
, 18429
, 18432
, 18433
, 18435
, 18458
, 18473
, 18480
, 18481
, 18483
, 18496
, 18497
, 18544
, 18545
, 18626
, 18627
, 18649
, 18672
, 18673
, 18674
, 18675
, 18757
, 18758
, 18764
, 18767
, 18773
, 18783
, 18790
, 18796
, 18805
, 18806
, 18812
, 18822
, 18828
, 18831
, 18838
, 18844
, 18853
, 18869
, 18870
, 18876
, 19017
, 19056
, 19082
, 19123
, 19129
, 19253
, 19261
, 19446
, 19452
, 19456
, 19457
, 19459
, 19481
, 19482
, 19488
, 19489
, 19491
, 19497
, 19504
, 19505
, 19507
, 19520
, 19521
, 19552
, 19553
, 19568
, 19569
, 19650
, 19651
, 19673
, 19674
, 19680
, 19681
, 19682
, 19683
, 19689
, 19696
, 19697
, 19698
, 19699
, 19782
, 19797
, 19798
, 19813
, 19814
, 19829
, 19862
, 19877
, 19878
, 19894
, 20237
, 20239
, 20252
, 20269
, 20278
, 20284
, 20430
, 20447
, 20454
, 20460
, 20477
, 20484
, 20485
, 20486
, 20487
, 20492
, 20495
, 20500
, 20501
, 20502
, 20503
, 20508
, 20511
, 20516
, 20517
, 20518
, 20519
, 20524
, 20540
, 20676
, 20677
, 20678
, 20679
, 20684
, 20687
, 20692
, 20693
, 20694
, 20695
, 20700
, 20703
, 20708
, 20709
, 20710
, 20711
, 20716
, 20732
, 20736
, 20737
, 20739
, 20752
, 20753
, 20755
, 20803
, 20819
, 20867
, 20883
, 20929
, 20930
, 20931
, 20945
, 20946
, 20947
, 21248
, 21256
, 21258
, 21265
, 21267
, 21273
, 21275
, 21280
, 21288
, 21297
, 21443
, 21449
, 21451
, 21458
, 21466
, 21475
, 21481
, 21490
, 21509
, 21511
, 21516
, 21519
, 21525
, 21527
, 21564
, 21700
, 21702
, 21708
, 21711
, 21716
, 21718
, 21756
, 21760
, 21761
, 21763
, 21777
, 21779
, 21785
, 21808
, 21809
, 21811
, 21827
, 21875
, 21891
, 21907
, 21915
, 21939
, 21953
, 21954
, 21955
, 21970
, 21978
, 22001
, 22002
, 22003
, 22029
, 22031
, 22076
, 22159
, 22214
, 22220
, 22222
, 22269
, 22281
, 22283
, 22320
, 22474
, 22515
, 22532
, 22534
, 22540
, 22543
, 22565
, 22567
, 22581
, 22583
, 22588
, 22727
, 22732
, 22735
, 22756
, 22758
, 22772
, 22774
, 22780
, 22784
, 22785
, 22787
, 22800
, 22810
, 22817
, 22819
, 22825
, 22832
, 22833
, 22835
, 22851
, 22867
, 22875
, 22899
, 22915
, 22947
, 22963
, 22978
, 22979
, 22995
, 23001
, 23010
, 23025
, 23026
, 23027
, 23052
, 23101
, 23119
, 23247
, 23286
, 23292
, 23304
, 23306
, 23345
, 23347
, 23353
, 23499
, 23538
, 23556
, 23557
, 23558
, 23559
, 23564
, 23567
, 23572
, 23573
, 23574
, 23575
, 23580
, 23583
, 23588
, 23589
, 23590
, 23591
, 23596
, 23604
, 23605
, 23606
, 23607
, 23612
, 23750
, 23751
, 23756
, 23759
, 23766
, 23767
, 23772
, 23775
, 23780
, 23781
, 23782
, 23783
, 23788
, 23796
, 23797
, 23798
, 23799
, 23804
, 23808
, 23809
, 23811
, 23824
, 23825
, 23827
, 23840
, 23841
, 23843
, 23856
, 23857
, 23859
, 23875
, 23891
, 23907
, 23923
, 23939
, 23955
, 23971
, 23987
, 24002
, 24003
, 24018
, 24019
, 24033
, 24034
, 24035
, 24049
, 24050
, 24051
, 24329
, 24331
, 24344
, 24346
, 24361
, 24368
, 24376
, 24522
, 24539
, 24563
, 24569
, 24580
, 24581
, 24582
, 24583
, 24588
, 24591
, 24596
, 24597
, 24598
, 24599
, 24604
, 24607
, 24612
, 24613
, 24614
, 24615
, 24620
, 24636
, 24707
, 24711
, 24723
, 24727
, 24739
, 24743
, 24755
, 24768
, 24769
, 24770
, 24771
, 24772
, 24773
, 24774
, 24775
, 24780
, 24783
, 24784
, 24785
, 24786
, 24787
, 24788
, 24789
, 24790
, 24791
, 24792
, 24793
, 24794
, 24795
, 24796
, 24799
, 24800
, 24801
, 24802
, 24803
, 24804
, 24805
, 24806
, 24807
, 24808
, 24809
, 24812
, 24816
, 24817
, 24818
, 24819
, 24828
, 24899
, 24915
, 24963
, 24979
, 24983
, 25026
, 25042
, 25046
, 25345
, 25347
, 25350
, 25353
, 25355
, 25356
, 25360
, 25365
, 25367
, 25368
, 25370
, 25373
, 25375
, 25377
, 25385
, 25388
, 25392
, 25405
, 25538
, 25543
, 25546
, 25549
, 25551
, 25555
, 25558
, 25561
, 25563
, 25564
, 25566
, 25570
, 25575
, 25581
, 25587
, 25596
, 25604
, 25606
, 25612
, 25615
, 25620
, 25622
, 25660
, 25731
, 25779
, 25792
, 25793
, 25794
, 25795
, 25797
, 25799
, 25804
, 25807
, 25813
, 25815
, 25816
, 25818
, 25840
, 25841
, 25842
, 25843
, 25852
, 25923
, 25939
, 25947
, 25971
, 25987
, 25991
, 25999
, 26007
, 26015
, 26035
, 26050
, 26054
, 26060
, 26063
, 26070
, 26076
, 26098
, 26108
, 26118
, 26124
, 26173
, 26183
, 26191
, 26251
, 26306
, 26314
, 26317
, 26319
, 26364
, 26376
, 26378
, 26381
, 26383
, 26417
, 26428
, 26566
, 26569
, 26571
, 26572
, 26574
, 26610
, 26621
, 26624
, 26625
, 26627
, 26648
, 26650
, 26665
, 26672
, 26673
, 26675
, 26755
, 26803
, 26818
, 26819
, 26841
, 26843
, 26856
, 26864
, 26865
, 26866
, 26867
, 26951
, 26959
, 26967
, 26975
, 26999
, 27015
, 27023
, 27047
, 27063
, 27078
, 27084
, 27087
, 27103
, 27110
, 27116
, 27126
, 27132
, 27146
, 27211
, 27315
, 27378
, 27445
, 27447
, 27453
, 27638
, 27644
, 27648
, 27649
, 27651
, 27652
, 27654
, 27660
, 27669
, 27671
, 27672
, 27673
, 27674
, 27675
, 27679
, 27680
, 27681
, 27683
, 27684
, 27686
, 27688
, 27689
, 27692
, 27696
, 27697
, 27699
, 27701
, 27703
, 27779
, 27799
, 27811
, 27827
, 27831
, 27842
, 27843
, 27847
, 27855
, 27862
, 27864
, 27865
, 27866
, 27867
, 27868
, 27872
, 27873
, 27874
, 27875
, 27877
, 27879
, 27880
, 27881
, 27888
, 27889
, 27890
, 27891
, 27892
, 27894
, 27900
, 27991
, 28007
, 28023
, 28039
, 28055
, 28071
, 28102
, 28118
, 28134
, 28424
, 28426
, 28429
, 28431
, 28441
, 28443
, 28444
, 28456
, 28461
, 28465
, 28467
, 28470
, 28473
, 28476
, 28619
, 28622
, 28634
, 28639
, 28646
, 28649
, 28652
, 28658
, 28663
, 28669
, 28803
, 28807
, 28819
, 28823
, 28835
, 28839
, 28851
, 28864
, 28865
, 28866
, 28867
, 28869
, 28870
, 28876
, 28879
, 28880
, 28881
, 28882
, 28883
, 28885
, 28886
, 28889
, 28890
, 28892
, 28895
, 28896
, 28897
, 28898
, 28899
, 28901
, 28902
, 28905
, 28908
, 28912
, 28913
, 28914
, 28915
, 28924
, 29059
, 29075
, 29440
, 29445
, 29450
, 29453
, 29455
, 29457
, 29459
, 29462
, 29465
, 29468
, 29472
, 29477
, 29485
, 29489
, 29500
, 29635
, 29638
, 29641
, 29644
, 29646
, 29650
, 29658
, 29661
, 29663
, 29667
, 29670
, 29673
, 29676
, 29682
, 29693
, 29827
, 29831
, 29847
, 29875
, 29888
, 29889
, 29890
, 29891
, 29894
, 29900
, 29903
, 29910
, 29913
, 29936
, 29937
, 29938
, 29939
, 29948
, 30031
, 30047
, 30083
, 30095
, 30099
, 30131
, 30275
, 30351
, 30387
, 30470
, 30473
, 30476
, 30512
, 30525
, 30666
, 30669
, 30671
, 30707
, 30716
, 30887
, 30903
, 30924
, 30927
, 30950
, 30966
, 30972
, 31043
, 31059
, 31091
, 31107
, 31139
, 31155
, 31311
, 31498
, 31537
, 31539
, 31545
, 31730
, 31879
, 31895
, 31911
, 31923
, 31927
, 31939
, 31942
, 31948
, 31951
, 31958
, 31962
, 31964
, 31967
, 31969
, 31971
, 31973
, 31974
, 31977
, 31980
, 31984
, 31986
, 31989
, 31990
, 31996
, 32067
, 32083
, 32099
, 32115
, 32131
, 32147
, 32163
, 32179
, 32521
, 32524
, 32538
, 32541
, 32543
, 32550
, 32553
, 32556
, 32560
, 32565
, 32573
, 32714
, 32719
, 32734
, 32749
, 32755
, 32758
, 32761
, 32764
, 32768
, 32785
, 32787
, 32793
, 32800
, 32817
, 32819
, 32833
, 32848
, 32865
, 32880
, 32961
, 32963
, 32976
, 32978
, 32986
, 32993
, 32995
, 33001
, 33008
, 33010
, 33157
, 33173
, 33174
, 33344
, 33345
, 33347
, 33349
, 33354
, 33356
, 33357
, 33359
, 33360
, 33361
, 33363
, 33365
, 33366
, 33369
, 33370
, 33372
, 33373
, 33375
, 33410
, 33411
, 33414
, 33417
, 33420
, 33421
, 33422
, 33423
, 33426
, 33427
, 33430
, 33433
, 33434
, 33436
, 33437
, 33438
, 33439
, 33536
, 33537
, 33539
, 33541
, 33542
, 33545
, 33546
, 33548
, 33549
, 33551
, 33552
, 33553
, 33555
, 33557
, 33558
, 33561
, 33562
, 33564
, 33565
, 33567
, 33568
, 33569
, 33573
, 33577
, 33580
, 33581
, 33584
, 33585
, 33596
, 33597
, 33600
, 33601
, 33604
, 33605
, 33608
, 33609
, 33612
, 33613
, 33616
, 33617
, 33620
, 33621
, 33624
, 33625
, 33628
, 33629
, 33632
, 33633
, 33636
, 33637
, 33640
, 33641
, 33644
, 33645
, 33648
, 33649
, 33660
, 33661
, 33730
, 33731
, 33734
, 33737
, 33738
, 33740
, 33741
, 33742
, 33743
, 33746
, 33747
, 33750
, 33753
, 33754
, 33756
, 33757
, 33758
, 33759
, 33762
, 33763
, 33766
, 33769
, 33772
, 33773
, 33778
, 33779
, 33788
, 33789
, 33793
, 33795
, 33818
, 33840
, 33856
, 33905
, 33984
, 33986
, 34009
, 34033
, 34035
, 34181
, 34182
, 34188
, 34191
, 34198
, 34204
, 34236
, 34368
, 34369
, 34371
, 34373
, 34374
, 34377
, 34378
, 34380
, 34381
, 34383
, 34390
, 34393
, 34396
, 34416
, 34417
, 34428
, 34429
, 34434
, 34438
, 34441
, 34442
, 34444
, 34445
, 34446
, 34447
, 34458
, 34461
, 34463
, 34482
, 34483
, 34492
, 34493
, 34573
, 34575
, 34582
, 34585
, 34609
, 34620
, 34621
, 34636
, 34645
, 34648
, 34672
, 34684
, 34685
, 34764
, 34766
, 34778
, 34802
, 34812
, 34813
, 35202
, 35203
, 35218
, 35226
, 35233
, 35235
, 35241
, 35248
, 35249
, 35250
, 35251
, 35404
, 35405
, 35407
, 35430
, 35436
, 35445
, 35446
, 35452
, 35453
, 35470
, 35471
, 35501
, 35510
, 35516
, 35517
, 35594
, 35610
, 35625
, 35632
, 35633
, 35635
, 35641
, 35657
, 35673
, 35688
, 35696
, 35697
, 35704
, 35826
, 35827
, 35840
, 35845
, 35846
, 35852
, 35855
, 35861
, 35862
, 35865
, 35868
, 35871
, 35872
, 35877
, 35878
, 35884
, 35889
, 35891
, 35893
, 35894
, 35900
, 35905
, 35908
, 35909
, 35924
, 35925
, 35937
, 35940
, 35941
, 35952
, 35956
, 35957
, 36038
, 36044
, 36047
, 36054
, 36060
, 36063
, 36069
, 36070
, 36076
, 36085
, 36086
, 36092
, 36226
, 36227
, 36242
, 36243
, 36256
, 36257
, 36258
, 36259
, 36272
, 36273
, 36274
, 36275
, 36428
, 36429
, 36431
, 36444
, 36445
, 36447
, 36453
, 36454
, 36460
, 36461
, 36469
, 36476
, 36477
, 36494
, 36495
, 36510
, 36511
, 36518
, 36524
, 36525
, 36534
, 36540
, 36541
, 36617
, 36618
, 36620
, 36633
, 36634
, 36637
, 36639
, 36646
, 36649
, 36652
, 36656
, 36657
, 36659
, 36661
, 36665
, 36669
, 36680
, 36681
, 36685
, 36696
, 36697
, 36700
, 36709
, 36712
, 36713
, 36717
, 36720
, 36721
, 36724
, 36728
, 36729
, 36732
, 36810
, 36815
, 36826
, 36830
, 36841
, 36845
, 36850
, 36851
, 36854
, 36857
, 36860
, 36868
, 36869
, 36870
, 36871
, 36876
, 36879
, 36884
, 36885
, 36886
, 36887
, 36892
, 36895
, 36900
, 36901
, 36902
, 36903
, 36908
, 36924
, 37060
, 37061
, 37062
, 37063
, 37068
, 37071
, 37076
, 37077
, 37078
, 37079
, 37084
, 37087
, 37092
, 37093
, 37094
, 37095
, 37100
, 37116
, 37120
, 37121
, 37123
, 37136
, 37137
, 37139
, 37187
, 37203
, 37251
, 37267
, 37313
, 37314
, 37315
, 37329
, 37330
, 37331
, 37376
, 37377
, 37379
, 37386
, 37388
, 37389
, 37391
, 37392
, 37393
, 37395
, 37398
, 37401
, 37402
, 37404
, 37405
, 37407
, 37443
, 37451
, 37455
, 37459
, 37463
, 37467
, 37471
, 37507
, 37519
, 37523
, 37531
, 37535
, 37570
, 37574
, 37580
, 37581
, 37582
, 37583
, 37586
, 37590
, 37594
, 37596
, 37597
, 37598
, 37599
, 37632
, 37633
, 37635
, 37637
, 37638
, 37639
, 37640
, 37641
, 37642
, 37643
, 37644
, 37645
, 37647
, 37648
, 37649
, 37651
, 37653
, 37654
, 37655
, 37656
, 37657
, 37658
, 37659
, 37660
, 37661
, 37663
, 37664
, 37665
, 37669
, 37672
, 37673
, 37676
, 37677
, 37680
, 37681
, 37692
, 37693
, 37826
, 37827
, 37830
, 37831
, 37833
, 37834
, 37835
, 37836
, 37837
, 37838
, 37839
, 37842
, 37843
, 37846
, 37847
, 37849
, 37850
, 37851
, 37852
, 37853
, 37854
, 37855
, 37858
, 37859
, 37862
, 37863
, 37865
, 37868
, 37869
, 37874
, 37875
, 37884
, 37885
, 37903
, 37908
, 37910
, 37948
, 38092
, 38101
, 38103
, 38144
, 38145
, 38147
, 38160
, 38170
, 38192
, 38193
, 38195
, 38211
, 38227
, 38235
, 38259
, 38275
, 38323
, 38337
, 38338
, 38339
, 38353
, 38355
, 38361
, 38385
, 38386
, 38387
, 38406
, 38409
, 38410
, 38412
, 38413
, 38415
, 38422
, 38425
, 38428
, 38448
, 38449
, 38460
, 38461
, 38467
, 38471
, 38475
, 38479
, 38487
, 38495
, 38539
, 38543
, 38555
, 38579
, 38594
, 38598
, 38602
, 38604
, 38605
, 38606
, 38607
, 38618
, 38621
, 38623
, 38642
, 38652
, 38653
, 38664
, 38666
, 38680
, 38682
, 38704
, 38705
, 38717
, 38857
, 38859
, 38873
, 38875
, 38898
, 38899
, 38908
, 38936
, 38938
, 38953
, 38960
, 38961
, 38963
, 39129
, 39131
, 39144
, 39152
, 39153
, 39154
, 39155
, 39173
, 39174
, 39180
, 39183
, 39190
, 39196
, 39205
, 39221
, 39222
, 39228
, 39239
, 39247
, 39255
, 39263
, 39287
, 39303
, 39311
, 39335
, 39351
, 39366
, 39372
, 39375
, 39391
, 39398
, 39404
, 39413
, 39414
, 39420
, 39433
, 39434
, 39450
, 39465
, 39472
, 39473
, 39475
, 39481
, 39499
, 39515
, 39539
, 39563
, 39603
, 39626
, 39650
, 39666
, 39733
, 39735
, 39740
, 39741
, 39926
, 39932
, 39933
, 39936
, 39937
, 39939
, 39940
, 39942
, 39948
, 39957
, 39959
, 39960
, 39961
, 39962
, 39963
, 39967
, 39968
, 39969
, 39971
, 39972
, 39974
, 39976
, 39977
, 39980
, 39984
, 39985
, 39987
, 39989
, 39991
, 40130
, 40131
, 40152
, 40153
, 40154
, 40155
, 40160
, 40161
, 40162
, 40163
, 40168
, 40169
, 40176
, 40177
, 40178
, 40179
, 40197
, 40213
, 40214
, 40229
, 40230
, 40246
, 40279
, 40295
, 40311
, 40327
, 40343
, 40359
, 40390
, 40406
, 40421
, 40422
, 40437
, 40457
, 40473
, 40474
, 40489
, 40496
, 40497
, 40499
, 40539
, 40547
, 40563
, 40587
, 40603
, 40627
, 40650
, 40666
, 40674
, 40690
, 40712
, 40714
, 40716
, 40717
, 40719
, 40729
, 40731
, 40732
, 40733
, 40735
, 40742
, 40744
, 40748
, 40749
, 40753
, 40755
, 40757
, 40758
, 40759
, 40761
, 40764
, 40765
, 40907
, 40910
, 40911
, 40922
, 40926
, 40927
, 40934
, 40937
, 40940
, 40941
, 40946
, 40950
, 40951
, 40956
, 40957
, 41091
, 41107
, 41123
, 41139
, 41153
, 41155
, 41168
, 41170
, 41176
, 41178
, 41185
, 41187
, 41193
, 41200
, 41202
, 41351
, 41367
, 41498
, 41539
, 41555
, 41563
, 41603
, 41611
, 41615
, 41619
, 41627
, 41631
, 41666
, 41674
, 41676
, 41677
, 41678
, 41679
, 41682
, 41686
, 41690
, 41692
, 41693
, 41694
, 41695
, 41733
, 41734
, 41735
, 41740
, 41741
, 41743
, 41749
, 41750
, 41751
, 41756
, 41757
, 41759
, 41765
, 41772
, 41773
, 41788
, 41789
, 41859
, 41863
, 41867
, 41871
, 41875
, 41879
, 41883
, 41887
, 41891
, 41895
, 41907
, 41922
, 41923
, 41926
, 41927
, 41929
, 41930
, 41931
, 41932
, 41933
, 41934
, 41935
, 41938
, 41939
, 41942
, 41943
, 41945
, 41946
, 41947
, 41948
, 41949
, 41950
, 41951
, 41954
, 41955
, 41958
, 41959
, 41961
, 41964
, 41965
, 41970
, 41971
, 41980
, 41981
, 42115
, 42176
, 42178
, 42201
, 42203
, 42225
, 42227
, 42375
, 42383
, 42506
, 42522
, 42563
, 42571
, 42587
, 42635
, 42639
, 42655
, 42675
, 42690
, 42694
, 42698
, 42700
, 42701
, 42702
, 42703
, 42710
, 42716
, 42718
, 42738
, 42748
, 42749
, 42765
, 42767
, 42774
, 42812
, 42813
, 42895
, 42907
, 42931
, 42956
, 42958
, 42970
, 42994
, 43004
, 43005
, 43191
, 43395
, 43427
, 43443
, 43532
, 43533
, 43535
, 43565
, 43574
, 43580
, 43581
, 43599
, 43639
, 43663
, 43726
, 43727
, 43750
, 43756
, 43766
, 43772
, 43773
, 43784
, 43786
, 43800
, 43802
, 43817
, 43824
, 43825
, 43827
, 43833
, 43955
, 43979
, 43995
, 44018
, 44019
, 44167
, 44183
, 44199
, 44211
, 44215
, 44227
, 44230
, 44231
, 44236
, 44239
, 44246
, 44247
, 44248
, 44250
, 44252
, 44255
, 44257
, 44259
, 44260
, 44261
, 44262
, 44263
, 44265
, 44268
, 44272
, 44274
, 44276
, 44277
, 44278
, 44279
, 44284
, 44419
, 44435
, 44451
, 44467
, 44556
, 44557
, 44559
, 44572
, 44573
, 44575
, 44582
, 44588
, 44589
, 44598
, 44604
, 44605
, 44623
, 44639
, 44647
, 44663
, 44687
, 44703
, 44750
, 44751
, 44766
, 44767
, 44774
, 44780
, 44781
, 44796
, 44797
, 44808
, 44809
, 44810
, 44811
, 44812
, 44824
, 44825
, 44826
, 44827
, 44829
, 44831
, 44838
, 44840
, 44841
, 44844
, 44848
, 44849
, 44851
, 44853
, 44855
, 44856
, 44857
, 44861
, 44939
, 44955
, 44959
, 44979
, 44983
, 45002
, 45003
, 45007
, 45018
, 45019
, 45022
, 45033
, 45037
, 45042
, 45043
, 45046
, 45049
, 45052
, 45191
, 45207
, 45223
, 45443
, 45459
, 45699
, 45711
, 45715
, 45727
, 45824
, 45825
, 45827
, 45833
, 45834
, 45840
, 45841
, 45843
, 45849
, 45850
, 45856
, 45857
, 45865
, 45872
, 45873
, 45955
, 45959
, 45963
, 45967
, 45971
, 45975
, 45979
, 45983
, 45987
, 45991
, 46003
, 46018
, 46019
, 46022
, 46025
, 46026
, 46028
, 46029
, 46030
, 46031
, 46034
, 46035
, 46038
, 46041
, 46042
, 46044
, 46045
, 46046
, 46047
, 46050
, 46051
, 46054
, 46057
, 46060
, 46061
, 46066
, 46067
, 46076
, 46077
, 46467
, 46515
, 46735
, 46771
, 46858
, 46874
, 46896
, 46897
, 47027
, 47049
, 47065
, 47090
, 47091
, 47100
, 47283
, 47503
, 47795
, 47925
, 47932
, 47933
, 48055
, 48118
, 48124
, 48125
, 48259
, 48279
, 48291
, 48307
, 48311
, 48819
, 48906
, 48908
, 48909
, 48911
, 48921
, 48924
, 48925
, 48927
, 48934
, 48940
, 48941
, 48945
, 48947
, 48949
, 48950
, 48953
, 48956
, 48957
, 49039
, 49051
, 49055
, 49075
, 49079
, 49102
, 49103
, 49114
, 49118
, 49119
, 49126
, 49129
, 49132
, 49133
, 49138
, 49142
, 49148
, 49149
, 49156
, 49158
, 49164
, 49173
, 49175
, 49183
, 49188
, 49190
, 49196
, 49221
, 49236
, 49253
, 49349
, 49351
, 49359
, 49364
, 49366
, 49372
, 49381
, 49383
, 49404
, 49473
, 49475
, 49488
, 49536
, 49537
, 49538
, 49539
, 49552
, 49553
, 49554
, 49555
, 49618
, 49670
, 49674
, 49676
, 49677
, 49679
, 49686
, 49690
, 49692
, 49693
, 49695
, 49728
, 49729
, 49731
, 49732
, 49733
, 49734
, 49735
, 49736
, 49737
, 49738
, 49739
, 49740
, 49741
, 49743
, 49744
, 49745
, 49747
, 49748
, 49749
, 49750
, 49751
, 49752
, 49753
, 49754
, 49755
, 49756
, 49757
, 49759
, 49794
, 49795
, 49798
, 49800
, 49801
, 49802
, 49803
, 49804
, 49805
, 49806
, 49807
, 49810
, 49811
, 49814
, 49816
, 49817
, 49818
, 49819
, 49820
, 49821
, 49822
, 49823
, 49858
, 49862
, 49866
, 49868
, 49869
, 49870
, 49871
, 49874
, 49878
, 49882
, 49884
, 49885
, 49886
, 49887
, 49920
, 49921
, 49923
, 49925
, 49927
, 49928
, 49929
, 49930
, 49931
, 49933
, 49935
, 49937
, 49939
, 49942
, 49944
, 49945
, 49946
, 49947
, 49948
, 49952
, 49957
, 49960
, 49961
, 49965
, 49968
, 49969
, 49980
, 49984
, 49985
, 49988
, 49992
, 49993
, 49996
, 50000
, 50005
, 50008
, 50009
, 50013
, 50017
, 50020
, 50024
, 50025
, 50028
, 50032
, 50033
, 50045
, 50114
, 50115
, 50118
, 50121
, 50122
, 50123
, 50124
, 50126
, 50130
, 50135
, 50137
, 50138
, 50139
, 50141
, 50143
, 50147
, 50150
, 50153
, 50156
, 50162
, 50163
, 50173
, 50181
, 50183
, 50191
, 50196
, 50198
, 50236
, 50244
, 50261
, 50372
, 50374
, 50380
, 50389
, 50391
, 50496
, 50513
, 50515
, 50521
, 50523
, 50545
, 50547
, 50560
, 50561
, 50562
, 50563
, 50576
, 50578
, 50584
, 50586
, 50608
, 50609
, 50610
, 50611
, 50626
, 50694
, 50698
, 50700
, 50701
, 50703
, 50710
, 50716
, 50748
, 50749
, 50752
, 50753
, 50755
, 50756
, 50757
, 50758
, 50759
, 50760
, 50761
, 50762
, 50763
, 50764
, 50765
, 50767
, 50773
, 50775
, 50776
, 50778
, 50781
, 50783
, 50800
, 50801
, 50812
, 50813
, 50818
, 50822
, 50824
, 50825
, 50826
, 50827
, 50828
, 50829
, 50830
, 50831
, 50838
, 50841
, 50843
, 50844
, 50846
, 50866
, 50867
, 50876
, 50877
, 50882
, 50886
, 50890
, 50892
, 50893
, 50894
, 50895
, 50906
, 50909
, 50911
, 50930
, 50940
, 50941
, 50953
, 50955
, 50968
, 50970
, 50992
, 51005
, 51016
, 51033
, 51057
, 51068
, 51146
, 51161
, 51163
, 51187
, 51196
, 51204
, 51206
, 51212
, 51236
, 51238
, 51253
, 51255
, 51269
, 51301
, 51316
, 51521
, 51523
, 51536
, 51544
, 51546
, 51553
, 51555
, 51561
, 51568
, 51591
, 51599
, 51606
, 51612
, 51621
, 51623
, 51638
, 51644
, 51722
, 51724
, 51738
, 51750
, 51756
, 51773
, 51784
, 51785
, 51786
, 51787
, 51789
, 51791
, 51801
, 51803
, 51808
, 51813
, 51815
, 51816
, 51821
, 51824
, 51825
, 51827
, 51828
, 51830
, 51832
, 51833
, 51836
, 51850
, 51851
, 51854
, 51866
, 51878
, 51881
, 51884
, 51890
, 51891
, 51896
, 51897
, 51901
, 51914
, 51919
, 51938
, 51949
, 51954
, 51958
, 51964
, 51976
, 51978
, 51993
, 51995
, 52008
, 52017
, 52019
, 52022
, 52025
, 52028
, 52041
, 52056
, 52073
, 52080
, 52085
, 52088
, 52093
, 52171
, 52186
, 52201
, 52210
, 52215
, 52221
, 52224
, 52225
, 52227
, 52228
, 52229
, 52230
, 52231
, 52236
, 52239
, 52244
, 52245
, 52246
, 52247
, 52248
, 52249
, 52250
, 52251
, 52252
, 52256
, 52260
, 52261
, 52262
, 52263
, 52264
, 52265
, 52272
, 52273
, 52275
, 52276
, 52277
, 52278
, 52279
, 52284
, 52288
, 52289
, 52292
, 52293
, 52308
, 52309
, 52321
, 52324
, 52325
, 52336
, 52337
, 52340
, 52341
, 52418
, 52419
, 52440
, 52441
, 52442
, 52443
, 52449
, 52451
, 52456
, 52457
, 52464
, 52465
, 52466
, 52467
, 52548
, 52549
, 52550
, 52551
, 52564
, 52565
, 52566
, 52567
, 52580
, 52581
, 52582
, 52583
, 52596
, 52597
, 52598
, 52599
, 52614
, 52615
, 52630
, 52631
, 52645
, 52646
, 52647
, 52661
, 52662
, 52663
, 52678
, 52694
, 52710
, 52726
, 52746
, 52762
, 52808
, 52809
, 52810
, 52811
, 52824
, 52825
, 52826
, 52827
, 52832
, 52833
, 52835
, 52840
, 52841
, 52848
, 52849
, 52851
, 52856
, 52857
, 52874
, 52875
, 52890
, 52891
, 52898
, 52904
, 52905
, 52914
, 52915
, 52920
, 52921
, 52938
, 52954
, 52962
, 52978
, 53000
, 53001
, 53002
, 53003
, 53004
, 53005
, 53007
, 53016
, 53017
, 53018
, 53019
, 53021
, 53023
, 53030
, 53032
, 53033
, 53036
, 53040
, 53041
, 53043
, 53045
, 53046
, 53047
, 53048
, 53049
, 53052
, 53053
, 53064
, 53065
, 53068
, 53069
, 53080
, 53081
, 53084
, 53092
, 53093
, 53096
, 53097
, 53101
, 53104
, 53105
, 53108
, 53109
, 53112
, 53113
, 53116
, 53117
, 53194
, 53195
, 53198
, 53199
, 53210
, 53211
, 53214
, 53222
, 53225
, 53229
, 53234
, 53235
, 53238
, 53239
, 53241
, 53244
, 53245
, 53248
, 53265
, 53267
, 53273
, 53275
, 53280
, 53288
, 53297
, 53299
, 53509
, 53510
, 53525
, 53526
, 53575
, 53591
, 53760
, 53763
, 53766
, 53769
, 53770
, 53772
, 53775
, 53776
, 53779
, 53782
, 53785
, 53786
, 53788
, 53791
, 53827
, 53831
, 53835
, 53839
, 53843
, 53847
, 53851
, 53855
, 54017
, 54019
, 54021
, 54022
, 54023
, 54025
, 54027
, 54028
, 54029
, 54031
, 54032
, 54037
, 54038
, 54039
, 54040
, 54042
, 54045
, 54047
, 54049
, 54053
, 54057
, 54060
, 54064
, 54076
, 54077
, 54273
, 54275
, 54296
, 54298
, 54320
, 54533
, 54534
, 54540
, 54543
, 54550
, 54556
, 54588
, 54599
, 54607
, 54615
, 54623
, 54790
, 54793
, 54794
, 54796
, 54799
, 54810
, 54815
, 54832
, 54844
, 54851
, 54855
, 54859
, 54863
, 54875
, 55048
, 55050
, 55053
, 55055
, 55062
, 55065
, 55067
, 55089
, 55100
, 55296
, 55321
, 55323
, 55336
, 55345
, 55347
, 55555
, 55558
, 55564
, 55568
, 55573
, 55578
, 55583
, 55587
, 55590
, 55593
, 55596
, 55600
, 55605
, 55623
, 55631
, 55635
, 55643
, 55655
, 55667
, 55818
, 55820
, 55823
, 55833
, 55859
, 55862
, 55865
, 55868
, 55883
, 55887
, 55907
, 55927
, 56073
, 56075
, 56088
, 56090
, 56105
, 56112
, 56117
, 56119
, 56120
, 56125
, 56320
, 56321
, 56323
, 56324
, 56325
, 56326
, 56327
, 56332
, 56335
, 56340
, 56341
, 56342
, 56343
, 56344
, 56345
, 56346
, 56347
, 56351
, 56353
, 56355
, 56356
, 56357
, 56358
, 56359
, 56360
, 56361
, 56364
, 56368
, 56369
, 56371
, 56372
, 56373
, 56374
, 56375
, 56380
, 56576
, 56579
, 56592
, 56595
, 56608
, 56611
, 56624
, 56627
, 56643
, 56659
, 56675
, 56691
, 56844
, 56847
, 56860
, 56863
, 56870
, 56876
, 56886
, 56892
, 56911
, 56927
, 56935
, 56951
, 57096
, 57097
, 57098
, 57099
, 57100
, 57101
, 57103
, 57112
, 57113
, 57114
, 57115
, 57116
, 57126
, 57128
, 57129
, 57133
, 57136
, 57137
, 57139
, 57141
, 57142
, 57143
, 57144
, 57145
, 57148
, 57149
, 57495
, 57541
, 57543
, 57551
, 57556
, 57558
, 57564
, 57573
, 57575
, 57596
, 57731
, 57747
, 57862
, 57868
, 57871
, 57878
, 57884
, 57887
, 57927
, 57935
, 57943
, 57951
, 57987
, 57995
, 57999
, 58003
, 58011
, 58015
, 58054
, 58058
, 58060
, 58063
, 58070
, 58074
, 58076
, 58079
, 58112
, 58113
, 58115
, 58120
, 58121
, 58122
, 58123
, 58129
, 58131
, 58136
, 58137
, 58138
, 58139
, 58144
, 58152
, 58153
, 58160
, 58161
, 58243
, 58247
, 58251
, 58255
, 58259
, 58267
, 58279
, 58291
, 58306
, 58307
, 58310
, 58313
, 58314
, 58315
, 58316
, 58318
, 58322
, 58327
, 58329
, 58330
, 58331
, 58333
, 58335
, 58339
, 58342
, 58345
, 58348
, 58354
, 58355
, 58365
, 58503
, 58564
, 58566
, 58572
, 58581
, 58583
, 58755
, 58803
, 58886
, 58892
, 58895
, 58902
, 58908
, 58940
, 58951
, 58959
, 58967
, 58975
, 59019
, 59023
, 59035
, 59059
, 59078
, 59082
, 59084
, 59087
, 59098
, 59103
, 59132
, 59145
, 59147
, 59160
, 59162
, 59184
, 59275
, 59338
, 59353
, 59355
, 59379
, 59388
, 59575
, 59591
, 59599
, 59621
, 59623
, 59636
, 59638
, 59644
, 59783
, 59791
, 59795
, 59803
, 59815
, 59827
, 59866
, 59914
, 59916
, 59930
, 59942
, 59948
, 59979
, 59983
, 59995
, 60007
, 60019
, 60043
, 60083
, 60106
, 60111
, 60150
, 60156
, 60168
, 60170
, 60185
, 60187
, 60200
, 60209
, 60211
, 60214
, 60217
, 60220
, 60315
, 60339
, 60363
, 60378
, 60393
, 60402
, 60407
, 60413
, 60547
, 60551
, 60567
, 60583
, 60595
, 60599
, 60610
, 60611
, 60614
, 60615
, 60620
, 60623
, 60630
, 60631
, 60632
, 60633
, 60634
, 60635
, 60639
, 60641
, 60643
, 60644
, 60645
, 60646
, 60647
, 60648
, 60649
, 60652
, 60656
, 60657
, 60658
, 60659
, 60660
, 60661
, 60662
, 60663
, 60668
, 60807
, 60823
, 60839
, 60855
, 60870
, 60886
, 60902
, 60918
, 60938
, 60954
, 61003
, 61019
, 61027
, 61043
, 61067
, 61083
, 61107
, 61130
, 61146
, 61192
, 61193
, 61194
, 61195
, 61196
, 61197
, 61199
, 61208
, 61209
, 61210
, 61211
, 61213
, 61215
, 61222
, 61224
, 61225
, 61228
, 61232
, 61233
, 61235
, 61237
, 61238
, 61239
, 61240
, 61241
, 61244
, 61245
, 61323
, 61327
, 61339
, 61343
, 61363
, 61367
, 61386
, 61387
, 61390
, 61391
, 61402
, 61403
, 61406
, 61414
, 61417
, 61421
, 61426
, 61427
, 61430
, 61431
, 61433
, 61436
, 61437
, 61587
, 61619
, 61633
, 61635
, 61648
, 61650
, 61656
, 61658
, 61665
, 61667
, 61673
, 61680
, 61682
, 61831
, 61847
, 61910
, 62019
, 62027
, 62035
, 62043
, 62083
, 62091
, 62095
, 62099
, 62107
, 62111
, 62146
, 62150
, 62154
, 62156
, 62157
, 62158
, 62159
, 62162
, 62166
, 62170
, 62172
, 62173
, 62174
, 62175
, 62213
, 62214
, 62215
, 62220
, 62221
, 62223
, 62229
, 62230
, 62231
, 62237
, 62239
, 62245
, 62252
, 62268
, 62269
, 62339
, 62343
, 62347
, 62351
, 62359
, 62367
, 62371
, 62375
, 62402
, 62406
, 62407
, 62410
, 62412
, 62413
, 62414
, 62415
, 62419
, 62422
, 62423
, 62425
, 62427
, 62428
, 62430
, 62434
, 62438
, 62439
, 62445
, 62451
, 62460
, 62461
, 62595
, 62656
, 62658
, 62681
, 62683
, 62705
, 62707
, 62855
, 62863
, 62918
, 62924
, 62943
, 63043
, 63051
, 63067
, 63115
, 63119
, 63135
, 63155
, 63170
, 63174
, 63178
, 63180
, 63181
, 63182
, 63183
, 63190
, 63196
, 63198
, 63218
, 63228
, 63229
, 63245
, 63247
, 63254
, 63292
, 63375
, 63387
, 63411
, 63430
, 63433
, 63435
, 63436
, 63438
, 63450
, 63474
, 63485
, 63667
, 63683
, 63704
, 63706
, 63721
, 63728
, 63730
, 63875
, 63895
, 63903
, 63907
, 63927
, 63951
, 63958
, 63964
, 63990
, 63996
, 64075
, 64079
, 64099
, 64119
, 64143
, 64155
, 64179
, 64206
, 64207
, 64218
, 64230
, 64236
, 64242
, 64246
, 64252
, 64253
, 64265
, 64267
, 64280
, 64282
, 64297
, 64304
, 64309
, 64311
, 64312
, 64317
, 64395
, 64439
, 64458
, 64475
, 64499
, 64502
, 64505
, 64508
, 64643
, 64647
, 64663
, 64675
, 64679
, 64691
, 64695
, 64706
, 64707
, 64710
, 64711
, 64716
, 64719
, 64726
, 64727
, 64728
, 64729
, 64730
, 64731
, 64732
, 64736
, 64738
, 64740
, 64741
, 64742
, 64743
, 64744
, 64745
, 64752
, 64753
, 64754
, 64755
, 64756
, 64757
, 64758
, 64759
, 64764
, 64899
, 64915
, 64931
, 64947
, 64962
, 64978
, 64994
, 65010
, 65103
, 65119
, 65127
, 65143
, 65167
, 65183
, 65230
, 65231
, 65246
, 65247
, 65254
, 65260
, 65261
, 65270
, 65276
, 65277
, 65288
, 65289
, 65290
, 65291
, 65292
, 65293
, 65295
, 65304
, 65305
, 65306
, 65307
, 65308
, 65318
, 65320
, 65321
, 65325
, 65328
, 65329
, 65331
, 65333
, 65334
, 65335
, 65336
, 65337
, 65340
, 65341
, 65419
, 65423
, 65435
, 65459
, 65463
, 65482
, 65483
, 65486
, 65487
, 65498
, 65499
, 65503
, 65510
, 65513
, 65516
, 65522
, 65523
, 65526
, 65527
, 65529
, 65532
, 65533
, 0
, 1
, 2
, 3
, 8
, 9
, 10
, 11
, 13
, 14
, 16
, 17
, 18
, 19
, 24
, 25
, 26
, 27
, 29
, 30
, 31
, 32
, 33
, 34
, 35
, 40
, 41
, 42
, 43
, 44
, 45
, 46
, 47
, 48
, 49
, 50
, 51
, 52
, 53
, 54
, 55
, 56
, 57
, 58
, 59
, 61
, 62
, 63
, 64
, 65
, 66
, 67
, 70
, 71
, 72
, 73
, 74
, 75
, 76
, 77
, 78
, 79
, 80
, 81
, 82
, 83
, 86
, 87
, 88
, 89
, 90
, 91
, 92
, 93
, 94
, 95
, 96
, 97
, 98
, 99
, 102
, 103
, 104
, 105
, 106
, 107
, 108
, 109
, 110
, 111
, 112
, 113
, 114
, 115
, 116
, 117
, 118
, 119
, 120
, 121
, 122
, 123
, 124
, 125
, 126
, 127
, 128
, 129
, 130
, 131
, 132
, 133
, 134
, 135
, 136
, 137
, 138
, 139
, 140
, 141
, 142
, 143
, 144
, 145
, 146
, 147
, 148
, 149
, 150
, 151
, 152
, 153
, 154
, 155
, 156
, 157
, 158
, 159
, 160
, 161
, 162
, 163
, 164
, 165
, 166
, 167
, 168
, 169
, 170
, 171
, 172
, 173
, 174
, 175
, 176
, 177
, 178
, 179
, 180
, 181
, 182
, 183
, 184
, 185
, 186
, 187
, 188
, 189
, 190
, 191
, 192
, 193
, 194
, 195
, 200
, 201
, 202
, 203
, 205
, 206
, 208
, 209
, 210
, 211
, 216
, 217
, 218
, 219
, 220
, 221
, 222
, 224
, 225
, 226
, 227
, 232
, 233
, 234
, 235
, 237
, 238
, 239
, 240
, 241
, 242
, 243
, 244
, 245
, 246
, 247
, 248
, 249
, 250
, 251
, 253
, 254
, 255
, 258
, 260
, 261
, 262
, 263
, 264
, 265
, 266
, 267
, 268
, 269
, 270
, 271
, 274
, 276
, 277
, 278
, 279
, 280
, 281
, 282
, 283
, 284
, 285
, 286
, 287
, 288
, 289
, 290
, 291
, 292
, 293
, 294
, 295
, 296
, 297
, 298
, 299
, 300
, 301
, 302
, 303
, 304
, 305
, 306
, 307
, 308
, 309
, 310
, 311
, 312
, 313
, 314
, 315
, 316
, 317
, 318
, 319
, 322
, 324
, 325
, 326
, 327
, 328
, 329
, 330
, 331
, 332
, 333
, 334
, 335
, 338
, 340
, 341
, 342
, 343
, 344
, 345
, 346
, 347
, 348
, 349
, 350
, 351
, 352
, 353
, 354
, 355
, 356
, 357
, 358
, 359
, 360
, 361
, 362
, 363
, 364
, 365
, 366
, 367
, 368
, 369
, 370
, 371
, 372
, 373
, 374
, 375
, 376
, 377
, 378
, 379
, 380
, 381
, 382
, 383
, 388
, 389
, 390
, 391
, 392
, 393
, 394
, 395
, 396
, 397
, 398
, 399
, 404
, 405
, 406
, 407
, 408
, 409
, 410
, 411
, 412
, 413
, 414
, 415
, 416
, 417
, 418
, 419
, 420
, 421
, 422
, 423
, 424
, 425
, 426
, 427
, 428
, 429
, 430
, 431
, 432
, 433
, 434
, 435
, 436
, 437
, 438
, 439
, 440
, 441
, 442
, 443
, 444
, 445
, 446
, 447
, 448
, 452
, 453
, 454
, 455
, 456
, 457
, 458
, 459
, 460
, 461
, 462
, 463
, 464
, 468
, 469
, 470
, 471
, 472
, 473
, 474
, 475
, 476
, 477
, 478
, 479
, 480
, 481
, 482
, 483
, 484
, 485
, 486
, 487
, 488
, 489
, 490
, 491
, 492
, 493
, 494
, 495
, 496
, 497
, 498
, 499
, 500
, 501
, 502
, 503
, 504
, 505
, 506
, 507
, 508
, 509
, 510
, 511
, 512
, 513
, 514
, 515
, 516
, 517
, 518
, 519
, 520
, 521
, 522
, 523
, 524
, 525
, 526
, 527
, 528
, 529
, 530
, 531
, 532
, 533
, 534
, 535
, 536
, 537
, 538
, 539
, 540
, 541
, 542
, 543
, 544
, 545
, 546
, 547
, 548
, 549
, 550
, 551
, 552
, 553
, 554
, 555
, 556
, 557
, 558
, 559
, 560
, 561
, 562
, 563
, 564
, 565
, 566
, 567
, 568
, 569
, 570
, 571
, 572
, 573
, 574
, 575
, 576
, 577
, 578
, 579
, 580
, 581
, 582
, 583
, 584
, 585
, 586
, 587
, 588
, 589
, 590
, 591
, 592
, 593
, 594
, 595
, 596
, 597
, 598
, 599
, 600
, 601
, 602
, 603
, 604
, 605
, 606
, 607
, 608
, 609
, 610
, 611
, 612
, 613
, 614
, 615
, 616
, 617
, 618
, 619
, 620
, 621
, 622
, 623
, 624
, 625
, 626
, 627
, 628
, 629
, 630
, 631
, 632
, 633
, 634
, 635
, 636
, 637
, 638
, 639
, 640
, 641
, 642
, 643
, 644
, 645
, 646
, 647
, 648
, 649
, 650
, 651
, 652
, 653
, 654
, 655
, 656
, 657
, 658
, 659
, 660
, 661
, 662
, 663
, 664
, 665
, 666
, 667
, 668
, 669
, 670
, 671
, 672
, 673
, 674
, 675
, 676
, 677
, 678
, 679
, 680
, 681
, 682
, 683
, 684
, 685
, 686
, 687
, 688
, 689
, 690
, 691
, 692
, 693
, 694
, 695
, 696
, 697
, 698
, 699
, 700
, 701
, 702
, 703
, 704
, 705
, 706
, 707
, 708
, 709
, 710
, 711
, 712
, 713
, 714
, 715
, 716
, 717
, 718
, 719
, 720
, 721
, 722
, 723
, 724
, 725
, 726
, 727
, 728
, 729
, 730
, 731
, 732
, 733
, 734
, 735
, 736
, 737
, 738
, 739
, 740
, 741
, 742
, 743
, 744
, 745
, 746
, 747
, 748
, 749
, 750
, 751
, 752
, 753
, 754
, 755
, 756
, 757
, 758
, 759
, 760
, 761
, 762
, 763
, 764
, 765
, 766
, 767
, 770
, 772
, 773
, 774
, 775
, 780
, 781
, 782
, 783
, 785
, 786
, 787
, 788
, 789
, 790
, 791
, 796
, 797
, 798
, 799
, 800
, 802
, 803
, 804
, 805
, 806
, 807
, 810
, 811
, 812
, 813
, 814
, 815
, 818
, 819
, 820
, 821
, 822
, 823
, 824
, 825
, 826
, 827
, 828
, 829
, 830
, 831
, 832
, 833
, 834
, 835
, 836
, 837
, 838
, 839
, 840
, 841
, 842
, 843
, 844
, 845
, 846
, 847
, 848
, 849
, 850
, 851
, 852
, 853
, 854
, 855
, 856
, 857
, 858
, 859
, 860
, 861
, 862
, 863
, 864
, 865
, 866
, 867
, 868
, 869
, 870
, 871
, 872
, 873
, 874
, 875
, 876
, 877
, 878
, 879
, 880
, 881
, 882
, 883
, 884
, 885
, 886
, 887
, 888
, 889
, 890
, 891
, 892
, 893
, 894
, 895
, 896
, 897
, 898
, 899
, 900
, 901
, 902
, 903
, 904
, 905
, 906
, 907
, 908
, 909
, 910
, 911
, 912
, 913
, 914
, 915
, 916
, 917
, 918
, 919
, 920
, 921
, 922
, 923
, 924
, 925
, 926
, 927
, 928
, 929
, 930
, 931
, 932
, 933
, 934
, 935
, 936
, 937
, 938
, 939
, 940
, 941
, 942
, 943
, 944
, 945
, 946
, 947
, 948
, 949
, 950
, 951
, 952
, 953
, 954
, 955
, 956
, 957
, 958
, 959
, 960
, 961
, 964
, 965
, 966
, 967
, 968
, 972
, 973
, 974
, 975
, 976
, 977
, 978
, 980
, 981
, 982
, 983
, 984
, 988
, 989
, 990
, 991
, 992
, 993
, 995
, 996
, 997
, 998
, 999
, 1000
, 1002
, 1003
, 1004
, 1005
, 1006
, 1007
, 1008
, 1009
, 1012
, 1013
, 1014
, 1015
, 1016
, 1017
, 1018
, 1019
, 1020
, 1021
, 1022
, 1023
, 1024
, 1025
, 1026
, 1027
, 1028
, 1029
, 1030
, 1031
, 1032
, 1033
, 1034
, 1035
, 1036
, 1037
, 1038
, 1039
, 1040
, 1041
, 1042
, 1043
, 1044
, 1045
, 1046
, 1047
, 1048
, 1049
, 1050
, 1051
, 1052
, 1053
, 1054
, 1055
, 1056
, 1057
, 1058
, 1059
, 1060
, 1061
, 1062
, 1063
, 1064
, 1065
, 1066
, 1067
, 1068
, 1069
, 1070
, 1071
, 1072
, 1073
, 1074
, 1075
, 1076
, 1077
, 1078
, 1079
, 1080
, 1081
, 1082
, 1083
, 1084
, 1085
, 1086
, 1087
, 1088
, 1089
, 1090
, 1091
, 1092
, 1093
, 1094
, 1095
, 1096
, 1097
, 1098
, 1099
, 1100
, 1101
, 1102
, 1103
, 1104
, 1105
, 1106
, 1107
, 1108
, 1109
, 1110
, 1111
, 1112
, 1113
, 1114
, 1115
, 1116
, 1117
, 1118
, 1119
, 1120
, 1121
, 1122
, 1123
, 1124
, 1125
, 1126
, 1127
, 1128
, 1129
, 1130
, 1131
, 1132
, 1133
, 1134
, 1135
, 1136
, 1137
, 1138
, 1139
, 1140
, 1141
, 1142
, 1143
, 1144
, 1145
, 1146
, 1147
, 1148
, 1149
, 1150
, 1151
, 1152
, 1153
, 1154
, 1155
, 1156
, 1157
, 1158
, 1159
, 1160
, 1161
, 1162
, 1163
, 1164
, 1165
, 1166
, 1167
, 1168
, 1169
, 1170
, 1171
, 1172
, 1173
, 1174
, 1175
, 1176
, 1177
, 1178
, 1179
, 1180
, 1181
, 1182
, 1183
, 1184
, 1185
, 1186
, 1187
, 1188
, 1189
, 1190
, 1191
, 1192
, 1193
, 1194
, 1195
, 1196
, 1197
, 1198
, 1199
, 1200
, 1201
, 1202
, 1203
, 1204
, 1205
, 1206
, 1207
, 1208
, 1209
, 1210
, 1211
, 1212
, 1213
, 1214
, 1215
, 1216
, 1217
, 1218
, 1219
, 1220
, 1221
, 1222
, 1223
, 1224
, 1225
, 1226
, 1227
, 1228
, 1229
, 1230
, 1231
, 1232
, 1233
, 1234
, 1235
, 1236
, 1237
, 1238
, 1239
, 1240
, 1241
, 1242
, 1243
, 1244
, 1245
, 1246
, 1247
, 1248
, 1249
, 1250
, 1251
, 1252
, 1253
, 1254
, 1255
, 1256
, 1257
, 1258
, 1259
, 1260
, 1261
, 1262
, 1263
, 1264
, 1265
, 1266
, 1267
, 1268
, 1269
, 1270
, 1271
, 1272
, 1273
, 1274
, 1275
, 1276
, 1277
, 1278
, 1279
, 1281
, 1282
, 1283
, 1284
, 1285
, 1286
, 1287
, 1288
, 1289
, 1290
, 1291
, 1292
, 1293
, 1294
, 1295
, 1296
, 1298
, 1300
, 1301
, 1302
, 1303
, 1304
, 1306
, 1307
, 1308
, 1309
, 1310
, 1311
, 1312
, 1313
, 1314
, 1315
, 1316
, 1317
, 1318
, 1319
, 1320
, 1321
, 1322
, 1323
, 1324
, 1325
, 1326
, 1327
, 1328
, 1330
, 1332
, 1333
, 1334
, 1335
, 1336
, 1337
, 1338
, 1339
, 1340
, 1341
, 1342
, 1343
, 1344
, 1346
, 1348
, 1349
, 1350
, 1351
, 1352
, 1353
, 1354
, 1355
, 1356
, 1357
, 1358
, 1359
, 1361
, 1362
, 1363
, 1364
, 1365
, 1366
, 1367
, 1369
, 1371
, 1372
, 1373
, 1374
, 1375
, 1376
, 1377
, 1378
, 1379
, 1380
, 1381
, 1382
, 1383
, 1384
, 1385
, 1386
, 1387
, 1388
, 1389
, 1390
, 1391
, 1393
, 1394
, 1395
, 1396
, 1397
, 1398
, 1399
, 1400
, 1401
, 1402
, 1403
, 1404
, 1405
, 1406
, 1407
, 1409
, 1411
, 1412
, 1413
, 1414
, 1415
, 1416
, 1417
, 1418
, 1419
, 1420
, 1421
, 1422
, 1423
, 1424
, 1426
, 1428
, 1429
, 1430
, 1431
, 1432
, 1434
, 1436
, 1437
, 1438
, 1439
, 1440
, 1441
, 1442
, 1443
, 1444
, 1445
, 1446
, 1447
, 1448
, 1449
, 1450
, 1451
, 1452
, 1453
, 1454
, 1455
, 1456
, 1458
, 1460
, 1461
, 1462
, 1463
, 1464
, 1465
, 1466
, 1467
, 1468
, 1469
, 1470
, 1471
, 1472
, 1474
, 1476
, 1477
, 1478
, 1479
, 1480
, 1481
, 1482
, 1483
, 1484
, 1485
, 1486
, 1487
, 1488
, 1489
, 1491
, 1492
, 1493
, 1494
, 1495
, 1496
, 1497
, 1499
, 1500
, 1501
, 1502
, 1503
, 1504
, 1505
, 1506
, 1507
, 1508
, 1509
, 1510
, 1511
, 1512
, 1513
, 1514
, 1515
, 1516
, 1517
, 1518
, 1519
, 1520
, 1521
, 1523
, 1524
, 1525
, 1526
, 1527
, 1528
, 1529
, 1530
, 1531
, 1532
, 1533
, 1534
, 1535
, 1536
, 1537
, 1538
, 1539
, 1540
, 1541
, 1543
, 1544
, 1545
, 1546
, 1547
, 1550
, 1552
, 1553
, 1554
, 1555
, 1556
, 1557
, 1558
, 1559
, 1560
, 1561
, 1562
, 1563
, 1564
, 1566
, 1568
, 1569
, 1570
, 1571
, 1572
, 1573
, 1574
, 1575
, 1576
, 1577
, 1578
, 1579
, 1580
, 1581
, 1582
, 1583
, 1584
, 1585
, 1586
, 1587
, 1588
, 1589
, 1590
, 1591
, 1592
, 1593
, 1594
, 1595
, 1598
, 1599
, 1600
, 1601
, 1602
, 1603
, 1608
, 1609
, 1610
, 1611
, 1614
, 1616
, 1617
, 1618
, 1619
, 1621
, 1623
, 1624
, 1625
, 1626
, 1627
, 1629
, 1630
, 1631
, 1632
, 1633
, 1634
, 1635
, 1636
, 1637
, 1638
, 1639
, 1640
, 1641
, 1642
, 1643
, 1644
, 1645
, 1646
, 1647
, 1648
, 1649
, 1650
, 1651
, 1652
, 1653
, 1654
, 1655
, 1656
, 1657
, 1658
, 1659
, 1662
, 1663
, 1664
, 1665
, 1666
, 1667
, 1668
, 1669
, 1671
, 1672
, 1673
, 1674
, 1675
, 1680
, 1681
, 1682
, 1683
, 1684
, 1685
, 1686
, 1687
, 1688
, 1689
, 1690
, 1691
, 1692
, 1694
, 1696
, 1697
, 1698
, 1699
, 1700
, 1701
, 1702
, 1703
, 1704
, 1705
, 1706
, 1707
, 1708
, 1709
, 1710
, 1711
, 1712
, 1713
, 1714
, 1715
, 1716
, 1717
, 1718
, 1719
, 1720
, 1721
, 1722
, 1723
, 1726
, 1727
, 1728
, 1729
, 1730
, 1731
, 1732
, 1733
, 1735
, 1736
, 1737
, 1738
, 1739
, 1744
, 1745
, 1746
, 1747
, 1748
, 1749
, 1751
, 1752
, 1753
, 1754
, 1755
, 1757
, 1759
, 1760
, 1761
, 1762
, 1763
, 1764
, 1765
, 1766
, 1767
, 1768
, 1769
, 1770
, 1771
, 1772
, 1773
, 1774
, 1775
, 1776
, 1777
, 1778
, 1779
, 1780
, 1781
, 1782
, 1783
, 1784
, 1785
, 1786
, 1787
, 1790
, 1791
, 1792
, 1793
, 1794
, 1795
, 1796
, 1797
, 1798
, 1799
, 1801
, 1803
, 1804
, 1805
, 1806
, 1807
, 1808
, 1809
, 1810
, 1811
, 1812
, 1813
, 1814
, 1815
, 1816
, 1818
, 1820
, 1821
, 1822
, 1823
, 1824
, 1825
, 1826
, 1827
, 1828
, 1829
, 1830
, 1831
, 1832
, 1833
, 1834
, 1835
, 1836
, 1837
, 1838
, 1839
, 1840
, 1842
, 1843
, 1844
, 1845
, 1846
, 1847
, 1848
, 1849
, 1850
, 1851
, 1852
, 1853
, 1854
, 1855
, 1856
, 1857
, 1858
, 1859
, 1860
, 1861
, 1862
, 1863
, 1864
, 1865
, 1866
, 1867
, 1868
, 1869
, 1870
, 1871
, 1872
, 1873
, 1874
, 1875
, 1876
, 1877
, 1878
, 1879
, 1880
, 1881
, 1882
, 1883
, 1884
, 1885
, 1886
, 1887
, 1888
, 1889
, 1890
, 1891
, 1892
, 1893
, 1894
, 1895
, 1896
, 1897
, 1898
, 1899
, 1900
, 1901
, 1902
, 1903
, 1904
, 1905
, 1906
, 1907
, 1908
, 1909
, 1910
, 1911
, 1912
, 1913
, 1914
, 1915
, 1916
, 1917
, 1918
, 1919
, 1920
, 1921
, 1922
, 1923
, 1924
, 1925
, 1926
, 1927
, 1928
, 1929
, 1930
, 1931
, 1932
, 1933
, 1934
, 1935
, 1936
, 1937
, 1938
, 1939
, 1940
, 1941
, 1942
, 1943
, 1944
, 1945
, 1946
, 1947
, 1948
, 1949
, 1950
, 1951
, 1952
, 1953
, 1954
, 1955
, 1956
, 1957
, 1958
, 1959
, 1960
, 1961
, 1962
, 1963
, 1964
, 1965
, 1966
, 1967
, 1968
, 1969
, 1970
, 1971
, 1972
, 1973
, 1974
, 1975
, 1976
, 1977
, 1978
, 1979
, 1980
, 1981
, 1982
, 1983
, 1984
, 1985
, 1986
, 1987
, 1988
, 1989
, 1990
, 1991
, 1992
, 1994
, 1996
, 1997
, 1998
, 1999
, 2000
, 2001
, 2002
, 2003
, 2004
, 2005
, 2006
, 2007
, 2008
, 2009
, 2011
, 2012
, 2013
, 2014
, 2015
, 2016
, 2017
, 2018
, 2019
, 2020
, 2021
, 2022
, 2023
, 2024
, 2025
, 2026
, 2027
, 2028
, 2029
, 2030
, 2031
, 2032
, 2033
, 2035
, 2036
, 2037
, 2038
, 2039
, 2040
, 2041
, 2042
, 2043
, 2044
, 2045
, 2046
, 2047
, 2048
, 2049
, 2050
, 2051
, 2052
, 2053
, 2054
, 2055
, 2056
, 2057
, 2058
, 2059
, 2060
, 2061
, 2062
, 2063
, 2064
, 2065
, 2066
, 2067
, 2068
, 2069
, 2070
, 2071
, 2072
, 2073
, 2074
, 2075
, 2076
, 2077
, 2078
, 2079
, 2080
, 2081
, 2082
, 2083
, 2084
, 2085
, 2086
, 2087
, 2088
, 2089
, 2090
, 2091
, 2092
, 2093
, 2094
, 2095
, 2096
, 2097
, 2098
, 2099
, 2101
, 2103
, 2104
, 2105
, 2106
, 2107
, 2109
, 2110
, 2111
, 2112
, 2113
, 2114
, 2115
, 2116
, 2117
, 2118
, 2119
, 2120
, 2121
, 2122
, 2123
, 2124
, 2125
, 2126
, 2127
, 2128
, 2129
, 2130
, 2131
, 2132
, 2133
, 2134
, 2135
, 2136
, 2137
, 2138
, 2139
, 2140
, 2141
, 2142
, 2143
, 2144
, 2145
, 2146
, 2147
, 2148
, 2149
, 2150
, 2151
, 2152
, 2153
, 2154
, 2155
, 2156
, 2157
, 2158
, 2159
, 2160
, 2161
, 2162
, 2163
, 2164
, 2166
, 2167
, 2168
, 2169
, 2170
, 2171
, 2172
, 2173
, 2174
, 2175
, 2176
, 2177
, 2178
, 2179
, 2180
, 2181
, 2182
, 2183
, 2184
, 2185
, 2186
, 2187
, 2188
, 2189
, 2190
, 2191
, 2192
, 2193
, 2194
, 2195
, 2196
, 2197
, 2198
, 2199
, 2200
, 2201
, 2202
, 2203
, 2204
, 2205
, 2206
, 2207
, 2208
, 2209
, 2210
, 2211
, 2212
, 2213
, 2214
, 2215
, 2216
, 2217
, 2218
, 2219
, 2220
, 2221
, 2222
, 2223
, 2224
, 2225
, 2226
, 2227
, 2228
, 2229
, 2230
, 2231
, 2232
, 2233
, 2234
, 2235
, 2236
, 2237
, 2238
, 2239
, 2240
, 2241
, 2242
, 2243
, 2244
, 2245
, 2246
, 2247
, 2248
, 2249
, 2250
, 2251
, 2252
, 2253
, 2254
, 2255
, 2256
, 2257
, 2258
, 2259
, 2260
, 2261
, 2262
, 2263
, 2264
, 2265
, 2266
, 2267
, 2268
, 2269
, 2270
, 2271
, 2272
, 2273
, 2274
, 2275
, 2276
, 2277
, 2278
, 2279
, 2280
, 2281
, 2282
, 2283
, 2284
, 2285
, 2286
, 2287
, 2288
, 2289
, 2290
, 2291
, 2292
, 2294
, 2296
, 2297
, 2298
, 2299
, 2300
, 2301
, 2302
, 2303
, 2306
, 2308
, 2309
, 2310
, 2311
, 2312
, 2313
, 2314
, 2315
, 2316
, 2317
, 2318
, 2319
, 2321
, 2322
, 2323
, 2324
, 2325
, 2326
, 2327
, 2328
, 2329
, 2331
, 2332
, 2333
, 2334
, 2335
, 2336
, 2338
, 2340
, 2341
, 2342
, 2343
, 2344
, 2346
, 2347
, 2348
, 2349
, 2350
, 2351
, 2354
, 2356
, 2357
, 2358
, 2359
, 2360
, 2361
, 2362
, 2363
, 2364
, 2365
, 2366
, 2367
, 2370
, 2372
, 2373
, 2374
, 2375
, 2376
, 2377
, 2378
, 2379
, 2380
, 2381
, 2382
, 2383
, 2384
, 2386
, 2388
, 2389
, 2390
, 2391
, 2392
, 2394
, 2396
, 2397
, 2398
, 2399
, 2401
, 2402
, 2403
, 2404
, 2405
, 2406
, 2407
, 2409
, 2410
, 2411
, 2412
, 2413
, 2414
, 2415
, 2418
, 2420
, 2421
, 2422
, 2423
, 2424
, 2425
, 2426
, 2427
, 2428
, 2429
, 2430
, 2431
, 2432
, 2433
, 2436
, 2437
, 2438
, 2439
, 2440
, 2441
, 2442
, 2443
, 2444
, 2445
, 2446
, 2447
, 2448
, 2449
, 2451
, 2452
, 2453
, 2454
, 2455
, 2457
, 2459
, 2460
, 2461
, 2462
, 2463
, 2464
, 2466
, 2468
, 2469
, 2470
, 2471
, 2472
, 2474
, 2475
, 2476
, 2477
, 2478
, 2479
, 2484
, 2485
, 2486
, 2487
, 2488
, 2489
, 2490
, 2491
, 2492
, 2493
, 2494
, 2495
, 2496
, 2497
, 2500
, 2501
, 2502
, 2503
, 2504
, 2505
, 2506
, 2507
, 2508
, 2509
, 2510
, 2511
, 2512
, 2513
, 2514
, 2516
, 2517
, 2518
, 2519
, 2520
, 2522
, 2523
, 2524
, 2525
, 2526
, 2527
, 2528
, 2529
, 2531
, 2532
, 2533
, 2534
, 2535
, 2536
, 2537
, 2538
, 2539
, 2540
, 2541
, 2542
, 2543
, 2544
, 2548
, 2549
, 2550
, 2551
, 2552
, 2553
, 2554
, 2555
, 2556
, 2557
, 2558
, 2559
, 2560
, 2561
, 2562
, 2563
, 2564
, 2565
, 2566
, 2567
, 2568
, 2569
, 2570
, 2571
, 2574
, 2576
, 2577
, 2578
, 2579
, 2580
, 2581
, 2582
, 2583
, 2584
, 2585
, 2586
, 2587
, 2588
, 2589
, 2590
, 2591
, 2592
, 2593
, 2594
, 2595
, 2596
, 2597
, 2598
, 2599
, 2600
, 2601
, 2602
, 2603
, 2604
, 2606
, 2607
, 2608
, 2609
, 2610
, 2611
, 2612
, 2613
, 2615
, 2616
, 2617
, 2618
, 2619
, 2622
, 2623
, 2624
, 2625
, 2626
, 2627
, 2628
, 2629
, 2630
, 2631
, 2632
, 2633
, 2634
, 2635
, 2638
, 2640
, 2641
, 2642
, 2643
, 2644
, 2645
, 2646
, 2647
, 2648
, 2649
, 2650
, 2651
, 2652
, 2653
, 2654
, 2655
, 2656
, 2657
, 2658
, 2659
, 2661
, 2663
, 2664
, 2665
, 2666
, 2667
, 2669
, 2670
, 2671
, 2672
, 2673
, 2674
, 2675
, 2680
, 2681
, 2682
, 2683
, 2686
, 2687
, 2688
, 2689
, 2690
, 2691
, 2692
, 2693
, 2694
, 2695
, 2696
, 2697
, 2698
, 2699
, 2700
, 2701
, 2704
, 2705
, 2706
, 2707
, 2708
, 2709
, 2710
, 2711
, 2712
, 2713
, 2714
, 2715
, 2716
, 2717
, 2718
, 2719
, 2720
, 2721
, 2722
, 2723
, 2724
, 2725
, 2726
, 2727
, 2728
, 2729
, 2730
, 2731
, 2732
, 2734
, 2735
, 2736
, 2737
, 2738
, 2739
, 2740
, 2741
, 2743
, 2744
, 2745
, 2746
, 2747
, 2750
, 2751
, 2752
, 2753
, 2754
, 2755
, 2756
, 2757
, 2758
, 2759
, 2760
, 2761
, 2762
, 2763
, 2764
, 2765
, 2768
, 2769
, 2770
, 2771
, 2772
, 2773
, 2774
, 2775
, 2776
, 2777
, 2778
, 2779
, 2780
, 2781
, 2782
, 2783
, 2784
, 2785
, 2786
, 2787
, 2788
, 2789
, 2791
, 2792
, 2793
, 2794
, 2795
, 2797
, 2798
, 2799
, 2800
, 2801
, 2802
, 2803
, 2804
, 2805
, 2807
, 2808
, 2809
, 2810
, 2811
, 2814
, 2815
, 2816
, 2817
, 2818
, 2819
, 2820
, 2821
, 2822
, 2823
, 2824
, 2826
, 2828
, 2829
, 2830
, 2831
, 2832
, 2833
, 2834
, 2835
, 2836
, 2837
, 2838
, 2839
, 2841
, 2843
, 2844
, 2845
, 2846
, 2847
, 2848
, 2849
, 2850
, 2851
, 2852
, 2853
, 2854
, 2855
, 2856
, 2858
, 2859
, 2860
, 2861
, 2862
, 2863
, 2865
, 2866
, 2867
, 2868
, 2869
, 2870
, 2871
, 2873
, 2874
, 2875
, 2876
, 2877
, 2878
, 2879
, 2880
, 2881
, 2882
, 2883
, 2884
, 2885
, 2886
, 2887
, 2888
, 2889
, 2890
, 2891
, 2892
, 2893
, 2894
, 2895
, 2896
, 2897
, 2898
, 2899
, 2900
, 2901
, 2902
, 2903
, 2904
, 2905
, 2906
, 2907
, 2908
, 2909
, 2910
, 2911
, 2912
, 2913
, 2914
, 2915
, 2916
, 2917
, 2918
, 2919
, 2920
, 2921
, 2922
, 2923
, 2924
, 2925
, 2926
, 2927
, 2928
, 2929
, 2930
, 2931
, 2932
, 2933
, 2934
, 2935
, 2936
, 2937
, 2938
, 2939
, 2940
, 2941
, 2942
, 2943
, 2944
, 2945
, 2946
, 2947
, 2948
, 2949
, 2950
, 2951
, 2952
, 2953
, 2954
, 2955
, 2956
, 2957
, 2958
, 2959
, 2960
, 2961
, 2962
, 2963
, 2964
, 2965
, 2966
, 2967
, 2968
, 2969
, 2970
, 2971
, 2972
, 2973
, 2974
, 2975
, 2976
, 2977
, 2978
, 2979
, 2980
, 2981
, 2982
, 2983
, 2984
, 2985
, 2986
, 2987
, 2988
, 2989
, 2990
, 2991
, 2992
, 2993
, 2994
, 2995
, 2996
, 2997
, 2998
, 2999
, 3000
, 3001
, 3002
, 3003
, 3004
, 3005
, 3006
, 3007
, 3008
, 3009
, 3010
, 3011
, 3012
, 3013
, 3014
, 3015
, 3016
, 3017
, 3019
, 3020
, 3021
, 3022
, 3023
, 3024
, 3025
, 3026
, 3027
, 3028
, 3029
, 3030
, 3031
, 3032
, 3033
, 3034
, 3036
, 3037
, 3038
, 3039
, 3040
, 3041
, 3042
, 3043
, 3044
, 3045
, 3046
, 3047
, 3048
, 3049
, 3050
, 3051
, 3052
, 3053
, 3054
, 3055
, 3056
, 3057
, 3058
, 3060
, 3061
, 3062
, 3063
, 3064
, 3066
, 3067
, 3068
, 3069
, 3070
, 3071
, 3072
, 3073
, 3074
, 3075
, 3080
, 3081
, 3082
, 3083
, 3085
, 3086
, 3088
, 3089
, 3090
, 3091
, 3096
, 3097
, 3098
, 3099
, 3100
, 3101
, 3102
, 3104
, 3105
, 3106
, 3107
, 3112
, 3113
, 3114
, 3115
, 3117
, 3118
, 3119
, 3120
, 3121
, 3122
, 3123
, 3128
, 3129
, 3130
, 3131
, 3133
, 3134
, 3135
, 3136
, 3137
, 3138
, 3139
, 3142
, 3143
, 3144
, 3145
, 3146
, 3147
, 3148
, 3149
, 3150
, 3151
, 3152
, 3153
, 3154
, 3155
, 3158
, 3159
, 3160
, 3161
, 3162
, 3163
, 3164
, 3165
, 3166
, 3167
, 3168
, 3169
, 3170
, 3171
, 3174
, 3175
, 3176
, 3177
, 3178
, 3179
, 3180
, 3181
, 3182
, 3183
, 3184
, 3185
, 3186
, 3187
, 3190
, 3191
, 3192
, 3193
, 3194
, 3195
, 3196
, 3197
, 3198
, 3199
, 3200
, 3201
, 3202
, 3203
, 3204
, 3205
, 3206
, 3207
, 3208
, 3209
, 3210
, 3211
, 3212
, 3213
, 3214
, 3215
, 3216
, 3217
, 3218
, 3219
, 3220
, 3221
, 3222
, 3223
, 3224
, 3225
, 3226
, 3227
, 3228
, 3229
, 3230
, 3231
, 3232
, 3233
, 3234
, 3235
, 3236
, 3237
, 3238
, 3239
, 3240
, 3241
, 3242
, 3243
, 3244
, 3245
, 3246
, 3247
, 3248
, 3249
, 3250
, 3251
, 3252
, 3253
, 3254
, 3255
, 3256
, 3257
, 3258
, 3259
, 3260
, 3261
, 3262
, 3263
, 3264
, 3265
, 3266
, 3267
, 3268
, 3269
, 3272
, 3273
, 3274
, 3275
, 3277
, 3278
, 3280
, 3281
, 3282
, 3283
, 3284
, 3285
, 3288
, 3289
, 3290
, 3291
, 3293
, 3294
, 3295
, 3296
, 3297
, 3298
, 3299
, 3304
, 3305
, 3306
, 3307
, 3308
, 3309
, 3310
, 3311
, 3312
, 3313
, 3314
, 3315
, 3320
, 3321
, 3322
, 3323
, 3325
, 3326
, 3327
, 3330
, 3332
, 3333
, 3334
, 3335
, 3336
, 3337
, 3338
, 3339
, 3340
, 3341
, 3342
, 3343
, 3346
, 3348
, 3349
, 3350
, 3351
, 3352
, 3353
, 3354
, 3355
, 3356
, 3357
, 3358
, 3359
, 3362
, 3364
, 3365
, 3366
, 3367
, 3368
, 3369
, 3370
, 3371
, 3372
, 3373
, 3374
, 3375
, 3378
, 3380
, 3381
, 3382
, 3383
, 3384
, 3385
, 3386
, 3387
, 3388
, 3389
, 3390
, 3391
, 3394
, 3396
, 3397
, 3398
, 3399
, 3400
, 3401
, 3402
, 3403
, 3404
, 3405
, 3406
, 3407
, 3410
, 3412
, 3413
, 3414
, 3415
, 3416
, 3417
, 3418
, 3419
, 3420
, 3421
, 3422
, 3423
, 3426
, 3428
, 3429
, 3430
, 3431
, 3432
, 3433
, 3434
, 3435
, 3436
, 3437
, 3438
, 3439
, 3442
, 3444
, 3445
, 3446
, 3447
, 3448
, 3449
, 3450
, 3451
, 3452
, 3453
, 3454
, 3455
, 3456
, 3457
, 3460
, 3461
, 3462
, 3463
, 3464
, 3465
, 3466
, 3467
, 3468
, 3469
, 3470
, 3471
, 3472
, 3473
, 3476
, 3477
, 3478
, 3479
, 3480
, 3481
, 3482
, 3483
, 3484
, 3485
, 3486
, 3487
, 3492
, 3493
, 3494
, 3495
, 3496
, 3497
, 3498
, 3499
, 3500
, 3501
, 3502
, 3503
, 3508
, 3509
, 3510
, 3511
, 3512
, 3513
, 3514
, 3515
, 3516
, 3517
, 3518
, 3519
, 3520
, 3521
, 3524
, 3525
, 3526
, 3527
, 3528
, 3529
, 3530
, 3531
, 3532
, 3533
, 3534
, 3535
, 3536
, 3537
, 3540
, 3541
, 3542
, 3543
, 3544
, 3545
, 3546
, 3547
, 3548
, 3549
, 3550
, 3551
, 3552
, 3556
, 3557
, 3558
, 3559
, 3560
, 3561
, 3562
, 3563
, 3564
, 3565
, 3566
, 3567
, 3568
, 3572
, 3573
, 3574
, 3575
, 3576
, 3577
, 3578
, 3579
, 3580
, 3581
, 3582
, 3583
, 3584
, 3585
, 3586
, 3587
, 3588
, 3589
, 3590
, 3591
, 3592
, 3593
, 3594
, 3595
, 3596
, 3597
, 3598
, 3599
, 3600
, 3601
, 3602
, 3603
, 3604
, 3605
, 3606
, 3607
, 3608
, 3609
, 3610
, 3611
, 3612
, 3613
, 3614
, 3615
, 3616
, 3617
, 3618
, 3619
, 3620
, 3621
, 3622
, 3623
, 3624
, 3625
, 3626
, 3627
, 3628
, 3629
, 3630
, 3631
, 3632
, 3633
, 3634
, 3635
, 3636
, 3637
, 3638
, 3639
, 3640
, 3641
, 3642
, 3643
, 3644
, 3645
, 3646
, 3647
, 3648
, 3649
, 3650
, 3651
, 3652
, 3653
, 3654
, 3655
, 3656
, 3657
, 3658
, 3659
, 3660
, 3661
, 3662
, 3663
, 3664
, 3665
, 3666
, 3667
, 3668
, 3669
, 3670
, 3671
, 3672
, 3673
, 3674
, 3675
, 3676
, 3677
, 3678
, 3679
, 3680
, 3681
, 3682
, 3683
, 3684
, 3685
, 3686
, 3687
, 3688
, 3689
, 3690
, 3691
, 3692
, 3693
, 3694
, 3695
, 3696
, 3697
, 3698
, 3699
, 3700
, 3701
, 3702
, 3703
, 3704
, 3705
, 3706
, 3707
, 3708
, 3709
, 3710
, 3711
, 3712
, 3713
, 3714
, 3715
, 3716
, 3717
, 3718
, 3719
, 3720
, 3721
, 3722
, 3723
, 3724
, 3725
, 3726
, 3727
, 3728
, 3729
, 3730
, 3731
, 3732
, 3733
, 3734
, 3735
, 3736
, 3737
, 3738
, 3739
, 3740
, 3741
, 3742
, 3743
, 3744
, 3745
, 3746
, 3747
, 3748
, 3749
, 3750
, 3751
, 3752
, 3753
, 3754
, 3755
, 3756
, 3757
, 3758
, 3759
, 3760
, 3761
, 3762
, 3763
, 3764
, 3765
, 3766
, 3767
, 3768
, 3769
, 3770
, 3771
, 3772
, 3773
, 3774
, 3775
, 3776
, 3777
, 3778
, 3779
, 3780
, 3781
, 3782
, 3783
, 3784
, 3785
, 3786
, 3787
, 3788
, 3789
, 3790
, 3791
, 3792
, 3793
, 3794
, 3795
, 3796
, 3797
, 3798
, 3799
, 3800
, 3801
, 3802
, 3803
, 3804
, 3805
, 3806
, 3807
, 3808
, 3809
, 3810
, 3811
, 3812
, 3813
, 3814
, 3815
, 3816
, 3817
, 3818
, 3819
, 3820
, 3821
, 3822
, 3823
, 3824
, 3825
, 3826
, 3827
, 3828
, 3829
, 3830
, 3831
, 3832
, 3833
, 3834
, 3835
, 3836
, 3837
, 3838
, 3839
, 3840
, 3841
, 3842
, 3843
, 3844
, 3845
, 3846
, 3847
, 3852
, 3853
, 3854
, 3855
, 3856
, 3857
, 3858
, 3859
, 3860
, 3861
, 3862
, 3863
, 3868
, 3869
, 3870
, 3871
, 3872
, 3873
, 3874
, 3875
, 3876
, 3877
, 3878
, 3879
, 3882
, 3883
, 3884
, 3885
, 3886
, 3887
, 3890
, 3892
, 3893
, 3894
, 3895
, 3898
, 3899
, 3900
, 3901
, 3902
, 3903
, 3904
, 3905
, 3906
, 3907
, 3908
, 3909
, 3910
, 3911
, 3912
, 3913
, 3914
, 3915
, 3916
, 3917
, 3918
, 3919
, 3920
, 3921
, 3922
, 3923
, 3924
, 3925
, 3926
, 3927
, 3928
, 3929
, 3930
, 3931
, 3932
, 3933
, 3934
, 3935
, 3936
, 3937
, 3938
, 3939
, 3940
, 3941
, 3942
, 3943
, 3944
, 3945
, 3946
, 3947
, 3948
, 3949
, 3950
, 3951
, 3952
, 3953
, 3954
, 3955
, 3956
, 3957
, 3958
, 3959
, 3960
, 3961
, 3962
, 3963
, 3964
, 3965
, 3966
, 3967
, 3968
, 3969
, 3970
, 3971
, 3972
, 3973
, 3974
, 3975
, 3976
, 3977
, 3978
, 3979
, 3980
, 3981
, 3982
, 3983
, 3984
, 3985
, 3986
, 3987
, 3988
, 3989
, 3990
, 3991
, 3992
, 3993
, 3994
, 3995
, 3996
, 3997
, 3998
, 3999
, 4000
, 4001
, 4002
, 4003
, 4004
, 4005
, 4006
, 4007
, 4008
, 4009
, 4010
, 4011
, 4012
, 4013
, 4014
, 4015
, 4016
, 4017
, 4018
, 4019
, 4020
, 4021
, 4022
, 4023
, 4024
, 4025
, 4026
, 4027
, 4028
, 4029
, 4030
, 4031
, 4032
, 4033
, 4034
, 4035
, 4036
, 4037
, 4038
, 4039
, 4040
, 4041
, 4044
, 4045
, 4046
, 4047
, 4048
, 4049
, 4050
, 4051
, 4052
, 4053
, 4054
, 4055
, 4056
, 4057
, 4060
, 4061
, 4062
, 4063
, 4064
, 4065
, 4066
, 4067
, 4068
, 4069
, 4070
, 4071
, 4072
, 4074
, 4075
, 4076
, 4077
, 4078
, 4079
, 4080
, 4081
, 4084
, 4085
, 4086
, 4087
, 4088
, 4090
, 4091
, 4092
, 4093
, 4094
, 4095
, 4098
, 4100
, 4101
, 4102
, 4103
, 4104
, 4105
, 4106
, 4107
, 4108
, 4109
, 4110
, 4111
, 4113
, 4114
, 4115
, 4116
, 4117
, 4118
, 4119
, 4124
, 4125
, 4126
, 4127
, 4128
, 4130
, 4132
, 4133
, 4134
, 4135
, 4138
, 4139
, 4140
, 4141
, 4142
, 4143
, 4146
, 4148
, 4149
, 4150
, 4151
, 4152
, 4153
, 4154
, 4155
, 4156
, 4157
, 4158
, 4159
, 4160
, 4161
, 4162
, 4163
, 4164
, 4165
, 4166
, 4167
, 4168
, 4169
, 4170
, 4171
, 4172
, 4173
, 4174
, 4175
, 4176
, 4177
, 4178
, 4179
, 4180
, 4181
, 4182
, 4183
, 4184
, 4185
, 4186
, 4187
, 4188
, 4189
, 4190
, 4191
, 4192
, 4193
, 4194
, 4195
, 4196
, 4197
, 4198
, 4199
, 4200
, 4201
, 4202
, 4203
, 4204
, 4205
, 4206
, 4207
, 4208
, 4209
, 4210
, 4211
, 4212
, 4213
, 4214
, 4215
, 4216
, 4217
, 4218
, 4219
, 4220
, 4221
, 4222
, 4223
, 4224
, 4225
, 4226
, 4227
, 4228
, 4229
, 4230
, 4231
, 4232
, 4233
, 4234
, 4235
, 4236
, 4237
, 4238
, 4239
, 4240
, 4241
, 4242
, 4243
, 4244
, 4245
, 4246
, 4247
, 4248
, 4249
, 4250
, 4251
, 4252
, 4253
, 4254
, 4255
, 4256
, 4257
, 4258
, 4259
, 4260
, 4261
, 4262
, 4263
, 4264
, 4265
, 4266
, 4267
, 4268
, 4269
, 4270
, 4271
, 4272
, 4273
, 4274
, 4275
, 4276
, 4277
, 4278
, 4279
, 4280
, 4281
, 4282
, 4283
, 4284
, 4285
, 4286
, 4287
, 4288
, 4289
, 4290
, 4291
, 4292
, 4293
, 4294
, 4295
, 4296
, 4297
, 4298
, 4299
, 4300
, 4301
, 4302
, 4303
, 4304
, 4305
, 4306
, 4307
, 4308
, 4309
, 4310
, 4311
, 4312
, 4313
, 4314
, 4315
, 4316
, 4317
, 4318
, 4319
, 4320
, 4321
, 4322
, 4323
, 4324
, 4325
, 4326
, 4327
, 4328
, 4329
, 4330
, 4331
, 4332
, 4333
, 4334
, 4335
, 4336
, 4337
, 4338
, 4339
, 4340
, 4341
, 4342
, 4343
, 4344
, 4345
, 4346
, 4347
, 4348
, 4349
, 4350
, 4351
, 4352
, 4353
, 4354
, 4355
, 4356
, 4359
, 4360
, 4361
, 4362
, 4363
, 4364
, 4365
, 4366
, 4367
, 4368
, 4369
, 4370
, 4371
, 4372
, 4375
, 4376
, 4377
, 4378
, 4379
, 4380
, 4381
, 4382
, 4383
, 4384
, 4385
, 4386
, 4387
, 4388
, 4389
, 4390
, 4391
, 4392
, 4393
, 4394
, 4395
, 4396
, 4397
, 4398
, 4399
, 4400
, 4401
, 4402
, 4403
, 4404
, 4405
, 4406
, 4407
, 4408
, 4409
, 4410
, 4411
, 4412
, 4413
, 4414
, 4415
, 4416
, 4417
, 4418
, 4419
, 4420
, 4421
, 4422
, 4424
, 4425
, 4426
, 4427
, 4428
, 4429
, 4430
, 4431
, 4432
, 4433
, 4434
, 4435
, 4436
, 4437
, 4438
, 4440
, 4441
, 4442
, 4443
, 4444
, 4445
, 4446
, 4447
, 4448
, 4449
, 4450
, 4451
, 4452
, 4453
, 4454
, 4455
, 4456
, 4457
, 4458
, 4459
, 4460
, 4461
, 4462
, 4463
, 4464
, 4465
, 4466
, 4467
, 4468
, 4469
, 4470
, 4471
, 4472
, 4473
, 4474
, 4475
, 4476
, 4477
, 4478
, 4479
, 4480
, 4481
, 4482
, 4483
, 4484
, 4485
, 4486
, 4487
, 4488
, 4489
, 4490
, 4491
, 4492
, 4493
, 4494
, 4495
, 4496
, 4497
, 4498
, 4499
, 4500
, 4501
, 4502
, 4503
, 4504
, 4505
, 4506
, 4507
, 4508
, 4509
, 4510
, 4511
, 4512
, 4513
, 4514
, 4515
, 4516
, 4517
, 4518
, 4519
, 4520
, 4521
, 4522
, 4523
, 4524
, 4525
, 4526
, 4527
, 4528
, 4529
, 4530
, 4531
, 4532
, 4533
, 4534
, 4535
, 4536
, 4537
, 4538
, 4539
, 4540
, 4541
, 4542
, 4543
, 4544
, 4545
, 4546
, 4547
, 4548
, 4549
, 4550
, 4551
, 4552
, 4553
, 4554
, 4555
, 4556
, 4557
, 4558
, 4559
, 4560
, 4561
, 4562
, 4563
, 4564
, 4565
, 4566
, 4567
, 4568
, 4569
, 4570
, 4571
, 4572
, 4573
, 4574
, 4575
, 4576
, 4577
, 4578
, 4579
, 4580
, 4581
, 4582
, 4583
, 4584
, 4585
, 4586
, 4587
, 4588
, 4589
, 4590
, 4591
, 4592
, 4593
, 4594
, 4595
, 4596
, 4597
, 4598
, 4599
, 4600
, 4601
, 4602
, 4603
, 4604
, 4605
, 4606
, 4607
, 4608
, 4609
, 4610
, 4611
, 4612
, 4613
, 4614
, 4615
, 4616
, 4617
, 4618
, 4619
, 4620
, 4621
, 4622
, 4623
, 4624
, 4625
, 4626
, 4627
, 4628
, 4629
, 4630
, 4631
, 4632
, 4633
, 4634
, 4635
, 4636
, 4637
, 4638
, 4639
, 4640
, 4641
, 4642
, 4643
, 4644
, 4645
, 4646
, 4647
, 4648
, 4649
, 4650
, 4651
, 4652
, 4653
, 4654
, 4655
, 4656
, 4657
, 4658
, 4659
, 4660
, 4661
, 4662
, 4663
, 4664
, 4665
, 4666
, 4667
, 4668
, 4669
, 4670
, 4671
, 4672
, 4673
, 4674
, 4675
, 4676
, 4677
, 4678
, 4679
, 4680
, 4681
, 4682
, 4683
, 4684
, 4685
, 4686
, 4687
, 4688
, 4689
, 4690
, 4691
, 4692
, 4693
, 4694
, 4695
, 4696
, 4697
, 4698
, 4699
, 4700
, 4701
, 4702
, 4703
, 4704
, 4705
, 4706
, 4707
, 4708
, 4709
, 4710
, 4711
, 4712
, 4713
, 4714
, 4715
, 4716
, 4717
, 4718
, 4719
, 4720
, 4721
, 4722
, 4723
, 4724
, 4725
, 4726
, 4727
, 4728
, 4729
, 4730
, 4731
, 4732
, 4733
, 4734
, 4735
, 4736
, 4737
, 4738
, 4739
, 4740
, 4741
, 4742
, 4743
, 4744
, 4745
, 4746
, 4747
, 4748
, 4749
, 4750
, 4751
, 4752
, 4753
, 4754
, 4755
, 4756
, 4757
, 4758
, 4759
, 4760
, 4761
, 4762
, 4763
, 4764
, 4765
, 4766
, 4767
, 4768
, 4769
, 4770
, 4771
, 4772
, 4773
, 4774
, 4775
, 4776
, 4777
, 4778
, 4779
, 4780
, 4781
, 4782
, 4783
, 4784
, 4785
, 4786
, 4787
, 4788
, 4789
, 4790
, 4791
, 4792
, 4793
, 4794
, 4795
, 4796
, 4797
, 4798
, 4799
, 4800
, 4801
, 4802
, 4803
, 4804
, 4805
, 4806
, 4807
, 4808
, 4809
, 4810
, 4811
, 4812
, 4813
, 4814
, 4815
, 4816
, 4817
, 4818
, 4819
, 4820
, 4821
, 4822
, 4823
, 4824
, 4825
, 4826
, 4827
, 4828
, 4829
, 4830
, 4831
, 4832
, 4833
, 4834
, 4835
, 4836
, 4837
, 4838
, 4839
, 4840
, 4841
, 4842
, 4843
, 4844
, 4845
, 4846
, 4847
, 4848
, 4849
, 4850
, 4851
, 4852
, 4853
, 4854
, 4855
, 4856
, 4857
, 4858
, 4859
, 4860
, 4861
, 4862
, 4863
, 4864
, 4865
, 4866
, 4867
, 4868
, 4869
, 4870
, 4871
, 4872
, 4873
, 4874
, 4875
, 4876
, 4877
, 4878
, 4879
, 4880
, 4881
, 4882
, 4883
, 4884
, 4885
, 4886
, 4887
, 4888
, 4889
, 4890
, 4891
, 4892
, 4893
, 4894
, 4895
, 4896
, 4897
, 4898
, 4899
, 4900
, 4901
, 4902
, 4903
, 4904
, 4905
, 4906
, 4907
, 4908
, 4909
, 4910
, 4911
, 4912
, 4913
, 4914
, 4915
, 4916
, 4917
, 4918
, 4919
, 4920
, 4921
, 4922
, 4923
, 4924
, 4925
, 4926
, 4927
, 4928
, 4929
, 4930
, 4931
, 4932
, 4933
, 4934
, 4935
, 4936
, 4937
, 4938
, 4939
, 4940
, 4941
, 4942
, 4943
, 4944
, 4945
, 4946
, 4947
, 4948
, 4949
, 4950
, 4951
, 4952
, 4953
, 4954
, 4955
, 4956
, 4957
, 4958
, 4959
, 4960
, 4961
, 4962
, 4963
, 4964
, 4965
, 4966
, 4967
, 4968
, 4969
, 4970
, 4971
, 4972
, 4973
, 4974
, 4975
, 4976
, 4977
, 4978
, 4979
, 4980
, 4981
, 4982
, 4983
, 4984
, 4985
, 4986
, 4987
, 4988
, 4989
, 4990
, 4991
, 4992
, 4993
, 4994
, 4995
, 4996
, 4997
, 4998
, 4999
, 5000
, 5001
, 5002
, 5003
, 5004
, 5005
, 5006
, 5007
, 5008
, 5009
, 5010
, 5011
, 5012
, 5013
, 5014
, 5015
, 5016
, 5017
, 5018
, 5019
, 5020
, 5021
, 5022
, 5023
, 5024
, 5025
, 5026
, 5027
, 5028
, 5029
, 5030
, 5031
, 5032
, 5033
, 5034
, 5035
, 5036
, 5037
, 5038
, 5039
, 5040
, 5041
, 5042
, 5043
, 5044
, 5045
, 5046
, 5047
, 5048
, 5049
, 5050
, 5051
, 5052
, 5053
, 5054
, 5055
, 5056
, 5057
, 5058
, 5059
, 5060
, 5061
, 5062
, 5063
, 5064
, 5065
, 5066
, 5067
, 5068
, 5069
, 5070
, 5071
, 5072
, 5073
, 5074
, 5075
, 5076
, 5077
, 5078
, 5079
, 5080
, 5081
, 5082
, 5083
, 5084
, 5085
, 5086
, 5087
, 5088
, 5089
, 5090
, 5091
, 5092
, 5093
, 5094
, 5095
, 5096
, 5097
, 5098
, 5099
, 5100
, 5101
, 5102
, 5103
, 5104
, 5105
, 5106
, 5107
, 5108
, 5109
, 5110
, 5111
, 5112
, 5113
, 5114
, 5115
, 5116
, 5117
, 5118
, 5119
, 5120
, 5121
, 5122
, 5123
, 5124
, 5125
, 5126
, 5127
, 5128
, 5129
, 5130
, 5131
, 5132
, 5133
, 5134
, 5135
, 5136
, 5137
, 5138
, 5139
, 5140
, 5141
, 5142
, 5143
, 5144
, 5145
, 5146
, 5147
, 5148
, 5149
, 5150
, 5151
, 5152
, 5153
, 5154
, 5155
, 5156
, 5157
, 5158
, 5159
, 5160
, 5161
, 5162
, 5163
, 5164
, 5165
, 5166
, 5167
, 5168
, 5169
, 5170
, 5171
, 5172
, 5173
, 5174
, 5175
, 5176
, 5177
, 5178
, 5179
, 5180
, 5181
, 5182
, 5183
, 5184
, 5185
, 5186
, 5187
, 5188
, 5189
, 5190
, 5191
, 5192
, 5193
, 5194
, 5195
, 5196
, 5197
, 5198
, 5199
, 5200
, 5201
, 5202
, 5203
, 5204
, 5205
, 5206
, 5207
, 5208
, 5209
, 5210
, 5211
, 5212
, 5213
, 5214
, 5215
, 5216
, 5217
, 5218
, 5219
, 5220
, 5221
, 5222
, 5223
, 5224
, 5225
, 5226
, 5227
, 5228
, 5229
, 5230
, 5231
, 5232
, 5233
, 5234
, 5235
, 5236
, 5237
, 5238
, 5239
, 5240
, 5241
, 5242
, 5243
, 5244
, 5245
, 5246
, 5247
, 5248
, 5249
, 5250
, 5251
, 5252
, 5253
, 5254
, 5255
, 5256
, 5257
, 5258
, 5259
, 5260
, 5261
, 5262
, 5263
, 5264
, 5265
, 5266
, 5267
, 5268
, 5269
, 5270
, 5271
, 5272
, 5273
, 5274
, 5275
, 5276
, 5277
, 5278
, 5279
, 5280
, 5281
, 5282
, 5283
, 5284
, 5285
, 5286
, 5287
, 5288
, 5289
, 5290
, 5291
, 5292
, 5293
, 5294
, 5295
, 5296
, 5297
, 5298
, 5299
, 5300
, 5301
, 5302
, 5303
, 5304
, 5305
, 5306
, 5307
, 5308
, 5309
, 5310
, 5311
, 5312
, 5313
, 5314
, 5315
, 5316
, 5317
, 5318
, 5319
, 5320
, 5321
, 5322
, 5323
, 5324
, 5325
, 5326
, 5327
, 5328
, 5329
, 5330
, 5331
, 5332
, 5333
, 5334
, 5335
, 5336
, 5337
, 5338
, 5339
, 5340
, 5341
, 5342
, 5343
, 5344
, 5345
, 5346
, 5347
, 5348
, 5349
, 5350
, 5351
, 5352
, 5353
, 5354
, 5355
, 5356
, 5357
, 5358
, 5359
, 5360
, 5361
, 5362
, 5363
, 5364
, 5365
, 5366
, 5367
, 5368
, 5369
, 5370
, 5371
, 5372
, 5373
, 5374
, 5375
, 5376
, 5377
, 5378
, 5379
, 5380
, 5381
, 5383
, 5384
, 5385
, 5386
, 5387
, 5389
, 5390
, 5391
, 5392
, 5393
, 5394
, 5395
, 5396
, 5398
, 5399
, 5400
, 5401
, 5402
, 5403
, 5404
, 5405
, 5406
, 5408
, 5409
, 5410
, 5411
, 5412
, 5413
, 5414
, 5415
, 5416
, 5417
, 5418
, 5419
, 5420
, 5421
, 5422
, 5423
, 5424
, 5425
, 5426
, 5427
, 5428
, 5429
, 5430
, 5431
, 5432
, 5433
, 5434
, 5435
, 5436
, 5437
, 5438
, 5439
, 5440
, 5441
, 5442
, 5443
, 5444
, 5445
, 5446
, 5448
, 5449
, 5450
, 5451
, 5452
, 5453
, 5454
, 5456
, 5457
, 5458
, 5459
, 5460
, 5461
, 5462
, 5463
, 5464
, 5465
, 5466
, 5467
, 5468
, 5469
, 5470
, 5471
, 5472
, 5473
, 5474
, 5475
, 5476
, 5477
, 5478
, 5479
, 5480
, 5481
, 5482
, 5483
, 5484
, 5485
, 5486
, 5487
, 5488
, 5489
, 5490
, 5491
, 5492
, 5493
, 5494
, 5495
, 5496
, 5497
, 5498
, 5499
, 5500
, 5501
, 5502
, 5503
, 5504
, 5505
, 5506
, 5507
, 5508
, 5509
, 5510
, 5511
, 5512
, 5513
, 5514
, 5515
, 5516
, 5517
, 5518
, 5519
, 5520
, 5521
, 5522
, 5523
, 5524
, 5525
, 5526
, 5527
, 5528
, 5529
, 5530
, 5531
, 5532
, 5533
, 5534
, 5535
, 5536
, 5537
, 5538
, 5539
, 5540
, 5541
, 5542
, 5543
, 5544
, 5545
, 5546
, 5547
, 5548
, 5549
, 5550
, 5551
, 5552
, 5553
, 5554
, 5555
, 5556
, 5557
, 5558
, 5559
, 5560
, 5561
, 5562
, 5563
, 5564
, 5565
, 5566
, 5567
, 5568
, 5569
, 5570
, 5571
, 5572
, 5573
, 5574
, 5575
, 5576
, 5577
, 5578
, 5579
, 5580
, 5581
, 5582
, 5583
, 5584
, 5585
, 5586
, 5587
, 5588
, 5589
, 5590
, 5591
, 5592
, 5593
, 5594
, 5595
, 5596
, 5597
, 5598
, 5599
, 5600
, 5601
, 5602
, 5603
, 5604
, 5605
, 5606
, 5607
, 5608
, 5609
, 5610
, 5611
, 5612
, 5613
, 5614
, 5615
, 5616
, 5617
, 5618
, 5619
, 5620
, 5621
, 5622
, 5623
, 5624
, 5625
, 5626
, 5627
, 5628
, 5629
, 5630
, 5631
, 5632
, 5633
, 5634
, 5635
, 5636
, 5637
, 5638
, 5639
, 5640
, 5641
, 5642
, 5643
, 5644
, 5645
, 5646
, 5647
, 5648
, 5649
, 5650
, 5651
, 5652
, 5653
, 5654
, 5655
, 5656
, 5657
, 5658
, 5659
, 5660
, 5661
, 5662
, 5663
, 5664
, 5665
, 5666
, 5667
, 5668
, 5669
, 5670
, 5671
, 5672
, 5673
, 5674
, 5675
, 5676
, 5677
, 5678
, 5679
, 5680
, 5681
, 5682
, 5683
, 5684
, 5685
, 5686
, 5687
, 5688
, 5689
, 5690
, 5691
, 5692
, 5693
, 5694
, 5695
, 5696
, 5697
, 5698
, 5699
, 5700
, 5701
, 5702
, 5703
, 5704
, 5705
, 5706
, 5707
, 5708
, 5709
, 5710
, 5711
, 5712
, 5713
, 5714
, 5715
, 5716
, 5717
, 5718
, 5719
, 5720
, 5721
, 5722
, 5723
, 5724
, 5725
, 5726
, 5727
, 5728
, 5729
, 5730
, 5731
, 5732
, 5733
, 5734
, 5735
, 5736
, 5737
, 5738
, 5739
, 5740
, 5741
, 5742
, 5743
, 5744
, 5745
, 5746
, 5747
, 5748
, 5749
, 5750
, 5751
, 5752
, 5753
, 5754
, 5755
, 5756
, 5757
, 5758
, 5759
, 5760
, 5761
, 5762
, 5763
, 5764
, 5765
, 5766
, 5767
, 5768
, 5769
, 5770
, 5771
, 5772
, 5773
, 5774
, 5775
, 5776
, 5777
, 5778
, 5779
, 5780
, 5781
, 5782
, 5783
, 5784
, 5785
, 5786
, 5787
, 5788
, 5789
, 5790
, 5791
, 5792
, 5793
, 5794
, 5795
, 5796
, 5797
, 5798
, 5799
, 5800
, 5801
, 5802
, 5803
, 5804
, 5805
, 5806
, 5807
, 5808
, 5809
, 5810
, 5811
, 5812
, 5813
, 5814
, 5815
, 5816
, 5817
, 5818
, 5819
, 5820
, 5821
, 5822
, 5823
, 5824
, 5825
, 5826
, 5827
, 5828
, 5829
, 5830
, 5831
, 5832
, 5833
, 5834
, 5835
, 5836
, 5837
, 5838
, 5839
, 5840
, 5841
, 5842
, 5843
, 5844
, 5845
, 5846
, 5847
, 5848
, 5849
, 5850
, 5851
, 5852
, 5853
, 5854
, 5855
, 5856
, 5857
, 5858
, 5859
, 5860
, 5861
, 5862
, 5863
, 5864
, 5865
, 5866
, 5867
, 5868
, 5869
, 5870
, 5871
, 5872
, 5873
, 5874
, 5875
, 5876
, 5877
, 5878
, 5879
, 5880
, 5881
, 5882
, 5883
, 5884
, 5885
, 5886
, 5887
, 5888
, 5889
, 5890
, 5891
, 5892
, 5893
, 5894
, 5895
, 5896
, 5897
, 5898
, 5899
, 5900
, 5901
, 5902
, 5903
, 5904
, 5905
, 5906
, 5907
, 5908
, 5909
, 5910
, 5911
, 5912
, 5913
, 5914
, 5915
, 5916
, 5917
, 5918
, 5919
, 5920
, 5921
, 5922
, 5923
, 5924
, 5925
, 5926
, 5927
, 5928
, 5929
, 5930
, 5931
, 5932
, 5933
, 5934
, 5935
, 5936
, 5937
, 5938
, 5939
, 5940
, 5941
, 5942
, 5943
, 5944
, 5945
, 5946
, 5947
, 5948
, 5949
, 5950
, 5951
, 5952
, 5953
, 5954
, 5955
, 5956
, 5957
, 5958
, 5959
, 5960
, 5961
, 5962
, 5963
, 5964
, 5965
, 5966
, 5967
, 5968
, 5969
, 5970
, 5971
, 5972
, 5973
, 5974
, 5975
, 5976
, 5977
, 5978
, 5979
, 5980
, 5981
, 5982
, 5983
, 5984
, 5985
, 5986
, 5987
, 5988
, 5989
, 5990
, 5991
, 5992
, 5993
, 5994
, 5995
, 5996
, 5997
, 5998
, 5999
, 6000
, 6001
, 6002
, 6003
, 6004
, 6005
, 6006
, 6007
, 6008
, 6009
, 6010
, 6011
, 6012
, 6013
, 6014
, 6015
, 6016
, 6017
, 6018
, 6019
, 6020
, 6021
, 6022
, 6023
, 6024
, 6025
, 6026
, 6027
, 6028
, 6029
, 6030
, 6031
, 6032
, 6033
, 6034
, 6035
, 6036
, 6037
, 6038
, 6039
, 6040
, 6041
, 6042
, 6043
, 6044
, 6045
, 6046
, 6047
, 6048
, 6049
, 6050
, 6051
, 6052
, 6053
, 6054
, 6055
, 6056
, 6057
, 6058
, 6059
, 6060
, 6061
, 6062
, 6063
, 6064
, 6065
, 6066
, 6067
, 6068
, 6069
, 6070
, 6071
, 6072
, 6073
, 6074
, 6075
, 6076
, 6077
, 6078
, 6079
, 6080
, 6081
, 6082
, 6083
, 6084
, 6085
, 6086
, 6087
, 6088
, 6089
, 6090
, 6091
, 6092
, 6093
, 6094
, 6095
, 6096
, 6097
, 6098
, 6099
, 6100
, 6101
, 6102
, 6103
, 6104
, 6105
, 6106
, 6107
, 6108
, 6109
, 6110
, 6111
, 6112
, 6113
, 6114
, 6115
, 6116
, 6117
, 6118
, 6119
, 6120
, 6121
, 6122
, 6123
, 6124
, 6125
, 6126
, 6127
, 6128
, 6129
, 6130
, 6131
, 6132
, 6133
, 6134
, 6135
, 6136
, 6137
, 6138
, 6139
, 6140
, 6141
, 6142
, 6143
, 6144
, 6145
, 6146
, 6147
, 6148
, 6149
, 6150
, 6151
, 6152
, 6153
, 6154
, 6155
, 6156
, 6157
, 6158
, 6159
, 6160
, 6161
, 6162
, 6163
, 6164
, 6165
, 6166
, 6167
, 6169
, 6171
, 6172
, 6173
, 6174
, 6175
, 6176
, 6177
, 6178
, 6179
, 6180
, 6181
, 6182
, 6183
, 6184
, 6186
, 6187
, 6188
, 6189
, 6190
, 6191
, 6193
, 6194
, 6195
, 6196
, 6197
, 6198
, 6199
, 6200
, 6201
, 6202
, 6203
, 6204
, 6205
, 6206
, 6207
, 6208
, 6209
, 6210
, 6211
, 6212
, 6213
, 6214
, 6215
, 6216
, 6217
, 6218
, 6219
, 6220
, 6221
, 6222
, 6223
, 6224
, 6225
, 6226
, 6227
, 6228
, 6229
, 6230
, 6231
, 6232
, 6233
, 6234
, 6235
, 6236
, 6237
, 6238
, 6239
, 6240
, 6241
, 6242
, 6243
, 6244
, 6245
, 6246
, 6247
, 6248
, 6249
, 6250
, 6251
, 6252
, 6253
, 6254
, 6255
, 6256
, 6257
, 6258
, 6259
, 6260
, 6261
, 6262
, 6263
, 6264
, 6265
, 6266
, 6267
, 6268
, 6269
, 6270
, 6271
, 6272
, 6273
, 6274
, 6275
, 6276
, 6277
, 6278
, 6279
, 6280
, 6281
, 6282
, 6283
, 6284
, 6285
, 6286
, 6287
, 6288
, 6289
, 6290
, 6291
, 6292
, 6293
, 6294
, 6295
, 6296
, 6297
, 6298
, 6299
, 6300
, 6301
, 6302
, 6303
, 6304
, 6305
, 6306
, 6307
, 6308
, 6309
, 6310
, 6311
, 6312
, 6313
, 6314
, 6315
, 6316
, 6317
, 6318
, 6319
, 6320
, 6321
, 6322
, 6323
, 6324
, 6325
, 6326
, 6327
, 6328
, 6329
, 6330
, 6331
, 6332
, 6333
, 6334
, 6335
, 6336
, 6337
, 6338
, 6339
, 6340
, 6341
, 6342
, 6343
, 6344
, 6345
, 6346
, 6347
, 6348
, 6349
, 6350
, 6351
, 6352
, 6353
, 6354
, 6355
, 6356
, 6357
, 6358
, 6359
, 6360
, 6361
, 6362
, 6363
, 6364
, 6365
, 6366
, 6367
, 6368
, 6369
, 6370
, 6371
, 6372
, 6373
, 6374
, 6375
, 6376
, 6377
, 6378
, 6379
, 6380
, 6381
, 6382
, 6383
, 6384
, 6385
, 6386
, 6387
, 6388
, 6389
, 6390
, 6391
, 6392
, 6393
, 6394
, 6395
, 6396
, 6397
, 6398
, 6399
, 6400
, 6401
, 6402
, 6403
, 6404
, 6407
, 6408
, 6409
, 6410
, 6411
, 6413
, 6414
, 6416
, 6417
, 6418
, 6419
, 6420
, 6421
, 6423
, 6424
, 6425
, 6426
, 6427
, 6429
, 6430
, 6431
, 6432
, 6433
, 6434
, 6435
, 6436
, 6438
, 6439
, 6440
, 6441
, 6442
, 6443
, 6444
, 6445
, 6446
, 6447
, 6448
, 6449
, 6450
, 6451
, 6452
, 6455
, 6456
, 6457
, 6458
, 6459
, 6461
, 6462
, 6463
, 6464
, 6465
, 6466
, 6467
, 6468
, 6469
, 6470
, 6472
, 6473
, 6474
, 6475
, 6476
, 6477
, 6478
, 6480
, 6481
, 6482
, 6483
, 6484
, 6485
, 6486
, 6488
, 6489
, 6490
, 6491
, 6492
, 6493
, 6494
, 6496
, 6497
, 6498
, 6499
, 6500
, 6501
, 6502
, 6503
, 6504
, 6505
, 6506
, 6507
, 6508
, 6509
, 6510
, 6511
, 6512
, 6513
, 6514
, 6515
, 6516
, 6517
, 6518
, 6520
, 6521
, 6522
, 6523
, 6524
, 6525
, 6526
, 6527
, 6528
, 6529
, 6530
, 6531
, 6532
, 6533
, 6534
, 6535
, 6536
, 6537
, 6538
, 6539
, 6540
, 6541
, 6542
, 6543
, 6544
, 6545
, 6546
, 6547
, 6548
, 6549
, 6550
, 6551
, 6552
, 6553
, 6554
, 6555
, 6556
, 6557
, 6558
, 6559
, 6560
, 6561
, 6562
, 6563
, 6564
, 6565
, 6566
, 6567
, 6568
, 6569
, 6570
, 6571
, 6572
, 6573
, 6574
, 6575
, 6576
, 6577
, 6578
, 6579
, 6580
, 6581
, 6582
, 6583
, 6584
, 6585
, 6586
, 6587
, 6588
, 6589
, 6590
, 6591
, 6592
, 6593
, 6594
, 6595
, 6596
, 6597
, 6598
, 6599
, 6600
, 6601
, 6602
, 6603
, 6604
, 6605
, 6606
, 6607
, 6608
, 6609
, 6610
, 6611
, 6612
, 6613
, 6614
, 6615
, 6616
, 6617
, 6618
, 6619
, 6620
, 6621
, 6622
, 6623
, 6624
, 6625
, 6626
, 6627
, 6628
, 6629
, 6630
, 6631
, 6632
, 6633
, 6634
, 6635
, 6636
, 6637
, 6638
, 6639
, 6640
, 6641
, 6642
, 6643
, 6644
, 6645
, 6646
, 6647
, 6648
, 6649
, 6650
, 6651
, 6652
, 6653
, 6654
, 6655
, 6656
, 6657
, 6658
, 6659
, 6660
, 6661
, 6662
, 6663
, 6664
, 6665
, 6666
, 6667
, 6668
, 6669
, 6670
, 6671
, 6672
, 6673
, 6674
, 6675
, 6676
, 6677
, 6678
, 6679
, 6680
, 6681
, 6682
, 6683
, 6684
, 6685
, 6686
, 6687
, 6688
, 6689
, 6690
, 6691
, 6692
, 6693
, 6694
, 6695
, 6696
, 6697
, 6698
, 6699
, 6700
, 6701
, 6702
, 6703
, 6704
, 6705
, 6706
, 6707
, 6708
, 6709
, 6710
, 6711
, 6712
, 6713
, 6714
, 6715
, 6716
, 6717
, 6718
, 6719
, 6720
, 6721
, 6722
, 6723
, 6724
, 6725
, 6726
, 6727
, 6728
, 6729
, 6730
, 6731
, 6732
, 6733
, 6734
, 6735
, 6736
, 6737
, 6738
, 6739
, 6740
, 6741
, 6742
, 6743
, 6744
, 6745
, 6746
, 6747
, 6748
, 6749
, 6750
, 6751
, 6752
, 6753
, 6754
, 6755
, 6756
, 6757
, 6758
, 6759
, 6760
, 6761
, 6762
, 6763
, 6764
, 6765
, 6766
, 6767
, 6768
, 6769
, 6770
, 6771
, 6772
, 6773
, 6774
, 6775
, 6776
, 6777
, 6778
, 6779
, 6780
, 6781
, 6782
, 6783
, 6784
, 6785
, 6786
, 6787
, 6788
, 6789
, 6790
, 6791
, 6792
, 6793
, 6794
, 6795
, 6796
, 6797
, 6798
, 6799
, 6800
, 6801
, 6802
, 6803
, 6804
, 6805
, 6806
, 6807
, 6808
, 6809
, 6810
, 6811
, 6812
, 6813
, 6814
, 6815
, 6816
, 6817
, 6818
, 6819
, 6820
, 6821
, 6822
, 6823
, 6824
, 6825
, 6826
, 6827
, 6828
, 6829
, 6830
, 6831
, 6832
, 6833
, 6834
, 6835
, 6836
, 6837
, 6838
, 6839
, 6840
, 6841
, 6842
, 6843
, 6844
, 6845
, 6846
, 6847
, 6848
, 6849
, 6850
, 6851
, 6852
, 6853
, 6854
, 6855
, 6856
, 6857
, 6858
, 6859
, 6860
, 6861
, 6862
, 6863
, 6864
, 6865
, 6866
, 6867
, 6868
, 6869
, 6870
, 6871
, 6872
, 6873
, 6874
, 6875
, 6876
, 6877
, 6878
, 6879
, 6880
, 6881
, 6882
, 6883
, 6884
, 6885
, 6886
, 6887
, 6888
, 6889
, 6890
, 6891
, 6892
, 6893
, 6894
, 6895
, 6896
, 6897
, 6898
, 6899
, 6900
, 6901
, 6902
, 6903
, 6904
, 6905
, 6906
, 6907
, 6908
, 6909
, 6910
, 6911
, 6912
, 6913
, 6914
, 6915
, 6916
, 6917
, 6918
, 6919
, 6920
, 6921
, 6922
, 6923
, 6924
, 6925
, 6926
, 6927
, 6928
, 6929
, 6930
, 6931
, 6932
, 6933
, 6934
, 6935
, 6936
, 6937
, 6938
, 6939
, 6940
, 6941
, 6942
, 6943
, 6944
, 6945
, 6946
, 6947
, 6948
, 6949
, 6950
, 6951
, 6952
, 6953
, 6954
, 6955
, 6956
, 6957
, 6958
, 6959
, 6960
, 6961
, 6962
, 6963
, 6964
, 6965
, 6966
, 6967
, 6968
, 6969
, 6970
, 6971
, 6972
, 6973
, 6974
, 6975
, 6976
, 6977
, 6978
, 6979
, 6980
, 6981
, 6982
, 6983
, 6984
, 6985
, 6986
, 6987
, 6988
, 6989
, 6990
, 6991
, 6992
, 6993
, 6994
, 6995
, 6996
, 6997
, 6998
, 6999
, 7000
, 7001
, 7002
, 7003
, 7004
, 7005
, 7006
, 7007
, 7008
, 7009
, 7010
, 7011
, 7012
, 7013
, 7014
, 7015
, 7016
, 7017
, 7018
, 7019
, 7020
, 7021
, 7022
, 7023
, 7024
, 7025
, 7026
, 7027
, 7028
, 7029
, 7030
, 7031
, 7032
, 7033
, 7034
, 7035
, 7036
, 7037
, 7038
, 7039
, 7040
, 7041
, 7042
, 7043
, 7044
, 7045
, 7046
, 7047
, 7048
, 7049
, 7050
, 7051
, 7052
, 7053
, 7054
, 7055
, 7056
, 7057
, 7058
, 7059
, 7060
, 7061
, 7062
, 7063
, 7064
, 7065
, 7066
, 7067
, 7068
, 7069
, 7070
, 7071
, 7072
, 7073
, 7074
, 7075
, 7076
, 7077
, 7078
, 7079
, 7080
, 7081
, 7082
, 7083
, 7084
, 7085
, 7086
, 7087
, 7088
, 7089
, 7090
, 7091
, 7092
, 7093
, 7094
, 7095
, 7096
, 7097
, 7098
, 7099
, 7100
, 7101
, 7102
, 7103
, 7104
, 7105
, 7106
, 7107
, 7108
, 7109
, 7110
, 7111
, 7112
, 7113
, 7114
, 7115
, 7116
, 7117
, 7118
, 7119
, 7120
, 7121
, 7122
, 7123
, 7124
, 7125
, 7126
, 7127
, 7128
, 7129
, 7130
, 7131
, 7132
, 7133
, 7134
, 7135
, 7136
, 7137
, 7138
, 7139
, 7140
, 7141
, 7142
, 7143
, 7144
, 7145
, 7146
, 7147
, 7148
, 7149
, 7150
, 7151
, 7152
, 7153
, 7154
, 7155
, 7156
, 7157
, 7158
, 7159
, 7160
, 7161
, 7162
, 7163
, 7164
, 7165
, 7166
, 7167
, 7170
, 7172
, 7173
, 7174
, 7175
, 7176
, 7177
, 7178
, 7179
, 7180
, 7181
, 7182
, 7183
, 7184
, 7185
, 7186
, 7187
, 7188
, 7189
, 7190
, 7191
, 7196
, 7197
, 7198
, 7199
, 7201
, 7202
, 7203
, 7204
, 7205
, 7206
, 7207
, 7210
, 7211
, 7212
, 7213
, 7214
, 7215
, 7218
, 7220
, 7221
, 7222
, 7223
, 7224
, 7225
, 7226
, 7227
, 7228
, 7229
, 7230
, 7231
, 7232
, 7233
, 7234
, 7235
, 7236
, 7237
, 7238
, 7239
, 7240
, 7241
, 7242
, 7243
, 7244
, 7245
, 7246
, 7247
, 7248
, 7249
, 7250
, 7251
, 7252
, 7253
, 7254
, 7255
, 7256
, 7257
, 7258
, 7259
, 7260
, 7261
, 7262
, 7263
, 7264
, 7265
, 7266
, 7267
, 7268
, 7269
, 7270
, 7271
, 7272
, 7273
, 7274
, 7275
, 7276
, 7277
, 7278
, 7279
, 7280
, 7281
, 7282
, 7283
, 7284
, 7285
, 7286
, 7287
, 7288
, 7289
, 7290
, 7291
, 7292
, 7293
, 7294
, 7295
, 7296
, 7297
, 7298
, 7299
, 7300
, 7301
, 7302
, 7303
, 7304
, 7305
, 7306
, 7307
, 7308
, 7309
, 7310
, 7311
, 7312
, 7313
, 7314
, 7315
, 7316
, 7317
, 7318
, 7319
, 7320
, 7321
, 7322
, 7323
, 7324
, 7325
, 7326
, 7327
, 7328
, 7329
, 7330
, 7331
, 7332
, 7333
, 7334
, 7335
, 7336
, 7337
, 7338
, 7339
, 7340
, 7341
, 7342
, 7343
, 7344
, 7345
, 7346
, 7347
, 7348
, 7349
, 7350
, 7351
, 7352
, 7353
, 7354
, 7355
, 7356
, 7357
, 7358
, 7359
, 7360
, 7361
, 7362
, 7363
, 7364
, 7365
, 7366
, 7367
, 7368
, 7369
, 7370
, 7371
, 7372
, 7373
, 7374
, 7375
, 7376
, 7377
, 7378
, 7379
, 7380
, 7381
, 7382
, 7383
, 7384
, 7385
, 7386
, 7387
, 7388
, 7389
, 7390
, 7391
, 7392
, 7393
, 7394
, 7395
, 7396
, 7397
, 7398
, 7399
, 7400
, 7401
, 7402
, 7403
, 7404
, 7405
, 7406
, 7407
, 7408
, 7409
, 7410
, 7411
, 7412
, 7413
, 7414
, 7415
, 7416
, 7417
, 7418
, 7419
, 7420
, 7421
, 7422
, 7423
, 7424
, 7425
, 7426
, 7427
, 7428
, 7431
, 7432
, 7433
, 7434
, 7435
, 7436
, 7437
, 7438
, 7439
, 7440
, 7441
, 7442
, 7443
, 7444
, 7447
, 7448
, 7449
, 7450
, 7451
, 7452
, 7453
, 7454
, 7455
, 7456
, 7457
, 7458
, 7459
, 7460
, 7463
, 7464
, 7465
, 7466
, 7467
, 7468
, 7469
, 7470
, 7471
, 7472
, 7473
, 7474
, 7475
, 7476
, 7479
, 7480
, 7481
, 7482
, 7483
, 7484
, 7485
, 7486
, 7487
, 7488
, 7489
, 7490
, 7491
, 7492
, 7493
, 7494
, 7496
, 7497
, 7498
, 7499
, 7500
, 7501
, 7502
, 7503
, 7504
, 7505
, 7506
, 7507
, 7508
, 7509
, 7510
, 7512
, 7513
, 7514
, 7515
, 7516
, 7517
, 7518
, 7519
, 7520
, 7521
, 7522
, 7523
, 7524
, 7525
, 7526
, 7528
, 7529
, 7530
, 7531
, 7532
, 7533
, 7534
, 7535
, 7536
, 7537
, 7538
, 7539
, 7540
, 7541
, 7542
, 7544
, 7545
, 7546
, 7547
, 7548
, 7549
, 7550
, 7551
, 7552
, 7553
, 7554
, 7555
, 7556
, 7557
, 7558
, 7559
, 7560
, 7561
, 7562
, 7563
, 7564
, 7565
, 7566
, 7567
, 7568
, 7569
, 7570
, 7571
, 7572
, 7573
, 7574
, 7575
, 7576
, 7577
, 7578
, 7579
, 7580
, 7581
, 7582
, 7583
, 7584
, 7585
, 7586
, 7587
, 7588
, 7589
, 7590
, 7591
, 7592
, 7593
, 7594
, 7595
, 7596
, 7597
, 7598
, 7599
, 7600
, 7601
, 7602
, 7603
, 7604
, 7605
, 7606
, 7607
, 7608
, 7609
, 7610
, 7611
, 7612
, 7613
, 7614
, 7615
, 7616
, 7617
, 7618
, 7619
, 7620
, 7621
, 7622
, 7623
, 7624
, 7625
, 7626
, 7627
, 7628
, 7629
, 7630
, 7631
, 7632
, 7633
, 7634
, 7635
, 7636
, 7637
, 7638
, 7639
, 7640
, 7641
, 7642
, 7643
, 7644
, 7645
, 7646
, 7647
, 7648
, 7649
, 7650
, 7651
, 7652
, 7653
, 7654
, 7655
, 7656
, 7657
, 7658
, 7659
, 7660
, 7661
, 7662
, 7663
, 7664
, 7665
, 7666
, 7667
, 7668
, 7669
, 7670
, 7671
, 7672
, 7673
, 7674
, 7675
, 7676
, 7677
, 7678
, 7679
, 7680
, 7681
, 7682
, 7683
, 7684
, 7685
, 7686
, 7687
, 7688
, 7689
, 7690
, 7691
, 7692
, 7693
, 7694
, 7695
, 7696
, 7697
, 7698
, 7699
, 7700
, 7701
, 7702
, 7703
, 7704
, 7705
, 7706
, 7707
, 7708
, 7709
, 7710
, 7711
, 7712
, 7713
, 7714
, 7715
, 7716
, 7717
, 7718
, 7719
, 7720
, 7721
, 7722
, 7723
, 7724
, 7725
, 7726
, 7727
, 7728
, 7729
, 7730
, 7731
, 7732
, 7733
, 7734
, 7735
, 7736
, 7737
, 7738
, 7739
, 7740
, 7741
, 7742
, 7743
, 7744
, 7745
, 7746
, 7747
, 7748
, 7749
, 7750
, 7751
, 7752
, 7753
, 7754
, 7755
, 7756
, 7757
, 7758
, 7759
, 7760
, 7761
, 7762
, 7763
, 7764
, 7765
, 7766
, 7767
, 7768
, 7769
, 7770
, 7771
, 7772
, 7773
, 7774
, 7775
, 7776
, 7777
, 7778
, 7779
, 7780
, 7781
, 7782
, 7783
, 7784
, 7785
, 7786
, 7787
, 7788
, 7789
, 7790
, 7791
, 7792
, 7793
, 7794
, 7795
, 7796
, 7797
, 7798
, 7799
, 7800
, 7801
, 7802
, 7803
, 7804
, 7805
, 7806
, 7807
, 7808
, 7809
, 7810
, 7811
, 7812
, 7813
, 7814
, 7815
, 7816
, 7817
, 7818
, 7819
, 7820
, 7821
, 7822
, 7823
, 7824
, 7825
, 7826
, 7827
, 7828
, 7829
, 7830
, 7831
, 7832
, 7833
, 7834
, 7835
, 7836
, 7837
, 7838
, 7839
, 7840
, 7841
, 7842
, 7843
, 7844
, 7845
, 7846
, 7847
, 7848
, 7849
, 7850
, 7851
, 7852
, 7853
, 7854
, 7855
, 7856
, 7857
, 7858
, 7859
, 7860
, 7861
, 7862
, 7863
, 7864
, 7865
, 7866
, 7867
, 7868
, 7869
, 7870
, 7871
, 7872
, 7873
, 7874
, 7875
, 7876
, 7877
, 7878
, 7879
, 7880
, 7881
, 7882
, 7883
, 7884
, 7885
, 7886
, 7887
, 7888
, 7889
, 7890
, 7891
, 7892
, 7893
, 7894
, 7895
, 7896
, 7897
, 7898
, 7899
, 7900
, 7901
, 7902
, 7903
, 7904
, 7905
, 7906
, 7907
, 7908
, 7909
, 7910
, 7911
, 7912
, 7913
, 7914
, 7915
, 7916
, 7917
, 7918
, 7919
, 7920
, 7921
, 7922
, 7923
, 7924
, 7925
, 7926
, 7927
, 7928
, 7929
, 7930
, 7931
, 7932
, 7933
, 7934
, 7935
, 7936
, 7937
, 7938
, 7939
, 7940
, 7941
, 7942
, 7943
, 7944
, 7945
, 7946
, 7947
, 7948
, 7949
, 7950
, 7951
, 7952
, 7953
, 7954
, 7955
, 7956
, 7957
, 7958
, 7959
, 7960
, 7961
, 7962
, 7963
, 7964
, 7965
, 7966
, 7967
, 7968
, 7969
, 7970
, 7971
, 7972
, 7973
, 7974
, 7975
, 7976
, 7977
, 7978
, 7979
, 7980
, 7981
, 7982
, 7983
, 7984
, 7985
, 7986
, 7987
, 7988
, 7989
, 7990
, 7991
, 7992
, 7993
, 7994
, 7995
, 7996
, 7997
, 7998
, 7999
, 8000
, 8001
, 8002
, 8003
, 8004
, 8005
, 8006
, 8007
, 8008
, 8009
, 8010
, 8011
, 8012
, 8013
, 8014
, 8015
, 8016
, 8017
, 8018
, 8019
, 8020
, 8021
, 8022
, 8023
, 8024
, 8025
, 8026
, 8027
, 8028
, 8029
, 8030
, 8031
, 8032
, 8033
, 8034
, 8035
, 8036
, 8037
, 8038
, 8039
, 8040
, 8041
, 8042
, 8043
, 8044
, 8045
, 8046
, 8047
, 8048
, 8049
, 8050
, 8051
, 8052
, 8053
, 8054
, 8055
, 8056
, 8057
, 8058
, 8059
, 8060
, 8061
, 8062
, 8063
, 8064
, 8065
, 8066
, 8067
, 8068
, 8069
, 8070
, 8071
, 8072
, 8073
, 8074
, 8075
, 8076
, 8077
, 8078
, 8079
, 8080
, 8081
, 8082
, 8083
, 8084
, 8085
, 8086
, 8087
, 8088
, 8089
, 8090
, 8091
, 8092
, 8093
, 8094
, 8095
, 8096
, 8097
, 8098
, 8099
, 8100
, 8101
, 8102
, 8103
, 8104
, 8105
, 8106
, 8107
, 8108
, 8109
, 8110
, 8111
, 8112
, 8113
, 8114
, 8115
, 8116
, 8117
, 8118
, 8119
, 8120
, 8121
, 8122
, 8123
, 8124
, 8125
, 8126
, 8127
, 8128
, 8129
, 8130
, 8131
, 8132
, 8133
, 8134
, 8135
, 8136
, 8137
, 8138
, 8139
, 8140
, 8141
, 8142
, 8143
, 8144
, 8145
, 8146
, 8147
, 8148
, 8149
, 8150
, 8151
, 8152
, 8153
, 8154
, 8155
, 8156
, 8157
, 8158
, 8159
, 8160
, 8161
, 8162
, 8163
, 8164
, 8165
, 8166
, 8167
, 8168
, 8169
, 8170
, 8171
, 8172
, 8173
, 8174
, 8175
, 8176
, 8177
, 8178
, 8179
, 8180
, 8181
, 8182
, 8183
, 8184
, 8185
, 8186
, 8187
, 8188
, 8189
, 8190
, 8191
, 8194
, 8196
, 8197
, 8198
, 8199
, 8200
, 8201
, 8202
, 8203
, 8204
, 8205
, 8206
, 8207
, 8208
, 8210
, 8212
, 8213
, 8214
, 8215
, 8220
, 8221
, 8222
, 8223
, 8225
, 8226
, 8227
, 8228
, 8229
, 8230
, 8231
, 8234
, 8235
, 8236
, 8237
, 8238
, 8239
, 8242
, 8244
, 8245
, 8246
, 8247
, 8248
, 8249
, 8250
, 8251
, 8252
, 8253
, 8254
, 8255
, 8256
, 8257
, 8258
, 8259
, 8260
, 8261
, 8262
, 8263
, 8264
, 8265
, 8266
, 8267
, 8268
, 8269
, 8270
, 8271
, 8272
, 8273
, 8274
, 8275
, 8276
, 8277
, 8278
, 8279
, 8280
, 8281
, 8282
, 8283
, 8284
, 8285
, 8286
, 8287
, 8288
, 8289
, 8290
, 8291
, 8292
, 8293
, 8294
, 8295
, 8296
, 8297
, 8298
, 8299
, 8300
, 8301
, 8302
, 8303
, 8304
, 8305
, 8306
, 8307
, 8308
, 8309
, 8310
, 8311
, 8312
, 8313
, 8314
, 8315
, 8316
, 8317
, 8318
, 8319
, 8320
, 8321
, 8322
, 8324
, 8325
, 8326
, 8328
, 8329
, 8330
, 8331
, 8332
, 8333
, 8334
, 8335
, 8336
, 8337
, 8338
, 8340
, 8341
, 8342
, 8344
, 8345
, 8346
, 8347
, 8348
, 8349
, 8350
, 8351
, 8352
, 8353
, 8354
, 8355
, 8356
, 8357
, 8358
, 8360
, 8361
, 8362
, 8363
, 8364
, 8365
, 8366
, 8367
, 8368
, 8369
, 8370
, 8372
, 8373
, 8374
, 8375
, 8376
, 8377
, 8378
, 8379
, 8380
, 8381
, 8382
, 8383
, 8392
, 8393
, 8394
, 8395
, 8397
, 8398
, 8401
, 8403
, 8412
, 8413
, 8414
, 8416
, 8418
, 8426
, 8427
, 8429
, 8430
, 8431
, 8436
, 8437
, 8438
, 8439
, 8440
, 8441
, 8442
, 8443
, 8445
, 8446
, 8447
, 8448
, 8449
, 8450
, 8451
, 8452
, 8455
, 8456
, 8457
, 8458
, 8459
, 8460
, 8461
, 8462
, 8463
, 8464
, 8465
, 8466
, 8467
, 8468
, 8471
, 8472
, 8473
, 8474
, 8475
, 8476
, 8477
, 8478
, 8479
, 8480
, 8481
, 8482
, 8483
, 8484
, 8485
, 8486
, 8487
, 8488
, 8489
, 8490
, 8491
, 8492
, 8493
, 8494
, 8495
, 8496
, 8497
, 8498
, 8499
, 8500
, 8501
, 8502
, 8503
, 8504
, 8505
, 8506
, 8507
, 8508
, 8509
, 8510
, 8511
, 8512
, 8513
, 8514
, 8515
, 8516
, 8517
, 8518
, 8520
, 8521
, 8522
, 8523
, 8524
, 8525
, 8526
, 8527
, 8528
, 8529
, 8530
, 8531
, 8532
, 8533
, 8534
, 8536
, 8537
, 8538
, 8539
, 8540
, 8541
, 8542
, 8543
, 8544
, 8545
, 8546
, 8547
, 8548
, 8549
, 8550
, 8551
, 8552
, 8553
, 8554
, 8555
, 8556
, 8557
, 8558
, 8559
, 8560
, 8561
, 8562
, 8563
, 8564
, 8565
, 8566
, 8567
, 8568
, 8569
, 8570
, 8571
, 8572
, 8573
, 8574
, 8575
, 8576
, 8577
, 8578
, 8580
, 8581
, 8582
, 8584
, 8585
, 8586
, 8587
, 8588
, 8589
, 8590
, 8591
, 8592
, 8593
, 8594
, 8596
, 8597
, 8598
, 8600
, 8601
, 8602
, 8603
, 8604
, 8605
, 8606
, 8607
, 8608
, 8609
, 8610
, 8611
, 8612
, 8613
, 8614
, 8615
, 8616
, 8617
, 8618
, 8619
, 8620
, 8621
, 8622
, 8623
, 8624
, 8625
, 8626
, 8627
, 8628
, 8629
, 8630
, 8631
, 8632
, 8633
, 8634
, 8635
, 8636
, 8637
, 8638
, 8639
, 8640
, 8641
, 8642
, 8644
, 8647
, 8648
, 8649
, 8650
, 8651
, 8652
, 8653
, 8654
, 8655
, 8656
, 8657
, 8658
, 8660
, 8663
, 8664
, 8665
, 8666
, 8667
, 8668
, 8669
, 8670
, 8671
, 8672
, 8673
, 8674
, 8675
, 8676
, 8677
, 8678
, 8679
, 8680
, 8681
, 8682
, 8683
, 8684
, 8685
, 8686
, 8687
, 8688
, 8689
, 8690
, 8691
, 8692
, 8693
, 8694
, 8695
, 8696
, 8697
, 8698
, 8699
, 8700
, 8701
, 8702
, 8703
, 8704
, 8705
, 8706
, 8707
, 8708
, 8709
, 8710
, 8711
, 8712
, 8713
, 8714
, 8715
, 8716
, 8717
, 8718
, 8719
, 8720
, 8721
, 8722
, 8723
, 8724
, 8725
, 8726
, 8727
, 8728
, 8729
, 8730
, 8731
, 8732
, 8733
, 8734
, 8735
, 8736
, 8737
, 8738
, 8739
, 8740
, 8741
, 8742
, 8743
, 8744
, 8745
, 8746
, 8747
, 8748
, 8749
, 8750
, 8751
, 8752
, 8753
, 8754
, 8755
, 8756
, 8757
, 8758
, 8759
, 8760
, 8761
, 8762
, 8763
, 8764
, 8765
, 8766
, 8767
, 8768
, 8769
, 8770
, 8771
, 8772
, 8773
, 8774
, 8775
, 8776
, 8777
, 8778
, 8779
, 8780
, 8781
, 8782
, 8783
, 8784
, 8785
, 8786
, 8787
, 8788
, 8789
, 8790
, 8791
, 8792
, 8793
, 8794
, 8795
, 8796
, 8797
, 8798
, 8799
, 8800
, 8801
, 8802
, 8803
, 8804
, 8805
, 8806
, 8807
, 8808
, 8809
, 8810
, 8811
, 8812
, 8813
, 8814
, 8815
, 8816
, 8817
, 8818
, 8819
, 8820
, 8821
, 8822
, 8823
, 8824
, 8825
, 8826
, 8827
, 8828
, 8829
, 8830
, 8831
, 8832
, 8833
, 8834
, 8835
, 8836
, 8837
, 8838
, 8839
, 8840
, 8841
, 8842
, 8843
, 8844
, 8845
, 8846
, 8847
, 8848
, 8849
, 8850
, 8851
, 8852
, 8853
, 8854
, 8855
, 8856
, 8857
, 8858
, 8859
, 8860
, 8861
, 8862
, 8863
, 8864
, 8865
, 8866
, 8867
, 8868
, 8869
, 8870
, 8871
, 8872
, 8873
, 8874
, 8875
, 8876
, 8877
, 8878
, 8879
, 8880
, 8881
, 8882
, 8883
, 8884
, 8885
, 8886
, 8887
, 8888
, 8889
, 8890
, 8891
, 8892
, 8893
, 8894
, 8895
, 8896
, 8897
, 8898
, 8899
, 8900
, 8901
, 8902
, 8903
, 8904
, 8905
, 8906
, 8907
, 8908
, 8909
, 8910
, 8911
, 8912
, 8913
, 8914
, 8915
, 8916
, 8917
, 8918
, 8919
, 8920
, 8921
, 8922
, 8923
, 8924
, 8925
, 8926
, 8927
, 8928
, 8929
, 8930
, 8931
, 8932
, 8933
, 8934
, 8935
, 8936
, 8937
, 8938
, 8939
, 8940
, 8941
, 8942
, 8943
, 8944
, 8945
, 8946
, 8947
, 8948
, 8949
, 8950
, 8951
, 8952
, 8953
, 8954
, 8955
, 8956
, 8957
, 8958
, 8959
, 8960
, 8961
, 8962
, 8963
, 8964
, 8965
, 8966
, 8967
, 8968
, 8969
, 8970
, 8971
, 8972
, 8973
, 8974
, 8975
, 8976
, 8977
, 8978
, 8979
, 8980
, 8981
, 8982
, 8983
, 8984
, 8985
, 8986
, 8987
, 8988
, 8989
, 8990
, 8991
, 8992
, 8993
, 8994
, 8995
, 8996
, 8997
, 8998
, 8999
, 9000
, 9001
, 9002
, 9003
, 9004
, 9005
, 9006
, 9007
, 9008
, 9009
, 9010
, 9011
, 9012
, 9013
, 9014
, 9015
, 9016
, 9017
, 9018
, 9019
, 9020
, 9021
, 9022
, 9023
, 9024
, 9025
, 9026
, 9027
, 9028
, 9029
, 9030
, 9031
, 9032
, 9033
, 9034
, 9035
, 9036
, 9037
, 9038
, 9039
, 9040
, 9041
, 9042
, 9043
, 9044
, 9045
, 9046
, 9047
, 9048
, 9049
, 9050
, 9051
, 9052
, 9053
, 9054
, 9055
, 9056
, 9057
, 9058
, 9059
, 9060
, 9061
, 9062
, 9063
, 9064
, 9065
, 9066
, 9067
, 9068
, 9069
, 9070
, 9071
, 9072
, 9073
, 9074
, 9075
, 9076
, 9077
, 9078
, 9079
, 9080
, 9081
, 9082
, 9083
, 9084
, 9085
, 9086
, 9087
, 9088
, 9089
, 9090
, 9091
, 9092
, 9093
, 9094
, 9095
, 9096
, 9097
, 9098
, 9099
, 9100
, 9101
, 9102
, 9103
, 9104
, 9105
, 9106
, 9107
, 9108
, 9109
, 9110
, 9111
, 9112
, 9113
, 9114
, 9115
, 9116
, 9117
, 9118
, 9119
, 9120
, 9121
, 9122
, 9123
, 9124
, 9125
, 9126
, 9127
, 9128
, 9129
, 9130
, 9131
, 9132
, 9133
, 9134
, 9135
, 9136
, 9137
, 9138
, 9139
, 9140
, 9141
, 9142
, 9143
, 9144
, 9145
, 9146
, 9147
, 9148
, 9149
, 9150
, 9151
, 9152
, 9153
, 9154
, 9155
, 9156
, 9157
, 9158
, 9159
, 9160
, 9161
, 9162
, 9163
, 9164
, 9165
, 9166
, 9167
, 9168
, 9169
, 9170
, 9171
, 9172
, 9173
, 9174
, 9175
, 9176
, 9177
, 9178
, 9179
, 9180
, 9181
, 9182
, 9183
, 9184
, 9185
, 9186
, 9187
, 9188
, 9189
, 9190
, 9191
, 9192
, 9193
, 9194
, 9195
, 9196
, 9197
, 9198
, 9199
, 9200
, 9201
, 9202
, 9203
, 9204
, 9205
, 9206
, 9207
, 9208
, 9209
, 9210
, 9211
, 9212
, 9213
, 9214
, 9215
, 9216
, 9218
, 9220
, 9221
, 9222
, 9223
, 9224
, 9225
, 9226
, 9227
, 9228
, 9229
, 9230
, 9231
, 9232
, 9233
, 9234
, 9235
, 9236
, 9237
, 9238
, 9239
, 9241
, 9243
, 9244
, 9245
, 9246
, 9247
, 9248
, 9249
, 9250
, 9251
, 9252
, 9253
, 9254
, 9255
, 9256
, 9257
, 9258
, 9259
, 9260
, 9261
, 9262
, 9263
, 9265
, 9266
, 9267
, 9268
, 9269
, 9270
, 9271
, 9272
, 9273
, 9274
, 9275
, 9276
, 9277
, 9278
, 9279
, 9280
, 9281
, 9282
, 9283
, 9284
, 9285
, 9286
, 9287
, 9288
, 9289
, 9290
, 9291
, 9292
, 9293
, 9294
, 9295
, 9296
, 9297
, 9298
, 9299
, 9300
, 9301
, 9302
, 9303
, 9304
, 9305
, 9306
, 9307
, 9308
, 9309
, 9310
, 9311
, 9312
, 9313
, 9314
, 9315
, 9316
, 9317
, 9318
, 9319
, 9320
, 9321
, 9322
, 9323
, 9324
, 9325
, 9326
, 9327
, 9328
, 9329
, 9330
, 9331
, 9332
, 9333
, 9334
, 9335
, 9336
, 9337
, 9338
, 9339
, 9340
, 9341
, 9342
, 9343
, 9344
, 9345
, 9346
, 9348
, 9349
, 9350
, 9351
, 9352
, 9353
, 9354
, 9355
, 9356
, 9357
, 9358
, 9359
, 9360
, 9361
, 9362
, 9363
, 9364
, 9365
, 9366
, 9367
, 9368
, 9369
, 9370
, 9371
, 9372
, 9373
, 9374
, 9375
, 9376
, 9377
, 9378
, 9379
, 9380
, 9381
, 9382
, 9383
, 9384
, 9385
, 9386
, 9387
, 9388
, 9389
, 9390
, 9391
, 9392
, 9393
, 9394
, 9395
, 9396
, 9397
, 9398
, 9399
, 9400
, 9401
, 9402
, 9403
, 9404
, 9405
, 9406
, 9407
, 9409
, 9411
, 9412
, 9413
, 9414
, 9415
, 9416
, 9417
, 9418
, 9419
, 9420
, 9421
, 9422
, 9423
, 9424
, 9425
, 9426
, 9427
, 9428
, 9429
, 9430
, 9431
, 9432
, 9434
, 9436
, 9437
, 9438
, 9439
, 9440
, 9441
, 9442
, 9443
, 9444
, 9445
, 9446
, 9447
, 9448
, 9449
, 9450
, 9451
, 9452
, 9453
, 9454
, 9455
, 9456
, 9458
, 9460
, 9461
, 9462
, 9463
, 9464
, 9465
, 9466
, 9467
, 9468
, 9469
, 9470
, 9471
, 9472
, 9473
, 9474
, 9475
, 9476
, 9479
, 9480
, 9481
, 9482
, 9483
, 9485
, 9486
, 9488
, 9489
, 9490
, 9491
, 9492
, 9493
, 9495
, 9496
, 9497
, 9498
, 9499
, 9501
, 9502
, 9503
, 9504
, 9505
, 9506
, 9507
, 9508
, 9509
, 9510
, 9511
, 9512
, 9513
, 9514
, 9515
, 9516
, 9517
, 9518
, 9519
, 9520
, 9521
, 9522
, 9523
, 9524
, 9525
, 9526
, 9527
, 9528
, 9529
, 9530
, 9531
, 9533
, 9534
, 9535
, 9536
, 9537
, 9538
, 9539
, 9540
, 9541
, 9542
, 9544
, 9545
, 9546
, 9547
, 9548
, 9549
, 9550
, 9552
, 9553
, 9554
, 9555
, 9556
, 9557
, 9558
, 9560
, 9561
, 9562
, 9563
, 9564
, 9565
, 9566
, 9568
, 9569
, 9570
, 9571
, 9572
, 9573
, 9574
, 9575
, 9576
, 9577
, 9578
, 9579
, 9580
, 9581
, 9582
, 9583
, 9584
, 9585
, 9586
, 9587
, 9588
, 9589
, 9590
, 9591
, 9592
, 9593
, 9594
, 9595
, 9596
, 9597
, 9598
, 9599
, 9600
, 9601
, 9602
, 9603
, 9604
, 9605
, 9606
, 9608
, 9609
, 9610
, 9611
, 9612
, 9613
, 9614
, 9616
, 9617
, 9618
, 9620
, 9621
, 9622
, 9623
, 9624
, 9625
, 9626
, 9628
, 9629
, 9630
, 9631
, 9632
, 9633
, 9634
, 9635
, 9636
, 9637
, 9638
, 9639
, 9640
, 9641
, 9642
, 9643
, 9644
, 9645
, 9646
, 9647
, 9648
, 9649
, 9650
, 9652
, 9653
, 9654
, 9655
, 9656
, 9657
, 9658
, 9659
, 9660
, 9661
, 9662
, 9663
, 9664
, 9665
, 9666
, 9668
, 9671
, 9672
, 9673
, 9674
, 9675
, 9677
, 9678
, 9680
, 9681
, 9682
, 9683
, 9684
, 9686
, 9687
, 9688
, 9689
, 9691
, 9692
, 9693
, 9694
, 9696
, 9697
, 9698
, 9699
, 9700
, 9701
, 9702
, 9703
, 9704
, 9705
, 9706
, 9707
, 9708
, 9709
, 9710
, 9711
, 9712
, 9713
, 9714
, 9715
, 9716
, 9717
, 9718
, 9719
, 9720
, 9721
, 9722
, 9723
, 9725
, 9726
, 9727
, 9728
, 9729
, 9730
, 9731
, 9732
, 9733
, 9734
, 9735
, 9736
, 9737
, 9738
, 9739
, 9740
, 9741
, 9742
, 9743
, 9744
, 9745
, 9746
, 9747
, 9748
, 9749
, 9750
, 9751
, 9752
, 9753
, 9754
, 9755
, 9756
, 9757
, 9758
, 9759
, 9760
, 9761
, 9762
, 9763
, 9764
, 9765
, 9766
, 9767
, 9768
, 9769
, 9770
, 9771
, 9772
, 9773
, 9774
, 9775
, 9776
, 9777
, 9778
, 9779
, 9780
, 9781
, 9782
, 9783
, 9784
, 9785
, 9786
, 9787
, 9788
, 9789
, 9790
, 9791
, 9792
, 9793
, 9794
, 9795
, 9796
, 9797
, 9798
, 9799
, 9800
, 9801
, 9802
, 9803
, 9804
, 9805
, 9806
, 9807
, 9808
, 9809
, 9810
, 9811
, 9812
, 9813
, 9814
, 9815
, 9816
, 9817
, 9818
, 9819
, 9820
, 9821
, 9822
, 9823
, 9824
, 9825
, 9826
, 9827
, 9828
, 9829
, 9830
, 9831
, 9832
, 9833
, 9834
, 9835
, 9836
, 9837
, 9838
, 9839
, 9840
, 9841
, 9842
, 9843
, 9844
, 9845
, 9846
, 9847
, 9848
, 9849
, 9850
, 9851
, 9852
, 9853
, 9854
, 9855
, 9856
, 9857
, 9858
, 9859
, 9860
, 9861
, 9862
, 9863
, 9864
, 9865
, 9866
, 9867
, 9868
, 9869
, 9870
, 9871
, 9872
, 9873
, 9874
, 9875
, 9876
, 9877
, 9878
, 9879
, 9880
, 9881
, 9882
, 9883
, 9884
, 9885
, 9886
, 9887
, 9888
, 9889
, 9890
, 9891
, 9892
, 9893
, 9894
, 9895
, 9896
, 9897
, 9898
, 9899
, 9900
, 9901
, 9902
, 9903
, 9904
, 9905
, 9906
, 9907
, 9908
, 9909
, 9910
, 9911
, 9912
, 9913
, 9914
, 9915
, 9916
, 9917
, 9918
, 9919
, 9920
, 9921
, 9922
, 9923
, 9924
, 9925
, 9926
, 9927
, 9928
, 9929
, 9930
, 9931
, 9932
, 9933
, 9934
, 9935
, 9936
, 9937
, 9938
, 9939
, 9940
, 9941
, 9942
, 9943
, 9944
, 9945
, 9946
, 9947
, 9948
, 9949
, 9950
, 9951
, 9952
, 9953
, 9954
, 9955
, 9956
, 9957
, 9958
, 9959
, 9960
, 9961
, 9962
, 9963
, 9964
, 9965
, 9966
, 9967
, 9968
, 9969
, 9970
, 9971
, 9972
, 9973
, 9974
, 9975
, 9976
, 9977
, 9978
, 9979
, 9980
, 9981
, 9982
, 9983
, 9984
, 9985
, 9986
, 9987
, 9988
, 9989
, 9990
, 9991
, 9992
, 9993
, 9994
, 9995
, 9996
, 9997
, 9998
, 9999
, 10000
, 10001
, 10002
, 10003
, 10004
, 10005
, 10006
, 10007
, 10008
, 10009
, 10010
, 10011
, 10012
, 10013
, 10014
, 10015
, 10016
, 10017
, 10018
, 10019
, 10020
, 10021
, 10022
, 10023
, 10024
, 10025
, 10026
, 10027
, 10028
, 10029
, 10030
, 10031
, 10032
, 10033
, 10034
, 10035
, 10036
, 10037
, 10038
, 10039
, 10040
, 10041
, 10042
, 10043
, 10044
, 10045
, 10046
, 10047
, 10048
, 10049
, 10050
, 10051
, 10052
, 10053
, 10054
, 10055
, 10056
, 10057
, 10058
, 10059
, 10060
, 10061
, 10062
, 10063
, 10064
, 10065
, 10066
, 10067
, 10068
, 10069
, 10070
, 10071
, 10072
, 10073
, 10074
, 10075
, 10076
, 10077
, 10078
, 10079
, 10080
, 10081
, 10082
, 10083
, 10084
, 10085
, 10086
, 10087
, 10088
, 10089
, 10090
, 10091
, 10092
, 10093
, 10094
, 10095
, 10096
, 10097
, 10098
, 10099
, 10100
, 10101
, 10102
, 10103
, 10104
, 10105
, 10106
, 10107
, 10108
, 10109
, 10110
, 10111
, 10112
, 10113
, 10114
, 10115
, 10116
, 10117
, 10118
, 10119
, 10120
, 10121
, 10122
, 10123
, 10124
, 10125
, 10126
, 10127
, 10128
, 10129
, 10130
, 10131
, 10132
, 10133
, 10134
, 10135
, 10136
, 10137
, 10138
, 10139
, 10140
, 10141
, 10142
, 10143
, 10144
, 10145
, 10146
, 10147
, 10148
, 10149
, 10150
, 10151
, 10152
, 10153
, 10154
, 10155
, 10156
, 10157
, 10158
, 10159
, 10160
, 10161
, 10162
, 10163
, 10164
, 10165
, 10166
, 10167
, 10168
, 10169
, 10170
, 10171
, 10172
, 10173
, 10174
, 10175
, 10176
, 10177
, 10178
, 10179
, 10180
, 10181
, 10182
, 10183
, 10184
, 10185
, 10186
, 10187
, 10188
, 10189
, 10190
, 10191
, 10192
, 10193
, 10194
, 10195
, 10196
, 10197
, 10198
, 10199
, 10200
, 10201
, 10202
, 10203
, 10204
, 10205
, 10206
, 10207
, 10208
, 10209
, 10210
, 10211
, 10212
, 10213
, 10214
, 10215
, 10216
, 10217
, 10218
, 10219
, 10220
, 10221
, 10222
, 10223
, 10224
, 10225
, 10226
, 10227
, 10228
, 10229
, 10230
, 10231
, 10232
, 10233
, 10234
, 10235
, 10236
, 10237
, 10238
, 10239
, 10241
, 10242
, 10243
, 10244
, 10245
, 10246
, 10247
, 10248
, 10249
, 10250
, 10251
, 10252
, 10253
, 10254
, 10255
, 10256
, 10257
, 10258
, 10259
, 10260
, 10261
, 10262
, 10263
, 10264
, 10266
, 10268
, 10269
, 10270
, 10271
, 10272
, 10273
, 10274
, 10275
, 10276
, 10277
, 10278
, 10279
, 10281
, 10282
, 10283
, 10284
, 10285
, 10286
, 10287
, 10288
, 10290
, 10293
, 10295
, 10296
, 10297
, 10298
, 10299
, 10301
, 10302
, 10303
, 10304
, 10305
, 10306
, 10307
, 10308
, 10309
, 10310
, 10311
, 10312
, 10313
, 10314
, 10315
, 10316
, 10317
, 10318
, 10319
, 10320
, 10321
, 10322
, 10323
, 10324
, 10325
, 10326
, 10327
, 10328
, 10329
, 10330
, 10331
, 10332
, 10333
, 10334
, 10335
, 10336
, 10337
, 10338
, 10339
, 10340
, 10341
, 10342
, 10343
, 10344
, 10345
, 10346
, 10347
, 10348
, 10349
, 10350
, 10351
, 10352
, 10353
, 10354
, 10355
, 10356
, 10357
, 10358
, 10359
, 10360
, 10361
, 10362
, 10363
, 10364
, 10365
, 10366
, 10367
, 10368
, 10369
, 10370
, 10371
, 10372
, 10373
, 10374
, 10375
, 10376
, 10377
, 10378
, 10379
, 10380
, 10381
, 10382
, 10383
, 10384
, 10385
, 10386
, 10387
, 10388
, 10389
, 10390
, 10391
, 10392
, 10393
, 10394
, 10395
, 10396
, 10397
, 10398
, 10399
, 10400
, 10401
, 10402
, 10403
, 10404
, 10405
, 10406
, 10407
, 10408
, 10409
, 10410
, 10411
, 10412
, 10413
, 10414
, 10415
, 10416
, 10417
, 10418
, 10420
, 10421
, 10422
, 10423
, 10424
, 10425
, 10426
, 10427
, 10428
, 10429
, 10430
, 10431
, 10432
, 10433
, 10434
, 10436
, 10437
, 10438
, 10439
, 10440
, 10441
, 10442
, 10443
, 10444
, 10445
, 10446
, 10447
, 10448
, 10449
, 10450
, 10451
, 10452
, 10453
, 10454
, 10455
, 10457
, 10459
, 10460
, 10461
, 10462
, 10463
, 10464
, 10465
, 10466
, 10467
, 10468
, 10469
, 10470
, 10471
, 10472
, 10474
, 10475
, 10476
, 10477
, 10478
, 10479
, 10481
, 10483
, 10484
, 10486
, 10488
, 10489
, 10490
, 10491
, 10492
, 10493
, 10494
, 10495
, 10497
, 10498
, 10500
, 10501
, 10503
, 10504
, 10505
, 10506
, 10507
, 10509
, 10510
, 10511
, 10513
, 10514
, 10515
, 10516
, 10518
, 10519
, 10520
, 10521
, 10523
, 10524
, 10525
, 10526
, 10528
, 10529
, 10530
, 10532
, 10533
, 10535
, 10536
, 10538
, 10539
, 10541
, 10542
, 10543
, 10545
, 10546
, 10548
, 10550
, 10551
, 10552
, 10553
, 10554
, 10555
, 10556
, 10557
, 10558
, 10559
, 10560
, 10561
, 10562
, 10564
, 10565
, 10566
, 10568
, 10569
, 10570
, 10571
, 10572
, 10573
, 10574
, 10576
, 10577
, 10578
, 10580
, 10581
, 10582
, 10583
, 10584
, 10585
, 10586
, 10588
, 10589
, 10590
, 10591
, 10592
, 10593
, 10594
, 10595
, 10596
, 10597
, 10598
, 10600
, 10601
, 10602
, 10603
, 10604
, 10605
, 10606
, 10607
, 10608
, 10609
, 10610
, 10612
, 10613
, 10614
, 10615
, 10616
, 10617
, 10618
, 10619
, 10620
, 10621
, 10622
, 10623
, 10624
, 10625
, 10626
, 10628
, 10629
, 10630
, 10631
, 10632
, 10633
, 10634
, 10635
, 10636
, 10637
, 10638
, 10639
, 10640
, 10641
, 10642
, 10643
, 10644
, 10645
, 10646
, 10648
, 10649
, 10650
, 10651
, 10652
, 10653
, 10654
, 10656
, 10657
, 10658
, 10660
, 10661
, 10662
, 10663
, 10664
, 10665
, 10666
, 10667
, 10668
, 10669
, 10670
, 10671
, 10672
, 10673
, 10674
, 10676
, 10677
, 10678
, 10680
, 10681
, 10682
, 10683
, 10684
, 10685
, 10686
, 10687
, 10688
, 10689
, 10690
, 10692
, 10693
, 10694
, 10695
, 10696
, 10697
, 10698
, 10699
, 10700
, 10701
, 10702
, 10704
, 10705
, 10706
, 10708
, 10709
, 10711
, 10712
, 10714
, 10715
, 10717
, 10718
, 10719
, 10720
, 10721
, 10722
, 10723
, 10724
, 10726
, 10727
, 10728
, 10729
, 10730
, 10731
, 10732
, 10733
, 10734
, 10735
, 10736
, 10737
, 10738
, 10740
, 10741
, 10743
, 10744
, 10745
, 10746
, 10747
, 10749
, 10750
, 10751
, 10752
, 10753
, 10754
, 10755
, 10756
, 10757
, 10758
, 10759
, 10760
, 10761
, 10762
, 10763
, 10764
, 10765
, 10766
, 10767
, 10768
, 10769
, 10770
, 10771
, 10772
, 10773
, 10774
, 10775
, 10776
, 10777
, 10778
, 10779
, 10780
, 10781
, 10782
, 10783
, 10784
, 10785
, 10786
, 10787
, 10788
, 10789
, 10790
, 10791
, 10792
, 10793
, 10794
, 10795
, 10796
, 10797
, 10798
, 10799
, 10800
, 10801
, 10802
, 10803
, 10804
, 10805
, 10806
, 10807
, 10808
, 10809
, 10810
, 10811
, 10812
, 10813
, 10814
, 10815
, 10816
, 10817
, 10818
, 10819
, 10820
, 10821
, 10822
, 10823
, 10824
, 10825
, 10826
, 10827
, 10828
, 10829
, 10830
, 10831
, 10832
, 10833
, 10834
, 10835
, 10836
, 10837
, 10838
, 10839
, 10840
, 10841
, 10842
, 10843
, 10844
, 10845
, 10846
, 10847
, 10848
, 10849
, 10850
, 10851
, 10852
, 10853
, 10854
, 10855
, 10856
, 10857
, 10858
, 10859
, 10860
, 10861
, 10862
, 10863
, 10864
, 10865
, 10866
, 10867
, 10868
, 10869
, 10870
, 10871
, 10872
, 10873
, 10874
, 10875
, 10876
, 10877
, 10878
, 10879
, 10880
, 10881
, 10882
, 10883
, 10884
, 10885
, 10886
, 10887
, 10888
, 10889
, 10890
, 10891
, 10892
, 10893
, 10894
, 10895
, 10896
, 10897
, 10898
, 10899
, 10900
, 10901
, 10902
, 10903
, 10904
, 10905
, 10906
, 10907
, 10908
, 10909
, 10910
, 10911
, 10912
, 10913
, 10914
, 10915
, 10916
, 10917
, 10918
, 10919
, 10920
, 10921
, 10922
, 10923
, 10924
, 10925
, 10926
, 10927
, 10928
, 10929
, 10930
, 10931
, 10932
, 10933
, 10934
, 10935
, 10936
, 10937
, 10938
, 10939
, 10940
, 10941
, 10942
, 10943
, 10944
, 10945
, 10946
, 10947
, 10948
, 10949
, 10950
, 10951
, 10952
, 10953
, 10954
, 10955
, 10956
, 10957
, 10958
, 10959
, 10960
, 10961
, 10962
, 10963
, 10964
, 10965
, 10966
, 10967
, 10968
, 10969
, 10970
, 10971
, 10972
, 10973
, 10974
, 10975
, 10976
, 10977
, 10978
, 10979
, 10980
, 10981
, 10982
, 10983
, 10984
, 10985
, 10986
, 10987
, 10988
, 10989
, 10990
, 10991
, 10992
, 10993
, 10994
, 10995
, 10996
, 10997
, 10998
, 10999
, 11000
, 11001
, 11002
, 11003
, 11004
, 11005
, 11006
, 11007
, 11008
, 11009
, 11010
, 11011
, 11012
, 11013
, 11014
, 11015
, 11016
, 11017
, 11018
, 11019
, 11020
, 11021
, 11022
, 11023
, 11024
, 11025
, 11026
, 11027
, 11028
, 11029
, 11030
, 11031
, 11032
, 11033
, 11034
, 11035
, 11036
, 11037
, 11038
, 11039
, 11040
, 11041
, 11042
, 11043
, 11044
, 11045
, 11046
, 11047
, 11048
, 11049
, 11050
, 11051
, 11052
, 11053
, 11054
, 11055
, 11056
, 11057
, 11058
, 11059
, 11060
, 11061
, 11062
, 11063
, 11064
, 11065
, 11066
, 11067
, 11068
, 11069
, 11070
, 11071
, 11072
, 11073
, 11074
, 11075
, 11076
, 11077
, 11078
, 11079
, 11080
, 11081
, 11082
, 11083
, 11084
, 11085
, 11086
, 11087
, 11088
, 11089
, 11090
, 11091
, 11092
, 11093
, 11094
, 11095
, 11096
, 11097
, 11098
, 11099
, 11100
, 11101
, 11102
, 11103
, 11104
, 11105
, 11106
, 11107
, 11108
, 11109
, 11110
, 11111
, 11112
, 11113
, 11114
, 11115
, 11116
, 11117
, 11118
, 11119
, 11120
, 11121
, 11122
, 11123
, 11124
, 11125
, 11126
, 11127
, 11128
, 11129
, 11130
, 11131
, 11132
, 11133
, 11134
, 11135
, 11136
, 11137
, 11138
, 11139
, 11140
, 11141
, 11142
, 11143
, 11144
, 11145
, 11146
, 11147
, 11148
, 11149
, 11150
, 11151
, 11152
, 11153
, 11154
, 11155
, 11156
, 11157
, 11158
, 11159
, 11160
, 11161
, 11162
, 11163
, 11164
, 11165
, 11166
, 11167
, 11168
, 11169
, 11170
, 11171
, 11172
, 11173
, 11174
, 11175
, 11176
, 11177
, 11178
, 11179
, 11180
, 11181
, 11182
, 11183
, 11184
, 11185
, 11186
, 11187
, 11188
, 11189
, 11190
, 11191
, 11192
, 11193
, 11194
, 11195
, 11196
, 11197
, 11198
, 11199
, 11200
, 11201
, 11202
, 11203
, 11204
, 11205
, 11206
, 11207
, 11208
, 11209
, 11210
, 11211
, 11212
, 11213
, 11214
, 11215
, 11216
, 11217
, 11218
, 11219
, 11220
, 11221
, 11222
, 11223
, 11224
, 11225
, 11226
, 11227
, 11228
, 11229
, 11230
, 11231
, 11232
, 11233
, 11234
, 11235
, 11236
, 11237
, 11238
, 11239
, 11240
, 11241
, 11242
, 11243
, 11244
, 11245
, 11246
, 11247
, 11248
, 11249
, 11250
, 11251
, 11252
, 11253
, 11254
, 11255
, 11256
, 11257
, 11258
, 11259
, 11260
, 11261
, 11262
, 11263
, 11266
, 11272
, 11273
, 11274
, 11275
, 11277
, 11278
, 11280
, 11281
, 11282
, 11283
, 11292
, 11293
, 11294
, 11296
, 11298
, 11306
, 11307
, 11309
, 11310
, 11311
, 11314
, 11320
, 11321
, 11322
, 11323
, 11325
, 11326
, 11327
, 11328
, 11329
, 11330
, 11331
, 11332
, 11333
, 11334
, 11335
, 11336
, 11337
, 11338
, 11339
, 11340
, 11341
, 11342
, 11343
, 11344
, 11345
, 11346
, 11347
, 11348
, 11349
, 11350
, 11351
, 11352
, 11353
, 11354
, 11355
, 11356
, 11357
, 11358
, 11359
, 11360
, 11361
, 11362
, 11363
, 11364
, 11365
, 11366
, 11367
, 11368
, 11369
, 11370
, 11371
, 11372
, 11373
, 11374
, 11375
, 11376
, 11377
, 11378
, 11379
, 11380
, 11381
, 11382
, 11383
, 11384
, 11385
, 11386
, 11387
, 11388
, 11389
, 11390
, 11391
, 11392
, 11393
, 11394
, 11396
, 11397
, 11398
, 11400
, 11401
, 11402
, 11403
, 11404
, 11405
, 11406
, 11407
, 11408
, 11409
, 11410
, 11411
, 11412
, 11413
, 11414
, 11416
, 11417
, 11418
, 11419
, 11420
, 11421
, 11422
, 11423
, 11424
, 11425
, 11426
, 11428
, 11429
, 11430
, 11432
, 11433
, 11434
, 11435
, 11436
, 11437
, 11438
, 11439
, 11440
, 11441
, 11442
, 11444
, 11445
, 11446
, 11448
, 11449
, 11450
, 11451
, 11452
, 11453
, 11454
, 11455
, 11456
, 11457
, 11460
, 11461
, 11464
, 11465
, 11466
, 11467
, 11469
, 11470
, 11472
, 11473
, 11474
, 11475
, 11476
, 11477
, 11485
, 11486
, 11487
, 11489
, 11491
, 11498
, 11499
, 11500
, 11501
, 11502
, 11503
, 11512
, 11513
, 11514
, 11515
, 11517
, 11518
, 11519
, 11521
, 11522
, 11524
, 11525
, 11526
, 11527
, 11528
, 11529
, 11530
, 11531
, 11532
, 11533
, 11534
, 11535
, 11537
, 11538
, 11540
, 11541
, 11542
, 11543
, 11544
, 11545
, 11546
, 11547
, 11548
, 11549
, 11550
, 11551
, 11553
, 11554
, 11556
, 11557
, 11558
, 11559
, 11560
, 11561
, 11562
, 11563
, 11564
, 11565
, 11566
, 11567
, 11569
, 11570
, 11572
, 11573
, 11574
, 11575
, 11576
, 11577
, 11578
, 11579
, 11580
, 11581
, 11582
, 11583
, 11584
, 11585
, 11586
, 11588
, 11589
, 11590
, 11591
, 11592
, 11593
, 11594
, 11595
, 11596
, 11597
, 11598
, 11599
, 11600
, 11601
, 11602
, 11604
, 11605
, 11606
, 11607
, 11608
, 11609
, 11610
, 11611
, 11612
, 11613
, 11614
, 11615
, 11616
, 11617
, 11618
, 11620
, 11621
, 11622
, 11623
, 11624
, 11625
, 11626
, 11627
, 11628
, 11629
, 11630
, 11631
, 11632
, 11633
, 11634
, 11636
, 11637
, 11638
, 11639
, 11640
, 11641
, 11642
, 11643
, 11644
, 11645
, 11646
, 11647
, 11648
, 11649
, 11650
, 11652
, 11653
, 11654
, 11655
, 11656
, 11657
, 11658
, 11659
, 11660
, 11661
, 11662
, 11663
, 11664
, 11665
, 11666
, 11668
, 11669
, 11670
, 11671
, 11672
, 11673
, 11674
, 11675
, 11676
, 11677
, 11678
, 11679
, 11680
, 11681
, 11682
, 11684
, 11685
, 11686
, 11687
, 11688
, 11689
, 11690
, 11691
, 11692
, 11693
, 11694
, 11695
, 11696
, 11697
, 11698
, 11700
, 11701
, 11702
, 11703
, 11704
, 11705
, 11706
, 11707
, 11708
, 11709
, 11710
, 11711
, 11712
, 11713
, 11714
, 11716
, 11717
, 11718
, 11719
, 11720
, 11721
, 11722
, 11723
, 11724
, 11725
, 11726
, 11727
, 11728
, 11729
, 11730
, 11732
, 11733
, 11734
, 11735
, 11736
, 11737
, 11738
, 11739
, 11740
, 11741
, 11742
, 11743
, 11744
, 11745
, 11746
, 11748
, 11749
, 11750
, 11751
, 11752
, 11753
, 11754
, 11755
, 11756
, 11757
, 11758
, 11759
, 11760
, 11761
, 11762
, 11764
, 11765
, 11766
, 11767
, 11768
, 11769
, 11770
, 11771
, 11772
, 11773
, 11774
, 11775
, 11776
, 11777
, 11778
, 11779
, 11780
, 11781
, 11782
, 11783
, 11784
, 11785
, 11786
, 11787
, 11788
, 11789
, 11790
, 11791
, 11792
, 11793
, 11794
, 11795
, 11796
, 11797
, 11798
, 11799
, 11800
, 11801
, 11802
, 11803
, 11804
, 11805
, 11806
, 11807
, 11808
, 11809
, 11810
, 11811
, 11812
, 11813
, 11814
, 11815
, 11816
, 11817
, 11818
, 11819
, 11820
, 11821
, 11822
, 11823
, 11824
, 11825
, 11826
, 11827
, 11828
, 11829
, 11830
, 11831
, 11832
, 11833
, 11834
, 11835
, 11836
, 11837
, 11838
, 11839
, 11840
, 11841
, 11842
, 11843
, 11844
, 11845
, 11846
, 11847
, 11848
, 11849
, 11850
, 11851
, 11852
, 11853
, 11854
, 11855
, 11856
, 11857
, 11858
, 11859
, 11860
, 11861
, 11862
, 11863
, 11864
, 11865
, 11866
, 11867
, 11868
, 11869
, 11870
, 11871
, 11872
, 11873
, 11874
, 11875
, 11876
, 11877
, 11878
, 11879
, 11880
, 11881
, 11882
, 11883
, 11884
, 11885
, 11886
, 11887
, 11888
, 11889
, 11890
, 11891
, 11892
, 11893
, 11894
, 11895
, 11896
, 11897
, 11898
, 11899
, 11900
, 11901
, 11902
, 11903
, 11904
, 11905
, 11906
, 11907
, 11908
, 11909
, 11910
, 11911
, 11912
, 11913
, 11914
, 11915
, 11916
, 11917
, 11918
, 11919
, 11920
, 11921
, 11922
, 11923
, 11924
, 11925
, 11926
, 11927
, 11928
, 11929
, 11930
, 11931
, 11932
, 11933
, 11934
, 11935
, 11936
, 11937
, 11938
, 11939
, 11940
, 11941
, 11942
, 11943
, 11944
, 11945
, 11946
, 11947
, 11948
, 11949
, 11950
, 11951
, 11952
, 11953
, 11954
, 11955
, 11956
, 11957
, 11958
, 11959
, 11960
, 11961
, 11962
, 11963
, 11964
, 11965
, 11966
, 11967
, 11968
, 11969
, 11970
, 11971
, 11972
, 11973
, 11974
, 11975
, 11976
, 11977
, 11978
, 11979
, 11980
, 11981
, 11982
, 11983
, 11984
, 11985
, 11986
, 11987
, 11988
, 11989
, 11990
, 11991
, 11992
, 11993
, 11994
, 11995
, 11996
, 11997
, 11998
, 11999
, 12000
, 12001
, 12002
, 12003
, 12004
, 12005
, 12006
, 12007
, 12008
, 12009
, 12010
, 12011
, 12012
, 12013
, 12014
, 12015
, 12016
, 12017
, 12018
, 12019
, 12020
, 12021
, 12022
, 12023
, 12024
, 12025
, 12026
, 12027
, 12028
, 12029
, 12030
, 12031
, 12032
, 12033
, 12034
, 12035
, 12036
, 12037
, 12038
, 12039
, 12040
, 12041
, 12042
, 12043
, 12044
, 12045
, 12046
, 12047
, 12048
, 12049
, 12050
, 12051
, 12052
, 12053
, 12054
, 12055
, 12056
, 12057
, 12058
, 12059
, 12060
, 12061
, 12062
, 12063
, 12064
, 12065
, 12066
, 12067
, 12068
, 12069
, 12070
, 12071
, 12072
, 12073
, 12074
, 12075
, 12076
, 12077
, 12078
, 12079
, 12080
, 12081
, 12082
, 12083
, 12084
, 12085
, 12086
, 12087
, 12088
, 12089
, 12090
, 12091
, 12092
, 12093
, 12094
, 12095
, 12096
, 12097
, 12098
, 12099
, 12100
, 12101
, 12102
, 12103
, 12104
, 12105
, 12106
, 12107
, 12108
, 12109
, 12110
, 12111
, 12112
, 12113
, 12114
, 12115
, 12116
, 12117
, 12118
, 12119
, 12120
, 12121
, 12122
, 12123
, 12124
, 12125
, 12126
, 12127
, 12128
, 12129
, 12130
, 12131
, 12132
, 12133
, 12134
, 12135
, 12136
, 12137
, 12138
, 12139
, 12140
, 12141
, 12142
, 12143
, 12144
, 12145
, 12146
, 12147
, 12148
, 12149
, 12150
, 12151
, 12152
, 12153
, 12154
, 12155
, 12156
, 12157
, 12158
, 12159
, 12160
, 12161
, 12162
, 12163
, 12164
, 12165
, 12166
, 12167
, 12168
, 12169
, 12170
, 12171
, 12172
, 12173
, 12174
, 12175
, 12176
, 12177
, 12178
, 12179
, 12180
, 12181
, 12182
, 12183
, 12184
, 12185
, 12186
, 12187
, 12188
, 12189
, 12190
, 12191
, 12192
, 12193
, 12194
, 12195
, 12196
, 12197
, 12198
, 12199
, 12200
, 12201
, 12202
, 12203
, 12204
, 12205
, 12206
, 12207
, 12208
, 12209
, 12210
, 12211
, 12212
, 12213
, 12214
, 12215
, 12216
, 12217
, 12218
, 12219
, 12220
, 12221
, 12222
, 12223
, 12224
, 12225
, 12226
, 12227
, 12228
, 12229
, 12230
, 12231
, 12232
, 12233
, 12234
, 12235
, 12236
, 12237
, 12238
, 12239
, 12240
, 12241
, 12242
, 12243
, 12244
, 12245
, 12246
, 12247
, 12248
, 12249
, 12250
, 12251
, 12252
, 12253
, 12254
, 12255
, 12256
, 12257
, 12258
, 12259
, 12260
, 12261
, 12262
, 12263
, 12264
, 12265
, 12266
, 12267
, 12268
, 12269
, 12270
, 12271
, 12272
, 12273
, 12274
, 12275
, 12276
, 12277
, 12278
, 12279
, 12280
, 12281
, 12282
, 12283
, 12284
, 12285
, 12286
, 12287
, 12288
, 12289
, 12290
, 12291
, 12296
, 12297
, 12298
, 12299
, 12301
, 12302
, 12304
, 12305
, 12306
, 12307
, 12312
, 12313
, 12314
, 12315
, 12316
, 12317
, 12318
, 12320
, 12321
, 12322
, 12323
, 12328
, 12329
, 12330
, 12331
, 12333
, 12334
, 12335
, 12336
, 12337
, 12338
, 12339
, 12340
, 12341
, 12342
, 12343
, 12344
, 12345
, 12346
, 12347
, 12349
, 12350
, 12351
, 12352
, 12353
, 12354
, 12355
, 12356
, 12357
, 12358
, 12359
, 12360
, 12361
, 12362
, 12363
, 12364
, 12365
, 12366
, 12367
, 12368
, 12369
, 12370
, 12371
, 12372
, 12373
, 12374
, 12375
, 12376
, 12377
, 12378
, 12379
, 12380
, 12381
, 12382
, 12383
, 12384
, 12385
, 12386
|]
|
colinba/tip-toi-reveng
|
src/KnownCodes.hs
|
mit
| 186,077 | 0 | 6 | 98,397 | 93 | 57 | 36 | 14 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
module System.HFind.Expr.Parser
( parsePred
, parseExpr
, parseLetBinding
, parseStringInterp
, parseCmdLineArg
, ParseError
) where
import Data.Char
import Data.Functor
import Data.Functor.Identity
import Data.Monoid
import Data.List (foldl', partition)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.ICU (Regex)
import qualified Data.Text.ICU as ICU
import qualified Data.Text.ICU.Error as ICU
import Text.Parsec
import Text.Parsec.Pos
import Text.Parsec.Text (Parser)
import qualified Text.Parsec.Error as PE
import qualified Text.Parsec.Expr as PExpr
import qualified Text.Parsec.Prim as Prim
import qualified Text.Parsec.Token as Tok
import System.HFind.Expr.Types
parsePred :: IsPred pre => SourceName -> Text -> Either ParseError pre
parsePred = runParser (whitespace *> predicate <* eof) ()
parseExpr :: IsExpr expr => SourceName -> Text -> Either ParseError expr
parseExpr = runParser (whitespace *> expr <* eof) ()
parseLetBinding :: IsExpr expr => SourceName -> Text -> Either ParseError (Name, expr)
parseLetBinding = runParser (whitespace *> letBinding <* eof) ()
parseStringInterp :: IsExpr expr => SourceName -> Text -> Either ParseError expr
parseStringInterp = runParser (whitespace *> parser <* eof) ()
where
parser = located $ do
stringInterp =<< consumeEverything
parseCmdLineArg :: IsExpr expr => SourceName -> Text -> Either ParseError expr
parseCmdLineArg = runParser (whitespace *> cmdLineArg <* eof) ()
langDef :: Monad m => Tok.GenLanguageDef Text () m
langDef = Tok.LanguageDef
{ Tok.commentStart = ""
, Tok.commentEnd = ""
, Tok.commentLine = ""
, Tok.nestedComments = False
, Tok.identStart = letter
, Tok.identLetter = alphaNum <|> char '_'
, Tok.opStart = Tok.opStart langDef
, Tok.opLetter = oneOf ":!#%&*+./<=>?@\\^|-~"
, Tok.reservedOpNames = ["+", "-", "*", "/",
"==", "=~", "<=", ">=", "<", ">"]
, Tok.reservedNames = ["true", "false", "and", "or", "not", "scope"]
, Tok.caseSensitive = True
}
lang :: Monad m => Tok.GenTokenParser Text () m
lang = Tok.makeTokenParser langDef
whitespace :: Parser ()
whitespace = Tok.whiteSpace lang
identifier :: Parser Text
identifier = T.pack <$> Tok.identifier lang
parens :: Parser a -> Parser a
parens = Tok.parens lang
-- braces :: Parser a -> Parser a
-- braces = Tok.braces lang
symbol :: String -> Parser ()
symbol s = Tok.symbol lang s $> ()
reserved :: String -> Parser ()
reserved s = Tok.reserved lang s
reservedOp :: String -> Parser ()
reservedOp s = Tok.reservedOp lang s $> ()
naturalLiteral :: Num a => Parser a
naturalLiteral = fromInteger <$> Tok.natural lang
stringLiteral :: Parser Text
stringLiteral = T.pack <$> Tok.stringLiteral lang
cmdLineArg :: IsExpr expr => Parser expr
cmdLineArg = located $ do
stringInterp =<< consumeEverything
predicate :: forall pre. IsPred pre => Parser pre
predicate = do
i1 <- getInput
loc <- getPosition
p1 <- predicateValue
rest <- many (item i1 loc)
-- [op_rhs2, op_rhs3, ...] -> ... op_rhs3 (op_rhs2 p1)
return $ foldl' (\lhs op_rhs -> op_rhs lhs) p1 rest
where
item :: Src -> SourcePos -> Parser (pre -> pre)
item i1 loc = try $ do
op <- reserved "and" $> andP
<|> reserved "or" $> orP
rhs <- predicateValue
ik <- getInput
let src = T.take (T.length i1 - T.length ik) i1
return (\lhs -> op lhs rhs (SrcLoc src loc))
predicateValue :: forall pre. IsPred pre => Parser pre
predicateValue = parens predicate
<|> (scope <?> "explicit scope")
<|> (negation <?> "negation")
<|> (exprPred <?> "expression predicate")
<?> "atomic predicate"
where
scope :: Parser pre
scope = located $
scopeP <$> (reserved "scope" *> parens predicate)
negation :: Parser pre
negation = located $
notP <$> (reserved "not" *> predicateValue)
exprPred :: Parser pre
exprPred = located $ do
e1 <- expr
optionMaybe comparator >>= \case
Just op -> do
e2 <- expr
return (opP op e1 e2)
Nothing -> choice
[ do reservedOp "=~"
(rx, capMode) <- regex
return (matchP e1 rx capMode)
, return (exprP e1)
]
comparator :: Parser Op
comparator = (reservedOp "==" $> OpEQ)
<|> (reservedOp "<=" $> OpLE)
<|> (reservedOp ">=" $> OpGE)
<|> (reservedOp "<" $> OpLT)
<|> (reservedOp ">" $> OpGT)
<?> "comparison operator"
letBinding :: forall expr. IsExpr expr => Parser (Name, expr)
letBinding = do
ident <- identifier
whitespace
symbol "="
e <- expr
return (ident, e)
expr :: forall expr. IsExpr expr => Parser expr
expr = located arithExpr
-- HACK: Parsec's expression parser limits operator types to
-- Parser (a -> a -> a),
-- but we need
-- Parser (a -> a -> SrcLoc -> a)
-- or similar, so we use a = (SrcLoc -> expr):
-- Parser ((SrcLoc -> expr) -> (SrcLoc -> expr) -> SrcLoc -> expr)
-- Hopefully most of them will not be used (intermediate values),
-- while the base atoms are actually
-- fmap const (located ...),
-- so they do contain the SrcLoc
arithExpr :: forall expr. IsExpr expr => Parser (SrcLoc -> expr)
arithExpr = PExpr.buildExpressionParser table (fmap const exprAtom)
where
table :: PExpr.OperatorTable T.Text () Identity (SrcLoc -> expr)
table = [ [ prefix pos, prefix neg ]
, [ binary mult ]
, [ binary plus, binary minus ] ]
binary p = PExpr.Infix p PExpr.AssocLeft
prefix p = PExpr.Prefix p
plus = reservedOp "+" $> \x y l -> plusE (x l) (y l) l
minus = reservedOp "-" $> \x y l -> plusE (x l) (negE (y l) l) l
mult = reservedOp "*" $> \x y l -> multE (x l) (y l) l
pos = reservedOp "+" $> \x l -> x l
neg = reservedOp "-" $> \x l -> negE (x l) l
exprAtom :: forall expr. IsExpr expr => Parser expr
exprAtom =
parens (located arithExpr)
<|> choice
[ located $ varE <$> var
, located $ stringInterp =<< stringLiteral
, located $ litE <$> litNoString
, located $ app
]
<?> "expression"
where
app :: Parser (SrcLoc -> expr)
app = appE <$> identifier <*> exprAtom <?> "function application"
stringInterp :: forall expr. IsExpr expr => Text -> Parser (SrcLoc -> expr)
stringInterp input = do
e <- either throwParseError return (parseInterp input)
return (simplInterp e)
where
simplInterp :: [Interp (ExprVar expr)] -> SrcLoc -> expr
simplInterp pieces =
case contract pieces of
[] -> \src -> litE (stringL "" src) src
[InterpLit s] -> \src -> litE (stringL s src) src
pieces' -> interpE pieces'
contract :: [Interp (ExprVar expr)] -> [Interp (ExprVar expr)]
contract [] = []
contract (InterpLit "" : ps) = contract ps
contract (p:ps) =
case (p, contract ps) of
(InterpLit s1, InterpLit s2 : ps') -> InterpLit (s1 <> s2) : ps'
(_, ps') -> p:ps'
parseInterp :: Text -> Either (SourcePos -> ParseError) [Interp (ExprVar expr)]
parseInterp s =
case T.break (=='$') s of
(prefix, "") ->
return [InterpLit prefix]
(prefix, s')
| "$$" `T.isPrefixOf` s' ->
fmap (InterpLit (prefix `T.snoc` '$') :)
(parseInterp (T.drop 2 s'))
| otherwise ->
case parseWithLeftovers varNoWhitespace "string literal" s' of
Right (v, s'') ->
fmap ([InterpLit prefix, InterpVar v] ++)
(parseInterp s'')
Left e ->
let msg = "Could not parse interpolated string"
err p = foldr PE.addErrorMessage
(parseError msg p)
(PE.errorMessages e)
in Left err
regex :: Parser (Regex, RxCaptureMode)
regex = flip label "regular expression" $ do
void (char 'm')
delim <- oneOf "/_@%#!,;|"
let go acc escaped = do
c <- anyChar
case c of
'\\' | escaped -> go ('\\':'\\':acc) False
| otherwise -> go acc True
_ | c == delim -> if escaped then go (c:acc) False
else return acc
| otherwise -> if escaped then go (c:'\\':acc) False
else go (c:acc) False
pattern <- T.pack . reverse <$> go [] False
(cap, opts) <- regexOpts
case ICU.regex' opts pattern of
Right rx -> return (rx, cap)
Left e -> let err = parseError (ICU.errorName (ICU.errError e))
in throwParseError $ \pos ->
case ICU.errOffset e of
Just off -> err (incSourceColumn pos off)
Nothing -> err pos
where
regexOpts :: Parser (RxCaptureMode, [ICU.MatchOption])
regexOpts = do
opts <- many letter <* whitespace
let translate = \case 'm' -> return ICU.Multiline
'x' -> return ICU.Comments
's' -> return ICU.DotAll
'i' -> return ICU.CaseInsensitive
c -> unexpected ("regex option " ++ show c)
case partition (/='n') opts of
(opts', []) -> (,) Capture <$> mapM translate opts'
(opts', _ ) -> (,) NoCapture <$> mapM translate opts'
litNoString :: IsLit lit => Parser lit
litNoString = located (boolL True <$ reserved "true"
<|> boolL False <$ reserved "false"
<|> numL <$> naturalLiteral
<?> "literal")
where
numLit = try date <|> try size <?> "numeric literal"
date = undefined
size = undefined
var :: IsVar var => Parser var
var = varNoWhitespace <* whitespace
<?> "variable"
varNoWhitespace :: IsVar var => Parser var
varNoWhitespace = located $ do
void (char '$')
let rawVar = rxCapVar <$> rxCapIndex
<|> namedVar <$> ident
<?> "variable name or regex capture index"
between (symbol "{") (char '}') (rawVar <* whitespace)
<|> rawVar
where
rxCapIndex = do
digits <- many1 (digitToInt <$> digit)
return $ foldl' (\acc x -> acc*10 + x) 0 digits
ident = do
c <- Tok.identStart langDef
cs <- many (Tok.identLetter langDef)
return (T.pack (c:cs))
-- low-level Text.Parsec.Prim-based utilities
located :: Parser (SrcLoc -> a) -> Parser a
located p = do
loc <- getPosition
prev <- getInput
f <- p
cur <- getInput
let src = T.take (T.length prev - T.length cur) prev
return (f (SrcLoc src loc))
consumeEverything :: Parser Text
consumeEverything = do
input <- getInput
setInput ""
return input
parseWithLeftovers :: Parser a
-> SourceName
-> Text
-> Either ParseError (a, Text)
parseWithLeftovers p srcName s = runIdentity $ do
cons <- Prim.runParsecT p (State s (initialPos srcName) ())
rep <- case cons of
Consumed mrep -> mrep
Empty mrep -> mrep
case rep of
Ok a st _ -> return $ Right (a, Prim.stateInput st)
Error err -> return $ Left err
parseError :: String -> SourcePos -> ParseError
parseError msg = PE.newErrorMessage (PE.Message msg)
throwParseError :: (SourcePos -> ParseError) -> Parser a
throwParseError err =
Prim.mkPT $ \st ->
let pos = Prim.statePos st
in return (Prim.Empty (return (Prim.Error (err pos))))
|
xcv-/hfind
|
src/System/HFind/Expr/Parser.hs
|
mit
| 12,087 | 0 | 21 | 3,760 | 3,917 | 1,986 | 1,931 | 283 | 11 |
import Euterpea
shared = [
-- Wise men / Shall I / Take my
f 5 hn,
c 6 hn,
-- say only / stay? Would it / hand, take my
f 5 $ hn + qn,
g 5 en,
a 5 en,
-- fools rush / be a / whole life
as 5 hn,
a 5 hn,
-- in, But / sin if / too. For
g 5 $ hn + qn + en,
c 5 en,
-- I can't / I can't / I can't
d 5 hn,
e 5 hn,
-- help falling in / help falling in / help falling in
f 5 hn,
g 5 $ hn / 3,
a 5 $ hn / 3,
as 5 $ hn / 3,
-- love with / love with / love with
a 5 hn,
g 5 hn
]
verse = line $ shared ++ [f 5 wn]
bridge = line [
-- Live a river flows
e 5 en,
a 5 en,
c 6 en,
e 6 en,
d 6 hn,
-- surely to the sea,
e 5 en,
a 5 en,
c 6 en,
e 6 en,
d 6 hn,
-- Darling, so it goes.
e 5 en,
a 5 en,
c 6 en,
e 6 en,
d 6 hn,
-- Some things are meant to
c 6 qn,
c 6 $ qn + en,
a 5 en,
c 6 en,
a 5 en,
-- be.
as 5 wn
]
ending = line $ shared ++ [
-- you. For
f 5 $ hn + qn + en,
c 5 en,
-- I can't
d 5 hn,
e 5 hn,
-- help falling in
f 5 hn,
g 5 $ hn / 3,
a 5 $ hn / 3,
as 5 $ hn / 3,
-- love with
a 5 hn,
g 5 hn,
-- you.
f 5 bn
]
{--
bridge = line [
-- Live a river flows
e 5 en,
a 5 en,
c 6 en,
e 6 en,
d 6 hn,
-- surely to the sea,
e 5 en,
a 5 en,
c 6 en,
e 6 en,
d 6 hn,
-- Darling, so it goes.
e 5 en,
a 5 en,
c 6 en,
e 6 en,
d 6 hn,
-- Some things are meant to
c 6 qn,
c 6 $ qn + en,
a 5 en,
c 6 en,
a 5 en,
-- be.
as 5 wn]
--}
cantHelp = verse :+: verse :+: bridge :+: ending
main = play $ Modify (Tempo 2) $ Modify (Instrument RhodesPiano) cantHelp
|
steinz/CantHelpFallingInLove
|
hs/cant_stop_falling_in_love.hs
|
mit
| 1,840 | 0 | 10 | 836 | 579 | 300 | 279 | 56 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.RDS.DescribeAccountAttributes
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Lists all of the attributes for a customer account. The attributes include
-- Amazon RDS quotas for the account, such as the number of DB instances
-- allowed. The description for a quota includes the quota name, current usage
-- toward that quota, and the quota's maximum value.
--
-- This command does not take any parameters.
--
-- <http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DescribeAccountAttributes.html>
module Network.AWS.RDS.DescribeAccountAttributes
(
-- * Request
DescribeAccountAttributes
-- ** Request constructor
, describeAccountAttributes
-- * Response
, DescribeAccountAttributesResponse
-- ** Response constructor
, describeAccountAttributesResponse
-- ** Response lenses
, daarAccountQuotas
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.RDS.Types
import qualified GHC.Exts
data DescribeAccountAttributes = DescribeAccountAttributes
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DescribeAccountAttributes' constructor.
describeAccountAttributes :: DescribeAccountAttributes
describeAccountAttributes = DescribeAccountAttributes
newtype DescribeAccountAttributesResponse = DescribeAccountAttributesResponse
{ _daarAccountQuotas :: List "member" AccountQuota
} deriving (Eq, Read, Show, Monoid, Semigroup)
instance GHC.Exts.IsList DescribeAccountAttributesResponse where
type Item DescribeAccountAttributesResponse = AccountQuota
fromList = DescribeAccountAttributesResponse . GHC.Exts.fromList
toList = GHC.Exts.toList . _daarAccountQuotas
-- | 'DescribeAccountAttributesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'daarAccountQuotas' @::@ ['AccountQuota']
--
describeAccountAttributesResponse :: DescribeAccountAttributesResponse
describeAccountAttributesResponse = DescribeAccountAttributesResponse
{ _daarAccountQuotas = mempty
}
-- | A list of 'AccountQuota' objects. Within this list, each quota has a name, a
-- count of usage toward the quota maximum, and a maximum value for the quota.
daarAccountQuotas :: Lens' DescribeAccountAttributesResponse [AccountQuota]
daarAccountQuotas =
lens _daarAccountQuotas (\s a -> s { _daarAccountQuotas = a })
. _List
instance ToPath DescribeAccountAttributes where
toPath = const "/"
instance ToQuery DescribeAccountAttributes where
toQuery = const mempty
instance ToHeaders DescribeAccountAttributes
instance AWSRequest DescribeAccountAttributes where
type Sv DescribeAccountAttributes = RDS
type Rs DescribeAccountAttributes = DescribeAccountAttributesResponse
request = post "DescribeAccountAttributes"
response = xmlResponse
instance FromXML DescribeAccountAttributesResponse where
parseXML = withElement "DescribeAccountAttributesResult" $ \x -> DescribeAccountAttributesResponse
<$> x .@? "AccountQuotas" .!@ mempty
|
kim/amazonka
|
amazonka-rds/gen/Network/AWS/RDS/DescribeAccountAttributes.hs
|
mpl-2.0
| 3,986 | 0 | 10 | 733 | 397 | 243 | 154 | 52 | 1 |
module GramLab.Data.Diff.FuzzyEditTree ( make
, apply
, check
)
where
import GramLab.Data.Diff.EditTree (split3)
import qualified GramLab.Data.Diff.EditTree2 as ET2
import qualified GramLab.Data.Diff.EditTree2Rev as ET2Rev
import GramLab.Data.CommonSubstrings hiding (lcs)
import qualified Data.Map as Map
import Data.List (sortBy)
import Data.Ord (comparing)
import qualified GramLab.Data.StringLike as S
import Test.QuickCheck hiding (check)
import Data.Char
import Data.Binary
import Control.Monad (liftM,liftM2,liftM3,liftM4)
make = fuzzySplit
apply = applyFuzzySplit
check s w = let (w_prefix,w_root,w_suffix) = uncurry (split3 w) (split s)
in ET2.check (prefix s) w_prefix
&& ET2Rev.check (root s) w_root
&& ET2Rev.check (suffix s) w_suffix
data FuzzyMatch s = FuzzyMatch s s [Int] [Int] Double deriving (Show,Eq,Ord)
data FuzzySplit s a = FuzzySplit { split :: (Int,Int)
, prefix :: ET2.EditTree s a
, root :: ET2Rev.EditTree s a
, suffix :: ET2Rev.EditTree s a }
deriving (Show,Eq,Ord)
fuzzyScore (FuzzyMatch _ _ _ _ d) = d
substringDistances d xs ys =
sortBy (flip (comparing fuzzyScore))
[ FuzzyMatch x y i_x i_y (d x y) | (x,i_x) <- (Map.toList . toMap . substrings) xs
, (y,i_y) <- (Map.toList . toMap . substrings) ys
, head x == head y && last x == last y ]
distance xs ys = fromIntegral ((length xs + length ys) - ET2Rev.size (ET2Rev.make xs ys)) / 2
lcs :: (Monad m, Ord a) => [a] -> [a] -> m (FuzzyMatch [a])
lcs xs ys = case substringDistances distance xs ys of
[] -> fail "lcs: no fuzzy strings"
(FuzzyMatch _ _ _ _ 0):_ -> fail "lcs: no fuzzy strings"
x:_ -> return x
fuzzySplit xs ys = case lcs xs ys of
Nothing ->
FuzzySplit { split = (0,length xs)
, prefix = ET2.make "" ""
, root = ET2Rev.make "" ""
, suffix = ET2Rev.make xs ys }
Just (FuzzyMatch x y (i_x:_) (i_y:_) d) ->
let (xs_prefix,xs_root,xs_suffix) = split3 xs i_x (length xs - i_x - length x)
(ys_prefix,ys_root,ys_suffix) = split3 ys i_y (length ys - i_y - length y)
in FuzzySplit { split = (i_x,length xs - i_x - length x)
, prefix = ET2.make xs_prefix ys_prefix
, root = ET2Rev.make xs_root ys_root
, suffix = ET2Rev.make xs_suffix ys_suffix }
applyFuzzySplit s xs = ET2.apply (prefix s) xs_prefix
++ ET2Rev.apply (root s) xs_root
++ ET2Rev.apply (suffix s) xs_suffix
where (xs_prefix,xs_root,xs_suffix) = uncurry (split3 xs) (split s)
-- Quickchecks props are the same for all edit script types
-- runtest should be in a separate module taking a record of functions
-- to test against these properties
instance Arbitrary Char where
arbitrary = choose ('a','d')
coarbitrary c = variant (ord c `rem` 4)
prop_apply :: (String,String) -> Bool
prop_apply (w,w') = w' == apply (make w w') w
prop_make_apply_rev :: (String,String) -> Bool
prop_make_apply_rev (w,w') = reverse (apply (make (reverse w) (reverse w')) (reverse w)) == w'
prop_check :: (String,String) -> Bool
prop_check (w,w') = check (make w w') w
runtests = do
q "apply" prop_apply
q "make_apply_rev" prop_make_apply_rev
q "check" prop_check
where q str prop = do putStr $ str ++ " "
quickCheck prop
instance Binary s => Binary (ET2.EditTree s a) where
put (ET2.Replace xs ys) = put (0::Word8) >> put xs >> put ys
put (ET2.Split i j lt rt) = put (1::Word8) >> put i >> put j >> put lt >> put rt
get = do
tag <- get
case tag::Word8 of
0 -> liftM2 ET2.Replace get get
1 -> liftM4 ET2.Split get get get get
instance Binary s => Binary (ET2Rev.EditTree s a) where
put (ET2Rev.ETR x) = put x
get = liftM ET2Rev.ETR get
instance Binary s => Binary (FuzzySplit s a) where
put (FuzzySplit a b c d) = put a >> put b >> put c >> put d
get = liftM4 FuzzySplit get get get get
|
gchrupala/morfette
|
src/GramLab/Data/Diff/FuzzyEditTree.hs
|
bsd-2-clause
| 4,919 | 0 | 16 | 1,926 | 1,636 | 852 | 784 | 85 | 3 |
{-# LANGUAGE RankNTypes #-}
module Main where
import Control.Monad (when, join, forever)
import Control.Monad.IO.Class
import qualified Data.ByteString.Char8 as C8
import qualified Data.ByteString.Internal as BI
import qualified Data.ByteString.Lazy as LB
import qualified Network.Pcap as P
import qualified Network.Pcap.Base as PB
import Data.Word (Word8, Word16)
import Data.Bits ((.&.), (.|.), shiftR, shiftL)
import qualified Foreign.Ptr as FFI
import qualified Foreign.Storable as FFI
import qualified Pipes as PI
import qualified Pipes.ByteString as PI
import Pipes ((>->))
import qualified Pipes.Network.TCP as PI
import qualified Pipes.HTTP as PI
import qualified Network.Simple.TCP as N
import Options.Applicative
import qualified Network.URI as URI
import Data.Maybe (fromJust)
import qualified System.IO as SIO
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types.Status as HTTP
import qualified Network.HTTP.Client.TLS as HTTP
import qualified Control.Concurrent.Async as Async
import qualified Network.Wai as Wai
import qualified Network.Wai.Handler.Warp as Warp
data ClientOptions = ClientOptions
{
clientOutput :: Maybe String
}
data ServerOptions = ServerOptions
{
serverOutput :: String
, serverInput :: String
}
data Command = Server ServerOptions
| Client ClientOptions
clientOptions :: Parser ClientOptions
clientOptions = ClientOptions
<$> optional (strOption
( long "output"
<> metavar "DEST"
<> help "Destination server"
))
serverOptions :: Parser ServerOptions
serverOptions = ServerOptions
<$> (strOption
( long "output"
<> metavar "DEST"
<> help "destination server"
))
<*> (strOption
( long "input"
<> metavar "ORG"
<> help "listener"
))
main :: IO ()
main = do
join $ execParser (info options (fullDesc <> progDesc "Relay HTTP traffic" <> header "harley - swiss army knife to replya http traffic"))
where
options :: Parser (IO ())
options = subparser
( command "client" (info (runClient <$> clientOptions) (fullDesc <> progDesc "Relay HTTP traffic" <> header "harley - swiss army knife to replay http traffic"))
<> command "server" (info (runServer <$> serverOptions) (fullDesc <> progDesc "Relay HTTP traffic" <> header "harley - swiss army knife to replay http traffic"))
)
dispatchClient :: URI.URI -> P.PcapHandle -> IO ()
dispatchClient uri handle = do
let scheme = URI.uriScheme uri
case scheme of
"tcp:" -> forwardTcp uri handle
"file:" -> forwardFile uri handle
"http:" -> forwardHttp uri handle
_ -> error $ "protocol " ++ scheme ++ " not supported."
forwardFile :: URI.URI -> P.PcapHandle -> IO ()
forwardFile uri handle = do
let filename = URI.uriPath uri
let producer = fromPcapHandle handle
SIO.withFile filename SIO.WriteMode $ \h -> do
SIO.hSetBuffering h (SIO.BlockBuffering (Just 1024))
PI.runEffect $ producer >-> (PI.toHandle h)
replayHttp :: HTTP.Request -> IO LB.ByteString
replayHttp http_req =
if HTTP.secure http_req
then
HTTP.withManager HTTP.tlsManagerSettings $ \m ->
PI.withHTTP http_req m $ \resp -> PI.toLazyM $ HTTP.responseBody resp
else
HTTP.withManager HTTP.defaultManagerSettings $ \m ->
PI.withHTTP http_req m $ \resp -> PI.toLazyM $ HTTP.responseBody resp
mkBackendRequest :: Wai.Request -> String -> IO HTTP.Request
mkBackendRequest wai_req backend_host = do
lbs_req_body <- Wai.lazyRequestBody wai_req
init_req <- HTTP.parseUrl backend_host
let http_req = init_req { HTTP.path = (Wai.rawPathInfo wai_req)
, HTTP.queryString = (Wai.rawQueryString wai_req)
, HTTP.requestBody = HTTP.RequestBodyLBS (lbs_req_body)
} :: HTTP.Request
return http_req
forwardHttp :: URI.URI -> P.PcapHandle -> IO ()
forwardHttp uri handle = undefined
forwardTcp :: URI.URI -> P.PcapHandle -> IO ()
forwardTcp uri handle = do
let scheme = init $ URI.uriScheme $ uri
let port = drop 1 $ URI.uriPort $ fromJust (URI.uriAuthority uri)
let regName = URI.uriRegName $ fromJust (URI.uriAuthority uri)
(sock, sockAddr) <- N.connectSock regName port
let producer = fromPcapHandle handle
PI.runEffect $ producer >-> (PI.toSocket sock)
N.closeSock sock
reverseProxy :: [String] -> Wai.Application
reverseProxy backend_hosts wai_req respond = do
http_reqs <- mapM (mkBackendRequest wai_req) backend_hosts
responses <- Async.mapConcurrently replayHttp http_reqs
let lbs_resp = head responses
respond $ Wai.responseLBS HTTP.status200 [] lbs_resp
relayProxy :: String -> Int -> IO ()
relayProxy hostname port = do
N.listen (N.Host hostname) (show port) $ \(listeningSocket, listeningAddr) -> do
putStrLn $ "Listening for incoming connections at " ++ show listeningAddr
forever . N.acceptFork listeningSocket $ \(connectionSocket, remoteAddr) -> do
putStrLn $ "Connection established from " ++ show remoteAddr
dispatchServer :: ServerOptions -> URI.URI -> IO ()
dispatchServer config uri = do
let scheme = URI.uriScheme uri
let listenRegName = URI.uriRegName $ fromJust (URI.uriAuthority uri)
let listenPort = read $ drop 1 $ URI.uriPort $ fromJust (URI.uriAuthority uri)
case scheme of
"http:" -> do
let backends = [(serverOutput config)]
Warp.run listenPort (reverseProxy backends)
"tcp:" -> do
relayProxy listenRegName listenPort
_ -> error $ "protocol " ++ scheme ++ " not supported."
runServer :: ServerOptions -> IO ()
runServer config = do
case URI.parseURI (serverInput config) of
Just uri -> dispatchServer config uri
Nothing -> putStrLn "Invalid URI"
{- |
Run client subcommand.
-}
runClient :: ClientOptions-> IO ()
runClient config = do
let device_name = "en0"
network <- P.lookupNet device_name
handle <- P.openLive device_name 65535 True 0
P.setFilter handle "tcp dst port 8000" False (PB.netMask network)
case (clientOutput config) of
Just maybeUri -> do
case URI.parseURI maybeUri of
Just uri -> dispatchClient uri handle
Nothing -> putStrLn "Invalid URI"
Nothing -> putStrLn "No output specified"
toBS :: (Int, FFI.Ptr Word8) -> IO C8.ByteString
toBS (len, ptr) = do
s <- BI.create (fromIntegral len) $ \p -> BI.memcpy p ptr (fromIntegral len)
return s
fromPcapHandle :: MonadIO m => P.PcapHandle -> PI.Producer' C8.ByteString m ()
fromPcapHandle handle = go handle
where
go :: MonadIO m => P.PcapHandle -> PI.Producer' C8.ByteString m ()
go handle = do
(hdr, ptr) <- liftIO $ P.next handle
let ethernet_size = 14
let ip_packet = FFI.plusPtr ptr ethernet_size
ip_packet_first_byte <- liftIO $ FFI.peek ip_packet
ip_packet_third_byte <- liftIO $ FFI.peekByteOff ip_packet 2
ip_packet_fourth_byte <- liftIO $ FFI.peekByteOff ip_packet 3
let ip_total_len = fromIntegral ( (((fromIntegral (ip_packet_third_byte :: Word8)) `shiftL` 8) .|. (fromIntegral (ip_packet_fourth_byte :: Word8))) :: Word16 )
let ip_hdr_size = 4 * fromIntegral ((ip_packet_first_byte .&. 0x0F) :: Word8)
when (ip_hdr_size < 20) $ return ()
let tcp_packet = FFI.plusPtr ip_packet ip_hdr_size
let tcp_payload_offset = FFI.plusPtr tcp_packet 12
tcp_payload_offset_byte <- liftIO $ FFI.peek tcp_payload_offset
let tcp_hdr_size = 4 * fromIntegral (shiftR ((tcp_payload_offset_byte .&. 0xF0) :: Word8) 4)
let tcp_payload = FFI.plusPtr tcp_packet tcp_hdr_size
liftIO $ putStrLn $ "IP header size: " ++ (show ip_hdr_size)
liftIO $ putStrLn $ "IP total len: " ++ (show ip_total_len)
liftIO $ putStrLn $ "TCP header size: " ++ (show tcp_hdr_size)
let payload_size = ip_total_len - ip_hdr_size - tcp_hdr_size
when (payload_size > 0) $ do
bs <- liftIO $ toBS (payload_size, tcp_payload)
PI.yield bs
go handle
|
BrandKarma/harley
|
executable/Main.hs
|
bsd-3-clause
| 8,439 | 0 | 20 | 2,133 | 2,489 | 1,263 | 1,226 | 172 | 4 |
-- Copyright (c) 2015 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -Wall #-}
-- | Utility code for compiling to LLVM. (This may be merged into
-- SimpleIR itself)
module IR.FlatIR.LLVMGen.Utils(
booltype,
getGlobalType,
getGlobalMutability,
getActualType
) where
import Data.Array.IArray
import Data.Graph.Inductive
import Data.Interval
import Data.Pos
import IR.FlatIR.Syntax
-- | The Flat IR type representing booleans.
booltype :: Pos -> Type
booltype p = IntType { intSigned = False, intSize = 1, intPos = p,
intIntervals = fromIntervalList [Interval 0 1] }
-- | Get the type of a global, constructing a function type if need
-- be.
getGlobalType :: Graph gr => Module gr -> Globalname -> Type
getGlobalType (Module { modGlobals = globals}) name =
case globals ! name of
Function { funcRetTy = retty, funcValTys = valtys,
funcParams = params, funcPos = p } ->
FuncType { funcTyRetTy = retty, funcTyPos = p,
funcTyArgTys = (map ((!) valtys) params) }
GlobalVar { gvarTy = ty } -> ty
-- | Get the mutability of a global. Note that all functions are
-- immutable.
getGlobalMutability :: Graph gr => Module gr -> Globalname -> Mutability
getGlobalMutability (Module { modGlobals = globals }) name =
case globals ! name of
GlobalVar { gvarMutability = mut } -> mut
Function {} -> Immutable
-- | Chase down references and get a concrete type (if it
-- leads to an opaque type, then return the named type
getActualType :: Graph gr => Module gr -> Type -> Type
getActualType irmodule @ (Module { modTypes = types })
idty @ (IdType { idName = tyname }) =
case types ! tyname of
(_, Just ty) -> getActualType irmodule ty
_ -> idty
getActualType _ ty = ty
|
emc2/chill
|
src/IR/FlatIR/LLVMGen/Utils.hs
|
bsd-3-clause
| 3,302 | 4 | 13 | 690 | 420 | 261 | 159 | 34 | 2 |
-----------------------------------------------------------------------------------------
{-|
Module : Print
Copyright : (c) Daan Leijen 2003
License : wxWindows
Maintainer : [email protected]
Stability : provisional
Portability : portable
Printer abstraction layer. See @samples\/wx\/Print.hs@ for a demo.
The application should create a 'pageSetupDialog' to hold the printer
settings of the user.
> f <- frame [text := "Print demo"]
>
> -- Create a pageSetup dialog with an initial margin of 25 mm.
> pageSetup <- pageSetupDialog f 25
The dialog can be shown using 'pageSetupShowModal'. Furthermore, the
function 'printDialog' and 'printPreview' can be used to show a print dialog
and preview window.
> mprint <- menuItem file
> [ text := "&Print..."
> , help := "Print a test"
> , on command := printDialog pageSetup "Test" pageFun printFun
> ]
> mpreview <- menuItem file
> [ text := "&Print preview"
> , help := "Print preview"
> , on command := printPreview pageSetup "Test" pageFun printFun
Those functions take a 'PageFunction' and 'PrintFunction' respectively that get called
to determine the number of needed pages and to draw on the printer DC respectively.
The framework takes automatic care of printer margins, preview scaling etc.
-}
-----------------------------------------------------------------------------------------
module Graphics.UI.WXCore.Print( -- * Printing
pageSetupDialog
, pageSetupShowModal
, printDialog
, printPreview
-- * Callbacks
, PageFunction
, PrintFunction
-- * Page and printer info
, PageInfo(..)
, PrintInfo(..)
-- * Internal
, pageSetupDataGetPageInfo, pageSetupDataSetPageInfo
, printOutGetPrintInfo
, pageSetupDialogGetFrame
) where
import Graphics.UI.WXCore.WxcClasses
import Graphics.UI.WXCore.WxcClassInfo
import Graphics.UI.WXCore.Types
import Graphics.UI.WXCore.Events
import Graphics.UI.WXCore.Frame
-- | Return a page range given page info, print info, and the printable size.
-- The printable size is the number of pixels available for printing
-- without the page margins.
type PageFunction = PageInfo -> PrintInfo -> Size -> (Int,Int)
-- | Print a page given page info, print info, the printable size, the
-- printer device context and the current page.
-- The printable size is the number of pixels available for printing
-- without the page margins
type PrintFunction = PageInfo -> PrintInfo -> Size -> DC () -> Int -> IO ()
{--------------------------------------------------------------------------
Handle print events
--------------------------------------------------------------------------}
-- | The standard print event handler
onPrint :: Bool {- preview? -}
-> PageInfo -> Printout (CWXCPrintout a)
-> PageFunction
-> PrintFunction
-> EventPrint -> IO ()
onPrint isPreview pageInfo printOut pageRangeFunction printFunction ev
= case ev of
PrintPrepare ->
printOutInitPageRange printOut pageInfo pageRangeFunction >>
return ()
PrintPage _cancel dc n ->
do{ printInfo <- printOutGetPrintInfo printOut
; let io info size = printFunction pageInfo info size dc n
; if isPreview
then do let previewInfo = toScreenInfo printInfo
(scaleX,scaleY) <- getPreviewZoom pageInfo previewInfo dc
dcScale dc scaleX scaleY (respectMargin pageInfo previewInfo dc (io previewInfo))
else respectMargin pageInfo printInfo dc (io printInfo)
}
_ -> return ()
-- | Set a clipping region and device origin according to the margin
respectMargin :: PageInfo -> PrintInfo -> DC a -> (Size -> IO b) -> IO b
respectMargin pageInfo printInfo dc io
= do let ((left,top),printSize) = printableArea pageInfo printInfo
-- the device origin is in unscaled coordinates
scaleX <- dcGetUserScaleX dc
scaleY <- dcGetUserScaleY dc
dcSetDeviceOrigin dc (pt (round (scaleX*left)) (round (scaleY*top)))
-- the clipping respects the scaling
dcSetClippingRegion dc (rect (pt 0 0) printSize)
io printSize
-- | Calculate the printable area
printableArea :: PageInfo -> PrintInfo -> ((Double,Double),Size)
printableArea pageInfo printInfo
= let (printW,printH) = pixelToMM (printerPPI printInfo) (printPageSize printInfo)
(ppmmW,ppmmH) = ppiToPPMM (printerPPI printInfo)
-- calculate minimal printer margin
minX = (toDouble (sizeW (pageSize pageInfo)) - printW)/2
minY = (toDouble (sizeH (pageSize pageInfo)) - printH)/2
-- top-left margin
top = ppmmH * (max minY (toDouble $ rectTop $ pageArea pageInfo))
left = ppmmW * (max minX (toDouble $ rectLeft $ pageArea pageInfo))
-- bottom-right margin
(Point mright mbottom)
= pointSub (pointFromSize (pageSize pageInfo)) (rectBottomRight (pageArea pageInfo))
bottom= ppmmH * (max minY (toDouble mbottom))
right = ppmmW * (max minX (toDouble mright))
dw = round (right + left)
dh = round (bottom + top)
(dw', dh') = if sizeW (printPageSize printInfo) < sizeH (printPageSize printInfo)
then (dw, dh)
else (dh, dw)
-- the actual printable page size
printSize = sz (sizeW (printPageSize printInfo) - dw')
(sizeH (printPageSize printInfo) - dh')
in ((left,top),printSize)
-- | Get the zoom factor from the preview
getPreviewZoom :: PageInfo -> PrintInfo -> DC a -> IO (Double,Double)
getPreviewZoom _pageInfo printInfo dc
= do size <- dcGetSize dc
let (printW,printH) = pixelToMM (printerPPI printInfo) (printPageSize printInfo)
(screenW,screenH) = pixelToMM (screenPPI printInfo) size
scaleX = screenW / printW
scaleY = screenH / printH
return (scaleX,scaleY)
-- | Transform printer info to screen printer info (for the preview).
toScreenInfo :: PrintInfo -> PrintInfo
toScreenInfo printInfo
= let scaleX = (toDouble (sizeW (screenPPI printInfo))) / (toDouble (sizeW (printerPPI printInfo)))
scaleY = (toDouble (sizeH (screenPPI printInfo))) / (toDouble (sizeH (printerPPI printInfo)))
pxX = round (scaleX * (toDouble (sizeW (printPageSize printInfo))))
pxY = round (scaleY * (toDouble (sizeH (printPageSize printInfo))))
in printInfo{ printerPPI = screenPPI printInfo
, printPageSize = sz pxX pxY
}
-- | Pixels to millimeters given a PPI
pixelToMM :: Size -> Size -> (Double,Double)
pixelToMM ppi size
= let convert f = toDouble (f size) / (toDouble (f ppi) / 25.4)
in (convert sizeW, convert sizeH)
-- | pixels per inch to pixels per millimeter
ppiToPPMM :: Size -> (Double,Double)
ppiToPPMM ppi
= let convert f = toDouble (f ppi) / 25.4
in (convert sizeW, convert sizeH)
-- | Convert an 'Int' to a 'Double'.
toDouble :: Int -> Double
toDouble i = fromIntegral i
-- | Scale the 'DC'.
dcScale :: DC a -> Double -> Double -> IO b -> IO b
dcScale dc scaleX scaleY io
= do oldX <- dcGetUserScaleX dc
oldY <- dcGetUserScaleY dc
dcSetUserScale dc (oldX*scaleX) (oldY*scaleY)
x <- io
dcSetUserScale dc oldX oldY
return x
{--------------------------------------------------------------------------
preview and printIt
--------------------------------------------------------------------------}
-- | Show a print dialog.
printDialog :: PageSetupDialog a
-> String
-> PageFunction
-> PrintFunction
-> IO ()
printDialog pageSetupDialog' title pageRangeFunction printFunction =
do{ pageSetupData <- pageSetupDialogGetPageSetupData pageSetupDialog'
; printData <- pageSetupDialogDataGetPrintData pageSetupData
; printDialogData <- printDialogDataCreateFromData printData
; printDialogDataSetAllPages printDialogData True
; printer <- printerCreate printDialogData
; printout <- wxcPrintoutCreate title
; pageInfo <- pageSetupDataGetPageInfo pageSetupData
; _ <- printOutInitPageRange printout pageInfo pageRangeFunction
; printOutOnPrint printout (onPrint False pageInfo printout pageRangeFunction printFunction)
; frame <- pageSetupDialogGetFrame pageSetupDialog'
; _ <- printerPrint printer frame printout True {- show printer setup? -}
; objectDelete printDialogData
; objectDelete printout
; objectDelete printer
}
-- | Show a preview window
printPreview :: PageSetupDialog a
-> String
-> PageFunction
-> PrintFunction
-> IO ()
printPreview pageSetupDialog' title pageRangeFunction printFunction =
do{ pageSetupData <- pageSetupDialogGetPageSetupData pageSetupDialog'
; pageInfo <- pageSetupDataGetPageInfo pageSetupData
; printout1 <- wxcPrintoutCreate "Print to preview"
; printout2 <- wxcPrintoutCreate "Print to printer"
; startPage <- printOutInitPageRange printout1 pageInfo pageRangeFunction
; _ <- printOutInitPageRange printout2 pageInfo pageRangeFunction
; printOutOnPrint printout1 (onPrint True pageInfo printout1 pageRangeFunction printFunction)
; printOutOnPrint printout2 (onPrint False pageInfo printout2 pageRangeFunction printFunction)
; printData <- pageSetupDialogDataGetPrintData pageSetupData
; printDialogData <- printDialogDataCreateFromData printData
; printDialogDataSetAllPages printDialogData True
; preview <- printPreviewCreateFromDialogData printout1 printout2 printDialogData
; _ <- printPreviewSetCurrentPage preview startPage
; frame <- pageSetupDialogGetFrame pageSetupDialog'
; previewFrame <- previewFrameCreate preview frame title rectNull frameDefaultStyle title
; previewFrameInitialize previewFrame
; _ <- windowShow previewFrame
; windowRaise previewFrame
}
{--------------------------------------------------------------------------
Class helpers
--------------------------------------------------------------------------}
-- | Set the correct page range for a printout.
printOutInitPageRange :: WXCPrintout a -> PageInfo -> PageFunction -> IO Int
printOutInitPageRange printOut pageInfo pageRangeFunction
= do{ printInfo <- printOutGetPrintInfo printOut
; let (_,size) = printableArea pageInfo printInfo
(start,end) = pageRangeFunction pageInfo printInfo size
; wxcPrintoutSetPageLimits printOut start end start end
; return start
}
-- | Get the parent frame of a 'PageSetupDialog'.
pageSetupDialogGetFrame :: PageSetupDialog a -> IO (Frame ())
pageSetupDialogGetFrame pageSetupDialog'
= do p <- windowGetParent pageSetupDialog'
case (safeCast p classFrame) of
Just frame -> return frame
Nothing -> do w <- wxcAppGetTopWindow
case (safeCast w classFrame) of
Just frame -> return frame
Nothing -> error "pageSetupDialogGetFrame: no parent frame found!"
{--------------------------------------------------------------------------
PageSetupDialog
--------------------------------------------------------------------------}
-- | Create a (hidden) page setup dialog that remembers printer settings.
-- It is a parameter to the functions 'printDialog' and 'printPreview'.
-- The creation function takes a parent frame and the initial page margins
-- (in millimeters) as an argument.
pageSetupDialog :: Frame a -> Int -> IO (PageSetupDialog ())
pageSetupDialog f margin
= do pageSetupData <- pageSetupDialogDataCreate
if (margin > 0)
then do pageInfo <- pageSetupDataGetPageInfo pageSetupData
let p0 = pt margin margin
p1 = pointSub (pointFromSize (pageSize pageInfo)) p0
newInfo = pageInfo{ pageArea = rectBetween p0 p1 }
pageSetupDataSetPageInfo pageSetupData newInfo
else return ()
pageSetupDialog' <- pageSetupDialogCreate f pageSetupData
prev <- windowGetOnClose f
windowOnClose f (do{ objectDelete pageSetupDialog'; prev })
objectDelete pageSetupData
return pageSetupDialog'
-- | Show the page setup dialog
pageSetupShowModal :: PageSetupDialog a -> IO ()
pageSetupShowModal p
= dialogShowModal p >> return ()
{--------------------------------------------------------------------------
PageInfo and PrintInfo
--------------------------------------------------------------------------}
-- | Information from the page setup dialog.
-- All measurements are in millimeters.
data PageInfo = PageInfo{ pageSize :: Size -- ^ The page size (in millimeters)
, pageArea :: Rect -- ^ The available page area (=margins) (in millimeters)
}
deriving Show
-- | Get page info
pageSetupDataGetPageInfo :: PageSetupDialogData a -> IO PageInfo
pageSetupDataGetPageInfo pageSetupData
= do{ topLeft <- pageSetupDialogDataGetMarginTopLeft pageSetupData
; bottomRight <- pageSetupDialogDataGetMarginBottomRight pageSetupData
; paperSize <- pageSetupDialogDataGetPaperSize pageSetupData
; return (PageInfo
{ pageSize = paperSize
, pageArea = rectBetween topLeft (pointSub (pointFromSize paperSize) bottomRight)
})
}
-- | Set page info
pageSetupDataSetPageInfo :: PageSetupDialogData a -> PageInfo -> IO ()
pageSetupDataSetPageInfo pageSetupData pageInfo
= do{ let topLeft = rectTopLeft (pageArea pageInfo)
bottomRight = pointSub (pointFromSize (pageSize pageInfo)) (rectBottomRight (pageArea pageInfo))
; pageSetupDialogDataSetMarginTopLeft pageSetupData topLeft
; pageSetupDialogDataSetMarginBottomRight pageSetupData bottomRight
; pageSetupDialogDataSetPaperSize pageSetupData (pageSize pageInfo)
}
-- | Printer information.
data PrintInfo = PrintInfo { screenPPI :: Size -- ^ screen pixels per inch
, printerPPI :: Size -- ^ printer pixels per inch
, printPageSize :: Size -- ^ printable area (in pixels) = PageInfo pageSize minus printer margins
} deriving Show
-- | Extract print info
printOutGetPrintInfo :: Printout a -> IO PrintInfo
printOutGetPrintInfo printOut
= do{ thePrinterPPI <- printoutGetPPIPrinter printOut
; theScreenPPI <- printoutGetPPIScreen printOut
; thePageSizePixels <- printoutGetPageSizePixels printOut
; return (PrintInfo
{ printerPPI = sizeFromPoint thePrinterPPI
, screenPPI = sizeFromPoint theScreenPPI
, printPageSize = thePageSizePixels
})
}
|
sherwoodwang/wxHaskell
|
wxcore/src/haskell/Graphics/UI/WXCore/Print.hs
|
lgpl-2.1
| 15,717 | 0 | 17 | 4,249 | 2,981 | 1,512 | 1,469 | 209 | 4 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hi-IN">
<title>Context Alert Filters | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/alertFilters/resources/help_hi_IN/helpset_hi_IN.hs
|
apache-2.0
| 983 | 85 | 52 | 161 | 400 | 211 | 189 | -1 | -1 |
{-# LANGUAGE TemplateHaskell, Rank2Types, CPP #-}
#ifndef NO_SAFE_HASKELL
{-# LANGUAGE Trustworthy #-}
#endif
-- | Test all properties in the current module, using Template Haskell.
-- You need to have a @{-\# LANGUAGE TemplateHaskell \#-}@ pragma in
-- your module for any of these to work.
module Test.QuickCheck.All(
-- ** Testing all properties in a module
quickCheckAll,
verboseCheckAll,
forAllProperties,
-- ** Testing polymorphic properties
polyQuickCheck,
polyVerboseCheck,
monomorphic) where
import Language.Haskell.TH
import Test.QuickCheck.Property hiding (Result)
import Test.QuickCheck.Test
import Data.Char
import Data.List
import Control.Monad
import qualified System.IO as S
-- | Test a polymorphic property, defaulting all type variables to 'Integer'.
--
-- Invoke as @$('polyQuickCheck' 'prop)@, where @prop@ is a property.
-- Note that just evaluating @'quickCheck' prop@ in GHCi will seem to
-- work, but will silently default all type variables to @()@!
--
-- @$('polyQuickCheck' \'prop)@ means the same as
-- @'quickCheck' $('monomorphic' \'prop)@.
-- If you want to supply custom arguments to 'polyQuickCheck',
-- you will have to combine 'quickCheckWith' and 'monomorphic' yourself.
--
-- If you want to use 'polyQuickCheck' in the same file where you defined the
-- property, the same scoping problems pop up as in 'quickCheckAll':
-- see the note there about @return []@.
polyQuickCheck :: Name -> ExpQ
polyQuickCheck x = [| quickCheck $(monomorphic x) |]
-- | Test a polymorphic property, defaulting all type variables to 'Integer'.
-- This is just a convenience function that combines 'verboseCheck' and 'monomorphic'.
--
-- If you want to use 'polyVerboseCheck' in the same file where you defined the
-- property, the same scoping problems pop up as in 'quickCheckAll':
-- see the note there about @return []@.
polyVerboseCheck :: Name -> ExpQ
polyVerboseCheck x = [| verboseCheck $(monomorphic x) |]
type Error = forall a. String -> a
-- | Monomorphise an arbitrary property by defaulting all type variables to 'Integer'.
--
-- For example, if @f@ has type @'Ord' a => [a] -> [a]@
-- then @$('monomorphic' 'f)@ has type @['Integer'] -> ['Integer']@.
--
-- If you want to use 'monomorphic' in the same file where you defined the
-- property, the same scoping problems pop up as in 'quickCheckAll':
-- see the note there about @return []@.
monomorphic :: Name -> ExpQ
monomorphic t = do
ty0 <- fmap infoType (reify t)
let err msg = error $ msg ++ ": " ++ pprint ty0
(polys, ctx, ty) <- deconstructType err ty0
case polys of
[] -> return (expName t)
_ -> do
integer <- [t| Integer |]
ty' <- monomorphiseType err integer ty
return (SigE (expName t) ty')
expName :: Name -> Exp
expName n = if isVar n then VarE n else ConE n
-- See section 2.4 of the Haskell 2010 Language Report, plus support for "[]"
isVar :: Name -> Bool
isVar = let isVar' (c:_) = not (isUpper c || c `elem` ":[")
isVar' _ = True
in isVar' . nameBase
infoType :: Info -> Type
#if __GLASGOW_HASKELL__ >= 711
infoType (ClassOpI _ ty _) = ty
infoType (DataConI _ ty _) = ty
infoType (VarI _ ty _) = ty
#else
infoType (ClassOpI _ ty _ _) = ty
infoType (DataConI _ ty _ _) = ty
infoType (VarI _ ty _ _) = ty
#endif
deconstructType :: Error -> Type -> Q ([Name], Cxt, Type)
deconstructType err ty0@(ForallT xs ctx ty) = do
let plain (PlainTV _) = True
#ifndef MIN_VERSION_template_haskell
plain (KindedTV _ StarT) = True
#else
#if MIN_VERSION_template_haskell(2,8,0)
plain (KindedTV _ StarT) = True
#else
plain (KindedTV _ StarK) = True
#endif
#endif
plain _ = False
unless (all plain xs) $ err "Higher-kinded type variables in type"
return (map (\(PlainTV x) -> x) xs, ctx, ty)
deconstructType _ ty = return ([], [], ty)
monomorphiseType :: Error -> Type -> Type -> TypeQ
monomorphiseType err mono ty@(VarT n) = return mono
monomorphiseType err mono (AppT t1 t2) = liftM2 AppT (monomorphiseType err mono t1) (monomorphiseType err mono t2)
monomorphiseType err mono ty@(ForallT _ _ _) = err $ "Higher-ranked type"
monomorphiseType err mono ty = return ty
-- | Test all properties in the current module, using a custom
-- 'quickCheck' function. The same caveats as with 'quickCheckAll'
-- apply.
--
-- @$'forAllProperties'@ has type @('Property' -> 'IO' 'Result') -> 'IO' 'Bool'@.
-- An example invocation is @$'forAllProperties' 'quickCheckResult'@,
-- which does the same thing as @$'quickCheckAll'@.
--
-- 'forAllProperties' has the same issue with scoping as 'quickCheckAll':
-- see the note there about @return []@.
forAllProperties :: Q Exp -- :: (Property -> IO Result) -> IO Bool
forAllProperties = do
Loc { loc_filename = filename } <- location
when (filename == "<interactive>") $ error "don't run this interactively"
ls <- runIO (fmap lines (readUTF8File filename))
let prefixes = map (takeWhile (\c -> isAlphaNum c || c == '_' || c == '\'') . dropWhile (\c -> isSpace c || c == '>')) ls
idents = nubBy (\x y -> snd x == snd y) (filter (("prop_" `isPrefixOf`) . snd) (zip [1..] prefixes))
#if __GLASGOW_HASKELL__ > 705
warning x = reportWarning ("Name " ++ x ++ " found in source file but was not in scope")
#else
warning x = report False ("Name " ++ x ++ " found in source file but was not in scope")
#endif
quickCheckOne :: (Int, String) -> Q [Exp]
quickCheckOne (l, x) = do
exists <- (warning x >> return False) `recover` (reify (mkName x) >> return True)
if exists then sequence [ [| ($(stringE $ x ++ " from " ++ filename ++ ":" ++ show l),
property $(monomorphic (mkName x))) |] ]
else return []
[| runQuickCheckAll $(fmap (ListE . concat) (mapM quickCheckOne idents)) |]
readUTF8File name = S.openFile name S.ReadMode >>=
set_utf8_io_enc >>=
S.hGetContents
-- Deal with UTF-8 input and output.
set_utf8_io_enc :: S.Handle -> IO S.Handle
#if __GLASGOW_HASKELL__ > 611
-- possibly if MIN_VERSION_base(4,2,0)
set_utf8_io_enc h = do S.hSetEncoding h S.utf8; return h
#else
set_utf8_io_enc h = return h
#endif
-- | Test all properties in the current module.
-- The name of the property must begin with @prop_@.
-- Polymorphic properties will be defaulted to 'Integer'.
-- Returns 'True' if all tests succeeded, 'False' otherwise.
--
-- To use 'quickCheckAll', add a definition to your module along
-- the lines of
--
-- > return []
-- > runTests = $quickCheckAll
--
-- and then execute @runTests@.
--
-- Note: the bizarre @return []@ in the example above is needed on
-- GHC 7.8; without it, 'quickCheckAll' will not be able to find
-- any of the properties. For the curious, the @return []@ is a
-- Template Haskell splice that makes GHC insert the empty list
-- of declarations at that point in the program; GHC typechecks
-- everything before the @return []@ before it starts on the rest
-- of the module, which means that the later call to 'quickCheckAll'
-- can see everything that was defined before the @return []@. Yikes!
quickCheckAll :: Q Exp
quickCheckAll = [| $(forAllProperties) quickCheckResult |]
-- | Test all properties in the current module.
-- This is just a convenience function that combines 'quickCheckAll' and 'verbose'.
--
-- 'verboseCheckAll' has the same issue with scoping as 'quickCheckAll':
-- see the note there about @return []@.
verboseCheckAll :: Q Exp
verboseCheckAll = [| $(forAllProperties) verboseCheckResult |]
runQuickCheckAll :: [(String, Property)] -> (Property -> IO Result) -> IO Bool
runQuickCheckAll ps qc =
fmap and . forM ps $ \(xs, p) -> do
putStrLn $ "=== " ++ xs ++ " ==="
r <- qc p
putStrLn ""
return $ case r of
Success { } -> True
Failure { } -> False
NoExpectedFailure { } -> False
GaveUp { } -> False
InsufficientCoverage { } -> False
|
srhb/quickcheck
|
Test/QuickCheck/All.hs
|
bsd-3-clause
| 7,927 | 0 | 20 | 1,619 | 1,451 | 795 | 656 | 91 | 5 |
{-# LANGUAGE BangPatterns, CPP, NondecreasingIndentation, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
-- NB: we specifically ignore deprecations. GHC 7.6 marks the .QSem module as
-- deprecated, although it became un-deprecated later. As a result, using 7.6
-- as your bootstrap compiler throws annoying warnings.
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow, 2011
--
-- This module implements multi-module compilation, and is used
-- by --make and GHCi.
--
-- -----------------------------------------------------------------------------
module GhcMake(
depanal,
load, LoadHowMuch(..),
topSortModuleGraph,
noModError, cyclicModuleErr
) where
#include "HsVersions.h"
#ifdef GHCI
import qualified Linker ( unload )
#endif
import DriverPhases
import DriverPipeline
import DynFlags
import ErrUtils
import Finder
import GhcMonad
import HeaderInfo
import HsSyn
import HscTypes
import Module
import RdrName ( RdrName )
import TcIface ( typecheckIface )
import TcRnMonad ( initIfaceCheck )
import Bag ( listToBag )
import BasicTypes
import Digraph
import Exception ( tryIO, gbracket, gfinally )
import FastString
import Maybes ( expectJust )
import MonadUtils ( allM, MonadIO )
import Outputable
import Panic
import SrcLoc
import StringBuffer
import SysTools
import UniqFM
import Util
import Data.Either ( rights, partitionEithers )
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Set as Set
import qualified FiniteMap as Map ( insertListWith )
import Control.Concurrent ( forkIOWithUnmask, killThread )
import Control.Concurrent.MVar
import Control.Concurrent.QSem
import Control.Exception
import Control.Monad
import Data.IORef
import Data.List
import qualified Data.List as List
import Data.Maybe
import Data.Ord ( comparing )
import Data.Time
import System.Directory
import System.FilePath
import System.IO ( fixIO )
import System.IO.Error ( isDoesNotExistError )
import GHC.Conc ( getNumProcessors, getNumCapabilities, setNumCapabilities )
-- -----------------------------------------------------------------------------
-- Loading the program
-- | Perform a dependency analysis starting from the current targets
-- and update the session with the new module graph.
--
-- Dependency analysis entails parsing the @import@ directives and may
-- therefore require running certain preprocessors.
--
-- Note that each 'ModSummary' in the module graph caches its 'DynFlags'.
-- These 'DynFlags' are determined by the /current/ session 'DynFlags' and the
-- @OPTIONS@ and @LANGUAGE@ pragmas of the parsed module. Thus if you want to
-- changes to the 'DynFlags' to take effect you need to call this function
-- again.
--
depanal :: GhcMonad m =>
[ModuleName] -- ^ excluded modules
-> Bool -- ^ allow duplicate roots
-> m ModuleGraph
depanal excluded_mods allow_dup_roots = do
hsc_env <- getSession
let
dflags = hsc_dflags hsc_env
targets = hsc_targets hsc_env
old_graph = hsc_mod_graph hsc_env
liftIO $ showPass dflags "Chasing dependencies"
liftIO $ debugTraceMsg dflags 2 (hcat [
text "Chasing modules from: ",
hcat (punctuate comma (map pprTarget targets))])
mod_graphE <- liftIO $ downsweep hsc_env old_graph excluded_mods allow_dup_roots
mod_graph <- reportImportErrors mod_graphE
modifySession $ \_ -> hsc_env { hsc_mod_graph = mod_graph }
return mod_graph
-- | Describes which modules of the module graph need to be loaded.
data LoadHowMuch
= LoadAllTargets
-- ^ Load all targets and its dependencies.
| LoadUpTo ModuleName
-- ^ Load only the given module and its dependencies.
| LoadDependenciesOf ModuleName
-- ^ Load only the dependencies of the given module, but not the module
-- itself.
-- | Try to load the program. See 'LoadHowMuch' for the different modes.
--
-- This function implements the core of GHC's @--make@ mode. It preprocesses,
-- compiles and loads the specified modules, avoiding re-compilation wherever
-- possible. Depending on the target (see 'DynFlags.hscTarget') compilating
-- and loading may result in files being created on disk.
--
-- Calls the 'reportModuleCompilationResult' callback after each compiling
-- each module, whether successful or not.
--
-- Throw a 'SourceError' if errors are encountered before the actual
-- compilation starts (e.g., during dependency analysis). All other errors
-- are reported using the callback.
--
load :: GhcMonad m => LoadHowMuch -> m SuccessFlag
load how_much = do
mod_graph <- depanal [] False
guessOutputFile
hsc_env <- getSession
let hpt1 = hsc_HPT hsc_env
let dflags = hsc_dflags hsc_env
-- The "bad" boot modules are the ones for which we have
-- B.hs-boot in the module graph, but no B.hs
-- The downsweep should have ensured this does not happen
-- (see msDeps)
let all_home_mods = [ms_mod_name s
| s <- mod_graph, not (isBootSummary s)]
bad_boot_mods = [s | s <- mod_graph, isBootSummary s,
not (ms_mod_name s `elem` all_home_mods)]
ASSERT( null bad_boot_mods ) return ()
-- check that the module given in HowMuch actually exists, otherwise
-- topSortModuleGraph will bomb later.
let checkHowMuch (LoadUpTo m) = checkMod m
checkHowMuch (LoadDependenciesOf m) = checkMod m
checkHowMuch _ = id
checkMod m and_then
| m `elem` all_home_mods = and_then
| otherwise = do
liftIO $ errorMsg dflags (text "no such module:" <+>
quotes (ppr m))
return Failed
checkHowMuch how_much $ do
-- mg2_with_srcimps drops the hi-boot nodes, returning a
-- graph with cycles. Among other things, it is used for
-- backing out partially complete cycles following a failed
-- upsweep, and for removing from hpt all the modules
-- not in strict downwards closure, during calls to compile.
let mg2_with_srcimps :: [SCC ModSummary]
mg2_with_srcimps = topSortModuleGraph True mod_graph Nothing
-- If we can determine that any of the {-# SOURCE #-} imports
-- are definitely unnecessary, then emit a warning.
warnUnnecessarySourceImports mg2_with_srcimps
let
-- check the stability property for each module.
stable_mods@(stable_obj,stable_bco)
= checkStability hpt1 mg2_with_srcimps all_home_mods
-- prune bits of the HPT which are definitely redundant now,
-- to save space.
pruned_hpt = pruneHomePackageTable hpt1
(flattenSCCs mg2_with_srcimps)
stable_mods
_ <- liftIO $ evaluate pruned_hpt
-- before we unload anything, make sure we don't leave an old
-- interactive context around pointing to dead bindings. Also,
-- write the pruned HPT to allow the old HPT to be GC'd.
modifySession $ \_ -> discardIC $ hsc_env { hsc_HPT = pruned_hpt }
liftIO $ debugTraceMsg dflags 2 (text "Stable obj:" <+> ppr stable_obj $$
text "Stable BCO:" <+> ppr stable_bco)
-- Unload any modules which are going to be re-linked this time around.
let stable_linkables = [ linkable
| m <- stable_obj++stable_bco,
Just hmi <- [lookupUFM pruned_hpt m],
Just linkable <- [hm_linkable hmi] ]
liftIO $ unload hsc_env stable_linkables
-- We could at this point detect cycles which aren't broken by
-- a source-import, and complain immediately, but it seems better
-- to let upsweep_mods do this, so at least some useful work gets
-- done before the upsweep is abandoned.
--hPutStrLn stderr "after tsort:\n"
--hPutStrLn stderr (showSDoc (vcat (map ppr mg2)))
-- Now do the upsweep, calling compile for each module in
-- turn. Final result is version 3 of everything.
-- Topologically sort the module graph, this time including hi-boot
-- nodes, and possibly just including the portion of the graph
-- reachable from the module specified in the 2nd argument to load.
-- This graph should be cycle-free.
-- If we're restricting the upsweep to a portion of the graph, we
-- also want to retain everything that is still stable.
let full_mg :: [SCC ModSummary]
full_mg = topSortModuleGraph False mod_graph Nothing
maybe_top_mod = case how_much of
LoadUpTo m -> Just m
LoadDependenciesOf m -> Just m
_ -> Nothing
partial_mg0 :: [SCC ModSummary]
partial_mg0 = topSortModuleGraph False mod_graph maybe_top_mod
-- LoadDependenciesOf m: we want the upsweep to stop just
-- short of the specified module (unless the specified module
-- is stable).
partial_mg
| LoadDependenciesOf _mod <- how_much
= ASSERT( case last partial_mg0 of
AcyclicSCC ms -> ms_mod_name ms == _mod; _ -> False )
List.init partial_mg0
| otherwise
= partial_mg0
stable_mg =
[ AcyclicSCC ms
| AcyclicSCC ms <- full_mg,
ms_mod_name ms `elem` stable_obj++stable_bco ]
-- the modules from partial_mg that are not also stable
-- NB. also keep cycles, we need to emit an error message later
unstable_mg = filter not_stable partial_mg
where not_stable (CyclicSCC _) = True
not_stable (AcyclicSCC ms)
= ms_mod_name ms `notElem` stable_obj++stable_bco
-- Load all the stable modules first, before attempting to load
-- an unstable module (#7231).
mg = stable_mg ++ unstable_mg
-- clean up between compilations
let cleanup hsc_env = intermediateCleanTempFiles (hsc_dflags hsc_env)
(flattenSCCs mg2_with_srcimps)
hsc_env
liftIO $ debugTraceMsg dflags 2 (hang (text "Ready for upsweep")
2 (ppr mg))
n_jobs <- case parMakeCount dflags of
Nothing -> liftIO getNumProcessors
Just n -> return n
let upsweep_fn | n_jobs > 1 = parUpsweep n_jobs
| otherwise = upsweep
setSession hsc_env{ hsc_HPT = emptyHomePackageTable }
(upsweep_ok, modsUpswept)
<- upsweep_fn pruned_hpt stable_mods cleanup mg
-- Make modsDone be the summaries for each home module now
-- available; this should equal the domain of hpt3.
-- Get in in a roughly top .. bottom order (hence reverse).
let modsDone = reverse modsUpswept
-- Try and do linking in some form, depending on whether the
-- upsweep was completely or only partially successful.
if succeeded upsweep_ok
then
-- Easy; just relink it all.
do liftIO $ debugTraceMsg dflags 2 (text "Upsweep completely successful.")
-- Clean up after ourselves
hsc_env1 <- getSession
liftIO $ intermediateCleanTempFiles dflags modsDone hsc_env1
-- Issue a warning for the confusing case where the user
-- said '-o foo' but we're not going to do any linking.
-- We attempt linking if either (a) one of the modules is
-- called Main, or (b) the user said -no-hs-main, indicating
-- that main() is going to come from somewhere else.
--
let ofile = outputFile dflags
let no_hs_main = gopt Opt_NoHsMain dflags
let
main_mod = mainModIs dflags
a_root_is_Main = any ((==main_mod).ms_mod) mod_graph
do_linking = a_root_is_Main || no_hs_main || ghcLink dflags == LinkDynLib || ghcLink dflags == LinkStaticLib
when (ghcLink dflags == LinkBinary
&& isJust ofile && not do_linking) $
liftIO $ debugTraceMsg dflags 1 $
text ("Warning: output was redirected with -o, " ++
"but no output will be generated\n" ++
"because there is no " ++
moduleNameString (moduleName main_mod) ++ " module.")
-- link everything together
linkresult <- liftIO $ link (ghcLink dflags) dflags do_linking (hsc_HPT hsc_env1)
loadFinish Succeeded linkresult
else
-- Tricky. We need to back out the effects of compiling any
-- half-done cycles, both so as to clean up the top level envs
-- and to avoid telling the interactive linker to link them.
do liftIO $ debugTraceMsg dflags 2 (text "Upsweep partially successful.")
let modsDone_names
= map ms_mod modsDone
let mods_to_zap_names
= findPartiallyCompletedCycles modsDone_names
mg2_with_srcimps
let mods_to_keep
= filter ((`notElem` mods_to_zap_names).ms_mod)
modsDone
hsc_env1 <- getSession
let hpt4 = retainInTopLevelEnvs (map ms_mod_name mods_to_keep)
(hsc_HPT hsc_env1)
-- Clean up after ourselves
liftIO $ intermediateCleanTempFiles dflags mods_to_keep hsc_env1
-- there should be no Nothings where linkables should be, now
ASSERT(all (isJust.hm_linkable) (eltsUFM (hsc_HPT hsc_env))) do
-- Link everything together
linkresult <- liftIO $ link (ghcLink dflags) dflags False hpt4
modifySession $ \hsc_env -> hsc_env{ hsc_HPT = hpt4 }
loadFinish Failed linkresult
-- | Finish up after a load.
loadFinish :: GhcMonad m => SuccessFlag -> SuccessFlag -> m SuccessFlag
-- If the link failed, unload everything and return.
loadFinish _all_ok Failed
= do hsc_env <- getSession
liftIO $ unload hsc_env []
modifySession discardProg
return Failed
-- Empty the interactive context and set the module context to the topmost
-- newly loaded module, or the Prelude if none were loaded.
loadFinish all_ok Succeeded
= do modifySession discardIC
return all_ok
-- | Forget the current program, but retain the persistent info in HscEnv
discardProg :: HscEnv -> HscEnv
discardProg hsc_env
= discardIC $ hsc_env { hsc_mod_graph = emptyMG
, hsc_HPT = emptyHomePackageTable }
-- | Discard the contents of the InteractiveContext, but keep the DynFlags
discardIC :: HscEnv -> HscEnv
discardIC hsc_env
= hsc_env { hsc_IC = emptyInteractiveContext (ic_dflags (hsc_IC hsc_env)) }
intermediateCleanTempFiles :: DynFlags -> [ModSummary] -> HscEnv -> IO ()
intermediateCleanTempFiles dflags summaries hsc_env
= do notIntermediate <- readIORef (filesToNotIntermediateClean dflags)
cleanTempFilesExcept dflags (notIntermediate ++ except)
where
except =
-- Save preprocessed files. The preprocessed file *might* be
-- the same as the source file, but that doesn't do any
-- harm.
map ms_hspp_file summaries ++
-- Save object files for loaded modules. The point of this
-- is that we might have generated and compiled a stub C
-- file, and in the case of GHCi the object file will be a
-- temporary file which we must not remove because we need
-- to load/link it later.
hptObjs (hsc_HPT hsc_env)
-- | If there is no -o option, guess the name of target executable
-- by using top-level source file name as a base.
guessOutputFile :: GhcMonad m => m ()
guessOutputFile = modifySession $ \env ->
let dflags = hsc_dflags env
mod_graph = hsc_mod_graph env
mainModuleSrcPath :: Maybe String
mainModuleSrcPath = do
let isMain = (== mainModIs dflags) . ms_mod
[ms] <- return (filter isMain mod_graph)
ml_hs_file (ms_location ms)
name = fmap dropExtension mainModuleSrcPath
#if defined(mingw32_HOST_OS)
-- we must add the .exe extention unconditionally here, otherwise
-- when name has an extension of its own, the .exe extension will
-- not be added by DriverPipeline.exeFileName. See #2248
name_exe = fmap (<.> "exe") name
#else
name_exe = name
#endif
in
case outputFile dflags of
Just _ -> env
Nothing -> env { hsc_dflags = dflags { outputFile = name_exe } }
-- -----------------------------------------------------------------------------
--
-- | Prune the HomePackageTable
--
-- Before doing an upsweep, we can throw away:
--
-- - For non-stable modules:
-- - all ModDetails, all linked code
-- - all unlinked code that is out of date with respect to
-- the source file
--
-- This is VERY IMPORTANT otherwise we'll end up requiring 2x the
-- space at the end of the upsweep, because the topmost ModDetails of the
-- old HPT holds on to the entire type environment from the previous
-- compilation.
pruneHomePackageTable :: HomePackageTable
-> [ModSummary]
-> ([ModuleName],[ModuleName])
-> HomePackageTable
pruneHomePackageTable hpt summ (stable_obj, stable_bco)
= mapUFM prune hpt
where prune hmi
| is_stable modl = hmi'
| otherwise = hmi'{ hm_details = emptyModDetails }
where
modl = moduleName (mi_module (hm_iface hmi))
hmi' | Just l <- hm_linkable hmi, linkableTime l < ms_hs_date ms
= hmi{ hm_linkable = Nothing }
| otherwise
= hmi
where ms = expectJust "prune" (lookupUFM ms_map modl)
ms_map = listToUFM [(ms_mod_name ms, ms) | ms <- summ]
is_stable m = m `elem` stable_obj || m `elem` stable_bco
-- -----------------------------------------------------------------------------
--
-- | Return (names of) all those in modsDone who are part of a cycle as defined
-- by theGraph.
findPartiallyCompletedCycles :: [Module] -> [SCC ModSummary] -> [Module]
findPartiallyCompletedCycles modsDone theGraph
= chew theGraph
where
chew [] = []
chew ((AcyclicSCC _):rest) = chew rest -- acyclic? not interesting.
chew ((CyclicSCC vs):rest)
= let names_in_this_cycle = nub (map ms_mod vs)
mods_in_this_cycle
= nub ([done | done <- modsDone,
done `elem` names_in_this_cycle])
chewed_rest = chew rest
in
if notNull mods_in_this_cycle
&& length mods_in_this_cycle < length names_in_this_cycle
then mods_in_this_cycle ++ chewed_rest
else chewed_rest
-- ---------------------------------------------------------------------------
--
-- | Unloading
unload :: HscEnv -> [Linkable] -> IO ()
unload hsc_env stable_linkables -- Unload everthing *except* 'stable_linkables'
= case ghcLink (hsc_dflags hsc_env) of
#ifdef GHCI
LinkInMemory -> Linker.unload (hsc_dflags hsc_env) stable_linkables
#else
LinkInMemory -> panic "unload: no interpreter"
-- urgh. avoid warnings:
hsc_env stable_linkables
#endif
_other -> return ()
-- -----------------------------------------------------------------------------
{- |
Stability tells us which modules definitely do not need to be recompiled.
There are two main reasons for having stability:
- avoid doing a complete upsweep of the module graph in GHCi when
modules near the bottom of the tree have not changed.
- to tell GHCi when it can load object code: we can only load object code
for a module when we also load object code fo all of the imports of the
module. So we need to know that we will definitely not be recompiling
any of these modules, and we can use the object code.
The stability check is as follows. Both stableObject and
stableBCO are used during the upsweep phase later.
@
stable m = stableObject m || stableBCO m
stableObject m =
all stableObject (imports m)
&& old linkable does not exist, or is == on-disk .o
&& date(on-disk .o) > date(.hs)
stableBCO m =
all stable (imports m)
&& date(BCO) > date(.hs)
@
These properties embody the following ideas:
- if a module is stable, then:
- if it has been compiled in a previous pass (present in HPT)
then it does not need to be compiled or re-linked.
- if it has not been compiled in a previous pass,
then we only need to read its .hi file from disk and
link it to produce a 'ModDetails'.
- if a modules is not stable, we will definitely be at least
re-linking, and possibly re-compiling it during the 'upsweep'.
All non-stable modules can (and should) therefore be unlinked
before the 'upsweep'.
- Note that objects are only considered stable if they only depend
on other objects. We can't link object code against byte code.
-}
checkStability
:: HomePackageTable -- HPT from last compilation
-> [SCC ModSummary] -- current module graph (cyclic)
-> [ModuleName] -- all home modules
-> ([ModuleName], -- stableObject
[ModuleName]) -- stableBCO
checkStability hpt sccs all_home_mods = foldl checkSCC ([],[]) sccs
where
checkSCC (stable_obj, stable_bco) scc0
| stableObjects = (scc_mods ++ stable_obj, stable_bco)
| stableBCOs = (stable_obj, scc_mods ++ stable_bco)
| otherwise = (stable_obj, stable_bco)
where
scc = flattenSCC scc0
scc_mods = map ms_mod_name scc
home_module m = m `elem` all_home_mods && m `notElem` scc_mods
scc_allimps = nub (filter home_module (concatMap ms_home_allimps scc))
-- all imports outside the current SCC, but in the home pkg
stable_obj_imps = map (`elem` stable_obj) scc_allimps
stable_bco_imps = map (`elem` stable_bco) scc_allimps
stableObjects =
and stable_obj_imps
&& all object_ok scc
stableBCOs =
and (zipWith (||) stable_obj_imps stable_bco_imps)
&& all bco_ok scc
object_ok ms
| gopt Opt_ForceRecomp (ms_hspp_opts ms) = False
| Just t <- ms_obj_date ms = t >= ms_hs_date ms
&& same_as_prev t
| otherwise = False
where
same_as_prev t = case lookupUFM hpt (ms_mod_name ms) of
Just hmi | Just l <- hm_linkable hmi
-> isObjectLinkable l && t == linkableTime l
_other -> True
-- why '>=' rather than '>' above? If the filesystem stores
-- times to the nearset second, we may occasionally find that
-- the object & source have the same modification time,
-- especially if the source was automatically generated
-- and compiled. Using >= is slightly unsafe, but it matches
-- make's behaviour.
--
-- But see #5527, where someone ran into this and it caused
-- a problem.
bco_ok ms
| gopt Opt_ForceRecomp (ms_hspp_opts ms) = False
| otherwise = case lookupUFM hpt (ms_mod_name ms) of
Just hmi | Just l <- hm_linkable hmi ->
not (isObjectLinkable l) &&
linkableTime l >= ms_hs_date ms
_other -> False
{- Parallel Upsweep
-
- The parallel upsweep attempts to concurrently compile the modules in the
- compilation graph using multiple Haskell threads.
-
- The Algorithm
-
- A Haskell thread is spawned for each module in the module graph, waiting for
- its direct dependencies to finish building before it itself begins to build.
-
- Each module is associated with an initially empty MVar that stores the
- result of that particular module's compile. If the compile succeeded, then
- the HscEnv (synchronized by an MVar) is updated with the fresh HMI of that
- module, and the module's HMI is deleted from the old HPT (synchronized by an
- IORef) to save space.
-
- Instead of immediately outputting messages to the standard handles, all
- compilation output is deferred to a per-module TQueue. A QSem is used to
- limit the number of workers that are compiling simultaneously.
-
- Meanwhile, the main thread sequentially loops over all the modules in the
- module graph, outputting the messages stored in each module's TQueue.
-}
-- | Each module is given a unique 'LogQueue' to redirect compilation messages
-- to. A 'Nothing' value contains the result of compilation, and denotes the
-- end of the message queue.
data LogQueue = LogQueue !(IORef [Maybe (Severity, SrcSpan, PprStyle, MsgDoc)])
!(MVar ())
-- | The graph of modules to compile and their corresponding result 'MVar' and
-- 'LogQueue'.
type CompilationGraph = [(ModSummary, MVar SuccessFlag, LogQueue)]
-- | Build a 'CompilationGraph' out of a list of strongly-connected modules,
-- also returning the first, if any, encountered module cycle.
buildCompGraph :: [SCC ModSummary] -> IO (CompilationGraph, Maybe [ModSummary])
buildCompGraph [] = return ([], Nothing)
buildCompGraph (scc:sccs) = case scc of
AcyclicSCC ms -> do
mvar <- newEmptyMVar
log_queue <- do
ref <- newIORef []
sem <- newEmptyMVar
return (LogQueue ref sem)
(rest,cycle) <- buildCompGraph sccs
return ((ms,mvar,log_queue):rest, cycle)
CyclicSCC mss -> return ([], Just mss)
-- A Module and whether it is a boot module.
type BuildModule = (Module, Bool)
mkBuildModule :: ModSummary -> BuildModule
mkBuildModule ms = (ms_mod ms, isBootSummary ms)
-- | The entry point to the parallel upsweep.
--
-- See also the simpler, sequential 'upsweep'.
parUpsweep
:: GhcMonad m
=> Int
-- ^ The number of workers we wish to run in parallel
-> HomePackageTable
-> ([ModuleName],[ModuleName])
-> (HscEnv -> IO ())
-> [SCC ModSummary]
-> m (SuccessFlag,
[ModSummary])
parUpsweep n_jobs old_hpt stable_mods cleanup sccs = do
hsc_env <- getSession
let dflags = hsc_dflags hsc_env
-- The bits of shared state we'll be using:
-- The global HscEnv is updated with the module's HMI when a module
-- successfully compiles.
hsc_env_var <- liftIO $ newMVar hsc_env
-- The old HPT is used for recompilation checking in upsweep_mod. When a
-- module sucessfully gets compiled, its HMI is pruned from the old HPT.
old_hpt_var <- liftIO $ newIORef old_hpt
-- What we use to limit parallelism with.
par_sem <- liftIO $ newQSem n_jobs
let updNumCapabilities = liftIO $ do
n_capabilities <- getNumCapabilities
unless (n_capabilities /= 1) $ setNumCapabilities n_jobs
return n_capabilities
-- Reset the number of capabilities once the upsweep ends.
let resetNumCapabilities orig_n = liftIO $ setNumCapabilities orig_n
gbracket updNumCapabilities resetNumCapabilities $ \_ -> do
-- Sync the global session with the latest HscEnv once the upsweep ends.
let finallySyncSession io = io `gfinally` do
hsc_env <- liftIO $ readMVar hsc_env_var
setSession hsc_env
finallySyncSession $ do
-- Build the compilation graph out of the list of SCCs. Module cycles are
-- handled at the very end, after some useful work gets done. Note that
-- this list is topologically sorted (by virtue of 'sccs' being sorted so).
(comp_graph,cycle) <- liftIO $ buildCompGraph sccs
let comp_graph_w_idx = zip comp_graph [1..]
-- The list of all loops in the compilation graph.
-- NB: For convenience, the last module of each loop (aka the module that
-- finishes the loop) is prepended to the beginning of the loop.
let comp_graph_loops = go (map fstOf3 (reverse comp_graph))
where
go [] = []
go (ms:mss) | Just loop <- getModLoop ms (ms:mss)
= map mkBuildModule (ms:loop) : go mss
| otherwise
= go mss
-- Build a Map out of the compilation graph with which we can efficiently
-- look up the result MVar associated with a particular home module.
let home_mod_map :: Map BuildModule (MVar SuccessFlag, Int)
home_mod_map =
Map.fromList [ (mkBuildModule ms, (mvar, idx))
| ((ms,mvar,_),idx) <- comp_graph_w_idx ]
-- For each module in the module graph, spawn a worker thread that will
-- compile this module.
let { spawnWorkers = forM comp_graph_w_idx $ \((mod,!mvar,!log_queue),!mod_idx) ->
forkIOWithUnmask $ \unmask -> do
-- Replace the default log_action with one that writes each
-- message to the module's log_queue. The main thread will
-- deal with synchronously printing these messages.
--
-- Use a local filesToClean var so that we can clean up
-- intermediate files in a timely fashion (as soon as
-- compilation for that module is finished) without having to
-- worry about accidentally deleting a simultaneous compile's
-- important files.
lcl_files_to_clean <- newIORef []
let lcl_dflags = dflags { log_action = parLogAction log_queue
, filesToClean = lcl_files_to_clean }
-- Unmask asynchronous exceptions and perform the thread-local
-- work to compile the module (see parUpsweep_one).
m_res <- try $ unmask $ prettyPrintGhcErrors lcl_dflags $
parUpsweep_one mod home_mod_map comp_graph_loops
lcl_dflags cleanup
par_sem hsc_env_var old_hpt_var
stable_mods mod_idx (length sccs)
res <- case m_res of
Right flag -> return flag
Left exc -> do
-- Don't print ThreadKilled exceptions: they are used
-- to kill the worker thread in the event of a user
-- interrupt, and the user doesn't have to be informed
-- about that.
when (fromException exc /= Just ThreadKilled)
(errorMsg lcl_dflags (text (show exc)))
return Failed
-- Populate the result MVar.
putMVar mvar res
-- Write the end marker to the message queue, telling the main
-- thread that it can stop waiting for messages from this
-- particular compile.
writeLogQueue log_queue Nothing
-- Add the remaining files that weren't cleaned up to the
-- global filesToClean ref, for cleanup later.
files_kept <- readIORef (filesToClean lcl_dflags)
addFilesToClean dflags files_kept
-- Kill all the workers, masking interrupts (since killThread is
-- interruptible). XXX: This is not ideal.
; killWorkers = uninterruptibleMask_ . mapM_ killThread }
-- Spawn the workers, making sure to kill them later. Collect the results
-- of each compile.
results <- liftIO $ bracket spawnWorkers killWorkers $ \_ ->
-- Loop over each module in the compilation graph in order, printing
-- each message from its log_queue.
forM comp_graph $ \(mod,mvar,log_queue) -> do
printLogs dflags log_queue
result <- readMVar mvar
if succeeded result then return (Just mod) else return Nothing
-- Collect and return the ModSummaries of all the successful compiles.
-- NB: Reverse this list to maintain output parity with the sequential upsweep.
let ok_results = reverse (catMaybes results)
-- Handle any cycle in the original compilation graph and return the result
-- of the upsweep.
case cycle of
Just mss -> do
liftIO $ fatalErrorMsg dflags (cyclicModuleErr mss)
return (Failed,ok_results)
Nothing -> do
let success_flag = successIf (all isJust results)
return (success_flag,ok_results)
where
writeLogQueue :: LogQueue -> Maybe (Severity,SrcSpan,PprStyle,MsgDoc) -> IO ()
writeLogQueue (LogQueue ref sem) msg = do
atomicModifyIORef ref $ \msgs -> (msg:msgs,())
_ <- tryPutMVar sem ()
return ()
-- The log_action callback that is used to synchronize messages from a
-- worker thread.
parLogAction :: LogQueue -> LogAction
parLogAction log_queue _dflags !severity !srcSpan !style !msg = do
writeLogQueue log_queue (Just (severity,srcSpan,style,msg))
-- Print each message from the log_queue using the log_action from the
-- session's DynFlags.
printLogs :: DynFlags -> LogQueue -> IO ()
printLogs !dflags (LogQueue ref sem) = read_msgs
where read_msgs = do
takeMVar sem
msgs <- atomicModifyIORef ref $ \xs -> ([], reverse xs)
print_loop msgs
print_loop [] = read_msgs
print_loop (x:xs) = case x of
Just (severity,srcSpan,style,msg) -> do
log_action dflags dflags severity srcSpan style msg
print_loop xs
-- Exit the loop once we encounter the end marker.
Nothing -> return ()
-- The interruptible subset of the worker threads' work.
parUpsweep_one
:: ModSummary
-- ^ The module we wish to compile
-> Map BuildModule (MVar SuccessFlag, Int)
-- ^ The map of home modules and their result MVar
-> [[BuildModule]]
-- ^ The list of all module loops within the compilation graph.
-> DynFlags
-- ^ The thread-local DynFlags
-> (HscEnv -> IO ())
-- ^ The callback for cleaning up intermediate files
-> QSem
-- ^ The semaphore for limiting the number of simultaneous compiles
-> MVar HscEnv
-- ^ The MVar that synchronizes updates to the global HscEnv
-> IORef HomePackageTable
-- ^ The old HPT
-> ([ModuleName],[ModuleName])
-- ^ Lists of stable objects and BCOs
-> Int
-- ^ The index of this module
-> Int
-- ^ The total number of modules
-> IO SuccessFlag
-- ^ The result of this compile
parUpsweep_one mod home_mod_map comp_graph_loops lcl_dflags cleanup par_sem
hsc_env_var old_hpt_var stable_mods mod_index num_mods = do
let this_build_mod = mkBuildModule mod
let home_imps = map unLoc $ ms_home_imps mod
let home_src_imps = map unLoc $ ms_home_srcimps mod
-- All the textual imports of this module.
let textual_deps = Set.fromList $ mapFst (mkModule (thisPackage lcl_dflags)) $
zip home_imps (repeat False) ++
zip home_src_imps (repeat True)
-- Dealing with module loops
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Not only do we have to deal with explicit textual dependencies, we also
-- have to deal with implicit dependencies introduced by import cycles that
-- are broken by an hs-boot file. We have to ensure that:
--
-- 1. A module that breaks a loop must depend on all the modules in the
-- loop (transitively or otherwise). This is normally always fulfilled
-- by the module's textual dependencies except in degenerate loops,
-- e.g.:
--
-- A.hs imports B.hs-boot
-- B.hs doesn't import A.hs
-- C.hs imports A.hs, B.hs
--
-- In this scenario, getModLoop will detect the module loop [A,B] but
-- the loop finisher B doesn't depend on A. So we have to explicitly add
-- A in as a dependency of B when we are compiling B.
--
-- 2. A module that depends on a module in an external loop can't proceed
-- until the entire loop is re-typechecked.
--
-- These two invariants have to be maintained to correctly build a
-- compilation graph with one or more loops.
-- The loop that this module will finish. After this module successfully
-- compiles, this loop is going to get re-typechecked.
let finish_loop = listToMaybe
[ tail loop | loop <- comp_graph_loops
, head loop == this_build_mod ]
-- If this module finishes a loop then it must depend on all the other
-- modules in that loop because the entire module loop is going to be
-- re-typechecked once this module gets compiled. These extra dependencies
-- are this module's "internal" loop dependencies, because this module is
-- inside the loop in question.
let int_loop_deps = Set.fromList $
case finish_loop of
Nothing -> []
Just loop -> filter (/= this_build_mod) loop
-- If this module depends on a module within a loop then it must wait for
-- that loop to get re-typechecked, i.e. it must wait on the module that
-- finishes that loop. These extra dependencies are this module's
-- "external" loop dependencies, because this module is outside of the
-- loop(s) in question.
let ext_loop_deps = Set.fromList
[ head loop | loop <- comp_graph_loops
, any (`Set.member` textual_deps) loop
, this_build_mod `notElem` loop ]
let all_deps = foldl1 Set.union [textual_deps, int_loop_deps, ext_loop_deps]
-- All of the module's home-module dependencies.
let home_deps_with_idx =
[ home_dep | dep <- Set.toList all_deps
, Just home_dep <- [Map.lookup dep home_mod_map] ]
-- Sort the list of dependencies in reverse-topological order. This way, by
-- the time we get woken up by the result of an earlier dependency,
-- subsequent dependencies are more likely to have finished. This step
-- effectively reduces the number of MVars that each thread blocks on.
let home_deps = map fst $ sortBy (flip (comparing snd)) home_deps_with_idx
-- Wait for the all the module's dependencies to finish building.
deps_ok <- allM (fmap succeeded . readMVar) home_deps
-- We can't build this module if any of its dependencies failed to build.
if not deps_ok
then return Failed
else do
-- Any hsc_env at this point is OK to use since we only really require
-- that the HPT contains the HMIs of our dependencies.
hsc_env <- readMVar hsc_env_var
old_hpt <- readIORef old_hpt_var
let logger err = printBagOfErrors lcl_dflags (srcErrorMessages err)
-- Limit the number of parallel compiles.
let withSem sem = bracket_ (waitQSem sem) (signalQSem sem)
mb_mod_info <- withSem par_sem $
handleSourceError (\err -> do logger err; return Nothing) $ do
-- Have the ModSummary and HscEnv point to our local log_action
-- and filesToClean var.
let lcl_mod = localize_mod mod
let lcl_hsc_env = localize_hsc_env hsc_env
-- Compile the module.
mod_info <- upsweep_mod lcl_hsc_env old_hpt stable_mods lcl_mod
mod_index num_mods
return (Just mod_info)
case mb_mod_info of
Nothing -> return Failed
Just mod_info -> do
let this_mod = ms_mod_name mod
-- Prune the old HPT unless this is an hs-boot module.
unless (isBootSummary mod) $
atomicModifyIORef old_hpt_var $ \old_hpt ->
(delFromUFM old_hpt this_mod, ())
-- Update and fetch the global HscEnv.
lcl_hsc_env' <- modifyMVar hsc_env_var $ \hsc_env -> do
let hsc_env' = hsc_env { hsc_HPT = addToUFM (hsc_HPT hsc_env)
this_mod mod_info }
-- If this module is a loop finisher, now is the time to
-- re-typecheck the loop.
hsc_env'' <- case finish_loop of
Nothing -> return hsc_env'
Just loop -> typecheckLoop lcl_dflags hsc_env' $
map (moduleName . fst) loop
return (hsc_env'', localize_hsc_env hsc_env'')
-- Clean up any intermediate files.
cleanup lcl_hsc_env'
return Succeeded
where
localize_mod mod
= mod { ms_hspp_opts = (ms_hspp_opts mod)
{ log_action = log_action lcl_dflags
, filesToClean = filesToClean lcl_dflags } }
localize_hsc_env hsc_env
= hsc_env { hsc_dflags = (hsc_dflags hsc_env)
{ log_action = log_action lcl_dflags
, filesToClean = filesToClean lcl_dflags } }
-- -----------------------------------------------------------------------------
--
-- | The upsweep
--
-- This is where we compile each module in the module graph, in a pass
-- from the bottom to the top of the graph.
--
-- There better had not be any cyclic groups here -- we check for them.
upsweep
:: GhcMonad m
=> HomePackageTable -- ^ HPT from last time round (pruned)
-> ([ModuleName],[ModuleName]) -- ^ stable modules (see checkStability)
-> (HscEnv -> IO ()) -- ^ How to clean up unwanted tmp files
-> [SCC ModSummary] -- ^ Mods to do (the worklist)
-> m (SuccessFlag,
[ModSummary])
-- ^ Returns:
--
-- 1. A flag whether the complete upsweep was successful.
-- 2. The 'HscEnv' in the monad has an updated HPT
-- 3. A list of modules which succeeded loading.
upsweep old_hpt stable_mods cleanup sccs = do
(res, done) <- upsweep' old_hpt [] sccs 1 (length sccs)
return (res, reverse done)
where
upsweep' _old_hpt done
[] _ _
= return (Succeeded, done)
upsweep' _old_hpt done
(CyclicSCC ms:_) _ _
= do dflags <- getSessionDynFlags
liftIO $ fatalErrorMsg dflags (cyclicModuleErr ms)
return (Failed, done)
upsweep' old_hpt done
(AcyclicSCC mod:mods) mod_index nmods
= do -- putStrLn ("UPSWEEP_MOD: hpt = " ++
-- show (map (moduleUserString.moduleName.mi_module.hm_iface)
-- (moduleEnvElts (hsc_HPT hsc_env)))
let logger _mod = defaultWarnErrLogger
hsc_env <- getSession
-- Remove unwanted tmp files between compilations
liftIO (cleanup hsc_env)
mb_mod_info
<- handleSourceError
(\err -> do logger mod (Just err); return Nothing) $ do
mod_info <- liftIO $ upsweep_mod hsc_env old_hpt stable_mods
mod mod_index nmods
logger mod Nothing -- log warnings
return (Just mod_info)
case mb_mod_info of
Nothing -> return (Failed, done)
Just mod_info -> do
let this_mod = ms_mod_name mod
-- Add new info to hsc_env
hpt1 = addToUFM (hsc_HPT hsc_env) this_mod mod_info
hsc_env1 = hsc_env { hsc_HPT = hpt1 }
-- Space-saving: delete the old HPT entry
-- for mod BUT if mod is a hs-boot
-- node, don't delete it. For the
-- interface, the HPT entry is probaby for the
-- main Haskell source file. Deleting it
-- would force the real module to be recompiled
-- every time.
old_hpt1 | isBootSummary mod = old_hpt
| otherwise = delFromUFM old_hpt this_mod
done' = mod:done
-- fixup our HomePackageTable after we've finished compiling
-- a mutually-recursive loop. See reTypecheckLoop, below.
hsc_env2 <- liftIO $ reTypecheckLoop hsc_env1 mod done'
setSession hsc_env2
upsweep' old_hpt1 done' mods (mod_index+1) nmods
-- | Compile a single module. Always produce a Linkable for it if
-- successful. If no compilation happened, return the old Linkable.
upsweep_mod :: HscEnv
-> HomePackageTable
-> ([ModuleName],[ModuleName])
-> ModSummary
-> Int -- index of module
-> Int -- total number of modules
-> IO HomeModInfo
upsweep_mod hsc_env old_hpt (stable_obj, stable_bco) summary mod_index nmods
= let
this_mod_name = ms_mod_name summary
this_mod = ms_mod summary
mb_obj_date = ms_obj_date summary
obj_fn = ml_obj_file (ms_location summary)
hs_date = ms_hs_date summary
is_stable_obj = this_mod_name `elem` stable_obj
is_stable_bco = this_mod_name `elem` stable_bco
old_hmi = lookupUFM old_hpt this_mod_name
-- We're using the dflags for this module now, obtained by
-- applying any options in its LANGUAGE & OPTIONS_GHC pragmas.
dflags = ms_hspp_opts summary
prevailing_target = hscTarget (hsc_dflags hsc_env)
local_target = hscTarget dflags
-- If OPTIONS_GHC contains -fasm or -fllvm, be careful that
-- we don't do anything dodgy: these should only work to change
-- from -fllvm to -fasm and vice-versa, otherwise we could
-- end up trying to link object code to byte code.
target = if prevailing_target /= local_target
&& (not (isObjectTarget prevailing_target)
|| not (isObjectTarget local_target))
then prevailing_target
else local_target
-- store the corrected hscTarget into the summary
summary' = summary{ ms_hspp_opts = dflags { hscTarget = target } }
-- The old interface is ok if
-- a) we're compiling a source file, and the old HPT
-- entry is for a source file
-- b) we're compiling a hs-boot file
-- Case (b) allows an hs-boot file to get the interface of its
-- real source file on the second iteration of the compilation
-- manager, but that does no harm. Otherwise the hs-boot file
-- will always be recompiled
mb_old_iface
= case old_hmi of
Nothing -> Nothing
Just hm_info | isBootSummary summary -> Just iface
| not (mi_boot iface) -> Just iface
| otherwise -> Nothing
where
iface = hm_iface hm_info
compile_it :: Maybe Linkable -> SourceModified -> IO HomeModInfo
compile_it mb_linkable src_modified =
compileOne hsc_env summary' mod_index nmods
mb_old_iface mb_linkable src_modified
compile_it_discard_iface :: Maybe Linkable -> SourceModified
-> IO HomeModInfo
compile_it_discard_iface mb_linkable src_modified =
compileOne hsc_env summary' mod_index nmods
Nothing mb_linkable src_modified
-- With the HscNothing target we create empty linkables to avoid
-- recompilation. We have to detect these to recompile anyway if
-- the target changed since the last compile.
is_fake_linkable
| Just hmi <- old_hmi, Just l <- hm_linkable hmi =
null (linkableUnlinked l)
| otherwise =
-- we have no linkable, so it cannot be fake
False
implies False _ = True
implies True x = x
in
case () of
_
-- Regardless of whether we're generating object code or
-- byte code, we can always use an existing object file
-- if it is *stable* (see checkStability).
| is_stable_obj, Just hmi <- old_hmi -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "skipping stable obj mod:" <+> ppr this_mod_name)
return hmi
-- object is stable, and we have an entry in the
-- old HPT: nothing to do
| is_stable_obj, isNothing old_hmi -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling stable on-disk mod:" <+> ppr this_mod_name)
linkable <- liftIO $ findObjectLinkable this_mod obj_fn
(expectJust "upsweep1" mb_obj_date)
compile_it (Just linkable) SourceUnmodifiedAndStable
-- object is stable, but we need to load the interface
-- off disk to make a HMI.
| not (isObjectTarget target), is_stable_bco,
(target /= HscNothing) `implies` not is_fake_linkable ->
ASSERT(isJust old_hmi) -- must be in the old_hpt
let Just hmi = old_hmi in do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "skipping stable BCO mod:" <+> ppr this_mod_name)
return hmi
-- BCO is stable: nothing to do
| not (isObjectTarget target),
Just hmi <- old_hmi,
Just l <- hm_linkable hmi,
not (isObjectLinkable l),
(target /= HscNothing) `implies` not is_fake_linkable,
linkableTime l >= ms_hs_date summary -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling non-stable BCO mod:" <+> ppr this_mod_name)
compile_it (Just l) SourceUnmodified
-- we have an old BCO that is up to date with respect
-- to the source: do a recompilation check as normal.
-- When generating object code, if there's an up-to-date
-- object file on the disk, then we can use it.
-- However, if the object file is new (compared to any
-- linkable we had from a previous compilation), then we
-- must discard any in-memory interface, because this
-- means the user has compiled the source file
-- separately and generated a new interface, that we must
-- read from the disk.
--
| isObjectTarget target,
Just obj_date <- mb_obj_date,
obj_date >= hs_date -> do
case old_hmi of
Just hmi
| Just l <- hm_linkable hmi,
isObjectLinkable l && linkableTime l == obj_date -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling mod with new on-disk obj:" <+> ppr this_mod_name)
compile_it (Just l) SourceUnmodified
_otherwise -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling mod with new on-disk obj2:" <+> ppr this_mod_name)
linkable <- liftIO $ findObjectLinkable this_mod obj_fn obj_date
compile_it_discard_iface (Just linkable) SourceUnmodified
_otherwise -> do
liftIO $ debugTraceMsg (hsc_dflags hsc_env) 5
(text "compiling mod:" <+> ppr this_mod_name)
compile_it Nothing SourceModified
-- Filter modules in the HPT
retainInTopLevelEnvs :: [ModuleName] -> HomePackageTable -> HomePackageTable
retainInTopLevelEnvs keep_these hpt
= listToUFM [ (mod, expectJust "retain" mb_mod_info)
| mod <- keep_these
, let mb_mod_info = lookupUFM hpt mod
, isJust mb_mod_info ]
-- ---------------------------------------------------------------------------
-- Typecheck module loops
{-
See bug #930. This code fixes a long-standing bug in --make. The
problem is that when compiling the modules *inside* a loop, a data
type that is only defined at the top of the loop looks opaque; but
after the loop is done, the structure of the data type becomes
apparent.
The difficulty is then that two different bits of code have
different notions of what the data type looks like.
The idea is that after we compile a module which also has an .hs-boot
file, we re-generate the ModDetails for each of the modules that
depends on the .hs-boot file, so that everyone points to the proper
TyCons, Ids etc. defined by the real module, not the boot module.
Fortunately re-generating a ModDetails from a ModIface is easy: the
function TcIface.typecheckIface does exactly that.
Picking the modules to re-typecheck is slightly tricky. Starting from
the module graph consisting of the modules that have already been
compiled, we reverse the edges (so they point from the imported module
to the importing module), and depth-first-search from the .hs-boot
node. This gives us all the modules that depend transitively on the
.hs-boot module, and those are exactly the modules that we need to
re-typecheck.
Following this fix, GHC can compile itself with --make -O2.
-}
reTypecheckLoop :: HscEnv -> ModSummary -> ModuleGraph -> IO HscEnv
reTypecheckLoop hsc_env ms graph
| Just loop <- getModLoop ms graph
, let non_boot = filter (not.isBootSummary) loop
= typecheckLoop (hsc_dflags hsc_env) hsc_env (map ms_mod_name non_boot)
| otherwise
= return hsc_env
getModLoop :: ModSummary -> ModuleGraph -> Maybe [ModSummary]
getModLoop ms graph
| not (isBootSummary ms)
, any (\m -> ms_mod m == this_mod && isBootSummary m) graph
, let mss = reachableBackwards (ms_mod_name ms) graph
= Just mss
| otherwise
= Nothing
where
this_mod = ms_mod ms
typecheckLoop :: DynFlags -> HscEnv -> [ModuleName] -> IO HscEnv
typecheckLoop dflags hsc_env mods = do
debugTraceMsg dflags 2 $
text "Re-typechecking loop: " <> ppr mods
new_hpt <-
fixIO $ \new_hpt -> do
let new_hsc_env = hsc_env{ hsc_HPT = new_hpt }
mds <- initIfaceCheck new_hsc_env $
mapM (typecheckIface . hm_iface) hmis
let new_hpt = addListToUFM old_hpt
(zip mods [ hmi{ hm_details = details }
| (hmi,details) <- zip hmis mds ])
return new_hpt
return hsc_env{ hsc_HPT = new_hpt }
where
old_hpt = hsc_HPT hsc_env
hmis = map (expectJust "typecheckLoop" . lookupUFM old_hpt) mods
reachableBackwards :: ModuleName -> [ModSummary] -> [ModSummary]
reachableBackwards mod summaries
= [ ms | (ms,_,_) <- reachableG (transposeG graph) root ]
where -- the rest just sets up the graph:
(graph, lookup_node) = moduleGraphNodes False summaries
root = expectJust "reachableBackwards" (lookup_node HsBootFile mod)
-- ---------------------------------------------------------------------------
--
-- | Topological sort of the module graph
topSortModuleGraph
:: Bool
-- ^ Drop hi-boot nodes? (see below)
-> [ModSummary]
-> Maybe ModuleName
-- ^ Root module name. If @Nothing@, use the full graph.
-> [SCC ModSummary]
-- ^ Calculate SCCs of the module graph, possibly dropping the hi-boot nodes
-- The resulting list of strongly-connected-components is in topologically
-- sorted order, starting with the module(s) at the bottom of the
-- dependency graph (ie compile them first) and ending with the ones at
-- the top.
--
-- Drop hi-boot nodes (first boolean arg)?
--
-- - @False@: treat the hi-boot summaries as nodes of the graph,
-- so the graph must be acyclic
--
-- - @True@: eliminate the hi-boot nodes, and instead pretend
-- the a source-import of Foo is an import of Foo
-- The resulting graph has no hi-boot nodes, but can be cyclic
topSortModuleGraph drop_hs_boot_nodes summaries mb_root_mod
= map (fmap summaryNodeSummary) $ stronglyConnCompG initial_graph
where
(graph, lookup_node) = moduleGraphNodes drop_hs_boot_nodes summaries
initial_graph = case mb_root_mod of
Nothing -> graph
Just root_mod ->
-- restrict the graph to just those modules reachable from
-- the specified module. We do this by building a graph with
-- the full set of nodes, and determining the reachable set from
-- the specified node.
let root | Just node <- lookup_node HsSrcFile root_mod, graph `hasVertexG` node = node
| otherwise = throwGhcException (ProgramError "module does not exist")
in graphFromEdgedVertices (seq root (reachableG graph root))
type SummaryNode = (ModSummary, Int, [Int])
summaryNodeKey :: SummaryNode -> Int
summaryNodeKey (_, k, _) = k
summaryNodeSummary :: SummaryNode -> ModSummary
summaryNodeSummary (s, _, _) = s
moduleGraphNodes :: Bool -> [ModSummary]
-> (Graph SummaryNode, HscSource -> ModuleName -> Maybe SummaryNode)
moduleGraphNodes drop_hs_boot_nodes summaries = (graphFromEdgedVertices nodes, lookup_node)
where
numbered_summaries = zip summaries [1..]
lookup_node :: HscSource -> ModuleName -> Maybe SummaryNode
lookup_node hs_src mod = Map.lookup (mod, hs_src) node_map
lookup_key :: HscSource -> ModuleName -> Maybe Int
lookup_key hs_src mod = fmap summaryNodeKey (lookup_node hs_src mod)
node_map :: NodeMap SummaryNode
node_map = Map.fromList [ ((moduleName (ms_mod s), ms_hsc_src s), node)
| node@(s, _, _) <- nodes ]
-- We use integers as the keys for the SCC algorithm
nodes :: [SummaryNode]
nodes = [ (s, key, out_keys)
| (s, key) <- numbered_summaries
-- Drop the hi-boot ones if told to do so
, not (isBootSummary s && drop_hs_boot_nodes)
, let out_keys = out_edge_keys hs_boot_key (map unLoc (ms_home_srcimps s)) ++
out_edge_keys HsSrcFile (map unLoc (ms_home_imps s)) ++
(-- see [boot-edges] below
if drop_hs_boot_nodes || ms_hsc_src s == HsBootFile
then []
else case lookup_key HsBootFile (ms_mod_name s) of
Nothing -> []
Just k -> [k]) ]
-- [boot-edges] if this is a .hs and there is an equivalent
-- .hs-boot, add a link from the former to the latter. This
-- has the effect of detecting bogus cases where the .hs-boot
-- depends on the .hs, by introducing a cycle. Additionally,
-- it ensures that we will always process the .hs-boot before
-- the .hs, and so the HomePackageTable will always have the
-- most up to date information.
-- Drop hs-boot nodes by using HsSrcFile as the key
hs_boot_key | drop_hs_boot_nodes = HsSrcFile
| otherwise = HsBootFile
out_edge_keys :: HscSource -> [ModuleName] -> [Int]
out_edge_keys hi_boot ms = mapMaybe (lookup_key hi_boot) ms
-- If we want keep_hi_boot_nodes, then we do lookup_key with
-- the IsBootInterface parameter True; else False
type NodeKey = (ModuleName, HscSource) -- The nodes of the graph are
type NodeMap a = Map.Map NodeKey a -- keyed by (mod, src_file_type) pairs
msKey :: ModSummary -> NodeKey
msKey (ModSummary { ms_mod = mod, ms_hsc_src = boot }) = (moduleName mod,boot)
mkNodeMap :: [ModSummary] -> NodeMap ModSummary
mkNodeMap summaries = Map.fromList [ (msKey s, s) | s <- summaries]
nodeMapElts :: NodeMap a -> [a]
nodeMapElts = Map.elems
-- | If there are {-# SOURCE #-} imports between strongly connected
-- components in the topological sort, then those imports can
-- definitely be replaced by ordinary non-SOURCE imports: if SOURCE
-- were necessary, then the edge would be part of a cycle.
warnUnnecessarySourceImports :: GhcMonad m => [SCC ModSummary] -> m ()
warnUnnecessarySourceImports sccs = do
dflags <- getDynFlags
logWarnings (listToBag (concatMap (check dflags . flattenSCC) sccs))
where check dflags ms =
let mods_in_this_cycle = map ms_mod_name ms in
[ warn dflags i | m <- ms, i <- ms_home_srcimps m,
unLoc i `notElem` mods_in_this_cycle ]
warn :: DynFlags -> Located ModuleName -> WarnMsg
warn dflags (L loc mod) =
mkPlainErrMsg dflags loc
(ptext (sLit "Warning: {-# SOURCE #-} unnecessary in import of ")
<+> quotes (ppr mod))
reportImportErrors :: MonadIO m => [Either ErrMsg b] -> m [b]
reportImportErrors xs | null errs = return oks
| otherwise = throwManyErrors errs
where (errs, oks) = partitionEithers xs
throwManyErrors :: MonadIO m => [ErrMsg] -> m ab
throwManyErrors errs = liftIO $ throwIO $ mkSrcErr $ listToBag errs
-----------------------------------------------------------------------------
--
-- | Downsweep (dependency analysis)
--
-- Chase downwards from the specified root set, returning summaries
-- for all home modules encountered. Only follow source-import
-- links.
--
-- We pass in the previous collection of summaries, which is used as a
-- cache to avoid recalculating a module summary if the source is
-- unchanged.
--
-- The returned list of [ModSummary] nodes has one node for each home-package
-- module, plus one for any hs-boot files. The imports of these nodes
-- are all there, including the imports of non-home-package modules.
downsweep :: HscEnv
-> [ModSummary] -- Old summaries
-> [ModuleName] -- Ignore dependencies on these; treat
-- them as if they were package modules
-> Bool -- True <=> allow multiple targets to have
-- the same module name; this is
-- very useful for ghc -M
-> IO [Either ErrMsg ModSummary]
-- The elts of [ModSummary] all have distinct
-- (Modules, IsBoot) identifiers, unless the Bool is true
-- in which case there can be repeats
downsweep hsc_env old_summaries excl_mods allow_dup_roots
= do
rootSummaries <- mapM getRootSummary roots
rootSummariesOk <- reportImportErrors rootSummaries
let root_map = mkRootMap rootSummariesOk
checkDuplicates root_map
summs <- loop (concatMap msDeps rootSummariesOk) root_map
return summs
where
dflags = hsc_dflags hsc_env
roots = hsc_targets hsc_env
old_summary_map :: NodeMap ModSummary
old_summary_map = mkNodeMap old_summaries
getRootSummary :: Target -> IO (Either ErrMsg ModSummary)
getRootSummary (Target (TargetFile file mb_phase) obj_allowed maybe_buf)
= do exists <- liftIO $ doesFileExist file
if exists
then Right `fmap` summariseFile hsc_env old_summaries file mb_phase
obj_allowed maybe_buf
else return $ Left $ mkPlainErrMsg dflags noSrcSpan $
text "can't find file:" <+> text file
getRootSummary (Target (TargetModule modl) obj_allowed maybe_buf)
= do maybe_summary <- summariseModule hsc_env old_summary_map False
(L rootLoc modl) obj_allowed
maybe_buf excl_mods
case maybe_summary of
Nothing -> return $ Left $ packageModErr dflags modl
Just s -> return s
rootLoc = mkGeneralSrcSpan (fsLit "<command line>")
-- In a root module, the filename is allowed to diverge from the module
-- name, so we have to check that there aren't multiple root files
-- defining the same module (otherwise the duplicates will be silently
-- ignored, leading to confusing behaviour).
checkDuplicates :: NodeMap [Either ErrMsg ModSummary] -> IO ()
checkDuplicates root_map
| allow_dup_roots = return ()
| null dup_roots = return ()
| otherwise = liftIO $ multiRootsErr dflags (head dup_roots)
where
dup_roots :: [[ModSummary]] -- Each at least of length 2
dup_roots = filterOut isSingleton $ map rights $ nodeMapElts root_map
loop :: [(Located ModuleName,IsBootInterface)]
-- Work list: process these modules
-> NodeMap [Either ErrMsg ModSummary]
-- Visited set; the range is a list because
-- the roots can have the same module names
-- if allow_dup_roots is True
-> IO [Either ErrMsg ModSummary]
-- The result includes the worklist, except
-- for those mentioned in the visited set
loop [] done = return (concat (nodeMapElts done))
loop ((wanted_mod, is_boot) : ss) done
| Just summs <- Map.lookup key done
= if isSingleton summs then
loop ss done
else
do { multiRootsErr dflags (rights summs); return [] }
| otherwise
= do mb_s <- summariseModule hsc_env old_summary_map
is_boot wanted_mod True
Nothing excl_mods
case mb_s of
Nothing -> loop ss done
Just (Left e) -> loop ss (Map.insert key [Left e] done)
Just (Right s)-> loop (msDeps s ++ ss) (Map.insert key [Right s] done)
where
key = (unLoc wanted_mod, if is_boot then HsBootFile else HsSrcFile)
mkRootMap :: [ModSummary] -> NodeMap [Either ErrMsg ModSummary]
mkRootMap summaries = Map.insertListWith (flip (++))
[ (msKey s, [Right s]) | s <- summaries ]
Map.empty
-- | Returns the dependencies of the ModSummary s.
-- A wrinkle is that for a {-# SOURCE #-} import we return
-- *both* the hs-boot file
-- *and* the source file
-- as "dependencies". That ensures that the list of all relevant
-- modules always contains B.hs if it contains B.hs-boot.
-- Remember, this pass isn't doing the topological sort. It's
-- just gathering the list of all relevant ModSummaries
msDeps :: ModSummary -> [(Located ModuleName, IsBootInterface)]
msDeps s =
concat [ [(m,True), (m,False)] | m <- ms_home_srcimps s ]
++ [ (m,False) | m <- ms_home_imps s ]
home_imps :: [Located (ImportDecl RdrName)] -> [Located ModuleName]
home_imps imps = [ ideclName i | L _ i <- imps, isLocal (ideclPkgQual i) ]
where isLocal Nothing = True
isLocal (Just pkg) | pkg == fsLit "this" = True -- "this" is special
isLocal _ = False
ms_home_allimps :: ModSummary -> [ModuleName]
ms_home_allimps ms = map unLoc (ms_home_srcimps ms ++ ms_home_imps ms)
ms_home_srcimps :: ModSummary -> [Located ModuleName]
ms_home_srcimps = home_imps . ms_srcimps
ms_home_imps :: ModSummary -> [Located ModuleName]
ms_home_imps = home_imps . ms_imps
-----------------------------------------------------------------------------
-- Summarising modules
-- We have two types of summarisation:
--
-- * Summarise a file. This is used for the root module(s) passed to
-- cmLoadModules. The file is read, and used to determine the root
-- module name. The module name may differ from the filename.
--
-- * Summarise a module. We are given a module name, and must provide
-- a summary. The finder is used to locate the file in which the module
-- resides.
summariseFile
:: HscEnv
-> [ModSummary] -- old summaries
-> FilePath -- source file name
-> Maybe Phase -- start phase
-> Bool -- object code allowed?
-> Maybe (StringBuffer,UTCTime)
-> IO ModSummary
summariseFile hsc_env old_summaries file mb_phase obj_allowed maybe_buf
-- we can use a cached summary if one is available and the
-- source file hasn't changed, But we have to look up the summary
-- by source file, rather than module name as we do in summarise.
| Just old_summary <- findSummaryBySourceFile old_summaries file
= do
let location = ms_location old_summary
src_timestamp <- get_src_timestamp
-- The file exists; we checked in getRootSummary above.
-- If it gets removed subsequently, then this
-- getModificationUTCTime may fail, but that's the right
-- behaviour.
-- return the cached summary if the source didn't change
if ms_hs_date old_summary == src_timestamp &&
not (gopt Opt_ForceRecomp (hsc_dflags hsc_env))
then do -- update the object-file timestamp
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then liftIO $ getObjTimestamp location False
else return Nothing
return old_summary{ ms_obj_date = obj_timestamp }
else
new_summary src_timestamp
| otherwise
= do src_timestamp <- get_src_timestamp
new_summary src_timestamp
where
get_src_timestamp = case maybe_buf of
Just (_,t) -> return t
Nothing -> liftIO $ getModificationUTCTime file
-- getMofificationUTCTime may fail
new_summary src_timestamp = do
let dflags = hsc_dflags hsc_env
(dflags', hspp_fn, buf)
<- preprocessFile hsc_env file mb_phase maybe_buf
(srcimps,the_imps, L _ mod_name) <- getImports dflags' buf hspp_fn file
-- Make a ModLocation for this file
location <- liftIO $ mkHomeModLocation dflags mod_name file
-- Tell the Finder cache where it is, so that subsequent calls
-- to findModule will find it, even if it's not on any search path
mod <- liftIO $ addHomeModuleToFinder hsc_env mod_name location
-- when the user asks to load a source file by name, we only
-- use an object file if -fobject-code is on. See #1205.
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then liftIO $ modificationTimeIfExists (ml_obj_file location)
else return Nothing
return (ModSummary { ms_mod = mod, ms_hsc_src = HsSrcFile,
ms_location = location,
ms_hspp_file = hspp_fn,
ms_hspp_opts = dflags',
ms_hspp_buf = Just buf,
ms_srcimps = srcimps, ms_textual_imps = the_imps,
ms_hs_date = src_timestamp,
ms_obj_date = obj_timestamp })
findSummaryBySourceFile :: [ModSummary] -> FilePath -> Maybe ModSummary
findSummaryBySourceFile summaries file
= case [ ms | ms <- summaries, HsSrcFile <- [ms_hsc_src ms],
expectJust "findSummaryBySourceFile" (ml_hs_file (ms_location ms)) == file ] of
[] -> Nothing
(x:_) -> Just x
-- Summarise a module, and pick up source and timestamp.
summariseModule
:: HscEnv
-> NodeMap ModSummary -- Map of old summaries
-> IsBootInterface -- True <=> a {-# SOURCE #-} import
-> Located ModuleName -- Imported module to be summarised
-> Bool -- object code allowed?
-> Maybe (StringBuffer, UTCTime)
-> [ModuleName] -- Modules to exclude
-> IO (Maybe (Either ErrMsg ModSummary)) -- Its new summary
summariseModule hsc_env old_summary_map is_boot (L loc wanted_mod)
obj_allowed maybe_buf excl_mods
| wanted_mod `elem` excl_mods
= return Nothing
| Just old_summary <- Map.lookup (wanted_mod, hsc_src) old_summary_map
= do -- Find its new timestamp; all the
-- ModSummaries in the old map have valid ml_hs_files
let location = ms_location old_summary
src_fn = expectJust "summariseModule" (ml_hs_file location)
-- check the modification time on the source file, and
-- return the cached summary if it hasn't changed. If the
-- file has disappeared, we need to call the Finder again.
case maybe_buf of
Just (_,t) -> check_timestamp old_summary location src_fn t
Nothing -> do
m <- tryIO (getModificationUTCTime src_fn)
case m of
Right t -> check_timestamp old_summary location src_fn t
Left e | isDoesNotExistError e -> find_it
| otherwise -> ioError e
| otherwise = find_it
where
dflags = hsc_dflags hsc_env
hsc_src = if is_boot then HsBootFile else HsSrcFile
check_timestamp old_summary location src_fn src_timestamp
| ms_hs_date old_summary == src_timestamp &&
not (gopt Opt_ForceRecomp dflags) = do
-- update the object-file timestamp
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then getObjTimestamp location is_boot
else return Nothing
return (Just (Right old_summary{ ms_obj_date = obj_timestamp }))
| otherwise =
-- source changed: re-summarise.
new_summary location (ms_mod old_summary) src_fn src_timestamp
find_it = do
-- Don't use the Finder's cache this time. If the module was
-- previously a package module, it may have now appeared on the
-- search path, so we want to consider it to be a home module. If
-- the module was previously a home module, it may have moved.
uncacheModule hsc_env wanted_mod
found <- findImportedModule hsc_env wanted_mod Nothing
case found of
Found location mod
| isJust (ml_hs_file location) ->
-- Home package
just_found location mod
| otherwise ->
-- Drop external-pkg
ASSERT(modulePackageId mod /= thisPackage dflags)
return Nothing
err -> return $ Just $ Left $ noModError dflags loc wanted_mod err
-- Not found
just_found location mod = do
-- Adjust location to point to the hs-boot source file,
-- hi file, object file, when is_boot says so
let location' | is_boot = addBootSuffixLocn location
| otherwise = location
src_fn = expectJust "summarise2" (ml_hs_file location')
-- Check that it exists
-- It might have been deleted since the Finder last found it
maybe_t <- modificationTimeIfExists src_fn
case maybe_t of
Nothing -> return $ Just $ Left $ noHsFileErr dflags loc src_fn
Just t -> new_summary location' mod src_fn t
new_summary location mod src_fn src_timestamp
= do
-- Preprocess the source file and get its imports
-- The dflags' contains the OPTIONS pragmas
(dflags', hspp_fn, buf) <- preprocessFile hsc_env src_fn Nothing maybe_buf
(srcimps, the_imps, L mod_loc mod_name) <- getImports dflags' buf hspp_fn src_fn
when (mod_name /= wanted_mod) $
throwOneError $ mkPlainErrMsg dflags' mod_loc $
text "File name does not match module name:"
$$ text "Saw:" <+> quotes (ppr mod_name)
$$ text "Expected:" <+> quotes (ppr wanted_mod)
-- Find the object timestamp, and return the summary
obj_timestamp <-
if isObjectTarget (hscTarget (hsc_dflags hsc_env))
|| obj_allowed -- bug #1205
then getObjTimestamp location is_boot
else return Nothing
return (Just (Right (ModSummary { ms_mod = mod,
ms_hsc_src = hsc_src,
ms_location = location,
ms_hspp_file = hspp_fn,
ms_hspp_opts = dflags',
ms_hspp_buf = Just buf,
ms_srcimps = srcimps,
ms_textual_imps = the_imps,
ms_hs_date = src_timestamp,
ms_obj_date = obj_timestamp })))
getObjTimestamp :: ModLocation -> Bool -> IO (Maybe UTCTime)
getObjTimestamp location is_boot
= if is_boot then return Nothing
else modificationTimeIfExists (ml_obj_file location)
preprocessFile :: HscEnv
-> FilePath
-> Maybe Phase -- ^ Starting phase
-> Maybe (StringBuffer,UTCTime)
-> IO (DynFlags, FilePath, StringBuffer)
preprocessFile hsc_env src_fn mb_phase Nothing
= do
(dflags', hspp_fn) <- preprocess hsc_env (src_fn, mb_phase)
buf <- hGetStringBuffer hspp_fn
return (dflags', hspp_fn, buf)
preprocessFile hsc_env src_fn mb_phase (Just (buf, _time))
= do
let dflags = hsc_dflags hsc_env
let local_opts = getOptions dflags buf src_fn
(dflags', leftovers, warns)
<- parseDynamicFilePragma dflags local_opts
checkProcessArgsResult dflags leftovers
handleFlagWarnings dflags' warns
let needs_preprocessing
| Just (Unlit _) <- mb_phase = True
| Nothing <- mb_phase, Unlit _ <- startPhase src_fn = True
-- note: local_opts is only required if there's no Unlit phase
| xopt Opt_Cpp dflags' = True
| gopt Opt_Pp dflags' = True
| otherwise = False
when needs_preprocessing $
throwGhcExceptionIO (ProgramError "buffer needs preprocesing; interactive check disabled")
return (dflags', src_fn, buf)
-----------------------------------------------------------------------------
-- Error messages
-----------------------------------------------------------------------------
noModError :: DynFlags -> SrcSpan -> ModuleName -> FindResult -> ErrMsg
-- ToDo: we don't have a proper line number for this error
noModError dflags loc wanted_mod err
= mkPlainErrMsg dflags loc $ cannotFindModule dflags wanted_mod err
noHsFileErr :: DynFlags -> SrcSpan -> String -> ErrMsg
noHsFileErr dflags loc path
= mkPlainErrMsg dflags loc $ text "Can't find" <+> text path
packageModErr :: DynFlags -> ModuleName -> ErrMsg
packageModErr dflags mod
= mkPlainErrMsg dflags noSrcSpan $
text "module" <+> quotes (ppr mod) <+> text "is a package module"
multiRootsErr :: DynFlags -> [ModSummary] -> IO ()
multiRootsErr _ [] = panic "multiRootsErr"
multiRootsErr dflags summs@(summ1:_)
= throwOneError $ mkPlainErrMsg dflags noSrcSpan $
text "module" <+> quotes (ppr mod) <+>
text "is defined in multiple files:" <+>
sep (map text files)
where
mod = ms_mod summ1
files = map (expectJust "checkDup" . ml_hs_file . ms_location) summs
cyclicModuleErr :: [ModSummary] -> SDoc
-- From a strongly connected component we find
-- a single cycle to report
cyclicModuleErr mss
= ASSERT( not (null mss) )
case findCycle graph of
Nothing -> ptext (sLit "Unexpected non-cycle") <+> ppr mss
Just path -> vcat [ ptext (sLit "Module imports form a cycle:")
, nest 2 (show_path path) ]
where
graph :: [Node NodeKey ModSummary]
graph = [(ms, msKey ms, get_deps ms) | ms <- mss]
get_deps :: ModSummary -> [NodeKey]
get_deps ms = ([ (unLoc m, HsBootFile) | m <- ms_home_srcimps ms ] ++
[ (unLoc m, HsSrcFile) | m <- ms_home_imps ms ])
show_path [] = panic "show_path"
show_path [m] = ptext (sLit "module") <+> ppr_ms m
<+> ptext (sLit "imports itself")
show_path (m1:m2:ms) = vcat ( nest 7 (ptext (sLit "module") <+> ppr_ms m1)
: nest 6 (ptext (sLit "imports") <+> ppr_ms m2)
: go ms )
where
go [] = [ptext (sLit "which imports") <+> ppr_ms m1]
go (m:ms) = (ptext (sLit "which imports") <+> ppr_ms m) : go ms
ppr_ms :: ModSummary -> SDoc
ppr_ms ms = quotes (ppr (moduleName (ms_mod ms))) <+>
(parens (text (msHsFilePath ms)))
|
frantisekfarka/ghc-dsi
|
compiler/main/GhcMake.hs
|
bsd-3-clause
| 82,584 | 0 | 35 | 26,184 | 13,864 | 7,087 | 6,777 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module T16895a where
import Language.Haskell.TH
main = print $(uInfixE [|1|] [|id id|] [|2|])
|
sdiehl/ghc
|
testsuite/tests/th/T16895a.hs
|
bsd-3-clause
| 130 | 0 | 8 | 20 | 39 | 26 | 13 | 4 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE OverloadedStrings #-}
module Reporting.Error where
import Data.Aeson ((.=))
import qualified Data.Aeson as Json
import Prelude hiding (print)
import qualified Reporting.Annotation as A
import qualified Reporting.Error.Canonicalize as Canonicalize
import qualified Reporting.Error.Docs as Docs
import qualified Reporting.Error.Syntax as Syntax
import qualified Reporting.Error.Type as Type
import qualified Reporting.PrettyPrint as P
import qualified Reporting.Report as Report
-- ALL POSSIBLE ERRORS
data Error
= Syntax Syntax.Error
| Canonicalize Canonicalize.Error
| Type Type.Error
| Docs Docs.Error
-- TO REPORT
toReport :: P.Dealiaser -> Error -> Report.Report
toReport dealiaser err =
case err of
Syntax syntaxError ->
Syntax.toReport dealiaser syntaxError
Canonicalize canonicalizeError ->
Canonicalize.toReport dealiaser canonicalizeError
Type typeError ->
Type.toReport dealiaser typeError
Docs docsError ->
Docs.toReport docsError
-- TO STRING
toString :: P.Dealiaser -> String -> String -> A.Located Error -> String
toString dealiaser location source (A.A region err) =
Report.toString location region (toReport dealiaser err) source
print :: P.Dealiaser -> String -> String -> A.Located Error -> IO ()
print dealiaser location source (A.A region err) =
Report.printError location region (toReport dealiaser err) source
-- TO JSON
toJson :: P.Dealiaser -> FilePath -> A.Located Error -> Json.Value
toJson dealiaser filePath (A.A region err) =
let
(maybeRegion, additionalFields) =
case err of
Syntax syntaxError ->
Report.toJson [] (Syntax.toReport dealiaser syntaxError)
Canonicalize canonicalizeError ->
let
suggestions =
maybe []
(\s -> ["suggestions" .= s])
(Canonicalize.extractSuggestions canonicalizeError)
in
Report.toJson suggestions (Canonicalize.toReport dealiaser canonicalizeError)
Type typeError ->
Report.toJson [] (Type.toReport dealiaser typeError)
Docs docsError ->
Report.toJson [] (Docs.toReport docsError)
in
Json.object $
[ "file" .= filePath
, "region" .= region
, "subregion" .= maybeRegion
, "type" .= ("error" :: String)
]
++ additionalFields
|
johnpmayer/elm-compiler
|
src/Reporting/Error.hs
|
bsd-3-clause
| 2,485 | 0 | 20 | 635 | 638 | 341 | 297 | 59 | 4 |
{-# OPTIONS_GHC -fwarn-unused-imports -fno-warn-missing-methods #-}
-- Check that although 'index' is apparently only used
-- unqualified, we nevertheless do not get a redundant-import warning
-- #3776
module T3776 where
import qualified Data.Ix( Ix(index) )
instance Data.Ix.Ix Float where
index = error "urk"
|
sdiehl/ghc
|
testsuite/tests/module/T3776.hs
|
bsd-3-clause
| 319 | 0 | 6 | 50 | 42 | 27 | 15 | 5 | 0 |
module HAD.Y2014.M03.D20.Exercise where
-- | addNoise
-- add "noise" to a list of number: each number is modified by a
-- random value between -x and x (where x is one of the parameter of the
-- function.
--
-- Quite easy
-- I didn't try it pointfree
-- I didn't try to find a one-liner
--
-- The function signature follows the idea of the methods in the System.Random
-- module: given a standard generator, you returns the modified list and the
-- generator in an altered state.
--
addNoise :: (Num a, Random a) => a -> [a] -> StdGen -> ([a], StdGen)
addNoise = undefined
|
1HaskellADay/1HAD
|
exercises/HAD/Y2014/M03/D20/Exercise.hs
|
mit
| 574 | 0 | 10 | 109 | 71 | 48 | 23 | 3 | 1 |
{-# LANGUAGE MultiParamTypeClasses, TypeFamilies, GADTs,
ConstraintKinds, DataKinds, KindSignatures,
FlexibleInstances #-}
{-# OPTIONS -fno-warn-redundant-constraints #-}
module T10195 where
import GHC.Exts
data Foo m zp r'q = Foo zp
data Dict :: Constraint -> * where
Dict :: a => Dict a
type family BarFamily a b :: Bool
class Bar m m'
instance (BarFamily m m' ~ 'True) => Bar m m'
magic :: (Bar m m') => c m zp -> Foo m zp (c m' zq)
magic = undefined
getDict :: a -> Dict (Num a)
getDict _ = undefined
fromScalar :: r -> c m r
fromScalar = undefined
foo :: (Bar m m')
=> c m zp -> Foo m zp (c m' zq) -> Foo m zp (c m' zq)
foo b (Foo sc) =
let scinv = fromScalar sc
in case getDict scinv of
Dict -> magic $ scinv * b
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_compile/T10195.hs
|
bsd-3-clause
| 764 | 0 | 11 | 191 | 293 | 152 | 141 | -1 | -1 |
{-# LANGUAGE TupleSections, FlexibleContexts #-}
module Kachushi.Decision
(
chooseFirstFive
, chooseOne
) where
import Grabble.Grabble
import Kachushi.Cards (Card (..))
import Kachushi.OFCP (Board (..), FilledBoard (..), Row (..), Slot (..), toBoard, scoreGame)
import Kachushi.KState (KState (..), boards, deck, putCard, putCards)
import Kachushi.Util (splice)
import Control.Parallel.Strategies (parMap, rdeepseq)
import Control.DeepSeq (force)
import Control.Monad.Random (MonadRandom (..))
import Control.Monad (liftM, replicateM)
import qualified Control.Monad.Parallel as MP (mapM, MonadParallel (..))
import Control.Monad.State (MonadState (..), StateT (..), runStateT, evalStateT, execState, modify)
import Control.Lens (view, (&), ix, (.~), set)
import Control.Arrow (second)
import Data.List (maximumBy, sortBy, (\\))
import Data.Function (on)
import Data.Array (elems)
---------------------------
-- Instances
---------------------------
instance MP.MonadParallel m => MP.MonadParallel (StateT s m) where
bindM2 f ma mb =
StateT (\s -> let f' a b = runStateT (f a b) s
in MP.bindM2 f' (evalStateT ma s) (evalStateT mb s))
---------------------------
-- Constructors
---------------------------
randomFills :: (MonadRandom m) => [Card] -> [Board] -> m [FilledBoard]
randomFills deck boards = do
let cardsLefts = map (\board -> 27 - nextTop board
- nextMiddle board
- nextBottom board) $ boards
[newCards] <- liftM force $ grabble deck 1 (sum cardsLefts)
let newCardss = splice newCards cardsLefts
let oldRowss = map (\board -> map (map (\(Filled c) -> c)
. takeWhile (/= Empty))
. splice (elems . asArray $ board)
$ [3,5,5]) boards
additionCounts = (flip map) boards (\board ->
[4 - nextTop board, 9 - nextMiddle board, 14 - nextBottom board])
additionss = zipWith splice newCardss additionCounts
tmbs = zipWith (zipWith (++)) oldRowss additionss
return $ force $ map (\[t, m, b] -> FilledBoard t m b) tmbs
initialChoices :: [[Row]]
initialChoices = go [[]] 3 5
where
go xs _ 0 = xs
go xs t n = (if t == 0 then []
else go (map (Top:) xs) (t-1) (n-1))
++ go (map (Middle:) xs) t (n-1)
++ go (map (Bottom:) xs) t (n-1)
---------------------------
-- Choosers
---------------------------
rowsToBoard rs h = toBoard t m b where
cs = zip rs h
t = map snd . filter ((== Top) . fst) $ cs
m = map snd . filter ((== Middle) . fst) $ cs
b = map snd . filter ((== Bottom) . fst) $ cs
chooseFirstFive :: (MonadState KState m, MonadRandom m, MP.MonadParallel m)
=> Int -> [Card] -> m ()
chooseFirstFive n hand = do
s <- get
let d = (view deck s) \\ hand
sb = view boards s
xs <- (flip MP.mapM) initialChoices $ \rs -> do
let b = force $ rowsToBoard rs hand
liftM (b,) $ replicateM 10 $ randomFills d (sb & ix n .~ b)
let scoredRows = parMap rdeepseq
(second (sum . map ((!! n) . scoreGame))) xs
best = maximumBy (compare `on` snd) scoredRows
modify (set boards (sb & ix n .~ (fst best)) . set deck d)
rowsToCheck n st
| 4 == (nextTop . (!! n) . view boards $ st) = [Middle, Bottom]
| 9 == (nextMiddle . (!! n) . view boards $ st) = [Top, Bottom]
| 14 == (nextBottom . (!! n) . view boards $ st) = [Top, Middle]
| otherwise = [Top, Middle, Bottom]
rowToBoard n r s c = (!! n) . view boards $ execState (putCard n c r) s
chooseOne :: (MonadState KState m, MonadRandom m, MP.MonadParallel m)
=> Int -> Card -> m ()
chooseOne n card = do
s <- get
case (!! n) . view boards $ s of
Board _ _ 9 14 -> putCard n card Top
Board _ 4 _ 14 -> putCard n card Middle
Board _ 4 9 _ -> putCard n card Bottom
otherwise -> do
let d = view deck s \\ [card]
rbs = map (\r -> (r, rowToBoard n r s card)) (rowsToCheck n s)
sb = view boards s
xs <- MP.mapM (\(r,b) -> liftM (b,) $ replicateM 500
$ randomFills d (sb & ix n .~ b)) rbs
let scoredRows = map
(second (sum . parMap rdeepseq ((!! n) . scoreGame))) xs
best = maximumBy (compare `on` snd) scoredRows
modify (set boards (sb & ix n .~ (fst best)) . set deck d)
|
ScrambledEggsOnToast/Kachushi
|
Kachushi/Decision.hs
|
mit
| 4,621 | 0 | 23 | 1,416 | 1,906 | 1,017 | 889 | 90 | 4 |
import Text.Regex.PCRE
import Data.List
import System.Environment
main :: IO ()
main = do
input <- getContents
args <- getArgs
let pass = args !! 0
putStrLn . unscramble pass . reverse . lines $ input
unscramble pass [] = pass
unscramble pass (l : ls) = unscramble pass' ls
where
action = head [ a | (p, a) <- rules, l =~ p]
pass' = action pass l
rules = [r1', r2', r3', r4', r5', r6']
r1' = (pattern, action)
where
pattern = "swap position (\\d+) with position (\\d+)"
action :: String -> String -> String
action pass line = pre ++ [cy] ++ mid ++ [cx] ++ end
where
[[_, xs, ys]] = line =~ pattern :: [[String]]
[x, y] = sort [read xs, read ys]
(pre, (cx : rest)) = splitAt x pass
(mid, (cy : end)) = splitAt (y - x - 1) rest
r2' = (pattern, action)
where
pattern = "swap letter (\\w) with letter (\\w)"
action :: String -> String -> String
action pass line = map swapper pass
where
[[_, xs, ys]] = line =~ pattern :: [[String]]
[x, y] = map head [xs, ys]
swapper z = if z == x
then y
else if z == y
then x
else z
r3' = (pattern, action)
where
pattern = "rotate (left|right) (\\d+) steps?"
action :: String -> String -> String
action pass line = if dir == "right"
then let (l, r) = splitAt n pass in r ++ l
else let (l, r) = splitAt (length pass - n) pass
in r ++ l
where
[[_, dir, ns]] = line =~ pattern :: [[String]]
n = read ns `mod` length pass
r4' = (pattern, action)
where
pattern = "rotate based on position of letter (\\w)"
action :: String -> String -> String
action pass line = let (l, r) = splitAt k pass in r ++ l
where
[[_, xs]] = line =~ pattern :: [[String]]
x = head xs
i = head $ elemIndices x pass
l = length pass
case1 = [ k + 1
| j <- [i, i + l, i + 2 * l]
, (j - 1) `mod` 2 == 0
, let k = (j - 1) `div` 2
, k >= 0
, k < l
, k < 4 ]
case2 = [ k + 2
| j <- [i, i + l, i + 2 * l]
, j `mod` 2 == 0
, let k = (j - 2) `div` 2
, k >= 0
, k < l
, k >= 4 ]
k = (head (case1 ++ case2)) `mod` l
r5' = (pattern, action)
where
pattern = "reverse positions (\\d+) through (\\d+)"
action :: String -> String -> String
action pass line = pre ++ reverse mid ++ end
where
[[_, xs, ys]] = line =~ pattern :: [[String]]
[x, y] = sort . map read $ [xs, ys]
(pre, rest) = splitAt x pass
(mid, end) = splitAt (y - x + 1) rest
r6' = (pattern, action)
where
pattern = "move position (\\d+) to position (\\d+)"
action :: String -> String -> String
action pass line = l ++ [xc] ++ r
where
[[_, xs, ys]] = line =~ pattern :: [[String]]
[x, y] = map read [xs, ys]
xc = pass !! y
pass' = delete xc pass
(l, r) = splitAt x pass'
|
lzlarryli/advent_of_code_2016
|
day21/part2.hs
|
mit
| 3,225 | 6 | 14 | 1,274 | 1,375 | 737 | 638 | 82 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Cric.UsersSpec
( test
) where
import Test.Hspec
import SpecHelpers
import Data.List
import Cric
import Cric.Users
test :: Spec
test = do
describe "createUser" $ do
it "builds the command with different parameters" $ do
let opts = defaultUserOptions
{ loginGroup = Just "logingroup"
, groups = ["group1", "group2"]
, uid = Just 1337
, shell = Just "/bin/sh"
}
cric = createUser "testusername" opts
isCmdGood cmd = all (`isInfixOf` cmd)
["adduser"
, "testusername"
, "-s /bin/sh"
, "-u 1337"
, "-G group1,group2"
, "-g logingroup"]
mockFunc cmd = if isCmdGood cmd
then Just $ return (Left 0, "good", "")
else Nothing
sshMock = SshMock [mockFunc] []
result <- testCricWith sshMock cric
result `shouldBe` Success "good" ""
it "doesn't specify anything if the options are set to Nothing/[]" $ do
let opts = defaultUserOptions
{ loginGroup = Nothing
, groups = []
, uid = Nothing
, shell = Nothing
}
cric = createUser "testusername" opts
isCmdGood cmd = not $ any (`isInfixOf` cmd) [ "-s" , "-u" , "-G" , "-g"]
mockFunc cmd = if "adduser" `isInfixOf` cmd
then if isCmdGood cmd
then Just $ return (Left 0, "good", "")
else Just $ return (Left 1, "bad", "")
else Nothing
sshMock = SshMock [mockFunc] []
result <- testCricWith sshMock cric
result `shouldBe` Success "good" ""
describe "removeUser" $ do
it "detects and uses rmuser" $ do
let mock = mockCommand "rmuser" (0, "rmuser called")
. mockCommand "which rmuser" (0, "/bin/rmuser")
$ defaultSshMock
result <- testCricWith mock $ removeUser "username"
result `shouldBe` Success "rmuser called" ""
it "uses deluser if rmuser is not found" $ do
let mock = mockCommand "deluser" (0, "deluser called") defaultSshMock
result <- testCricWith mock $ removeUser "username"
result `shouldBe` Success "deluser called" ""
|
thoferon/cric
|
tests/Cric/UsersSpec.hs
|
mit
| 2,502 | 0 | 21 | 995 | 596 | 313 | 283 | 58 | 4 |
{-# OPTIONS_GHC -Wall #-}
module FailureExercise where
import Test.QuickCheck
-- Failure
-- Find out why this property fails.
-- for a function
square x = x * x
-- why does this property no hold? Examine the type of sqrt.
squareIdentity = square . sqrt
prop_squareIdentity :: (Floating a, Eq a) => a -> Bool
prop_squareIdentity x = squareIdentity x == x
|
NickAger/LearningHaskell
|
HaskellProgrammingFromFirstPrinciples/Chapter14.hsproj/FailureExercise.hs
|
mit
| 358 | 0 | 6 | 66 | 74 | 42 | 32 | 7 | 1 |
module DataTypes where
import Data.Foldable hiding (msum)
import Data.Map (Map)
import Data.Monoid
import Control.Monad.State
import Data.Traversable
import Data.Functor
import Control.Applicative
type Modality = String
data Type = TAtomic String
| TMonadic Modality Type
| TPair Type Type
| TFunctional Type Type deriving (Eq, Show, Ord)
data Linearity = Linear
| Classical deriving (Eq,Show,Ord)
data Formula = Atom String Type Linearity
| Var String Type Linearity
| M Formula Modality Type Linearity
| P Formula Formula Type Linearity
| I Formula Formula Type Linearity deriving (Eq, Show, Ord)
isLinear :: Formula -> Bool
isLinear (Atom _ _ Linear) = True
isLinear (Var _ _ Linear) = True
isLinear (M _ _ _ Linear) = True
isLinear (P _ _ _ Linear) = True
isLinear (I _ _ _ Linear) = True
isLinear _ = False
defAtom s t = Atom s t Linear
defVar s t = Var s t Linear
defMonad f m t = M f m t Linear
defPair f g t = P f g t Linear
defImpl f g t = I f g t Linear
changeLinearity :: Linearity -> Formula -> Formula
changeLinearity l (Atom s t _) = Atom s t l
changeLinearity l (Var s t _) = Var s t l
changeLinearity l (M f m t _) = M f m t l
changeLinearity l (P f g t _) = P f g t l
changeLinearity l (I f g t _) = I f g t l
getType :: Formula -> Type
getType (Atom _ t _) = t
getType (Var _ t _) = t
getType (M _ _ t _) = t
getType (I _ _ t _) = t
getType (P _ _ t _) = t
type Context = [Formula]
type Sequent = (Context, Formula)
-- infixr 7 :->:
data BinTree a = Branch Label (BinTree a) a (BinTree a)
| Unary Label a (BinTree a)
| Leaf Label a deriving (Eq, Show)
instance Foldable BinTree where
foldMap f (Leaf _ a) = f a
foldMap f (Unary _ a c) = f a `mappend` foldMap f c
foldMap f (Branch _ l a r) = foldMap f l `mappend` f a `mappend` foldMap f r
instance Functor BinTree where
fmap f (Leaf l a) = Leaf l (f a)
fmap f (Unary l a c) = Unary l (f a) (fmap f c)
fmap f (Branch lab l a r) = Branch lab (fmap f l) (f a) (fmap f r)
instance Traversable BinTree where
traverse f (Leaf l a) = (Leaf l) <$> f a
traverse f (Unary l a c) = (Unary l) <$> f a <*> traverse f c
traverse f (Branch lab l a r) = (Branch lab) <$> traverse f l <*> f a <*> traverse f r
getVal :: BinTree a -> a
getVal (Leaf _ a) = a
getVal (Branch _ _ a _) = a
getVal (Unary _ a _) = a
setVal :: a -> BinTree a -> BinTree a
setVal a (Leaf l _) = Leaf l a
setVal a (Branch lab l _ r) = Branch lab l a r
setVal a (Unary l _ c) = Unary l a c
data Label = Id
| ImpL
| ImpR
| TensL
| TensR
| MonL
| MonR deriving (Eq, Show)
-- Curry Howard
data LambdaTerm = V Int
| C String
| Lambda LambdaTerm LambdaTerm -- this definition sucks because we want only variables but it'll do for now
| App LambdaTerm LambdaTerm
| Pair LambdaTerm LambdaTerm
| FirstProjection LambdaTerm
| SecondProjection LambdaTerm
| Eta LambdaTerm
| LambdaTerm :*: LambdaTerm deriving (Eq, Show, Ord) -- also this one is a poor definition
infixr 7 :*:
data DecoratedFormula = DF { identifier :: Int
, term :: LambdaTerm
, formula :: Formula } deriving Show
instance Eq DecoratedFormula where
f == g = (identifier f) == (identifier g)
type DecoratedSequent = ([DecoratedFormula],DecoratedFormula)
data S = S { counter :: Int
, vars :: Map Formula Formula} deriving Show
sanevars = ["x","y","z","w","v","k","h","l","m","n","a","b","c","d","e"]
type NonDeterministicState s a = StateT s [] a
failure :: NonDeterministicState s a
failure = StateT $ \_ -> []
every :: [NonDeterministicState s a] -> NonDeterministicState s a
every = msum
evaluateState :: NonDeterministicState s a -> s -> [a]
evaluateState = evalStateT
-- |Translates a lambda term into an Haskell expression. The output is not meant to be pretty. We could have used Language.Haskell.Syntax but it seems overkill for our purposes
toHaskell :: LambdaTerm -> String
toHaskell (V i) = "__v" ++ (show i) ++ "__"
toHaskell (C s) = s
toHaskell (Lambda x t) = "(\\ " ++ toHaskell x ++ " -> " ++ toHaskell t ++ ")"
toHaskell (App f x) = "(" ++ toHaskell f ++ " " ++ toHaskell x ++ ")"
toHaskell (Pair t u) = "(" ++ toHaskell t ++ "," ++ toHaskell u ++ ")"
toHaskell (FirstProjection p) = "(fst " ++ toHaskell p ++ ")"
toHaskell (SecondProjection p) = "(snd " ++ toHaskell p ++ ")"
toHaskell (Eta t) = "(return " ++ toHaskell t ++ ")"
toHaskell (m :*: k) = "(" ++ toHaskell m ++ " >>= " ++ toHaskell k ++ ")"
|
gianlucagiorgolo/glue-tp
|
DataTypes.hs
|
mit
| 4,740 | 0 | 10 | 1,313 | 1,944 | 1,011 | 933 | 112 | 1 |
{-# LANGUAGE TupleSections, ViewPatterns #-}
module Y2018.M05.D04.Solution where
{--
We're going to ease back into ETL-like functionality again. The endgame is this:
We want to create a daily upload process that:
1. Queries the database for the most recent entry into the audit logs (date)
2. Fetches a set of (articles) from the REST endpoint up to a week before (date)
3. Triages (articles) into:
a. redundant
b. updated
c. new
when compared against what's in the data store.
4. Takes the new articles and
a. discards them
b. updates them
c. inserts them
based upon their triaged state
5. Logs everything and audits the process for review and the next go-'round.
So, let's break this problem into bite-sized pieces, recollecting what we've
done before, reusing what we can, and repurposing what we can.
The vein of these Haskell problems is to use available code/algorithms, even
when data types don't quite match what we've done before.
So: 1. get the most recent audit log entry and offset that by one week.
--}
import Control.Arrow ((&&&))
import Control.Monad (guard)
import Control.Monad.Writer
import Data.Aeson
import Data.Maybe
import Data.Monoid
import Data.Time
import Database.PostgreSQL.Simple
-- below imports available via 1HaskellADay git repository
import Control.DList (dlToList)
import Data.Logger
import Data.Time.Stamped
import Store.SQL.Connection
import Store.SQL.Util.AuditLogging (oneWeekAgo)
-- Y2018.M01.D29.Solution (oneWeekAgo)
-- we can't use:
-- Y2018.M01.D26.Solution
-- because that's based on PILOT data structures, not WPJ ones. We fetch
-- WPJ packets from:
import Y2018.M04.D11.Solution (PageNumber)
import Y2018.M04.D13.Solution hiding (packetReader, errOut)
-- Packet is declared in Y2018.M04.D13.Solution, but we need to convert the
-- Value values to Article values to get the date published
import Y2018.M04.D02.Solution -- for FromJSON Article
-- Say the the result of oneWeekAgo for WPJ database is (date). Did you have
-- to change anything to get that result?
-- Okay, that was too easy.
-- 2. Fetch a set of articles from the rest endpoint upto (date)
data ParsedPacket = PP (Packet Value, [(Value, Article)])
deriving Show
-- the monoid instance for list-processing comes in handy for dealing with
-- multiple parsed packets later.
-- with a seed packet of
seedPacket :: ParsedPacket
seedPacket = PP (Pack [], [])
-- a very weird-lookin' monoid, indeed! ... but it is a monoid, so:
instance Monoid ParsedPacket where
mempty = seedPacket
mappend (PP (Pack as, xs)) (PP (Pack bs, ys)) = PP (Pack (as ++ bs), xs ++ ys)
pack2arts :: Day -> Packet Value -> [(Value, Article)]
pack2arts day (Pack arts) = mapMaybe (r2m . (id &&& fromJSON)) arts
where r2m (y, Success x) = Just (y, x)
r2m _ = Nothing
-- but we also need to weed out articles too old in the packet:
include :: Day -> Article -> Bool
include day (date . art -> d) = i' day d
i' :: Day -> Maybe ZonedTime -> Bool
i' day Nothing = False
i' day (Just d) = localDay (zonedTimeToLocalTime d) >= day
-- and from there you can convert a packet to a parsed packet
packetReader :: Day -> Tries -> StampedWriter LogEntry [ParsedPacket]
packetReader = pr' 1 []
pr' :: PageNumber -> [ParsedPacket] -> Day -> Tries
-> StampedWriter LogEntry [ParsedPacket]
pr' pn accum day tries = if tries > 3
then error ("Tried three times to load packet " ++ show pn ++ "; quitting")
else lift (readPacket pn) >>=
let nex = succ pn in
either (accumPacket day nex accum)
(errOut nex day (succ tries) accum)
-- this time packetReader reads all packets up to (weekAgo)
accumPacket :: Day -> PageNumber -> [ParsedPacket] -> Packet Value
-> StampedWriter LogEntry [ParsedPacket]
accumPacket day pn accum pack =
let arts = pack2arts day pack in
if null arts then return accum
else
let today = fmap (localDay . zonedTimeToLocalTime) . date . art . snd
downloadedDay = minimum (mapMaybe today arts)
pruned = prune day arts
newaccum = (PP (Pack (map fst pruned), pruned)):accum in
if downloadedDay < day then return newaccum
else loggerr ("Loaded packet " ++ show pn) >> pr' pn newaccum day 0
prune :: Day -> [(a, Article)] -> [(a, Article)]
prune day = filter (include day . snd)
-- the accumulator function on successful read
errOut :: PageNumber -> Day -> Tries -> [ParsedPacket] -> String
-> StampedWriter LogEntry [ParsedPacket]
errOut pn weekAgo retries accum errmsg =
loggerr ("Error reading packet " ++ show pn ++ ": " ++ errmsg) >>
pr' pn accum weekAgo retries
loggerr :: String -> StampedWriter LogEntry ()
loggerr msg = sayIO (Entry ERROR "daily upload" "Y2018.M05.D04.Solution" msg) >>
lift (putStrLn msg)
-- How many packets did you consume for a week's worth of articles from today?
downloader :: Connection -> IO (Day, [ParsedPacket])
downloader conn = -- connectInfo WPJ >>= connect >>= \conn ->
oneWeekAgo conn >>= \day ->
fmap fst (runWriterT (packetReader day 0)) >>=
return . (day,)
|
geophf/1HaskellADay
|
exercises/HAD/Y2018/M05/D04/Solution.hs
|
mit
| 5,127 | 0 | 18 | 1,056 | 1,124 | 608 | 516 | 70 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module System.TrailDB.Error
( TrailDBException(..) )
where
import Control.Exception
import Data.Data
import Foreign.C.Types
import GHC.Generics
-- | Exceptions that may happen with TrailDBs.
--
-- Some programming errors may throw with `error` instead.
data TrailDBException
= CannotAllocateTrailDBCons -- ^ Failed to allocate `TdbCons`.
| CannotAllocateTrailDB -- ^ Failed to allocate `Tdb`.
| TrailDBError !CInt String -- ^ Errors reported by error code from TrailDB C library.
-- includes numerical error and human-readable error.
| NoSuchTrailID -- ^ A `TrailID` was used that doesn't exist in `Tdb`.
| NoSuchUUID -- ^ A `UUID` was used that doesn't exist in `Tdb`.
| NoSuchFieldID -- ^ A `FieldID` was used that doesn't exist in `Tdb`.
| NoSuchValue -- ^ A `Feature` was used that doesn't contain a valid value.
| NoSuchFeature -- ^ Attempted to find `Feature` for human readable name that doesn't exist.
| FinalizationFailure -- ^ For some reason, finalizing a `TdbCons` failed.
deriving ( Eq, Ord, Show, Read, Typeable, Generic )
instance Exception TrailDBException
|
traildb/traildb-haskell
|
System/TrailDB/Error.hs
|
mit
| 1,296 | 0 | 7 | 327 | 122 | 78 | 44 | 22 | 0 |
module Prepare.Perseus.TeiEpidocParser where
import Prelude hiding (Word)
import Control.Lens (over, _Just)
import Data.Text (Text)
import qualified Data.Text as Text
import Prepare.Perseus.TeiEpidocModel
import qualified Prepare.Perseus.TeiEpidocHeaderParser as Header
import Prepare.Perseus.TeiEpidocParserCommon
import Prepare.Xml.Parser (NodeParser, (<|>), many, optional)
import qualified Prepare.Xml.Parser as Xml
import qualified Text.Megaparsec.Char as MP
import qualified Text.Megaparsec.Lexer as MP
import qualified Text.Megaparsec.Prim as MP
milestoneParagraph :: NodeParser Milestone
milestoneParagraph = build <$> Xml.elementAttrNS (teiNS "milestone") attributes Xml.end
where
build (x, _) = x
attributes = do
ed <- optional (Xml.attribute "ed")
u <- Xml.attribute "unit"
_ <- Xml.parseNested ("milestone unit para") (MP.string "para") u
return $ MilestoneParagraph ed
milestoneCard :: NodeParser Milestone
milestoneCard = build <$> Xml.elementAttrNS (teiNS "milestone") attributes Xml.end
where
build (x, _) = x
attributes = do
n <- Xml.attribute "n"
num <- Xml.parseNested "milestone card n" MP.integer n
u <- Xml.attribute "unit"
_ <- Xml.parseNested "milestone unit card" (MP.string "card") u
return $ MilestoneCard num
milestone :: NodeParser Milestone
milestone
= MP.try milestoneParagraph
<|> milestoneCard
apparatusAdd :: NodeParser ApparatusAdd
apparatusAdd = ApparatusAdd <$> Xml.elementContentNS (teiNS "add")
apparatusDel :: NodeParser ApparatusDel
apparatusDel = ApparatusDel <$> Xml.elementContentNS (teiNS "del")
apparatusCorr :: NodeParser ApparatusCorr
apparatusCorr = ApparatusCorr <$> Xml.elementContentNS (teiNS "corr")
term :: NodeParser Term
term = Term <$> Xml.elementContentNS (teiNS "term")
gap :: NodeParser Gap
gap = build <$> Xml.elementAttrNS (teiNS "gap") (optional $ Xml.attribute "reason") Xml.end
where
build (x, _) = Gap x
plainText :: NodeParser Text
plainText = Xml.content
bibl :: NodeParser Bibl
bibl = build <$> Xml.elementContentAttrNS (teiNS "bibl") attributes
where
build ((d, n), t) = Bibl n t d
attributes = do
d <- optional (Xml.attribute "default")
n <- optional (Xml.attribute "n")
return (d, n)
quoteLine :: NodeParser QuoteLine
quoteLine = build <$> Xml.elementContentAttrNS (teiNS "l") attributes
where
build ((a, m), c) = QuoteLine m c a
attributes = do
a <- optional (Xml.attribute "ana")
m <- optional (Xml.attribute "met")
return (a, m)
quote :: NodeParser Quote
quote = build <$> Xml.elementAttrNS (teiNS "quote") attributes children
where
build (x, y) = Quote x y
attributes = Xml.attribute "type"
children = many quoteLine
cit :: NodeParser Cit
cit = Xml.elementNS (teiNS "cit") (Cit <$> quote <*> bibl)
sic :: NodeParser Sic
sic =
let
name = teiNS "sic"
parse
= MP.try (fmap Just (Xml.elementContentNS name))
<|> fmap (const Nothing) (Xml.elementEmptyNS name)
in Sic <$> parse
speaker :: NodeParser Speaker
speaker = Xml.elementNS (teiNS "sp") children
where
children = do
s <- xmlContent "speaker"
p <- xmlContent "p"
return $ Speaker s p
speakerContents :: NodeParser [Content]
speakerContents = pure . ContentSpeaker <$> speaker
content :: NodeParser Content
content
= MP.try (ContentText <$> plainText)
<|> (ContentAdd <$> apparatusAdd)
<|> (ContentDel <$> apparatusDel)
<|> (ContentCorr <$> apparatusCorr)
<|> (ContentTerm <$> term)
<|> (ContentMilestone <$> milestone)
<|> (ContentGap <$> gap)
<|> (ContentQuote <$> quote)
<|> (ContentBibl <$> bibl)
<|> (ContentCit <$> cit)
<|> (ContentSic <$> sic)
textPartSubtype :: Text -> Xml.AttributeParser Integer
textPartSubtype v = do
n <- Xml.attribute "n"
num <- Xml.parseNested (Text.unpack v ++ " number") MP.integer n
_ <- Xml.attributeValue "subtype" v
_ <- Xml.attributeValue "type" "textpart"
return num
divType :: Text -> Xml.AttributeParser Integer
divType v = do
n <- Xml.attribute "n"
num <- Xml.parseNested (Text.unpack v ++ " number") MP.integer n
_ <- Xml.attributeValue "type" v
return num
divTypeOrSubtype :: Text -> Xml.AttributeParser Integer
divTypeOrSubtype v
= MP.try (textPartSubtype v)
<|> divType v
paragraph :: NodeParser [Content]
paragraph = Xml.elementNS (teiNS "p") (many content)
section :: NodeParser Section
section = build <$> Xml.elementAttrNS (teiNS "div") attributes children
where
build (x, y) = Section x y
attributes = divTypeOrSubtype "section"
children = concat <$> many (paragraph <|> speakerContents)
chapter :: NodeParser Chapter
chapter = build <$> Xml.elementAttrNS (teiNS "div") attributes children
where
build (x, y) = Chapter x y
attributes = divTypeOrSubtype "chapter"
children = many section
book :: NodeParser Book
book = build <$> Xml.elementAttrNS (teiNS "div") attributes children
where
build (x, (y, z)) = Book x y z
attributes = divTypeOrSubtype "book"
children = do
h <- optional (Xml.elementContentNS (teiNS "head"))
cs <- many chapter
return (h, cs)
lineContent :: NodeParser LineContent
lineContent
= MP.try (LineContentMilestone <$> milestone)
<|> MP.try (LineContentText <$> plainText)
<|> (LineContentDel <$> apparatusDel)
line :: NodeParser Line
line = build <$> Xml.elementAttrNS (teiNS "l") attributes children
where
build ((n, r), cs) = Line n r cs
attributes = do
n <- optional (Xml.attribute "n")
num <- _Just (Xml.parseNested "line number" MP.integer) n
rend <- optional (Xml.attribute "rend")
r <- _Just (Xml.parseNested "line rend" $ MP.string "displayNumAndIndent") rend
return (num, over _Just (const LineRender_DisplayNumAndIndent) r)
children = many lineContent
bookLineContent :: NodeParser BookLineContent
bookLineContent
= MP.try (BookLineContentMilestone <$> milestone)
<|> (BookLineContentLine <$> line)
bookLines :: NodeParser BookLines
bookLines = build <$> Xml.elementAttrNS (teiNS "div") attributes children
where
build (x, y) = BookLines x y
attributes = divTypeOrSubtype "book"
children = many bookLineContent
division :: NodeParser Division
division
= MP.try (DivisionBooks <$> many book)
<|> MP.try (DivisionChapters <$> many chapter)
<|> MP.try (DivisionSections <$> many section)
<|> (DivisionBookLines <$> many bookLines)
edition :: NodeParser Edition
edition = build <$> Xml.elementAttrNS (teiNS "div") attributes children
where
build ((n, l), y) = Edition n l y
attributes = do
n <- Xml.attribute "n"
_ <- Xml.attributeValue "type" "edition"
l <- optional (Xml.attributeXml "lang")
return (n, l)
children = division
body :: NodeParser Body
body = Xml.elementNS (teiNS "body") children
where
children
= MP.try (BodyEdition <$> edition)
<|> (BodyDivision <$> division)
interp :: NodeParser Interp
interp = build <$> Xml.elementContentAttrNS (teiNS "interp") attributes
where
build (i, v) = Interp i v
attributes = Xml.attributeXml "id"
interpGrp :: NodeParser InterpGrp
interpGrp = build <$> Xml.elementAttrNS (teiNS "interpGrp") attributes children
where
build ((t, l), v) = InterpGrp t l v
attributes = do
t <- Xml.attribute "type"
l <- Xml.attributeXml "lang"
return (t, l)
children = many interp
teiText :: NodeParser TeiText
teiText = build <$> Xml.elementAttrNS (teiNS "text") attributes children
where
build ((n, l), (i, b)) = TeiText l b n i
attributes = do
n <- optional (Xml.attribute "n")
l <- optional (Xml.attributeXml "lang")
return (n, l)
children = do
i <- optional interpGrp
b <- body
return (i, b)
tei :: NodeParser Tei
tei = Xml.elementNS (teiNS "TEI") children
where
children = pure Tei
<*> Header.teiHeader
<*> teiText
|
ancientlanguage/haskell-analysis
|
prepare/src/Prepare/Perseus/TeiEpidocParser.hs
|
mit
| 7,770 | 0 | 17 | 1,442 | 2,736 | 1,385 | 1,351 | 206 | 1 |
module Shapes.Linear.Class where
-- TODO: use class to make matrix/vector/scalar multiplication fit together nicely?
-- NOTE: Can't make type family of kind #. (confirm?)
|
ublubu/shapes
|
shapes-math/src/Shapes/Linear/Class.hs
|
mit
| 172 | 0 | 3 | 25 | 9 | 7 | 2 | 1 | 0 |
module DynamoDbEventStore.ProjectPrelude
(traceM
,module X
,NonEmpty(..)
,fmap2
,Foldable.traverse_
) where
import BasicPrelude as X
import GHC.Natural as X
import Control.Monad.Except as X
import Pipes as X (Producer, yield, await, (>->), Pipe)
import Data.List.NonEmpty (NonEmpty(..))
import Data.Foldable as Foldable
import qualified Debug.Trace
import Data.Text as T
{-# WARNING traceM "traceM still in code" #-}
traceM :: Monad m => T.Text -> m ()
traceM = Debug.Trace.traceM . T.unpack
fmap2
:: (Functor f, Functor f1)
=> (a -> b) -> f (f1 a) -> f (f1 b)
fmap2 = fmap . fmap
|
adbrowne/dynamodb-eventstore
|
dynamodb-eventstore/src/DynamoDbEventStore/ProjectPrelude.hs
|
mit
| 605 | 0 | 10 | 114 | 209 | 128 | 81 | 21 | 1 |
module Network.Levin.Command.Ping where
import Network.Levin.Peer (PeerId)
-- net_node.h
-- p2p_protocol_defs.h
-- Used to make "callback" connection, to be sure that opponent node
-- have accessible connection point. Only other nodes can add peer to peerlist,
-- and ONLY in case when peer has accepted connection and answered to ping.
-- #define P2P_COMMANDS_POOL_BASE 1000
-- const static int ID = P2P_COMMANDS_POOL_BASE + 3;
data CommandPing = CommandPingReq
| CommandPingResp
{ status :: String
, peerId :: PeerId
}
deriving (Show, Eq)
|
nvmd/hs-cryptonote
|
src/Network/Levin/Command/Ping.hs
|
mit
| 638 | 0 | 8 | 177 | 61 | 41 | 20 | 7 | 0 |
{-# OPTIONS_HADDOCK hide, prune #-}
module Handler.About
( getAboutR
) where
import Import
getAboutR :: Handler Html
getAboutR =
fullLayout Nothing "About us" $ do
setTitle "About qua-kit"
toWidgetBody $
[hamlet|
<div class="row">
<div class="col-lg-6 col-md-5 col-sm-8 col-xs-9">
<div.card.card-orange>
<div.card-main>
<div.card-header>
<div.card-inner>
<h5.h5.margin-bottom-no.margin-top-no>
Chair of Information Architecture
<div.card-inner>
<p>
The project is developed within our chair at ETH Zurich.
<blockquote>
“We develop visual methods for the analysis, design and simulation #
of urban systems for a sustainable future”
<div.card-action>
<div.card-action-btn.pull-left>
<a.btn.btn-flat.waves-attach.waves-light.waves-effect href="http://www.ia.arch.ethz.ch/" target="_blank">
<span.icon>check
Visit our site
<div class="col-lg-6 col-md-5 col-sm-8 col-xs-9">
<div.card>
<div.card-main>
<div.card-header>
<div.card-inner>
<h5.h5.margin-bottom-no.margin-top-no>
Qua-kit
<div.card-inner>
<p>
Qua-kit is a web platform for viewing and manipulating simple urban geometry. #
<p>
Logged in via edX platform you can work on a single design problem, #
share your ideas together with design proposals, view and discuss works of others.
<p>
Local clients can use qua-kit as a presentation tool.
Connected to a computational server backend #
it can use registered remote services for visual analysis of geometry.
<p>
The platform is an open source project; #
therefore, everyone is welcome to contribute.
<div.card-action>
<div.card-action-btn.pull-left>
<a.btn.btn-flat.waves-attach.waves-light.waves-effect href="https://github.com/achirkin/qua-kit/" target="_blank">
<span.icon>check
Visit project's page
<div class="col-lg-6 col-md-5 col-sm-8 col-xs-9">
<div.card.card-brand>
<div.card-main>
<div.card-header>
<div.card-inner>
<h5.h5.margin-bottom-no.margin-top-no>
ADvISE
<div.card-inner>
<p>
One of this platform's goals is to gather data for ADvISE research project.
<p>
ADvISE stands for #
Data Analysis for understanding the Impact of urban design on Social pErformance of a city.
<p>
We aim to produce an open framework with basic functionality #
to efficiently search for compromise solutions for complex planning problems #
and an experimental software prototype #
with an intuitive user interface for representing planning problems #
and presenting optimal solutions at various stages of the design process.
<div.card-action>
<div.card-action-btn.pull-left>
<a.btn.btn-flat.waves-attach.waves-light.waves-effect href="http://www.ia.arch.ethz.ch/advise/" target="_blank">
<span.icon>check
Visit project's page
<div class="col-lg-6 col-md-5 col-sm-8 col-xs-9">
<div.card.card-red>
<div.card-main>
<div.card-header>
<div.card-inner>
<h5.h5.margin-bottom-no.margin-top-no>
Empower Shack by U-TT ETH Zurich
<div.card-inner>
<p>
Our first case study for the online course on edX platform is provided by Urban-ThinkTank group at ETH Zurich.
<p>
Empower Shack is an interdisciplinary development project directed by U-TT, #
ETH Zurich and the local NGO Ikhayalami Development Services, in collaboration with the BT-Section #
community and associated local and international partners.
<p>
The ongoing pilot phase is focused on a cluster of 68 houses within the BT-Section of Khayelitsha. #
Through innovative design and organisational models, the project aims to develop a comprehensive and #
sustainable informal settlement upgrading strategy centred on four core components: a two-story housing prototype, #
participatory spatial planning, ecological landscape management, and integrated livelihoods programming.
<div.card-action>
<div.card-action-btn.pull-left>
<a.btn.btn-flat.waves-attach.waves-light.waves-effect href="http://u-tt.com/project/empower-shack/" target="_blank">
<span.icon>check
Visit Empower Shack page
|]
|
mb21/qua-kit
|
apps/hs/qua-server/src/Handler/About.hs
|
mit
| 5,757 | 0 | 8 | 2,339 | 54 | 30 | 24 | -1 | -1 |
module Example1 (
main
) where
import Data.List (intercalate)
import Control.Arrow ((&&&))
import LogicProblem
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
data ID = A | B | C | D | E
deriving (Show, Eq, Ord, Enum, Bounded)
data Color = Roja | Verde | Azul | Marfil | Amarilla
deriving (Eq, Show)
data Nacion = Ingles | Espanol | Ruso | Noruego | Japones
deriving (Eq, Show)
data Animal = Perro | Caracoles | Zorro | Caballo | Cebra
deriving (Eq, Show)
data Bebida = Cafe | Te | Leche | Naranjada | Agua
deriving (Eq, Show)
data Musica = Piano | Bateria | Guitarra | Teclado | Violin
deriving (Eq, Show)
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
facts :: KnownFacts AnEntry
facts = rules [
"№2" -: Ingles <==> Roja |:: "El inglés vive en la casa roja."
, "№3" -: Espanol <==> Perro |:: "El español es el proprietario del perro."
, "№4" -: Verde <==> Cafe |:: "En la casa verde se bebe café."
, "№5" -: Ruso <==> Te |:: "El ruso bebe té."
, "№6" -: Verde <==> Marfil |?> aDerechaDe |:: "La casa verde está junto y a la derecha de la casa de marfil."
, "№7" -: Piano <==> Caracoles |:: "El pianista tiene caracoles."
, "№8" -: Amarilla <==> Bateria |:: "En la casa amarilla se toca la batería."
, "№9" -: C <==> Leche |:: "En la casa del centro se vende leche."
, "№10" -: Noruego <==> A |:: "El noruego vive en la primera casa de la izquierda."
, "№11" -: Guitarra <==> Zorro |?> enseguida |:: "El hombre que toca guitarra vive en la casa" ++
" contigua a la del dueño del zorro."
, "№12" -: Caballo <==> Bateria |?> enseguida |:: "En la casa contigua a aquella donde se encuentra el caballo" ++
" se toca la batería."
, "№13" -: Violin <==> Naranjada |:: "El violinista bebe naranjada."
, "№14" -: Japones <==> Teclado |:: "El japonés toca el teclado."
, "№15" -: Noruego <==> Azul |?> enseguida |:: "El noruego vive a lado de la casa azul."
, "№16" -: Japones !? tieneUnSoloVicino |:: "El japonés solo tiene un vicino."
, "№17" -: Perro <==> Leche |?> enseguida |:: "El quien tiene perro vive junto al quien toma leche."
]
type CondFunc1 v = Maybe v -> Maybe v -> Bool
enseguida :: CondFunc1 ID
Just x `enseguida` Just y = x /= y &&
((maxBound :: ID) /= x && succ x == y ||
(minBound :: ID) /= x && pred x == y )
aDerechaDe :: CondFunc1 ID
Just x `aDerechaDe` Just y = x /= y && (maxBound :: ID) /= y && succ y == x
aIzquierdaDe = flip aDerechaDe
tieneUnSoloVicino :: Maybe ID -> Bool
tieneUnSoloVicino (Just id) = id == maxBound || id == minBound
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
data AnEntry = AnEntry { casaId :: ID
, color :: Maybe Color
, nacion :: Maybe Nacion
, animal :: Maybe Animal
, bebida :: Maybe Bebida
, musica :: Maybe Musica
}
deriving Show
instance IdRepr ID where getRepr = show
getOrd = fromEnum
instance Entry AnEntry where
getId (AnEntry i _ _ _ _ _) = Id i
instance AccessibleEntry AnEntry ID where getV _ = Just . casaId
setV _ _ = Nothing
clearV _ _ = Nothing
instance AccessibleEntry AnEntry Color where getV _ = color
setV e a = Just $ e {color = Just a}
clearV _ e = Just $ e {color = Nothing}
instance AccessibleEntry AnEntry Nacion where getV _ = nacion
setV e a = Just $ e {nacion = Just a}
clearV _ e = Just $ e {color = Nothing}
instance AccessibleEntry AnEntry Animal where getV _ = animal
setV e a = Just $ e {animal = Just a}
clearV _ e = Just $ e {color = Nothing}
instance AccessibleEntry AnEntry Bebida where getV _ = bebida
setV e a = Just $ e {bebida = Just a}
clearV _ e = Just $ e {color = Nothing}
instance AccessibleEntry AnEntry Musica where getV _ = musica
setV e a = Just $ e {musica = Just a}
clearV _ e = Just $ e {color = Nothing}
instance Accessible ID where modifiable _ = False
varDescriptor _ = AccessibleDescriptor "ID"
instance Accessible Color where modifiable _ = True
varDescriptor _ = AccessibleDescriptor "Color"
instance Accessible Nacion where modifiable _ = True
varDescriptor _ = AccessibleDescriptor "Nacion"
instance Accessible Animal where modifiable _ = True
varDescriptor _ = AccessibleDescriptor "Animal"
instance Accessible Bebida where modifiable _ = True
varDescriptor _ = AccessibleDescriptor "Bebida"
instance Accessible Musica where modifiable _ = True
varDescriptor _ = AccessibleDescriptor "Musica"
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
newEntry i = AnEntry i Nothing Nothing Nothing Nothing Nothing
table = newETable (Id &&& newEntry) (enumFrom A)
ctx :: ExecContext AtomicRule AnEntry
ctx = newExecContext table
res2 = solveProblem ctx facts (Just 100)
main :: IO()
main = do putStrLn "facts:"
putStrLn $ intercalate "\n" (map show facts)
putStrLn "== run solveProblem =="
let (c', r', a') = res2
putStrLn "-- history:"
putStrLn $ showHistory a'
putStrLn "-- context:"
print c'
putStrLn "-- result:"
print r'
|
fehu/h-logic-einstein
|
example-1/Example1.hs
|
mit
| 6,625 | 0 | 14 | 2,647 | 1,552 | 812 | 740 | -1 | -1 |
module Internal.API where
import Internal.FFI
import Internal.Type
import Data.JSString.Text (textFromJSString, textToJSString)
import GHCJS.Marshal (FromJSVal (..))
import GHCJS.Perch (Elem)
import Control.Arrow ((***))
import Data.Text (Text)
-- | Get @2d@ context of Canvas element. Throws run-time error if given element
-- is not a @CANVAS@.
canvas2dCtx :: Elem -> IO CanvasCtx
canvas2dCtx = canvasCtx ctxName2d
-- * Drawing Rectangles
-- | Set all pixels in rectangle to transparent black erasing old contents.
clearRect :: Coords -> Pixels -> Pixels -> CanvasRender ()
clearRect cs w h = CanvasRender $ withCtx $ \c ->
do let (rx, ry) = fromCoords cs
w' = pix w
h' = pix h
js_canvasContext2dClearRect c rx ry w' h'
-- | Paint a rectangle using current stroke style.
strokeRect :: Coords -- ^ rectangle starting point
-> Pixels -- ^ rectangle width
-> Pixels -- ^ rectangle height
-> CanvasRender ()
strokeRect cs w h = CanvasRender $ withCtx $ \c ->
do let (rx, ry) = fromCoords cs
w' = pix w
h' = pix h
js_canvasContext2dStrokeRect c rx ry w' h'
-- * Drwing Text
-- | Fill text.
fillText :: Text -- ^ text to render
-> Pixels -- ^ x
-> Pixels -- ^ y
-> Maybe Pixels -- ^ optional max width
-> CanvasRender ()
fillText txt x y mw = CanvasRender $ withCtx $ \c ->
do let t = textToJSString txt
case mw of
Nothing -> js_canvasContext2dFillTextA3 c t (pix x) (pix y)
Just w -> js_canvasContext2dFillTextA4 c t (pix x) (pix y) (pix w)
-- | Set 2d context font.
--
-- Given text should be in format @\<SIZE\>px \<NAME\>@, example:
-- @"48px Iosevka Slab"@.
setContextFont :: Text -> CanvasRender ()
setContextFont fontSpec =
CanvasRender $ withCtx $
flip js_canvasContext2dSetFont (textToJSString fontSpec)
-- | Get 2d context font.
getContextFont :: CanvasCtx -> IO Text
getContextFont c = textFromJSString <$> js_canvasContext2dGetFont (getContext c)
-- * Paths
-- The following methods can be used to manipulate paths of objects.
-- | Starts a new path by emptying the list of sub-paths. Call this method when
-- you want to create a new path.
beginPath :: CanvasRender ()
beginPath = CanvasRender $ withCtx $ js_canvasContext2dBeginPath
-- | Causes the point of the pen to move back to the start of the current
-- sub-path. It tries to draw a straight line from the current point to the
-- start. If the shape has already been closed or has only one point, this
-- function does nothing.
closePath :: CanvasRender ()
closePath = CanvasRender $ withCtx $ js_canvasContext2dClosePath
-- * Drawing Images
-- | Set context images smoothing. Note, this is experimental feature and may
-- not work properly in some cases.
setImageSmoothing :: Bool -> CanvasRender ()
setImageSmoothing v = CanvasRender $ withCtx $
flip js_canvasContext2dSetImageSmoothing v
-- | Handy shortcut for enabling image smoothing.
enableImageSmoothing :: CanvasRender ()
enableImageSmoothing = setImageSmoothing True
-- | Handy shortcut for disabling image smoothing.
disableImageSmoothing :: CanvasRender ()
disableImageSmoothing = setImageSmoothing False
-- | Draw an image.
drawImage :: CanvasDrawImageSettings -> CanvasRender ()
drawImage (DrawImageSimple (CanvasImageSource is) dx dy) =
CanvasRender $ withCtx $ \c ->
js_canvasContext2dDrawImageA3 c is (pix dx) (pix dy)
drawImage (DrawImageSized (CanvasImageSource is) dx dy w h) =
CanvasRender $ withCtx $ \c ->
js_canvasContext2dDrawImageA5 c is (pix dx) (pix dy) (pix w) (pix h)
drawImage (DrawImageCropped
(CanvasImageSource is) sx sy sWidth sHeight dx dy dWidth dHeight) =
CanvasRender $ withCtx $ \c ->
let d = pix sx
e = pix sy
f = pix sWidth
g = pix sHeight
h = pix dx
i = pix dy
j = pix dWidth
k = pix dHeight
in js_canvasContext2dDrawImageA9 c is d e f g h i j k
-- | Convert integral value to 'Pixels'.
pixels :: Int -> Pixels
pixels = Px
-- | Convert 'Pixels' to integral value.
pix :: Pixels -> Int
pix (Px n) = fromIntegral n
-- * Internal Stuff
-- |Canvas two-dimentional context name.
ctxName2d :: Text
ctxName2d = "2d"
-- | Get context of @CANVAS@ element. __Unsafe__ code, throws run-time
-- exception if context is not available (for example when element is not a
-- @CANVAS@).
canvasCtx :: Text -- ^ context name
-> Elem -- ^ element, supposed to be @CANVAS@
-> IO CanvasCtx
canvasCtx n e =
do isCanvas <- js_elementIsCanvas e
-- FIXME Check Bool return tile, maybe conversion JSBool -> Bool needed
if not isCanvas
then error "Context is not available - not a CANVAS."
else do j <- js_canvasGetContext e (textToJSString n)
v <- fromJSVal j
case v of
Nothing -> error "Error converting context to Elem"
Just e -> return $ CanvasCtx e
-- | Do action with 'CanvasCtx' and return it.
withCtx :: (Elem -> IO ()) -> CanvasCtx -> IO CanvasCtx
withCtx io c =
do io (getContext c)
return c
-- | Unwrap canvas element from 'CanvasCtx'.
getContext :: CanvasCtx -> Elem
getContext (CanvasCtx e) = e
-- | Convert 'Elem' to 'CanvasImageSource'. Valid elements are:
-- @IMAGE@, @VIDEO@, @CANVAS@, @CanvasRenderingContext2D@. @ImageBitmap@ is
-- currently unsupported. __Unsafe__ code, throws run-time exception if element
-- is notvalid image source.
makeImageSource :: Elem -> IO CanvasImageSource
makeImageSource e =
do isSource <- js_elementIsImageSource e
-- FIXME Check if Bool is proper return type
if not isSource
then error "Invalid image source."
else return (CanvasImageSource e)
-- | Unwrap image source.
getImageSource :: CanvasImageSource -> Elem
getImageSource (CanvasImageSource s) = s
-- | Make 'Coords' from X and Y 'Pixels'.
makeCoords :: Pixels -> Pixels -> Coords
makeCoords px py = Coords (px, py)
-- | Unwrap 'Coords' to (X, Y) 'Pixels' tuple.
takeCoords :: Coords -> (Pixels, Pixels)
takeCoords (Coords (px, py)) = (px, py)
-- | Unwrap 'Coords' to (X, Y) 'Int' tuple.
fromCoords :: Coords -> (Int, Int)
fromCoords = (pix *** pix) . takeCoords
|
geraldus/ghcjs-perch-canvas
|
src/Internal/API.hs
|
mit
| 6,332 | 0 | 14 | 1,500 | 1,388 | 725 | 663 | 109 | 3 |
module Service.Interface (
get_server_info,
get_task_types,
get_task_description,
verify_task_config,
get_task_instance,
get_task_instance_or_fail,
grade_task_solution,
get_task_description_localized,
verify_task_config_localized,
get_task_instance_localized,
grade_task_solution_localized,
get_task_instance_or_fail_localized,
Server
) where
import Types.TT
import Types.Basic
import Types.Signed as S
import Types.Documented as D
import Types.ServerInfo
import Types.TaskTree
import Types.TaskDescription
import Types.Config
import Types.Instance as I
import Types.Solution
import Autolib.Multilingual (Language (..))
import Network.XmlRpc.Client
import Control.Applicative
type Server = String
get_server_info :: Server -> IO ServerInfo
get_server_info srv =
unTT <$> remote srv "get_server_info"
get_task_types :: Server -> IO [TaskTree]
get_task_types srv =
unTT <$> remote srv "get_task_types"
get_task_description :: Server -> Task -> IO TaskDescription
get_task_description srv a =
unTT <$> remote srv "get_task_description" (TT a)
verify_task_config :: Server
-> Task -> Config
-> IO (Either Description (Signed (Task, Config)))
verify_task_config srv a b =
unTT <$> remote srv "verify_task_config" (TT a) (TT b)
get_task_instance :: Server
-> Signed (Task, Config) -> Seed
-> IO (Signed (Task, Instance), Description, Documented Solution)
get_task_instance srv a b =
unTT <$> remote srv "get_task_instance" (TT a) (TT b)
get_task_instance_or_fail :: Server
-> Signed (Task, Config) -> Seed
-> IO (Either Description (Signed (Task, Instance), Description, Documented Solution))
get_task_instance_or_fail srv a b =
unTT <$> remote srv "get_task_instance_or_fail" (TT a) (TT b)
grade_task_solution :: Server
-> Signed (Task, Instance) -> Solution
-> IO (Either Description (Documented Double))
grade_task_solution srv a b =
unTT <$> remote srv "grade_task_solution" (TT a) (TT b)
get_task_description_localized :: Server -> Task -> Language -> IO TaskDescription
get_task_description_localized srv a lang =
unTT <$> remote srv "get_task_description_localized" (TT a) (TT lang)
verify_task_config_localized :: Server
-> Task -> Config
-> Language -> IO (Either Description (Signed (Task, Config)))
verify_task_config_localized srv a b lang =
unTT <$> remote srv "verify_task_config_localized" (TT a) (TT b) (TT lang)
get_task_instance_localized :: Server
-> Signed (Task, Config) -> Seed
-> Language -> IO (Signed (Task, Instance), Description, Documented Solution)
get_task_instance_localized srv a b lang =
unTT <$> remote srv "get_task_instance_localized" (TT a) (TT b) (TT lang)
get_task_instance_or_fail_localized :: Server
-> Signed (Task, Config) -> Seed
-> Language -> IO (Either Description (Signed (Task, Instance), Description, Documented Solution))
get_task_instance_or_fail_localized srv a b lang =
unTT <$> remote srv "get_task_instance_or_fail_localized" (TT a) (TT b) (TT lang)
grade_task_solution_localized :: Server
-> Signed (Task, Instance) -> Solution
-> Language -> IO (Either Description (Documented Double))
grade_task_solution_localized srv a b lang =
unTT <$> remote srv "grade_task_solution_localized" (TT a) (TT b) (TT lang)
|
marcellussiegburg/autotool
|
server-interface/src/Service/Interface.hs
|
gpl-2.0
| 3,345 | 0 | 14 | 575 | 1,029 | 540 | 489 | 80 | 1 |
{-# OPTIONS_HADDOCK ignore-exports #-}
{-# LANGUAGE ViewPatterns #-}
module Algebra.Vector where
import Control.Applicative
import Control.Arrow ((***))
import Data.List (sortBy)
import Diagrams.Coordinates
import Diagrams.TwoD.Types
import GHC.Float
import MyPrelude
data Alignment = CW
| CCW
| CL
deriving (Eq)
-- |Convert two dimensions such as (xmin, xmax) and (ymin, ymax)
-- to proper square coordinates, as in:
-- ((xmin, ymin), (xmax, ymax))
dimToSquare :: (Double, Double) -- ^ x dimension
-> (Double, Double) -- ^ y dimension
-> ((Double, Double), (Double, Double)) -- ^ square describing those dimensions
dimToSquare (x1, x2) (y1, y2) = ((x1, y1), (x2, y2))
-- |Checks whether the Point is in a given Square.
inRange :: ((Double, Double), (Double, Double)) -- ^ the square: ((xmin, ymin), (xmax, ymax))
-> P2 Double -- ^ Coordinate
-> Bool -- ^ result
inRange ((xmin, ymin), (xmax, ymax)) (coords -> x :& y)
= x >= min xmin xmax
&& x <= max xmin xmax
&& y >= min ymin ymax
&& y <= max ymin ymax
-- |Get the angle between two vectors.
getAngle :: V2 Double -> V2 Double -> Double
getAngle a b =
acos
. flip (/) (vecLength a * vecLength b)
. scalarProd a
$ b
-- |Get the length of a vector.
vecLength :: V2 Double -> Double
vecLength v = sqrt (x^(2 :: Int) + y^(2 :: Int))
where
(x, y) = unr2 v
-- |Compute the scalar product of two vectors.
scalarProd :: V2 Double -> V2 Double -> Double
scalarProd (V2 a1 a2) (V2 b1 b2) = a1 * b1 + a2 * b2
-- |Multiply a scalar with a vector.
scalarMul :: Double -> V2 Double -> V2 Double
scalarMul d (V2 a b) = V2 (a * d) (b * d)
-- |Construct a vector that points to a point from the origin.
pt2Vec :: P2 Double -> V2 Double
pt2Vec = r2 . unp2
-- |Give the point which is at the coordinates the vector
-- points to from the origin.
vec2Pt :: V2 Double -> P2 Double
vec2Pt = p2 . unr2
-- |Construct a vector between two points.
vp2 :: P2 Double -- ^ vector origin
-> P2 Double -- ^ vector points here
-> V2 Double
vp2 a b = pt2Vec b - pt2Vec a
-- |Computes the determinant of 3 points.
det :: P2 Double -> P2 Double -> P2 Double -> Double
det (coords -> ax :& ay) (coords -> bx :& by) (coords -> cx :& cy) =
(bx - ax) * (cy - ay) - (by - ay) * (cx - ax)
-- |Get the point where two lines intesect, if any. Excludes the
-- case of end-points intersecting.
intersectSeg :: (P2 Double, P2 Double) -> (P2 Double, P2 Double) -> Maybe (P2 Double)
intersectSeg (a, b) (c, d) = case intersectSegSeg a b c d of
Just x -> if x `notElem` [a,b,c,d] then Just a else Nothing
Nothing -> Nothing
-- |Get the orientation of 3 points which can either be
-- * clock-wise
-- * counter-clock-wise
-- * collinear
getOrient :: P2 Double -> P2 Double -> P2 Double -> Alignment
getOrient a b c = case compare (det a b c) 0 of
LT -> CW
GT -> CCW
EQ -> CL
--- |Checks if 3 points a,b,c do not build a clockwise triangle by
--- connecting a-b-c. This is done by computing the determinant and
--- checking the algebraic sign.
notcw :: P2 Double -> P2 Double -> P2 Double -> Bool
notcw a b c = case getOrient a b c of
CW -> False
_ -> True
--- |Checks if 3 points a,b,c do build a clockwise triangle by
--- connecting a-b-c. This is done by computing the determinant and
--- checking the algebraic sign.
cw :: P2 Double -> P2 Double -> P2 Double -> Bool
cw a b c = not . notcw a b $ c
-- |Sort X and Y coordinates lexicographically.
sortedXY :: [P2 Double] -> [P2 Double]
sortedXY = fmap p2 . sortLex . fmap unp2
-- |Sort Y and X coordinates lexicographically.
sortedYX :: [P2 Double] -> [P2 Double]
sortedYX = fmap p2 . sortLexSwapped . fmap unp2
-- |Sort all points according to their X-coordinates only.
sortedX :: [P2 Double] -> [P2 Double]
sortedX xs =
fmap p2
. sortBy (\(a1, _) (a2, _) -> compare a1 a2)
$ fmap unp2 xs
-- |Sort all points according to their Y-coordinates only.
sortedY :: [P2 Double] -> [P2 Double]
sortedY xs =
fmap p2
. sortBy (\(_, b1) (_, b2) -> compare b1 b2)
$ fmap unp2 xs
-- |Apply a function on the coordinates of a point.
onPT :: ((Double, Double) -> (Double, Double)) -> P2 Double -> P2 Double
onPT f = p2 . f . unp2
-- |Compare the y-coordinate of two points.
ptCmpY :: P2 Double -> P2 Double -> Ordering
ptCmpY (coords -> _ :& y1) (coords -> _ :& y2) =
compare y1 y2
-- |Compare the x-coordinate of two points.
ptCmpX :: P2 Double -> P2 Double -> Ordering
ptCmpX (coords -> x1 :& _) (coords -> x2 :& _) =
compare x1 x2
posInfPT :: P2 Double
posInfPT = p2 (read "Infinity", read "Infinity")
negInfPT :: P2 Double
negInfPT = p2 (negate . read $ "Infinity", negate . read $ "Infinity")
-- | Given an infinite line which intersects P1 and P2,
-- let P4 be the point on the line that is closest to P3.
--
-- Return an indication of where on the line P4 is relative to P1 and P2.
--
-- @
-- if P4 == P1 then 0
-- if P4 == P2 then 1
-- if P4 is halfway between P1 and P2 then 0.5
-- @
--
-- @
-- |
-- P1
-- |
-- P4 +---- P3
-- |
-- P2
-- |
-- @
--
{-# INLINE closestPointOnLineParam #-}
closestPointOnLineParam
:: P2 Double -- ^ `P1`
-> P2 Double -- ^ `P2`
-> P2 Double -- ^ `P3`
-> Double
closestPointOnLineParam p1 p2 p3
= pt2Vec (p3 - p1) `scalarProd` pt2Vec (p2 - p1)
/ pt2Vec (p2 - p1) `scalarProd` pt2Vec (p2 - p1)
-- | Given four points specifying two lines, get the point where the two lines
-- cross, if any. Note that the lines extend off to infinity, so the
-- intersection point might not line between either of the two pairs of points.
--
-- @
-- \\ /
-- P1 P4
-- \\ /
-- +
-- / \\
-- P3 P2
-- / \\
-- @
--
intersectLineLine
:: P2 Double -- ^ `P1`
-> P2 Double -- ^ `P2`
-> P2 Double -- ^ `P3`
-> P2 Double -- ^ `P4`
-> Maybe (P2 Double)
intersectLineLine (coords -> x1 :& y1)
(coords -> x2 :& y2)
(coords -> x3 :& y3)
(coords -> x4 :& y4)
= let dx12 = x1 - x2
dx34 = x3 - x4
dy12 = y1 - y2
dy34 = y3 - y4
den = dx12 * dy34 - dy12 * dx34
in if den == 0
then Nothing
else let
det12 = x1*y2 - y1*x2
det34 = x3*y4 - y3*x4
numx = det12 * dx34 - dx12 * det34
numy = det12 * dy34 - dy12 * det34
in Just $ p2 (numx / den, numy / den)
-- | Get the point where a segment @P1-P2@ crosses another segement @P3-P4@,
-- if any.
intersectSegSeg
:: P2 Double -- ^ `P1`
-> P2 Double -- ^ `P2`
-> P2 Double -- ^ `P3`
-> P2 Double -- ^ `P4`
-> Maybe (P2 Double)
intersectSegSeg p1 p2 p3 p4
-- TODO: merge closest point checks with intersection, reuse subterms.
| Just p0 <- intersectLineLine p1 p2 p3 p4
, t12 <- closestPointOnLineParam p1 p2 p0
, t23 <- closestPointOnLineParam p3 p4 p0
, t12 >= 0 && t12 <= 1
, t23 >= 0 && t23 <= 1
= Just p0
| otherwise
= Nothing
|
hasufell/CGA
|
Algebra/Vector.hs
|
gpl-2.0
| 7,367 | 0 | 14 | 2,195 | 2,099 | 1,126 | 973 | 138 | 3 |
{-| Implementation of command-line functions.
This module holds the common command-line related functions for the
binaries, separated into this module since "Ganeti.Utils" is
used in many other places and this is more IO oriented.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.HTools.CLI
( Options(..)
, OptType
, defaultOptions
, Ganeti.HTools.CLI.parseOpts
, parseOptsInner
, parseYesNo
, parseISpecString
, shTemplate
, maybePrintNodes
, maybePrintInsts
, maybeShowWarnings
, printKeys
, printFinal
, setNodeStatus
-- * The options
, oDataFile
, oDiskMoves
, oDiskTemplate
, oSpindleUse
, oDynuFile
, oEvacMode
, oExInst
, oExTags
, oExecJobs
, oForce
, oFullEvacuation
, oGroup
, oIAllocSrc
, oIgnoreNonRedundant
, oInstMoves
, oJobDelay
, genOLuxiSocket
, oLuxiSocket
, oMachineReadable
, oMaxCpu
, oMaxSolLength
, oMinDisk
, oMinGain
, oMinGainLim
, oMinScore
, oNoHeaders
, oNoSimulation
, oNodeSim
, oNodeTags
, oOfflineMaintenance
, oOfflineNode
, oOneStepOnly
, oOutputDir
, oPrintCommands
, oPrintInsts
, oPrintMoves
, oPrintNodes
, oQuiet
, oRapiMaster
, oSaveCluster
, oSelInst
, oShowHelp
, oShowVer
, oShowComp
, oSkipNonRedundant
, oStdSpec
, oTieredSpec
, oVerbose
, oPriority
, genericOpts
) where
import Control.Monad
import Data.Char (toUpper)
import Data.Maybe (fromMaybe)
import System.Console.GetOpt
import System.IO
import Text.Printf (printf)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.Path as Path
import Ganeti.HTools.Types
import Ganeti.BasicTypes
import Ganeti.Common as Common
import Ganeti.Types
import Ganeti.Utils
-- * Data types
-- | Command line options structure.
data Options = Options
{ optDataFile :: Maybe FilePath -- ^ Path to the cluster data file
, optDiskMoves :: Bool -- ^ Allow disk moves
, optInstMoves :: Bool -- ^ Allow instance moves
, optDiskTemplate :: Maybe DiskTemplate -- ^ Override for the disk template
, optSpindleUse :: Maybe Int -- ^ Override for the spindle usage
, optDynuFile :: Maybe FilePath -- ^ Optional file with dynamic use data
, optEvacMode :: Bool -- ^ Enable evacuation mode
, optExInst :: [String] -- ^ Instances to be excluded
, optExTags :: Maybe [String] -- ^ Tags to use for exclusion
, optExecJobs :: Bool -- ^ Execute the commands via Luxi
, optForce :: Bool -- ^ Force the execution
, optFullEvacuation :: Bool -- ^ Fully evacuate nodes to be rebooted
, optGroup :: Maybe GroupID -- ^ The UUID of the group to process
, optIAllocSrc :: Maybe FilePath -- ^ The iallocation spec
, optIgnoreNonRedundant :: Bool -- ^ Ignore non-redundant instances
, optSelInst :: [String] -- ^ Instances to be excluded
, optLuxi :: Maybe FilePath -- ^ Collect data from Luxi
, optJobDelay :: Double -- ^ Delay before executing first job
, optMachineReadable :: Bool -- ^ Output machine-readable format
, optMaster :: String -- ^ Collect data from RAPI
, optMaxLength :: Int -- ^ Stop after this many steps
, optMcpu :: Maybe Double -- ^ Override max cpu ratio for nodes
, optMdsk :: Double -- ^ Max disk usage ratio for nodes
, optMinGain :: Score -- ^ Min gain we aim for in a step
, optMinGainLim :: Score -- ^ Limit below which we apply mingain
, optMinScore :: Score -- ^ The minimum score we aim for
, optNoHeaders :: Bool -- ^ Do not show a header line
, optNoSimulation :: Bool -- ^ Skip the rebalancing dry-run
, optNodeSim :: [String] -- ^ Cluster simulation mode
, optNodeTags :: Maybe [String] -- ^ List of node tags to restrict to
, optOffline :: [String] -- ^ Names of offline nodes
, optOfflineMaintenance :: Bool -- ^ Pretend all instances are offline
, optOneStepOnly :: Bool -- ^ Only do the first step
, optOutPath :: FilePath -- ^ Path to the output directory
, optPrintMoves :: Bool -- ^ Whether to show the instance moves
, optSaveCluster :: Maybe FilePath -- ^ Save cluster state to this file
, optShowCmds :: Maybe FilePath -- ^ Whether to show the command list
, optShowHelp :: Bool -- ^ Just show the help
, optShowComp :: Bool -- ^ Just show the completion info
, optShowInsts :: Bool -- ^ Whether to show the instance map
, optShowNodes :: Maybe [String] -- ^ Whether to show node status
, optShowVer :: Bool -- ^ Just show the program version
, optSkipNonRedundant :: Bool -- ^ Skip nodes with non-redundant instance
, optStdSpec :: Maybe RSpec -- ^ Requested standard specs
, optTestCount :: Maybe Int -- ^ Optional test count override
, optTieredSpec :: Maybe RSpec -- ^ Requested specs for tiered mode
, optReplay :: Maybe String -- ^ Unittests: RNG state
, optVerbose :: Int -- ^ Verbosity level
, optPriority :: Maybe OpSubmitPriority -- ^ OpCode submit priority
} deriving Show
-- | Default values for the command line options.
defaultOptions :: Options
defaultOptions = Options
{ optDataFile = Nothing
, optDiskMoves = True
, optInstMoves = True
, optDiskTemplate = Nothing
, optSpindleUse = Nothing
, optDynuFile = Nothing
, optEvacMode = False
, optExInst = []
, optExTags = Nothing
, optExecJobs = False
, optForce = False
, optFullEvacuation = False
, optGroup = Nothing
, optIAllocSrc = Nothing
, optIgnoreNonRedundant = False
, optSelInst = []
, optLuxi = Nothing
, optJobDelay = 10
, optMachineReadable = False
, optMaster = ""
, optMaxLength = -1
, optMcpu = Nothing
, optMdsk = defReservedDiskRatio
, optMinGain = 1e-2
, optMinGainLim = 1e-1
, optMinScore = 1e-9
, optNoHeaders = False
, optNoSimulation = False
, optNodeSim = []
, optNodeTags = Nothing
, optSkipNonRedundant = False
, optOffline = []
, optOfflineMaintenance = False
, optOneStepOnly = False
, optOutPath = "."
, optPrintMoves = False
, optSaveCluster = Nothing
, optShowCmds = Nothing
, optShowHelp = False
, optShowComp = False
, optShowInsts = False
, optShowNodes = Nothing
, optShowVer = False
, optStdSpec = Nothing
, optTestCount = Nothing
, optTieredSpec = Nothing
, optReplay = Nothing
, optVerbose = 1
, optPriority = Nothing
}
-- | Abbreviation for the option type.
type OptType = GenericOptType Options
instance StandardOptions Options where
helpRequested = optShowHelp
verRequested = optShowVer
compRequested = optShowComp
requestHelp o = o { optShowHelp = True }
requestVer o = o { optShowVer = True }
requestComp o = o { optShowComp = True }
-- * Helper functions
parseISpecString :: String -> String -> Result RSpec
parseISpecString descr inp = do
let sp = sepSplit ',' inp
err = Bad ("Invalid " ++ descr ++ " specification: '" ++ inp ++
"', expected disk,ram,cpu")
when (length sp < 3 || length sp > 4) err
prs <- mapM (\(fn, val) -> fn val) $
zip [ annotateResult (descr ++ " specs disk") . parseUnit
, annotateResult (descr ++ " specs memory") . parseUnit
, tryRead (descr ++ " specs cpus")
, tryRead (descr ++ " specs spindles")
] sp
case prs of
{- Spindles are optional, so that they are not needed when exclusive storage
is disabled. When exclusive storage is disabled, spindles are ignored,
so the actual value doesn't matter. We use 1 as a default so that in
case someone forgets and exclusive storage is enabled, we don't run into
weird situations. -}
[dsk, ram, cpu] -> return $ RSpec cpu ram dsk 1
[dsk, ram, cpu, spn] -> return $ RSpec cpu ram dsk spn
_ -> err
-- | Disk template choices.
optComplDiskTemplate :: OptCompletion
optComplDiskTemplate = OptComplChoices $
map diskTemplateToRaw [minBound..maxBound]
-- * Command line options
oDataFile :: OptType
oDataFile =
(Option "t" ["text-data"]
(ReqArg (\ f o -> Ok o { optDataFile = Just f }) "FILE")
"the cluster data FILE",
OptComplFile)
oDiskMoves :: OptType
oDiskMoves =
(Option "" ["no-disk-moves"]
(NoArg (\ opts -> Ok opts { optDiskMoves = False}))
"disallow disk moves from the list of allowed instance changes,\
\ thus allowing only the 'cheap' failover/migrate operations",
OptComplNone)
oDiskTemplate :: OptType
oDiskTemplate =
(Option "" ["disk-template"]
(reqWithConversion diskTemplateFromRaw
(\dt opts -> Ok opts { optDiskTemplate = Just dt })
"TEMPLATE") "select the desired disk template",
optComplDiskTemplate)
oSpindleUse :: OptType
oSpindleUse =
(Option "" ["spindle-use"]
(reqWithConversion (tryRead "parsing spindle-use")
(\su opts -> do
when (su < 0) $
fail "Invalid value of the spindle-use (expected >= 0)"
return $ opts { optSpindleUse = Just su })
"SPINDLES") "select how many virtual spindle instances use\
\ [default read from cluster]",
OptComplFloat)
oSelInst :: OptType
oSelInst =
(Option "" ["select-instances"]
(ReqArg (\ f opts -> Ok opts { optSelInst = sepSplit ',' f }) "INSTS")
"only select given instances for any moves",
OptComplManyInstances)
oInstMoves :: OptType
oInstMoves =
(Option "" ["no-instance-moves"]
(NoArg (\ opts -> Ok opts { optInstMoves = False}))
"disallow instance (primary node) moves from the list of allowed,\
\ instance changes, thus allowing only slower, but sometimes\
\ safer, drbd secondary changes",
OptComplNone)
oDynuFile :: OptType
oDynuFile =
(Option "U" ["dynu-file"]
(ReqArg (\ f opts -> Ok opts { optDynuFile = Just f }) "FILE")
"Import dynamic utilisation data from the given FILE",
OptComplFile)
oEvacMode :: OptType
oEvacMode =
(Option "E" ["evac-mode"]
(NoArg (\opts -> Ok opts { optEvacMode = True }))
"enable evacuation mode, where the algorithm only moves\
\ instances away from offline and drained nodes",
OptComplNone)
oExInst :: OptType
oExInst =
(Option "" ["exclude-instances"]
(ReqArg (\ f opts -> Ok opts { optExInst = sepSplit ',' f }) "INSTS")
"exclude given instances from any moves",
OptComplManyInstances)
oExTags :: OptType
oExTags =
(Option "" ["exclusion-tags"]
(ReqArg (\ f opts -> Ok opts { optExTags = Just $ sepSplit ',' f })
"TAG,...") "Enable instance exclusion based on given tag prefix",
OptComplString)
oExecJobs :: OptType
oExecJobs =
(Option "X" ["exec"]
(NoArg (\ opts -> Ok opts { optExecJobs = True}))
"execute the suggested moves via Luxi (only available when using\
\ it for data gathering)",
OptComplNone)
oForce :: OptType
oForce =
(Option "f" ["force"]
(NoArg (\ opts -> Ok opts {optForce = True}))
"force the execution of this program, even if warnings would\
\ otherwise prevent it",
OptComplNone)
oFullEvacuation :: OptType
oFullEvacuation =
(Option "" ["full-evacuation"]
(NoArg (\ opts -> Ok opts { optFullEvacuation = True}))
"fully evacuate the nodes to be rebooted",
OptComplNone)
oGroup :: OptType
oGroup =
(Option "G" ["group"]
(ReqArg (\ f o -> Ok o { optGroup = Just f }) "ID")
"the target node group (name or UUID)",
OptComplOneGroup)
oIAllocSrc :: OptType
oIAllocSrc =
(Option "I" ["ialloc-src"]
(ReqArg (\ f opts -> Ok opts { optIAllocSrc = Just f }) "FILE")
"Specify an iallocator spec as the cluster data source",
OptComplFile)
oIgnoreNonRedundant :: OptType
oIgnoreNonRedundant =
(Option "" ["ignore-non-redundant"]
(NoArg (\ opts -> Ok opts { optIgnoreNonRedundant = True }))
"Pretend that there are no non-redundant instances in the cluster",
OptComplNone)
oJobDelay :: OptType
oJobDelay =
(Option "" ["job-delay"]
(reqWithConversion (tryRead "job delay")
(\d opts -> Ok opts { optJobDelay = d }) "SECONDS")
"insert this much delay before the execution of repair jobs\
\ to allow the tool to continue processing instances",
OptComplFloat)
genOLuxiSocket :: String -> OptType
genOLuxiSocket defSocket =
(Option "L" ["luxi"]
(OptArg ((\ f opts -> Ok opts { optLuxi = Just f }) .
fromMaybe defSocket) "SOCKET")
("collect data via Luxi, optionally using the given SOCKET path [" ++
defSocket ++ "]"),
OptComplFile)
oLuxiSocket :: IO OptType
oLuxiSocket = liftM genOLuxiSocket Path.defaultLuxiSocket
oMachineReadable :: OptType
oMachineReadable =
(Option "" ["machine-readable"]
(OptArg (\ f opts -> do
flag <- parseYesNo True f
return $ opts { optMachineReadable = flag }) "CHOICE")
"enable machine readable output (pass either 'yes' or 'no' to\
\ explicitly control the flag, or without an argument defaults to\
\ yes)",
optComplYesNo)
oMaxCpu :: OptType
oMaxCpu =
(Option "" ["max-cpu"]
(reqWithConversion (tryRead "parsing max-cpu")
(\mcpu opts -> do
when (mcpu <= 0) $
fail "Invalid value of the max-cpu ratio, expected >0"
return $ opts { optMcpu = Just mcpu }) "RATIO")
"maximum virtual-to-physical cpu ratio for nodes (from 0\
\ upwards) [default read from cluster]",
OptComplFloat)
oMaxSolLength :: OptType
oMaxSolLength =
(Option "l" ["max-length"]
(reqWithConversion (tryRead "max solution length")
(\i opts -> Ok opts { optMaxLength = i }) "N")
"cap the solution at this many balancing or allocation\
\ rounds (useful for very unbalanced clusters or empty\
\ clusters)",
OptComplInteger)
oMinDisk :: OptType
oMinDisk =
(Option "" ["min-disk"]
(reqWithConversion (tryRead "min free disk space")
(\n opts -> Ok opts { optMdsk = n }) "RATIO")
"minimum free disk space for nodes (between 0 and 1) [0]",
OptComplFloat)
oMinGain :: OptType
oMinGain =
(Option "g" ["min-gain"]
(reqWithConversion (tryRead "min gain")
(\g opts -> Ok opts { optMinGain = g }) "DELTA")
"minimum gain to aim for in a balancing step before giving up",
OptComplFloat)
oMinGainLim :: OptType
oMinGainLim =
(Option "" ["min-gain-limit"]
(reqWithConversion (tryRead "min gain limit")
(\g opts -> Ok opts { optMinGainLim = g }) "SCORE")
"minimum cluster score for which we start checking the min-gain",
OptComplFloat)
oMinScore :: OptType
oMinScore =
(Option "e" ["min-score"]
(reqWithConversion (tryRead "min score")
(\e opts -> Ok opts { optMinScore = e }) "EPSILON")
"mininum score to aim for",
OptComplFloat)
oNoHeaders :: OptType
oNoHeaders =
(Option "" ["no-headers"]
(NoArg (\ opts -> Ok opts { optNoHeaders = True }))
"do not show a header line",
OptComplNone)
oNoSimulation :: OptType
oNoSimulation =
(Option "" ["no-simulation"]
(NoArg (\opts -> Ok opts {optNoSimulation = True}))
"do not perform rebalancing simulation",
OptComplNone)
oNodeSim :: OptType
oNodeSim =
(Option "" ["simulate"]
(ReqArg (\ f o -> Ok o { optNodeSim = f:optNodeSim o }) "SPEC")
"simulate an empty cluster, given as\
\ 'alloc_policy,num_nodes,disk,ram,cpu'",
OptComplString)
oNodeTags :: OptType
oNodeTags =
(Option "" ["node-tags"]
(ReqArg (\ f opts -> Ok opts { optNodeTags = Just $ sepSplit ',' f })
"TAG,...") "Restrict to nodes with the given tags",
OptComplString)
oOfflineMaintenance :: OptType
oOfflineMaintenance =
(Option "" ["offline-maintenance"]
(NoArg (\ opts -> Ok opts {optOfflineMaintenance = True}))
"Schedule offline maintenance, i.e., pretend that all instance are\
\ offline.",
OptComplNone)
oOfflineNode :: OptType
oOfflineNode =
(Option "O" ["offline"]
(ReqArg (\ n o -> Ok o { optOffline = n:optOffline o }) "NODE")
"set node as offline",
OptComplOneNode)
oOneStepOnly :: OptType
oOneStepOnly =
(Option "" ["one-step-only"]
(NoArg (\ opts -> Ok opts {optOneStepOnly = True}))
"Only do the first step",
OptComplNone)
oOutputDir :: OptType
oOutputDir =
(Option "d" ["output-dir"]
(ReqArg (\ d opts -> Ok opts { optOutPath = d }) "PATH")
"directory in which to write output files",
OptComplDir)
oPrintCommands :: OptType
oPrintCommands =
(Option "C" ["print-commands"]
(OptArg ((\ f opts -> Ok opts { optShowCmds = Just f }) .
fromMaybe "-")
"FILE")
"print the ganeti command list for reaching the solution,\
\ if an argument is passed then write the commands to a\
\ file named as such",
OptComplNone)
oPrintInsts :: OptType
oPrintInsts =
(Option "" ["print-instances"]
(NoArg (\ opts -> Ok opts { optShowInsts = True }))
"print the final instance map",
OptComplNone)
oPrintMoves :: OptType
oPrintMoves =
(Option "" ["print-moves"]
(NoArg (\ opts -> Ok opts { optPrintMoves = True }))
"print the moves of the instances",
OptComplNone)
oPrintNodes :: OptType
oPrintNodes =
(Option "p" ["print-nodes"]
(OptArg ((\ f opts ->
let (prefix, realf) = case f of
'+':rest -> (["+"], rest)
_ -> ([], f)
splitted = prefix ++ sepSplit ',' realf
in Ok opts { optShowNodes = Just splitted }) .
fromMaybe []) "FIELDS")
"print the final node list",
OptComplNone)
oQuiet :: OptType
oQuiet =
(Option "q" ["quiet"]
(NoArg (\ opts -> Ok opts { optVerbose = optVerbose opts - 1 }))
"decrease the verbosity level",
OptComplNone)
oRapiMaster :: OptType
oRapiMaster =
(Option "m" ["master"]
(ReqArg (\ m opts -> Ok opts { optMaster = m }) "ADDRESS")
"collect data via RAPI at the given ADDRESS",
OptComplHost)
oSaveCluster :: OptType
oSaveCluster =
(Option "S" ["save"]
(ReqArg (\ f opts -> Ok opts { optSaveCluster = Just f }) "FILE")
"Save cluster state at the end of the processing to FILE",
OptComplNone)
oSkipNonRedundant :: OptType
oSkipNonRedundant =
(Option "" ["skip-non-redundant"]
(NoArg (\ opts -> Ok opts { optSkipNonRedundant = True }))
"Skip nodes that host a non-redundant instance",
OptComplNone)
oStdSpec :: OptType
oStdSpec =
(Option "" ["standard-alloc"]
(ReqArg (\ inp opts -> do
tspec <- parseISpecString "standard" inp
return $ opts { optStdSpec = Just tspec } )
"STDSPEC")
"enable standard specs allocation, given as 'disk,ram,cpu'",
OptComplString)
oTieredSpec :: OptType
oTieredSpec =
(Option "" ["tiered-alloc"]
(ReqArg (\ inp opts -> do
tspec <- parseISpecString "tiered" inp
return $ opts { optTieredSpec = Just tspec } )
"TSPEC")
"enable tiered specs allocation, given as 'disk,ram,cpu'",
OptComplString)
oVerbose :: OptType
oVerbose =
(Option "v" ["verbose"]
(NoArg (\ opts -> Ok opts { optVerbose = optVerbose opts + 1 }))
"increase the verbosity level",
OptComplNone)
oPriority :: OptType
oPriority =
(Option "" ["priority"]
(ReqArg (\ inp opts -> do
prio <- parseSubmitPriority inp
Ok opts { optPriority = Just prio }) "PRIO")
"set the priority of submitted jobs",
OptComplChoices (map fmtSubmitPriority [minBound..maxBound]))
-- | Generic options.
genericOpts :: [GenericOptType Options]
genericOpts = [ oShowVer
, oShowHelp
, oShowComp
]
-- * Functions
-- | Wrapper over 'Common.parseOpts' with our custom options.
parseOpts :: [String] -- ^ The command line arguments
-> String -- ^ The program name
-> [OptType] -- ^ The supported command line options
-> [ArgCompletion] -- ^ The supported command line arguments
-> IO (Options, [String]) -- ^ The resulting options and leftover
-- arguments
parseOpts = Common.parseOpts defaultOptions
-- | A shell script template for autogenerated scripts.
shTemplate :: String
shTemplate =
printf "#!/bin/sh\n\n\
\# Auto-generated script for executing cluster rebalancing\n\n\
\# To stop, touch the file /tmp/stop-htools\n\n\
\set -e\n\n\
\check() {\n\
\ if [ -f /tmp/stop-htools ]; then\n\
\ echo 'Stop requested, exiting'\n\
\ exit 0\n\
\ fi\n\
\}\n\n"
-- | Optionally print the node list.
maybePrintNodes :: Maybe [String] -- ^ The field list
-> String -- ^ Informational message
-> ([String] -> String) -- ^ Function to generate the listing
-> IO ()
maybePrintNodes Nothing _ _ = return ()
maybePrintNodes (Just fields) msg fn = do
hPutStrLn stderr ""
hPutStrLn stderr (msg ++ " status:")
hPutStrLn stderr $ fn fields
-- | Optionally print the instance list.
maybePrintInsts :: Bool -- ^ Whether to print the instance list
-> String -- ^ Type of the instance map (e.g. initial)
-> String -- ^ The instance data
-> IO ()
maybePrintInsts do_print msg instdata =
when do_print $ do
hPutStrLn stderr ""
hPutStrLn stderr $ msg ++ " instance map:"
hPutStr stderr instdata
-- | Function to display warning messages from parsing the cluster
-- state.
maybeShowWarnings :: [String] -- ^ The warning messages
-> IO ()
maybeShowWarnings fix_msgs =
unless (null fix_msgs) $ do
hPutStrLn stderr "Warning: cluster has inconsistent data:"
hPutStrLn stderr . unlines . map (printf " - %s") $ fix_msgs
-- | Format a list of key, value as a shell fragment.
printKeys :: String -- ^ Prefix to printed variables
-> [(String, String)] -- ^ List of (key, value) pairs to be printed
-> IO ()
printKeys prefix =
mapM_ (\(k, v) ->
printf "%s_%s=%s\n" prefix (map toUpper k) (ensureQuoted v))
-- | Prints the final @OK@ marker in machine readable output.
printFinal :: String -- ^ Prefix to printed variable
-> Bool -- ^ Whether output should be machine readable;
-- note: if not, there is nothing to print
-> IO ()
printFinal prefix True =
-- this should be the final entry
printKeys prefix [("OK", "1")]
printFinal _ False = return ()
-- | Potentially set the node as offline based on passed offline list.
setNodeOffline :: [Ndx] -> Node.Node -> Node.Node
setNodeOffline offline_indices n =
if Node.idx n `elem` offline_indices
then Node.setOffline n True
else n
-- | Set node properties based on command line options.
setNodeStatus :: Options -> Node.List -> IO Node.List
setNodeStatus opts fixed_nl = do
let offline_passed = optOffline opts
all_nodes = Container.elems fixed_nl
offline_lkp = map (lookupName (map Node.name all_nodes)) offline_passed
offline_wrong = filter (not . goodLookupResult) offline_lkp
offline_names = map lrContent offline_lkp
offline_indices = map Node.idx $
filter (\n -> Node.name n `elem` offline_names)
all_nodes
m_cpu = optMcpu opts
m_dsk = optMdsk opts
unless (null offline_wrong) .
exitErr $ printf "wrong node name(s) set as offline: %s\n"
(commaJoin (map lrContent offline_wrong))
let setMCpuFn = case m_cpu of
Nothing -> id
Just new_mcpu -> flip Node.setMcpu new_mcpu
let nm = Container.map (setNodeOffline offline_indices .
flip Node.setMdsk m_dsk .
setMCpuFn) fixed_nl
return nm
|
narurien/ganeti-ceph
|
src/Ganeti/HTools/CLI.hs
|
gpl-2.0
| 24,674 | 0 | 21 | 6,358 | 5,201 | 2,940 | 2,261 | 591 | 3 |
{-# LANGUAGE DeriveDataTypeable, RecordWildCards #-}
module Tasks.Cli.Cli(
--
) where
import Data.Binary
import qualified Data.ByteString as B
import Data.Maybe
import System.Console.CmdArgs
import Tasks.Project
import Tasks.Types
import Tasks.Cli.Types
(>>>) :: a -> (a -> b) -> b
val >>> f = f val
dbPath :: FilePath
dbPath = "/home/eric/.tasks.db"
loadDB :: FilePath -> IO DBFormat
loadDB = decodeFile
--loadDB pth = readFile pth >>= (return . decode . bs)
projectStr :: Project -> Int -> String
projectStr prj idx =
"Project " ++ (show idx) ++ ": " ++ (bsToString $ projectName prj) ++ " " ++
"(" ++ (show . length . projectTasks $ prj) ++ " tasks, " ++
(show . projectPriority $ prj) ++ " priority)"
data Opts = Opts { listProjects :: Bool
, editProject :: Int
, newProject :: Bool } deriving (Data, Typeable, Show, Eq)
myOpts = Opts { listProjects = True &= name "l" &= help "List all projects"
, editProject = -1 &= name "e" &= help "Edit a project, given its index"
, newProject = False &= name "n" &= help "Create a project" }
type Choice = (String, String)
choiceStr :: Choice -> String
choiceStr (keys, message) = "[" ++ keys ++ "] - " ++ message
-- Format is message, keys
editProjectChoices :: Project -> [(String, String)]
editProjectChoices prj@Project{..} =
[("n", "Project Name: " ++ bsToString projectName),
("N", "Project Notes: " ++ noteLine ++ "...")]
where
newl = convertCharToWord8 '\n'
noteLines = B.split newl (fromMaybe (bs "") (projectNotes prj))
noteLine = if length noteLines >= 1 then
bsToString $ head noteLines
else
"No notes"
progEditProject :: Int -> IO ([Project])
progEditProject idx = do
(DBFormat tasks projects) <- loadDB path
return projects
progListProjects :: IO ()
progListProjects = do
(DBFormat tasks projects) <- loadDB dbPath
mapM_ (\(prj, idx) -> putStrLn $ projectStr prj idx) (zip projects [1..])
handleOpts :: Opts -> IO ()
handleOpts opts
| listProjects opts == True = progListProjects
| otherwise = putStrLn "Unhandled option"
main = cmdArgs myOpts >>= handleOpts
|
BigEndian/tasks
|
tasks-cli/Tasks/Cli/Cli.hs
|
gpl-2.0
| 2,260 | 0 | 15 | 577 | 701 | 375 | 326 | 51 | 2 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Language.ArrayForth.HLL.AST where
import Prelude hiding (Ord (..), Eq (..), not)
import qualified Prelude
import Control.Monad.Free
import Data.String
import Language.ArrayForth.Opcode (F18Word)
data Expr = Num F18Word
| ArrayRef String
| Array String [F18Word]
| Nil
| Op Operator AST AST
| UOp UOperator AST
| If AST AST AST
| For AST AST AST
| While AST AST
| Map AST AST AST
| Fold AST AST AST AST
deriving Show
data Forth next = Forth Expr next deriving (Functor, Show)
type AST = Free Forth ()
data Operator = Add | Sub | Mul | Lt | Gt | LtE | GtE | Eq | NEq | Set | Index deriving (Show, Prelude.Eq)
data UOperator = Neg | Not deriving Show
liftExpr :: Expr -> AST
liftExpr expr = liftF $ Forth expr ()
op :: Operator -> AST -> AST -> AST
op opr e₁ e₂ = liftExpr $ Op opr e₁ e₂
instance Num AST where
fromInteger = liftExpr . Num . fromInteger
(+) = op Add
(-) = op Sub
(*) = op Mul
negate (Free (Forth (Num n) (Pure ()))) = Free $ Forth (Num $ negate n) (Pure ())
negate expr = liftExpr $ UOp Neg expr
abs = undefined
signum = undefined
instance IsString AST where
fromString = ref
(<), (>), (<=), (≤), (>=), (≥), (==), (/=), (≠), (!), (=:) :: AST -> AST -> AST
(<) = op Lt
(>) = op Gt
(<=) = op LtE
(≤) = (<=)
(>=) = op GtE
(≥) = (>=)
(==) = op Eq
(/=) = op NEq
(≠) = (/=)
(!) = op Index
(=:) = op Set
infix 3 =:
infix 4 <, >, <=, >=, ≤, ≥, ==, /=, ≠
infix 8 !
not :: AST -> AST
not = liftExpr . UOp Not
ifThenElse :: AST -> AST -> AST -> AST
ifThenElse cond e₁ e₂ = liftExpr $ If cond e₁ e₂
array :: String -> [F18Word] -> AST
array name values = liftExpr $ Array name values
nil :: AST
nil = liftExpr Nil
for :: AST -> AST -> AST -> AST
for var range body = liftExpr $ For var range body
while :: AST -> AST -> AST
while cond body = liftExpr $ While cond body
map :: AST -> AST -> AST -> AST
map var arr body = liftExpr $ Map var arr body
fold :: AST -> AST -> AST -> AST -> AST
fold var₁ var₂ arr body = liftExpr $ Fold var₁ var₂ arr body
ref :: String -> AST
ref name = liftExpr $ ArrayRef name
|
TikhonJelvis/array-forth-hll
|
src/Language/ArrayForth/HLL/AST.hs
|
gpl-3.0
| 2,510 | 24 | 14 | 741 | 991 | 573 | 418 | 75 | 1 |
{-# LANGUAGE
OverloadedStrings
, ConstraintKinds
, FlexibleContexts
, MultiParamTypeClasses
, DeriveGeneric
, GeneralizedNewtypeDeriving
#-}
module Application.Types where
import Cabal.Types
import Server.Types
import Schema
import Path.Extended
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.ByteString as BS
import qualified Data.ByteString.Base16 as BS16
import Network.HTTP.Client (Manager, newManager)
import Network.HTTP.Client.TLS (tlsManagerSettings)
import Data.Aeson
import Data.Monoid
import Data.Url
import Data.TimeMap as TM
import Data.Hashable
import Data.Data
import Data.Acid
import Data.Acid.Memory
import Data.STRef
import Control.Monad.Logger
import Control.Monad.Trans.Control
import Control.Monad.Reader
import Control.Monad.Catch
import Control.Monad.ST
import Control.Concurrent.STM (atomically)
import Control.Concurrent.QSem
import Crypto.Saltine.Core.Sign as NaCl
import Crypto.Saltine.Class as NaCl
import GHC.Generics
-- * Global Variables
-- ** Sessions
newtype ClientPublicKey = ClientPublicKey
{ getClientPublicKey :: BS.ByteString
} deriving (Show, Eq, Hashable)
instance FromJSON ClientPublicKey where
parseJSON (String s) =
case BS16.decode $ T.encodeUtf8 s of
(decoded, rest) | rest /= "" -> fail "Not base-16 encoded"
| otherwise -> pure $ ClientPublicKey decoded
parseJSON _ = fail "Not a string"
toNaClPublicKey :: ClientPublicKey -> Maybe NaCl.PublicKey
toNaClPublicKey (ClientPublicKey k) = NaCl.decode k
type SessionId = ClientPublicKey
-- A cache is a time-indexed mapping from nonces to accrued hashings
type SessionCache = TimeMap SessionId UserId
-- ** The Execution Environment
data Queues = Queues
{ queueFetch :: QSem
, queueHtml :: QSem
, queueJson :: QSem
}
-- The environment accessible from our application
data Env = Env
{ envAuthority :: UrlAuthority
, envCwd :: FilePath -- ^ for File Processing
, envStatic :: FilePath
, envProduction :: Bool
, envVerbose :: Bool
, envSession :: SessionCache
, envPublicKey :: PublicKey
, envSecretKey :: SecretKey
, envManager :: Manager
, envDatabase :: AcidState Database
, envFetched :: STRef RealWorld Fetched
, envQueues :: Queues
, envTLS :: Bool
}
instance Show Env where
show (Env a c s p v _ _ _ _ _ _ _ t) =
"Env {envAuthority = " ++ show a ++ ", envCwd = "
++ show c ++ ", envStatic = "
++ show s ++ ", envProduction = "
++ show p ++ ", envVerbose = "
++ show v ++ ", envSession = <session>},\
\ envPublicKey = <#>,\
\ envSecretKey = <#>,\
\ envManager = <manager>,\
\ envDatabase = <database>,\
\ envFetched = <fetched>,\
\ envQueues = <queues>,\
\ envTLS = "
++ show t ++ "}"
instance Eq Env where
(Env a1 c1 s1 p1 v1 _ _ _ _ _ _ _ t1) == (Env a2 c2 s2 p2 v2 _ _ _ _ _ _ _ t2) =
a1 == a2 && c1 == c2 && s1 == s2 && p1 == p2 && v1 == v2 && t1 == t2
-- | A really terrible environment value that should only be used with testing
emptyEnv :: IO Env
emptyEnv = do
t <- atomically TM.newTimeMap
(sk,pk) <- NaCl.newKeypair
m <- newManager tlsManagerSettings
db <- openMemoryState initDB
f <- stToIO $ newSTRef emptyFetched
lf <- newQSem 100
lh <- newQSem 100
lj <- newQSem 1000
let auth = UrlAuthority "http" True Nothing "localhost" Nothing
pure Env { envAuthority = auth
, envCwd = "/"
, envStatic = "/"
, envProduction = False
, envVerbose = False
, envSession = t
, envPublicKey = pk
, envSecretKey = sk
, envManager = m
, envDatabase = db
, envFetched = f
, envQueues = Queues lf lh lj
, envTLS = False
}
-- * Application Effects Stack
type AppM = LoggingT (ReaderT Env IO)
runAppM :: AppM a -> Env -> IO a
runAppM = runReaderT . runStderrLoggingT
type AppTemplateT m = AbsoluteUrlT m
runAppTemplateT :: AppTemplateT m a -> UrlAuthority -> m a
runAppTemplateT = runAbsoluteUrlT
type MonadApp m =
( MonadIO m
, MonadThrow m
, MonadCatch m
, MonadMask m
, MonadLogger m
, MonadReader Env m
, MonadUrl Abs File m
, MonadBaseControl IO m
, Typeable m
)
-- * Inter-App Hrefs
-- | Data type representing top navigation bar
data AppLinks
= AppHome
| AppAbout
| AppContact
deriving (Show, Eq)
instance ToPath AppLinks Abs File where
toPath AppHome = parseAbsFile "/index"
toPath AppAbout = parseAbsFile "/about"
toPath AppContact = parseAbsFile "/contact"
instance ToLocation AppLinks Abs File where
toLocation x = fromPath <$> toPath x
data AppResources
= JQuery
| JQueryCdn
| SemanticJs
| SemanticJsCdn
| SemanticCss
| SemanticCssCdn
| JsSHA
| JsSHACdn
| JsNaCl
| JsNaClCdn
| LessStyles
| AppFrontend
deriving (Show, Eq)
instance ToPath AppResources Abs File where
toPath JQuery = parseAbsFile "/vendor/jquery/dist/jquery"
toPath SemanticJs = parseAbsFile "/vendor/semantic-ui/dist/semantic"
toPath SemanticCss = parseAbsFile "/vendor/semantic-ui/dist/semantic"
toPath JQueryCdn = parseAbsFile "/ajax/libs/jquery/3.0.0-beta1/jquery"
toPath SemanticJsCdn = parseAbsFile "/ajax/libs/semantic-ui/2.1.8/semantic"
toPath SemanticCssCdn = parseAbsFile "/ajax/libs/semantic-ui/2.1.8/semantic"
toPath JsSHACdn = parseAbsFile "/ajax/libs/jsSHA/2.1.0/sha"
toPath JsSHA = parseAbsFile "/vendor/jsSHA/src/sha"
toPath JsNaCl = parseAbsFile "/vendor/js-nacl/lib/nacl_factory"
toPath JsNaClCdn = parseAbsFile "/ajax/libs/js-nacl/1.2.0/nacl_factory"
toPath LessStyles = parseAbsFile "/style"
toPath AppFrontend = parseAbsFile "/Main"
instance ToLocation AppResources Abs File where
toLocation JQuery = (addFileExt "min.js" . fromPath) <$> toPath JQuery
toLocation SemanticJs = (addFileExt "min.js" . fromPath) <$> toPath SemanticJs
toLocation SemanticCss = (addFileExt "min.css" . fromPath) <$> toPath SemanticCss
toLocation JQueryCdn = (addFileExt "min.js" . fromPath) <$> toPath JQueryCdn
toLocation SemanticJsCdn = (addFileExt "min.js" . fromPath) <$> toPath SemanticJsCdn
toLocation SemanticCssCdn = (addFileExt "min.css" . fromPath) <$> toPath SemanticCssCdn
toLocation JsSHACdn = (addFileExt "js" . fromPath) <$> toPath JsSHACdn
toLocation JsSHA = (addFileExt "js" . fromPath) <$> toPath JsSHA
toLocation JsNaCl = (addFileExt "js" . fromPath) <$> toPath JsNaCl
toLocation JsNaClCdn = (addFileExt "min.js" . fromPath) <$> toPath JsNaClCdn
toLocation LessStyles = (addFileExt "css" . fromPath) <$> toPath LessStyles
toLocation AppFrontend = (addFileExt "min.js" . fromPath) <$> toPath AppFrontend
appendActiveWhen :: AppLinks -> Maybe AppLinks -> T.Text -> T.Text
appendActiveWhen x (Just y) c | x == y = c <> " active"
appendActiveWhen _ _ c = c
-- * Exceptions
data SessionException
= InvalidSignedRequest String
| BadSessionFormat
| NonexistentSessionId SessionId
deriving (Generic, Show)
instance Exception SessionException
data LoginException
= BadLoginFormat
deriving (Generic, Show)
instance Exception LoginException
|
athanclark/happ-store
|
src/Application/Types.hs
|
gpl-3.0
| 7,828 | 0 | 18 | 2,147 | 1,838 | 977 | 861 | 183 | 1 |
-- Data.Fasta.ByteString.Lazy module.
-- By G.W. Schwartz
--
{- | Collects all application specific functions and types. Used here for
Text.Lazy
-}
module Data.Fasta.ByteString.Lazy ( module Data.Fasta.ByteString.Lazy.Types
, module Data.Fasta.ByteString.Lazy.Parse
, module Data.Fasta.ByteString.Lazy.Translation
, module Data.Fasta.ByteString.Lazy.Utility ) where
-- Local
import Data.Fasta.ByteString.Lazy.Types
import Data.Fasta.ByteString.Lazy.Parse
import Data.Fasta.ByteString.Lazy.Translation
import Data.Fasta.ByteString.Lazy.Utility
|
GregorySchwartz/fasta
|
src/Data/Fasta/ByteString/Lazy.hs
|
gpl-3.0
| 647 | 0 | 5 | 152 | 83 | 64 | 19 | 8 | 0 |
{-
Author: Ary Pablo Batista <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module LISRepresentation (
Program (Program),
Block,
Command (
Skip,
Assign,
If,
While,
),
BExp (
BCte,
And,
Cmp,
Not,
Or,
),
ROp (
Equal,
NotEqual,
Greater,
GreaterEqual,
Lower,
LowerEqual,
),
VarName,
NExp (
Variable,
NCte,
Add,
Sub,
Mul,
Div,
Mod,
),
) where
-- LIS Representation
data Program = Program Block
type Block = [Command]
data Command = Skip
| Assign VarName NExp
| If BExp Block Block
| While BExp Block
data BExp = BCte Bool
| And BExp BExp
| Cmp ROp NExp NExp
| Not BExp
| Or BExp BExp
data ROp = Equal
| Greater
| GreaterEqual
| NotEqual
| Lower
| LowerEqual
type VarName = String
data NExp = Variable VarName
| NCte Int
| Add NExp NExp
| Sub NExp NExp
| Mul NExp NExp
| Div NExp NExp
| Mod NExp NExp
-- LIS Show
instance Show ROp where
show Equal = "Equal"
show NotEqual = "NotEqual"
show Greater = "Greater"
show GreaterEqual = "GreaterEqual"
show Lower = "Lower"
show LowerEqual = "LowerEqual"
instance Show BExp where
show (BCte b) = wNJ ["BCte", show b]
show (And b1 b2) = wNJ ["And", show b1, show b2]
show (Or b1 b2) = wNJ ["Or", show b1, show b2]
show (Cmp rop e1 e2) = wNJ ["Cmp", show rop, show e1, show e2]
show (Not b) = wNJ ["Not", show b]
instance Show NExp where
show (Variable name) = wNJ ["Variable", name]
show (NCte n) = wNJ ["NCte", show n]
show (Add e1 e2) = wNJ ["Add", show e1, show e2]
show (Sub e1 e2) = wNJ ["Sub", show e1, show e2]
show (Mul e1 e2) = wNJ ["Mul", show e1, show e2]
show (Div e1 e2) = wNJ ["Div", show e1, show e2]
show (Mod e1 e2) = wNJ ["Mod", show e1, show e2]
instance Show Command where
show Skip = "Skip"
show (Assign v ne) = wNJ ["Assign ", show v, show ne]
show (If b tb fb) = wNJ ["If ", show b, show tb, show fb]
show (While b block) = wNJ ["While ", show b, show block]
instance Show Program where
show (Program cs) = "Program \n" ++ show cs
-- Auxiliary functions
join :: String -> [String] -> String
join sep [] = ""
join sep (s:ss) = s ++ sep ++ join sep ss
wrap s = "(" ++ s ++ ")"
wNJ = wrap . (join " ")
|
arypbatista/LIS-Parser
|
LISRepresentation.hs
|
gpl-3.0
| 3,512 | 0 | 8 | 1,374 | 956 | 527 | 429 | -1 | -1 |
-- Examples from
-- http://www.csc.liv.ac.uk/~lad/research/challenges
module IWC where
import Hip.Prelude hiding (choose)
import Prelude(Bool(..))
data Nat = S Nat | Z
otherwise = True
len [] = Z
len (x:xs) = S (len xs)
even Z = True
even (S Z) = False
even (S (S x)) = even x
[] `app` ys = ys
(x:xs) `app` ys = x : (xs `app` ys)
prop_evenLengthAppend :: [a] -> [a] -> Prop Bool
prop_evenLengthAppend xs ys = even (len (xs `app` ys)) =:=
even (len (ys `app` xs))
rotate Z xs = xs
rotate (S n) [] = []
rotate (S n) (x:xs) = rotate n (xs `app` [x])
prop_rotateLength xs = rotate (len xs) xs =:= xs
prop_rotateLength2 xs ys = rotate (len xs) (xs `app` ys) =:= (ys `app` xs)
plus x Z = x
plus x (S y) = S (plus x y)
times x Z = Z
times x (S y) = (x `times` y) `plus` x
choose x Z = S Z
choose Z (S y) = Z
choose (S x) (S y) = choose x (S y) `plus` choose x y
exp x Z = Z
exp x (S y) = exp x y `times` x
lt x Z = False
lt Z y = True
lt (S x) (S y) = lt x y
sum' :: Nat -> Nat -> (Nat -> Nat) -> Nat
sum' x Z f = f Z
sum' x (S y) f | S y `lt` x = Z
| otherwise = f (S y) `plus` sum' x y f
prop_binomialTheorems :: Nat -> Nat -> Prop Nat
prop_binomialTheorems x n = exp (S x) n =:= sum' Z n (\i -> choose n i `times` exp x i)
minus n Z = n
minus (S n) (S k) = minus n k
minus Z k = Z
{-
prop_chooseLemma :: Nat -> Nat -> Prop Nat
prop_chooseLemma n k = givenBool (k `lt` n)
$ choose n k =:= choose n (n `minus` k)
-}
prop_sumLemma :: Nat -> Nat -> (Nat -> Nat) -> (Nat -> Nat) -> Prop Nat
prop_sumLemma n m f g = sum' n m (\i -> f i `plus` g i) =:= sum' n m f `plus` sum' n m g
prop_sumLemma2 :: Nat -> Nat -> Nat -> (Nat -> Nat) -> Prop Nat
prop_sumLemma2 n m t f = sum' n m (\i -> t `times` f i) =:= t `times` sum' n m f
evenm Z = True
evenm (S n) = oddm n
oddm Z = False
oddm (S n) = evenm n
evenr Z = True
evenr (S Z) = False
evenr (S (S x)) = evenr x
prop_evenEq :: Nat -> Prop Bool
prop_evenEq n = evenm n =:= evenr n
is6 (S (S (S (S (S (S Z)))))) = True
is6 _ = False
splitList :: [a] -> [a] -> [a]
splitList [] w = w
splitList (a:x) w | is6 (len w) = w `app` splitList (a:x) []
| otherwise = splitList x (w `app` [a])
newSplit :: [a] -> [a] -> Nat -> [a]
newSplit [] w d = w
newSplit (a:x) w d | is6 d = w `app` newSplit (a:x) [] Z
| otherwise = newSplit x (w `app` [a]) (S d)
prop_split :: [a] -> [a] -> Prop [a]
prop_split x w = newSplit x w (len w) =:= splitList x w
|
danr/hipspec
|
examples/old-examples/hip/IWC.hs
|
gpl-3.0
| 2,607 | 0 | 17 | 808 | 1,498 | 774 | 724 | 66 | 1 |
{-
The data structures used for NFA-DFA conversion.
This was created as Project 1 for EECS 665 at the University of Kansas.
Author: Ryan Scott
-}
{-# LANGUAGE RankNTypes #-}
module NFA2DFA.Data where
import Control.Applicative
import Control.Monad
import qualified Data.Map as M
import Data.Map (Map)
import Data.Maybe
import qualified Data.Set as S
import Data.Set (Set)
import Data.Word
-------------------------------------------------------------------------------
-- | Magical data accessor/setter type.
type Lens s t a b = forall f. Functor f => (a -> f b) -> s -> f t
-- | The simpler Lens type used for most operations.
type Lens' s a = Lens s s a a
-- | The trivial functor.
newtype Identity a = Identity { runIdentity :: a }
instance Functor Identity where
fmap f = Identity . f . runIdentity
-- | View a data structure element via a 'Lens'.
viewL :: Lens' s a -> s -> a
viewL lens = getConst . lens Const
{-# INLINE viewL #-}
-- | Replace a data structure element via a 'Lens'.
setL :: Lens' s a -> a -> s -> s
setL lens = overL lens . const
{-# INLINE setL #-}
-- | Modify a data structure element via a 'Lens'.
overL :: Lens' s a -> (a -> a) -> s -> s
overL lens f = runIdentity . lens (Identity . f)
{-# INLINE overL #-}
-------------------------------------------------------------------------------
-- | A stack is simply a singly linked list.
type Stack = []
-- Pushing is cons
push :: a -> Stack a -> Stack a
push = (:)
-- Popping is cdr
pop :: Stack a -> Stack a
pop = tail
-- Peeking is car
peek :: Stack a -> a
peek = head
-------------------------------------------------------------------------------
-- | A nondeterminstic finite automaton data structure.
data NFA = NFA {
nfaAStates :: Set AState
, nfaInputSymbols :: Set InputSymbol
, nfaTransitionFun :: NFATran
, nfaInitialAState :: AState
, nfaFinalAStates :: Set AState
} deriving (Eq, Ord, Read, Show)
-- | An NFA transition table.
type NFATran = Map (AState, Transition) (Set AState)
-- | A state data structure.
newtype AState = AState { runAState :: Word } deriving (Bounded, Eq, Ord, Read)
instance Num AState where
abs = error "AState arithmetic not permitted"
signum = error "AState arithmetic not permitted"
(+) = error "AState arithmetic not permitted"
(-) = error "AState arithmetic not permitted"
(*) = error "AState arithmetic not permitted"
fromInteger = AState . fromInteger
instance Show AState where
showsPrec d (AState w) = showsPrec d w
-- After an NFA is converted to a DFA, the DFA's states will actually consist
-- of marked sets of NFA states. Nifty!
data MarkedAStateSet = MarkedAStateSet {
markedNum :: Word
, marked :: Bool
, markedSet :: Set AState
} deriving (Eq, Read)
-- Only compare 'MarkedAStateSet's by their indexes.
instance Ord MarkedAStateSet where
compare ms1 ms2 = compare (markedNum ms1) (markedNum ms2)
instance Show MarkedAStateSet where
showsPrec d (MarkedAStateSet n _ _) = showsPrec d n
-- A 'Lens' that views a 'MarkedAStateSet`'s marked status.
_marked :: Lens' MarkedAStateSet Bool
_marked inj (MarkedAStateSet mn m ms) = (\m' -> MarkedAStateSet mn m' ms) <$> inj m
{-# INLINE _marked #-}
type InputSymbol = Char
-- An input symbol, or epsilon
type Transition = Maybe InputSymbol
epsilon :: Transition
epsilon = Nothing
-- A deterministic finite automaton data structure (as converted from an NFA).
data DFA = DFA {
dfaAStates :: Set MarkedAStateSet
, dfaInputSymbols :: Set InputSymbol
, dfaTransitionFun :: DFATran
, dfaInitialAState :: MarkedAStateSet
, dfaFinalStates :: Set MarkedAStateSet
}
-- | A DFA transition table.
type DFATran = Map (MarkedAStateSet, InputSymbol) MarkedAStateSet
-- Prettily prints a DFA to the screen.
printDFA :: DFA -> IO ()
printDFA (DFA states inputs trans initial finals) = do
putStr "Initial State: "
printSet $ S.singleton initial
putStrLn ""
putStr "Final States: "
printSet finals
putStrLn ""
putStr "State\t"
let inputsList = S.toList inputs
forM_ inputsList $ \input -> do
putChar input
putStr "\t"
putStrLn ""
forM_ (S.toList states) $ \st -> do
putStr $ show st
putStr "\t"
forM_ inputsList $ \input -> do
if (M.member (st, input) trans)
then do
putChar '{'
putStr . show . fromJust $ M.lookup (st, input) trans
putStr "}\t"
else putStr "{}\t"
putStrLn ""
-- Prettily prints a set's contents surrounded by braces.
printSet :: Show a => Set a -> IO ()
printSet set = do
putChar '{'
unless (S.null set) $ do
let setList = S.toList set
putStr . show $ head setList
forM_ (tail setList) $ \e -> do
putChar ','
putStr $ show e
putChar '}'
|
RyanGlScott/nfa2dfa
|
src/NFA2DFA/Data.hs
|
gpl-3.0
| 4,995 | 0 | 20 | 1,259 | 1,238 | 650 | 588 | 107 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Manufacturers.Accounts.Products.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the product from a Manufacturer Center account.
--
-- /See:/ <https://developers.google.com/manufacturers/ Manufacturer Center API Reference> for @manufacturers.accounts.products.delete@.
module Network.Google.Resource.Manufacturers.Accounts.Products.Delete
(
-- * REST Resource
AccountsProductsDeleteResource
-- * Creating a Request
, accountsProductsDelete
, AccountsProductsDelete
-- * Request Lenses
, apdParent
, apdXgafv
, apdUploadProtocol
, apdAccessToken
, apdUploadType
, apdName
, apdCallback
) where
import Network.Google.Manufacturers.Types
import Network.Google.Prelude
-- | A resource alias for @manufacturers.accounts.products.delete@ method which the
-- 'AccountsProductsDelete' request conforms to.
type AccountsProductsDeleteResource =
"v1" :>
Capture "parent" Text :>
"products" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Deletes the product from a Manufacturer Center account.
--
-- /See:/ 'accountsProductsDelete' smart constructor.
data AccountsProductsDelete =
AccountsProductsDelete'
{ _apdParent :: !Text
, _apdXgafv :: !(Maybe Xgafv)
, _apdUploadProtocol :: !(Maybe Text)
, _apdAccessToken :: !(Maybe Text)
, _apdUploadType :: !(Maybe Text)
, _apdName :: !Text
, _apdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsProductsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'apdParent'
--
-- * 'apdXgafv'
--
-- * 'apdUploadProtocol'
--
-- * 'apdAccessToken'
--
-- * 'apdUploadType'
--
-- * 'apdName'
--
-- * 'apdCallback'
accountsProductsDelete
:: Text -- ^ 'apdParent'
-> Text -- ^ 'apdName'
-> AccountsProductsDelete
accountsProductsDelete pApdParent_ pApdName_ =
AccountsProductsDelete'
{ _apdParent = pApdParent_
, _apdXgafv = Nothing
, _apdUploadProtocol = Nothing
, _apdAccessToken = Nothing
, _apdUploadType = Nothing
, _apdName = pApdName_
, _apdCallback = Nothing
}
-- | Parent ID in the format \`accounts\/{account_id}\`. \`account_id\` - The
-- ID of the Manufacturer Center account.
apdParent :: Lens' AccountsProductsDelete Text
apdParent
= lens _apdParent (\ s a -> s{_apdParent = a})
-- | V1 error format.
apdXgafv :: Lens' AccountsProductsDelete (Maybe Xgafv)
apdXgafv = lens _apdXgafv (\ s a -> s{_apdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
apdUploadProtocol :: Lens' AccountsProductsDelete (Maybe Text)
apdUploadProtocol
= lens _apdUploadProtocol
(\ s a -> s{_apdUploadProtocol = a})
-- | OAuth access token.
apdAccessToken :: Lens' AccountsProductsDelete (Maybe Text)
apdAccessToken
= lens _apdAccessToken
(\ s a -> s{_apdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
apdUploadType :: Lens' AccountsProductsDelete (Maybe Text)
apdUploadType
= lens _apdUploadType
(\ s a -> s{_apdUploadType = a})
-- | Name in the format \`{target_country}:{content_language}:{product_id}\`.
-- \`target_country\` - The target country of the product as a CLDR
-- territory code (for example, US). \`content_language\` - The content
-- language of the product as a two-letter ISO 639-1 language code (for
-- example, en). \`product_id\` - The ID of the product. For more
-- information, see
-- https:\/\/support.google.com\/manufacturers\/answer\/6124116#id.
apdName :: Lens' AccountsProductsDelete Text
apdName = lens _apdName (\ s a -> s{_apdName = a})
-- | JSONP
apdCallback :: Lens' AccountsProductsDelete (Maybe Text)
apdCallback
= lens _apdCallback (\ s a -> s{_apdCallback = a})
instance GoogleRequest AccountsProductsDelete where
type Rs AccountsProductsDelete = Empty
type Scopes AccountsProductsDelete =
'["https://www.googleapis.com/auth/manufacturercenter"]
requestClient AccountsProductsDelete'{..}
= go _apdParent _apdName _apdXgafv _apdUploadProtocol
_apdAccessToken
_apdUploadType
_apdCallback
(Just AltJSON)
manufacturersService
where go
= buildClient
(Proxy :: Proxy AccountsProductsDeleteResource)
mempty
|
brendanhay/gogol
|
gogol-manufacturers/gen/Network/Google/Resource/Manufacturers/Accounts/Products/Delete.hs
|
mpl-2.0
| 5,488 | 0 | 17 | 1,203 | 781 | 458 | 323 | 111 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
module Main where
-- aeson ---------------------------------------------------------------------
import Data.Aeson
( FromJSON (parseJSON)
, ToJSON (toJSON)
, Result (Error, Success)
, Value (Object)
, (.=)
, (.:)
, (.:?)
, decode
, encode
, fromJSON
, object
)
-- base ----------------------------------------------------------------------
import Control.Applicative ((<$>))
import Control.Concurrent
( forkIO
, killThread
, modifyMVar_
, myThreadId
, newEmptyMVar
, putMVar
, readMVar
, takeMVar
, threadDelay
)
import Control.Exception
( Exception
, SomeException
, catch
, finally
, throwIO
)
import Control.Monad (msum, mzero)
import Data.Foldable (for_, toList)
import Data.Int (Int64)
import Data.List (foldl')
import Data.Maybe (mapMaybe)
import Data.Monoid ((<>), mconcat, mempty)
import Data.Traversable (for, traverse)
import Data.Typeable (Typeable)
import Data.Unique (newUnique, hashUnique)
import Prelude hiding (log)
import System.Environment (getArgs)
import System.IO (stderr)
-- bytestring ----------------------------------------------------------------
import Data.ByteString.Lazy (fromStrict)
-- ConfigFile ----------------------------------------------------------------
import Data.ConfigFile
( CPErrorData
, emptyCP
, get
, readfile
, sections
)
-- directory -----------------------------------------------------------------
import System.Directory
( createDirectoryIfMissing
, doesFileExist
, removeDirectory
, removeFile
)
-- hslogger ------------------------------------------------------------------
import System.Log.Logger
( Priority (DEBUG, WARNING)
, debugM
, errorM
, getLogger
, rootLoggerName
, setLevel
, setHandlers
, warningM
, updateGlobalLogger
)
import System.Log.Handler.Simple (verboseStreamHandler)
-- http-client ---------------------------------------------------------------
import Network.HTTP.Client
( HttpException
, applyBasicAuth
, httpLbs
, parseUrl
, withManager
)
import Network.HTTP.Client.Internal
( RequestBody (RequestBodyBS)
, method
, requestBody
, requestHeaders
)
import qualified Network.HTTP.Client.Internal as H
( responseBody
)
-- http-client-tls -----------------------------------------------------------
import Network.HTTP.Client.TLS
( tlsManagerSettings
)
-- iCalendar -----------------------------------------------------------------
import Text.ICalendar
( Date (Date)
, DateTime (FloatingDateTime, UTCDateTime, ZonedDateTime)
, DTStart (DTStartDate, DTStartDateTime)
, DTEnd (DTEndDate, DTEndDateTime)
, DurationProp (DurationProp)
, Duration (DurationDate, DurationTime, DurationWeek)
, Sign (Positive)
, descriptionValue
, locationValue
, parseICalendar
, summaryValue
, uidValue
, urlValue
, vcEvents
, veDescription
, veDTEndDuration
, veDTStart
, veLocation
, veSummary
, veUID
, veUrl
)
-- IntervalMap ---------------------------------------------------------------
import Data.IntervalMap.Interval (Interval (IntervalCO))
import qualified Data.IntervalMap.Generic.Interval as I (Interval)
import Data.IntervalMap.Generic.Strict
( IntervalMap
, alter
, intersecting
, leftClosed
, lowerBound
, rightClosed
, upperBound
)
-- lens ----------------------------------------------------------------------
import Control.Lens ((.~), (?~), (^?), (&), (^..))
-- lens-aeson ----------------------------------------------------------------
import Data.Aeson.Lens (key, _String)
-- network -------------------------------------------------------------------
import Network.Socket
( Family (AF_UNIX)
, SockAddr (SockAddrUnix)
, Socket
, SocketType (Stream)
, accept
, bind
, listen
, socket
, sClose
)
import Network.Socket.ByteString.Lazy (recv, sendAll)
-- network-uri ---------------------------------------------------------------
import Network.URI (uriToString)
-- old-locale ----------------------------------------------------------------
import System.Locale (defaultTimeLocale)
-- text ----------------------------------------------------------------------
import Data.Text
( Text
, intercalate
, isPrefixOf
, pack
, replace
, splitOn
, unpack
)
import Data.Text.Lazy (toStrict)
import Data.Text.Encoding (encodeUtf8)
-- time ----------------------------------------------------------------------
import Data.Time
( Day
, UTCTime (UTCTime)
, addDays
, addUTCTime
, formatTime
, fromGregorian
, getCurrentTime
, getCurrentTimeZone
, localTimeToUTC
, parseTime
, secondsToDiffTime
, showGregorian
, toGregorian
, utctDay
)
-- unix ----------------------------------------------------------------------
import System.Posix.Files (setFileMode)
-- wreq ----------------------------------------------------------------------
import Network.Wreq
( Response
, FormParam ((:=))
, asValue
, auth
, defaults
, getWith
, header
, oauth2Bearer
, params
, post
, responseBody
)
-- xml-conduit ---------------------------------------------------------------
import Text.XML (def, parseLBS_)
-- xml-lens ------------------------------------------------------------------
import Text.XML.Lens (named, nodes, root, _Element, _Content)
------------------------------------------------------------------------------
programName :: String
programName = "events-fetcher"
------------------------------------------------------------------------------
type Events = IntervalMap (Interval UTCTime) [Event]
------------------------------------------------------------------------------
data Event = Event
{ start :: !(Either Day UTCTime)
, end :: !(Either Day UTCTime)
, eventId :: !Text
, summary :: !Text
, htmlLink :: !(Maybe Text)
, location :: !(Maybe Text)
, description :: !(Maybe Text)
}
deriving (Read, Show, Eq, Ord, Typeable)
------------------------------------------------------------------------------
instance I.Interval Event UTCTime where
lowerBound = either (flip UTCTime (secondsToDiffTime 0)) id . start
upperBound = either (flip UTCTime (secondsToDiffTime 0)) id . end
leftClosed = const True
rightClosed = const False
------------------------------------------------------------------------------
instance FromJSON Event where
parseJSON (Object o) = do
s <- o .: "start"
start_ <- parseDateTime s
e <- o .: "end"
end_ <- either (Left . addDays 1) Right <$> parseDateTime e
eventId_ <- o .: "id"
summary_ <- o .: "summary"
htmlLink_ <- o .:? "htmlLink"
location_ <- o .:? "location"
description_ <- o .:? "description"
return $ Event
{ start = start_
, end = end_
, eventId = eventId_
, summary = summary_
, htmlLink = htmlLink_
, location = location_
, description = description_
}
where
parseDateTime (Object d) = msum $
[ Left <$> do
date <- d .: "date"
case parseTime defaultTimeLocale "%F" date of
Just day -> return day
Nothing -> mzero
, Right <$> do
dateTime <- d .: "dateTime"
case parseTime defaultTimeLocale "%FT%T%Q%Z" dateTime of
Just time -> return time
Nothing -> mzero
]
parseDateTime _ = mzero
parseJSON _ = mzero
------------------------------------------------------------------------------
instance ToJSON Event where
toJSON e = object $ mconcat
[
[ "start" .= dateToJSON (start e)
, "end" .= dateToJSON (end e)
, "id" .= eventId e
, "summary" .= summary e
]
, maybe [] (\l -> ["htmlLink" .= l]) (htmlLink e)
, maybe [] (\l -> ["location" .= l]) (location e)
, maybe [] (\d -> ["description" .= d]) (description e)
]
where
dateToJSON (Left day) = object ["date" .= pack (showGregorian day)]
dateToJSON (Right time) = object ["dateTime" .=
formatTime defaultTimeLocale "%FT%T%Q%z" time]
------------------------------------------------------------------------------
data GoogleCalendar = GoogleCalendar
{ clientSecret :: !Text
, refreshToken :: !Text
, clientId :: !Text
, apiKey :: !Text
, userAgent :: !Text
, email :: !Text
}
deriving (Show)
------------------------------------------------------------------------------
data CaldavCalendar = CaldavCalendar
{ url :: !Text
, username :: !Text
, password :: !Text
}
deriving (Show)
------------------------------------------------------------------------------
data Calendar = Google GoogleCalendar | Caldav CaldavCalendar
deriving (Show)
------------------------------------------------------------------------------
name :: Calendar -> String
name (Google calendar) = unpack (email calendar)
name (Caldav calendar) = map sanitize $ unpack (url calendar)
where
sanitize '/' = '_'
sanitize ':' = '_'
sanitize '%' = '_'
sanitize c = c
------------------------------------------------------------------------------
logger :: Calendar -> String
logger calendar = programName ++ "." ++ name calendar
------------------------------------------------------------------------------
data Config = Config
{ calendars :: [Calendar]
, logLevel :: Priority
}
deriving (Show)
------------------------------------------------------------------------------
configFileName :: FilePath
configFileName = programName ++ ".conf"
------------------------------------------------------------------------------
threadDelay' :: Int64 -> IO ()
threadDelay' t = do
let t' = t - fromIntegral (let x = maxBound in const x (threadDelay x))
if t' > 0
then threadDelay maxBound >> threadDelay' t'
else threadDelay $ fromIntegral t
------------------------------------------------------------------------------
cache :: String -> Int64 -> IO a -> IO (IO a)
cache loggerName time action = do
resultMVar <- newEmptyMVar
_ <- forkIO $ logErrors loggerName $ do
result <- action'
putMVar resultMVar result
let loop = do
threadDelay' time
result' <- action'
modifyMVar_ resultMVar (const (return result'))
loop
loop
return (readMVar resultMVar)
where
action' = logErrors loggerName action
------------------------------------------------------------------------------
data ResponseError = ResponseError (Response Value)
deriving (Show, Typeable)
------------------------------------------------------------------------------
instance Exception ResponseError
------------------------------------------------------------------------------
data ConfigFileError
= ConfigFileParseError FilePath CPErrorData
| ConfigFileNotFound
deriving (Typeable)
------------------------------------------------------------------------------
instance Show ConfigFileError where
show (ConfigFileParseError f e) = mconcat
[ "Could not parse config file "
, f
, ": "
, show e
]
show ConfigFileNotFound = mconcat
[ "Could not find config file "
, show configFileName
, " in either "
, show ("." :: String)
, " or "
, show ("/etc" :: String)
]
------------------------------------------------------------------------------
instance Exception ConfigFileError
------------------------------------------------------------------------------
twoYearsFromNow :: IO UTCTime
twoYearsFromNow = do
today <- utctDay <$> getCurrentTime
let (year, _, _) = toGregorian today
return $ UTCTime (fromGregorian (year + 2) 1 1) (secondsToDiffTime 0)
------------------------------------------------------------------------------
fetchGoogleEvents :: GoogleCalendar -> IO Events
fetchGoogleEvents calendar = do
debugM log "fetching events list"
token <- fetchToken
(events, next) <- fetchPage Nothing token
result <- go token (buildMap mempty events) next
debugM log "got events list"
return result
where
log = logger (Google calendar)
go _ !imap Nothing = return imap
go token !imap next = do
(events, next') <- fetchPage next token
go token (buildMap imap events) next'
fetchToken = retryOnHttpException log $ do
debugM log "fetching token"
response <- asValue =<< post "https://accounts.google.com/o/oauth2/token"
[ "client_secret" := clientSecret calendar
, "grant_type" := pack "refresh_token"
, "refresh_token" := refreshToken calendar
, "client_id" := clientId calendar
]
debugM log "got token"
case response ^? responseBody . key "access_token" . _String of
Just token' -> return $! encodeUtf8 token'
Nothing -> throwIO (ResponseError response)
fetchPage pageToken oathToken = retryOnHttpException log $ do
debugM log "fetching events page"
timeMax <- twoYearsFromNow
let opts = defaults
& params .~
[ ("timeMax", pack $ formatTime defaultTimeLocale "%FT%T%Q%z" timeMax)
, ("maxResults", "2500")
, ("singleEvents", "true")
, ("orderBy", "starttime")
, ("fields", "items(description,end,htmlLink,id,location,start,summary),nextPageToken")
, ("key", apiKey calendar)
] ++ maybe [] (\t -> [("pageToken", t)]) pageToken
& auth ?~ oauth2Bearer oathToken
& header "X-Javascript-User-Agent" .~ [encodeUtf8 $ userAgent calendar]
response <- asValue =<< getWith opts (mconcat
[ "https://www.googleapis.com/calendar/v3/calendars/"
, unpack $ replace "@" "%40" $ email calendar
, "/events"
])
events <- case response ^? responseBody . key "items" of
Just items -> case fromJSON items of
Error _ -> throwIO (ResponseError response)
Success items' -> return $! items'
Nothing -> throwIO (ResponseError response)
debugM log "got events page"
case response ^? responseBody . key "nextPageToken" . _String of
nextPageToken -> return $! (events, nextPageToken)
------------------------------------------------------------------------------
fetchCaldavEvents :: CaldavCalendar -> IO Events
fetchCaldavEvents calendar = do
debugM log "fetching events list"
time <- twoYearsFromNow
let timeBS = encodeUtf8 $ pack $ formatTime defaultTimeLocale "%Y%m%dT%H%M%SZ" time
let contentType = "application/xml; charset=\"utf-8\""
let body = RequestBodyBS $ mconcat
[ "<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n\
\<C:calendar-query xmlns:D=\"DAV:\" xmlns:C=\"urn:ietf:params:xml:ns:caldav\">\n\
\ <D:prop>\n\
\ <D:getetag/>\n\
\ <C:calendar-data>\n\
\ <C:expand start=\"10000101T000000Z\" end=\""
, timeBS
, "\"/>\n\
\ </C:calendar-data>\n\
\ </D:prop>\n\
\ <C:filter>\n\
\ <C:comp-filter name=\"VCALENDAR\">\n\
\ <C:comp-filter name=\"VEVENT\">\n\
\ <C:time-range start=\"10000101T000000Z\" end=\""
, timeBS
, "\"/>\n\
\ </C:comp-filter>\n\
\ </C:comp-filter>\n\
\ </C:filter>\n\
\</C:calendar-query>"
]
let user = encodeUtf8 $ username calendar
let pass = encodeUtf8 $ password calendar
req <- parseUrl (unpack $ url calendar)
let req' = applyBasicAuth user pass req
let req'' = req'
{ method = "REPORT"
, requestHeaders = requestHeaders (req') ++
[ ("Depth", "1")
, ("Content-Type", contentType)
]
, requestBody = body
}
rsp <- withManager tlsManagerSettings $ httpLbs req''
debugM log "got response; parsing XML..."
let xml = parseLBS_ def (H.responseBody rsp)
let icals = xml ^.. root
. nodes . traverse . _Element
. nodes . traverse . _Element
. nodes . traverse . _Element
. nodes . traverse . _Element
. named "calendar-data"
. nodes . traverse . _Content
icals' <- mapM sanitize icals
debugM log "parsing iCal..."
let vevents = toList $ vcEvents $ mconcat $ map parse icals'
zone <- getCurrentTimeZone
debugM log "building index..."
let events = buildMap mempty $ mapMaybe (v2e zone) vevents
debugM log "got events list"
return events
where
log = logger (Caldav calendar)
parse = toVCal . parseICalendar def programName . toLBS
toVCal (Right (icals, _)) = mconcat icals
toVCal (Left s) = error s
toLBS = fromStrict . encodeUtf8
sanitize = fmap (intercalate "\r\n")
. mapM randomiseUid
. filter (not . isPrefixOf "RECURRENCE-ID:")
. splitOn "\r\n"
randomiseUid l | "UID:" `isPrefixOf` l = do
("UID:" <>) . pack . show . hashUnique <$> newUnique
randomiseUid l = return l
v2e z v = do
start_ <- veDTStart v
end_ <- veDTEndDuration v
let start_' = dtStart z start_
let end_' = dtEnd z start_' end_
return $ Event
{ start = start_'
, end = end_'
, eventId = toStrict $ uidValue $ veUID v
, summary = toStrict $ maybe "[untitled]" summaryValue $ veSummary v
, htmlLink = (\x -> pack $ uriToString id x "") . urlValue <$> veUrl v
, location = toStrict . locationValue <$> veLocation v
, description = toStrict . descriptionValue <$> veDescription v
}
dtStart z (DTStartDateTime x _) = Right $ dt2utc z x
dtStart _ (DTStartDate (Date x) _) = Left x
dtEnd z _ (Left (DTEndDateTime x _)) = Right $ dt2utc z x
dtEnd _ _ (Left (DTEndDate (Date x) _)) = Left x
dtEnd _ s (Right (DurationProp d _)) = Right $ addUTCTime (d2dt d) (edu2utc s)
dt2utc _ (UTCDateTime x) = x
dt2utc z (FloatingDateTime x) = localTimeToUTC z x
dt2utc z (ZonedDateTime x _) = localTimeToUTC z x
edu2utc (Left x) = UTCTime x (secondsToDiffTime 0)
edu2utc (Right x) = x
d2dt (DurationDate p d h m s) = fromIntegral $ s2m p * (((((d * 24) + h) * 60) + m) * 60) + s
d2dt (DurationTime p h m s) = fromIntegral $ s2m p * (((h * 60) + m) * 60) + s
d2dt (DurationWeek p w) = fromIntegral $ s2m p + (w * 7 * 24 * 60 * 60)
s2m Positive = 1
s2m _ = negate 1
{-
data Event = Event
{ start :: !(Either Day UTCTime)
, end :: !(Either Day UTCTime)
, eventId :: !Text
, summary :: !Text
, htmlLink :: !(Maybe Text)
, location :: !(Maybe Text)
, description :: !(Maybe Text)
}
-}
------------------------------------------------------------------------------
buildMap :: Events -> [Event] -> Events
buildMap = foldl' f
where
f !imap !event = do
let k = IntervalCO (lowerBound event) (upperBound event)
alter (\v -> Just (event : maybe [] id v)) k imap
------------------------------------------------------------------------------
fetchEvents :: Calendar -> IO Events
fetchEvents (Google calendar) = fetchGoogleEvents calendar
fetchEvents (Caldav calendar) = fetchCaldavEvents calendar
------------------------------------------------------------------------------
retryOnHttpException :: String -> IO a -> IO a
retryOnHttpException loggerName m = catch m $ \(e :: HttpException) -> do
warningM loggerName (show e)
threadDelay 30000000
retryOnHttpException loggerName m
------------------------------------------------------------------------------
data Query = Query !UTCTime !UTCTime
deriving (Eq, Ord, Read, Show, Typeable)
------------------------------------------------------------------------------
instance FromJSON Query where
parseJSON v = do
(s, e) <- parseJSON v
s' <- maybe mzero return (parseTime defaultTimeLocale "%FT%T%Q%Z" s)
e' <- maybe mzero return (parseTime defaultTimeLocale "%FT%T%Q%Z" e)
return $ Query s' e'
------------------------------------------------------------------------------
query :: Events -> Query -> [Event]
query imap (Query a b) = intersecting imap (IntervalCO a b) >>= snd
------------------------------------------------------------------------------
serve :: Socket -> Calendar -> IO ()
serve sock calendar = do
events <- cache (logger calendar) 100000000 $ fetchEvents calendar
let loop = do
(connection, _) <- accept sock
_ <- forkIO $ flip finally (sClose connection) $ logErrors (logger calendar) $ do
debugM (logger calendar) $ "accepted connection"
bytes <- recv connection 128
let Just q = decode bytes
es <- events
debugM (logger calendar) $ "received request: " ++ show q
let result = query es q
debugM (logger calendar) $ "sending response: " ++ show result
sendAll connection $ encode result
debugM (logger calendar) $ "closing connection"
loop
loop
------------------------------------------------------------------------------
parseConfigFile :: FilePath -> IO Config
parseConfigFile file = do
ecp <- readfile emptyCP file
either (throwIO . ConfigFileParseError file . fst) return $ do
cp <- ecp
calendars_ <- for (sections cp) $ \email_ -> msum
[ do
clientSecret_ <- get cp email_ "secret"
refreshToken_ <- get cp email_ "token"
clientId_ <- get cp email_ "id"
apiKey_ <- get cp email_ "key"
userAgent_ <- get cp email_ "agent"
return $ Google $ GoogleCalendar
{ clientSecret = pack clientSecret_
, refreshToken = pack refreshToken_
, clientId = pack clientId_
, apiKey = pack apiKey_
, userAgent = pack userAgent_
, email = pack email_
}
, do
username_ <- get cp email_ "username"
password_ <- get cp email_ "password"
return $ Caldav $ CaldavCalendar
{ url = pack email_
, username = pack username_
, password = pack password_
}
]
logLevel_ <- either (const (return WARNING)) return $
get cp "DEFAULT" "loglevel"
return $ Config calendars_ logLevel_
------------------------------------------------------------------------------
main :: IO ()
main = logErrors programName $ do
handler <- verboseStreamHandler stderr DEBUG
_ <- getLogger programName
updateGlobalLogger programName (setHandlers [handler])
updateGlobalLogger rootLoggerName (setHandlers $ [] `asTypeOf` [handler])
args <- getArgs
file <- case args of
file : _ -> return file
_ -> do
exists <- doesFileExist configFileName
if exists then return configFileName else do
etcExists <- doesFileExist $ "/etc/" ++ configFileName
if etcExists then return ("/etc/" ++ configFileName) else do
throwIO ConfigFileNotFound
config <- parseConfigFile file
for_ (calendars config) (getLogger . logger)
updateGlobalLogger programName (setLevel (logLevel config))
let dir = "/tmp/" ++ programName
createDirectoryIfMissing False dir
cleanups <- for (calendars config) $ \calendar -> do
mvar <- newEmptyMVar
let file' = dir ++ "/" ++ name calendar ++ ".sock"
sock <- socket AF_UNIX Stream 0
tid <- forkIO $ logErrors (logger calendar) $ do
bind sock (SockAddrUnix file')
setFileMode file' 0o666
listen sock 5
serve sock calendar
putMVar mvar ()
return (mvar, (cleanup file' sock, tid))
flip finally (cleanupAll dir cleanups) $ do
for_ cleanups $ takeMVar . fst
where
cleanup file sock = do
sClose sock
removeFile file
cleanupAll dir cleanups = do
for_ cleanups $ \(_, (cleanup_, tid)) -> do
killThread tid
cleanup_
removeDirectory dir
------------------------------------------------------------------------------
logErrors :: String -> IO a -> IO a
logErrors loggerName m = do
catch m $ \(e :: SomeException) -> do
errorM loggerName (show e)
myThreadId >>= killThread
m
|
duairc/events-fetcher
|
events-fetcher.hs
|
agpl-3.0
| 28,303 | 0 | 26 | 9,442 | 6,154 | 3,196 | 2,958 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable, LambdaCase #-}
module ViperVM.VirtualPlatform.MetaObject (
MetaObject(..), allInstances,
allocate, attachInstance, detachInstance, exchangeInstance,
instanceMemories, instancesInMemory, allocateInstance
) where
import ViperVM.VirtualPlatform.Object
import ViperVM.VirtualPlatform.Descriptor
import ViperVM.STM.TSet as TSet
import ViperVM.Platform.Memory
import Control.Concurrent.STM
import Control.Applicative ((<$>), (<*>))
import Data.Typeable
import Data.Traversable (forM)
import Data.Word
import Data.Set as Set
import System.Clock
-- | Meta object
-- Set of identical objects stored in different memories
data MetaObject = MetaObject {
metaObjectID :: Word64, -- ^ Unique identifier
descriptor :: Descriptor, -- ^ High-level data description
objects :: TSet Object, -- ^ Raw objects
subObjects :: TSet SubObject -- ^ Objects coming from other meta objects
} deriving (Typeable)
instance Eq MetaObject where
(==) a b = metaObjectID a == metaObjectID b
instance Ord MetaObject where
compare a b = compare (metaObjectID a) (metaObjectID b)
instance Show MetaObject where
show so = "ShObj(" ++ show (descriptor so) ++ ")"
type ObjectFilter = Object -> Object
data SubObject = SubObject MetaObject ObjectFilter
-- | Allocate a meta object
allocate :: Descriptor -> IO MetaObject
allocate desc = do
tim <- getTime Monotonic -- FIXME: use an additional random generator per thread
let oid = fromIntegral (nsec tim) + 1000000000 * fromIntegral (sec tim)
atomically (MetaObject oid desc <$> TSet.empty <*> TSet.empty)
-- | Retrieve all object instances
allInstances :: MetaObject -> STM (Set Object)
allInstances mo = Set.union <$> instances mo <*> indirectInstances mo
-- | Direct object instances
instances :: MetaObject -> STM (Set Object)
instances = readTVar . objects
-- | Indirect object instances
indirectInstances :: MetaObject -> STM (Set Object)
indirectInstances mo = do
sos <- TSet.elems (subObjects mo)
Set.unions <$> forM sos link2Instances
where
link2Instances (SubObject smo f) = Set.map f <$> allInstances smo
-- | Attach an object to a meta object
-- Attached object must be compatible with shared object descriptor
attachInstance :: MetaObject -> Object -> STM ()
attachInstance mo o = do
if checkObject (descriptor mo) (objectPeer o)
then TSet.insert o (objects mo)
else error "Fail"
-- | Detach an object from a shared object
detachInstance :: MetaObject -> Object -> STM ()
detachInstance mo o = do
TSet.delete o (objects mo)
-- | Exchange an object instance between two shared objects
exchangeInstance :: Object -> MetaObject -> MetaObject -> STM ()
exchangeInstance o src dst = do
detachInstance src o
attachInstance dst o
-- | Retrieve memories into which an instance of the object exists
instanceMemories :: MetaObject -> STM (Set Memory)
instanceMemories so = Set.map objectMemory <$> instances so
-- | Retrieve object instances in the given memory
instancesInMemory :: MetaObject -> Memory -> STM (Set Object)
instancesInMemory mo mem = Set.filter f <$> instances mo
where
f = (==) mem . objectMemory
-- | Allocate a compatible instance of the shared object, DO NOT attach it
allocateInstance :: MetaObject -> Memory -> IO Object
allocateInstance o mem = allocateFromDescriptor mem (descriptor o)
|
hsyl20/HViperVM
|
lib/ViperVM/VirtualPlatform/MetaObject.hs
|
lgpl-3.0
| 3,388 | 0 | 14 | 608 | 854 | 449 | 405 | 63 | 2 |
-- prmRng 5 11 -> [5,7,11]
module Pr39 where
import Pr31
prmRng :: Int -> Int -> [Int]
prmRng x y =
if y <= x then []
else
[z | z <- [x..y], isprime z]
|
ekalosak/haskell-practice
|
Pr39.hs
|
lgpl-3.0
| 170 | 0 | 9 | 54 | 71 | 40 | 31 | 6 | 2 |
{-
Why Functional Programming Matters
John Hughes, Mary Sheeran (Lambda Days 2017)
-}
{-# LANGUAGE RankNTypes #-}
module Church where
-- Booleans
true x y = x
false x y = y
ifte bool t e = bool t e
-- Positive integers
zero f x = x
one f x = f x
two f x = f (f x)
add m n f x = m f (n f x)
mul m n f x = m (n f) x
exp m n = n m
-- Factorial
fact :: (forall a. (a -> a) -> a -> a) -> (a -> a) -> a -> a
fact n =
ifte (iszero n)
one
(mul n (fact (decr n)))
iszero n =
n (\_ -> false) true
decr n =
n (\m f x -> f (m incr zero))
zero
(\x -> x)
zero
incr n f x =
f (n f x)
-- Predecessor in wikipedia
decr' n f x =
n (\g h -> h (g f)) (\_ -> x) (\u -> u)
minus m n = (n decr) m
|
jmgimeno/haskell-playground
|
src/Church.hs
|
unlicense
| 735 | 0 | 11 | 245 | 407 | 209 | 198 | 28 | 1 |
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings, UnicodeSyntax, DeriveGeneric #-}
module Sweetroll.Microsub.Response where
import Sweetroll.Prelude
data Channel = Channel { uid ∷ Text
, name ∷ Text
, unread ∷ Int }
deriving (Generic)
instance ToJSON Channel where
toEncoding = genericToEncoding defaultOptions
compactChannel ∷ Value → Maybe Channel
compactChannel x = do
url ← firstStr x (key "properties" . key "url")
name ← firstStr x (key "properties" . key "name")
return $ Channel url name 0
data Subscription = Subscription { url ∷ Text }
instance ToJSON Subscription where
toJSON (Subscription url) = toJSON $ object [ "type" .= asText "feed", "url" .= url ]
compactSubscription ∷ Value → Maybe Subscription
compactSubscription x = do
url ← firstStr x (key "feed")
return $ Subscription url
data Paging = Paging { after ∷ Maybe Text
, before ∷ Maybe Text }
deriving (Generic)
instance ToJSON Paging where
toEncoding = genericToEncoding defaultOptions
data MicrosubResponse = Created Channel
| Channels [Channel]
| Subscriptions [Subscription]
| Subscribed Subscription
| Entries [Value] (Maybe Paging)
instance ToJSON MicrosubResponse where
toJSON (Created chan) = toJSON chan
toJSON (Channels chans) = toJSON $ object [ "channels" .= chans ]
toJSON (Subscriptions subs) = toJSON $ object [ "items" .= subs ]
toJSON (Subscribed sub) = toJSON sub
toJSON (Entries ents (Just pg)) = toJSON $ object [ "items" .= ents, "paging" .= pg ]
toJSON (Entries ents _) = toJSON $ object [ "items" .= ents ]
|
myfreeweb/sweetroll
|
sweetroll-be/library/Sweetroll/Microsub/Response.hs
|
unlicense
| 1,778 | 0 | 11 | 485 | 523 | 269 | 254 | 38 | 1 |
module Opts
( parseActions
) where
import Control.Error
import Options.Applicative
import Types
parseActions :: Script Actions
parseActions = scriptIO $ execParser actions
actions :: ParserInfo Actions
actions = info (helper <*> actions')
( fullDesc
<> progDesc "A program for working with topic models."
<> header "topical - A program for working with topic models.")
actions' :: Parser Actions
actions' = subparser
( command "load"
(info (helper <*> loadAction)
( fullDesc
<> progDesc "Load a corpus into a vector space model."))
)
loadAction :: Parser Actions
loadAction = LoadCorpus
<$> strOption ( short 'i' <> long "input" <> metavar "DIRECTORY"
<> help "The directory to process into a file.")
<*> optional (strOption ( short 's' <> long "stopwords"
<> metavar "FILENAME"
<> help "A file to read for stop words."))
<*> strOption ( short 'o' <> long "output" <> metavar "FILENAME"
<> help "A file to write the corpus into.")
|
erochest/topical
|
app/Opts.hs
|
apache-2.0
| 1,265 | 0 | 14 | 483 | 254 | 125 | 129 | 27 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Tests for lock allocation.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Locking.Allocation
( testLocking_Allocation
, TestLock
, TestOwner
, requestSucceeded
) where
import Control.Applicative
import qualified Data.Foldable as F
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Set as S
import qualified Text.JSON as J
import Test.QuickCheck
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHelper
import Ganeti.BasicTypes
import Ganeti.Locking.Allocation
import Ganeti.Locking.Types
{-
Ganeti.Locking.Allocation is polymorphic in the types of locks
and lock owners. So we can use much simpler types here than Ganeti's
real locks and lock owners, knowning that polymorphic functions cannot
exploit the simplicity of the types they're deling with.
-}
data TestOwner = TestOwner Int deriving (Ord, Eq, Show)
instance Arbitrary TestOwner where
arbitrary = TestOwner <$> choose (0, 2)
data TestLock = TestBigLock
| TestCollectionLockA
| TestLockA Int
| TestCollectionLockB
| TestLockB Int
deriving (Ord, Eq, Show, Read)
instance Arbitrary TestLock where
arbitrary = frequency [ (1, elements [ TestBigLock
, TestCollectionLockA
, TestCollectionLockB
])
, (2, TestLockA <$> choose (0, 2))
, (2, TestLockB <$> choose (0, 2))
]
instance Lock TestLock where
lockImplications (TestLockA _) = [TestCollectionLockA, TestBigLock]
lockImplications (TestLockB _) = [TestCollectionLockB, TestBigLock]
lockImplications TestBigLock = []
lockImplications _ = [TestBigLock]
{-
All states of a LockAllocation ever available outside the
Ganeti.Locking.Allocation module must be constructed by starting
with emptyAllocation and applying the exported functions.
-}
instance Arbitrary OwnerState where
arbitrary = elements [OwnShared, OwnExclusive]
instance Arbitrary a => Arbitrary (LockRequest a) where
arbitrary = LockRequest <$> arbitrary <*> genMaybe arbitrary
data UpdateRequest b a = UpdateRequest b [LockRequest a]
| FreeLockRequest b
deriving Show
instance (Arbitrary a, Arbitrary b) => Arbitrary (UpdateRequest a b) where
arbitrary =
frequency [ (4, UpdateRequest <$> arbitrary <*> (choose (1, 4) >>= vector))
, (1, FreeLockRequest <$> arbitrary)
]
-- | Transform an UpdateRequest into the corresponding state transformer.
asAllocTrans :: (Lock a, Ord b, Show b)
=> LockAllocation a b -> UpdateRequest b a -> LockAllocation a b
asAllocTrans state (UpdateRequest owner updates) =
fst $ updateLocks owner updates state
asAllocTrans state (FreeLockRequest owner) = freeLocks state owner
-- | Fold a sequence of requests to transform a lock allocation onto the empty
-- allocation. As we consider all exported LockAllocation transformers, any
-- LockAllocation definable is obtained in this way.
foldUpdates :: (Lock a, Ord b, Show b)
=> [UpdateRequest b a] -> LockAllocation a b
foldUpdates = foldl asAllocTrans emptyAllocation
instance (Arbitrary a, Lock a, Arbitrary b, Ord b, Show b)
=> Arbitrary (LockAllocation a b) where
arbitrary = foldUpdates <$> (choose (0, 8) >>= vector)
-- | Basic property of locking: the exclusive locks of one user
-- are disjoint from any locks of any other user.
prop_LocksDisjoint :: Property
prop_LocksDisjoint =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
let aExclusive = M.keysSet . M.filter (== OwnExclusive) $ listLocks a state
bAll = M.keysSet $ listLocks b state
in printTestCase
(show a ++ "'s exclusive lock" ++ " is not respected by " ++ show b)
(S.null $ S.intersection aExclusive bAll)
-- | Verify that the list of active locks indeed contains all locks that
-- are owned by someone.
prop_LockslistComplete :: Property
prop_LockslistComplete =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . M.null . listLocks a)) $ \state ->
printTestCase "All owned locks must be mentioned in the all-locks list" $
let allLocks = listAllLocks state in
all (`elem` allLocks) (M.keys $ listLocks a state)
-- | Verify that the list of all locks with states is contained in the list
-- of all locks.
prop_LocksAllOwnersSubsetLockslist :: Property
prop_LocksAllOwnersSubsetLockslist =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
printTestCase "The list of all active locks must contain all locks mentioned\
\ in the locks state" $
S.isSubsetOf (S.fromList . map fst $ listAllLocksOwners state)
(S.fromList $ listAllLocks state)
-- | Verify that all locks of all owners are mentioned in the list of all locks'
-- owner's state.
prop_LocksAllOwnersComplete :: Property
prop_LocksAllOwnersComplete =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . M.null . listLocks a)) $ \state ->
printTestCase "Owned locks must be mentioned in list of all locks' state" $
let allLocksState = listAllLocksOwners state
in flip all (M.toList $ listLocks a state) $ \(lock, ownership) ->
elem (a, ownership) . fromMaybe [] $ lookup lock allLocksState
-- | Verify that all lock owners mentioned in the list of all locks' owner's
-- state actually own their lock.
prop_LocksAllOwnersSound :: Property
prop_LocksAllOwnersSound =
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . null . listAllLocksOwners)) $ \state ->
printTestCase "All locks mentioned in listAllLocksOwners must be owned by the\
\ mentioned owner" .
flip all (listAllLocksOwners state) $ \(lock, owners) ->
flip all owners $ \(owner, ownership) -> holdsLock owner lock ownership state
-- | Verify that exclusive group locks are honored, i.e., verify that if someone
-- holds a lock, then no one else can hold a lock on an exclusive lock on an
-- implied lock.
prop_LockImplicationX :: Property
prop_LockImplicationX =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
let bExclusive = M.keysSet . M.filter (== OwnExclusive) $ listLocks b state
in printTestCase "Others cannot have an exclusive lock on an implied lock" .
flip all (M.keys $ listLocks a state) $ \lock ->
flip all (lockImplications lock) $ \impliedlock ->
not $ S.member impliedlock bExclusive
-- | Verify that shared group locks are honored, i.e., verify that if someone
-- holds an exclusive lock, then no one else can hold any form on lock on an
-- implied lock.
prop_LockImplicationS :: Property
prop_LockImplicationS =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
let aExclusive = M.keys . M.filter (== OwnExclusive) $ listLocks a state
bAll = M.keysSet $ listLocks b state
in printTestCase "Others cannot hold locks implied by an exclusive lock" .
flip all aExclusive $ \lock ->
flip all (lockImplications lock) $ \impliedlock ->
not $ S.member impliedlock bAll
-- | Verify that locks can only be modified by updates of the owner.
prop_LocksStable :: Property
prop_LocksStable =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
forAll (arbitrary :: Gen [LockRequest TestLock]) $ \request ->
let (state', _) = updateLocks b request state
in (listLocks a state ==? listLocks a state')
-- | Verify that a given request is statisfied in list of owned locks
requestSucceeded :: Ord a => M.Map a OwnerState -> LockRequest a -> Bool
requestSucceeded owned (LockRequest lock status) = M.lookup lock owned == status
-- | Verify that lock updates are atomic, i.e., either we get all the required
-- locks, or the state is completely unchanged.
prop_LockupdateAtomic :: Property
prop_LockupdateAtomic =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen [LockRequest TestLock]) $ \request ->
let (state', result) = updateLocks a request state
in if result == Ok S.empty
then printTestCase
("Update succeeded, but in final state " ++ show state'
++ "not all locks are as requested")
$ let owned = listLocks a state'
in all (requestSucceeded owned) request
else printTestCase
("Update failed, but state changed to " ++ show state')
(state == state')
-- | Verify that releasing a lock always succeeds.
prop_LockReleaseSucceeds :: Property
prop_LockReleaseSucceeds =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen TestLock) $ \lock ->
let (_, result) = updateLocks a [requestRelease lock] state
in printTestCase
("Releasing a lock has to suceed uncondiationally, but got "
++ show result)
(isOk result)
-- | Verify the property that only the blocking owners prevent
-- lock allocation. We deliberatly go for the expensive variant
-- restraining by suchThat, as otherwise the number of cases actually
-- covered is too small.
prop_BlockSufficient :: Property
prop_BlockSufficient =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen TestLock) $ \lock ->
forAll (elements [ [requestShared lock]
, [requestExclusive lock]]) $ \request ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (genericResult (const False) (not . S.null)
. snd . updateLocks a request)) $ \state ->
let (_, result) = updateLocks a request state
blockedOn = genericResult (const S.empty) id result
in printTestCase "After all blockers release, a request must succeed"
. isOk . snd . updateLocks a request $ F.foldl freeLocks state blockedOn
-- | Verify the property that every blocking owner is necessary, i.e., even
-- if we only keep the locks of one of the blocking owners, the request still
-- will be blocked. We deliberatly use the expensive variant of restraining
-- to ensure good coverage. To make sure the request can always be blocked
-- by two owners, for a shared request we request two different locks.
prop_BlockNecessary :: Property
prop_BlockNecessary =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen TestLock) $ \lock ->
forAll (arbitrary `suchThat` (/= lock)) $ \lock' ->
forAll (elements [ [requestShared lock, requestShared lock']
, [requestExclusive lock]]) $ \request ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (genericResult (const False) ((>= 2) . S.size)
. snd . updateLocks a request)) $ \state ->
let (_, result) = updateLocks a request state
blockers = genericResult (const S.empty) id result
in printTestCase "Each blocker alone must block the request"
. flip all (S.elems blockers) $ \blocker ->
(==) (Ok $ S.singleton blocker) . snd . updateLocks a request
. F.foldl freeLocks state
$ S.filter (/= blocker) blockers
instance J.JSON TestOwner where
showJSON (TestOwner x) = J.showJSON x
readJSON = (>>= return . TestOwner) . J.readJSON
instance J.JSON TestLock where
showJSON = J.showJSON . show
readJSON = (>>= return . read) . J.readJSON
-- | Verify that for LockAllocation we have readJSON . showJSON = Ok.
prop_ReadShow :: Property
prop_ReadShow =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
J.readJSON (J.showJSON state) ==? J.Ok state
-- | Verify that the list of lock owners is complete.
prop_OwnerComplete :: Property
prop_OwnerComplete =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
foldl freeLocks state (lockOwners state) ==? emptyAllocation
-- | Verify that each owner actually owns a lock.
prop_OwnerSound :: Property
prop_OwnerSound =
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . null . lockOwners)) $ \state ->
printTestCase "All subjects listed as owners must own at least one lock"
. flip all (lockOwners state) $ \owner ->
not . M.null $ listLocks owner state
-- | Verify that for LockRequest we have readJSON . showJSON = Ok.
prop_ReadShowRequest :: Property
prop_ReadShowRequest =
forAll (arbitrary :: Gen (LockRequest TestLock)) $ \state ->
J.readJSON (J.showJSON state) ==? J.Ok state
testSuite "Locking/Allocation"
[ 'prop_LocksDisjoint
, 'prop_LockslistComplete
, 'prop_LocksAllOwnersSubsetLockslist
, 'prop_LocksAllOwnersComplete
, 'prop_LocksAllOwnersSound
, 'prop_LockImplicationX
, 'prop_LockImplicationS
, 'prop_LocksStable
, 'prop_LockupdateAtomic
, 'prop_LockReleaseSucceeds
, 'prop_BlockSufficient
, 'prop_BlockNecessary
, 'prop_ReadShow
, 'prop_OwnerComplete
, 'prop_OwnerSound
, 'prop_ReadShowRequest
]
|
ganeti-github-testing/ganeti-test-1
|
test/hs/Test/Ganeti/Locking/Allocation.hs
|
bsd-2-clause
| 14,937 | 0 | 27 | 3,125 | 3,390 | 1,815 | 1,575 | 231 | 2 |
-- | This module contains some useful combinators I have come across as I built a large
-- react-flux application. None of these are required to use React.Flux, they just reduce somewhat
-- the typing needed to create rendering functions.
module React.Flux.Combinators (
clbutton_
, cldiv_
, faIcon_
, foreign_
, labeledInput_
) where
import Data.Monoid ((<>))
import React.Flux.DOM
import React.Flux.Internal
import React.Flux.PropertiesAndEvents
import React.Flux.Views
#ifdef __GHCJS__
import GHCJS.Types (JSString, JSVal)
foreign import javascript unsafe
"$r = window[$1]"
js_lookupWindow :: JSString -> JSVal
#else
js_lookupWindow :: a -> ()
js_lookupWindow _ = ()
#endif
-- | A wrapper around 'foreignClass' that looks up the class on the `window`. I use it for several
-- third-party react components. For example, with <https://github.com/rackt/react-modal
-- react-modal>, assuming `window.Modal` contains the definition of the classes,
--
-- >foreign_ "Modal" [ "isOpen" @= isModelOpen myProps
-- > , callback "onRequestClose" $ dispatch closeModel
-- > , "style" @= Aeson.object [ "overlay" @= Aeson.object ["left" $= "50%", "right" $= "50%"]]
-- > ] $ do
-- > h1_ "Hello, World!"
-- > p_ "...."
--
-- Here is another example using <https://github.com/JedWatson/react-select react-select>:
--
-- >reactSelect_ :: [PropertyOrHandler eventHandler] -> ReactElementM eventHandler ()
-- >reactSelect_ props = foreign_ "Select" props mempty
-- >
-- >someView :: ReactView ()
-- >someView = defineView "some view" $ \() ->
-- > reactSelect_
-- > [ "name" $= "form-field-name"
-- > , "value" $= "one"
-- > , "options" @= [ object [ "value" .= "one", "label" .= "One" ]
-- > , object [ "value" .= "two", "label" .= "Two" ]
-- > ]
-- > , callback "onChange" $ \(i :: String) -> dispatch $ ItemChangedTo i
-- > ]
foreign_ :: String -- ^ this should be the name of a property on `window` which contains a react class.
-> [PropertyOrHandler handler] -- ^ properties
-> ReactElementM handler a -- ^ children
-> ReactElementM handler a
foreign_ x = foreignClass (js_lookupWindow $ toJSString x)
-- | A 'div_' with the given class name (multiple classes can be separated by spaces). This is
-- useful for defining rows and columns in your CSS framework of choice. I use
-- <http://purecss.io/forms/ Pure CSS> so I use it something like:
--
-- >cldiv_ "pure-g" $ do
-- > cldiv_ "pure-u-1-3" $ p_ "First Third"
-- > cldiv_ "pure-u-1-3" $ p_ "Middle Third"
-- > cldiv_ "pure-u-1-3" $ p_ "Last Third"
--
-- You should consider writing something like the following for the various components in your frontend
-- of choice. In PureCSS, I use:
--
-- >prow_ :: ReactElementM handler a -> ReactElementM handler a
-- >prow_ = cldiv_ "pure-g"
-- >
-- >pcol_ :: String -> ReactElementM handler a -> ReactElementM handler a
-- >pcol_ cl = cldiv_ (unwords $ map ("pure-u-"++) $ words cl)
cldiv_ :: String -> ReactElementM handler a -> ReactElementM handler a
cldiv_ cl = div_ ["className" @= cl]
-- | A 'button_' with the given class names and `onClick` handler.
--
-- >clbutton_ ["pure-button button-success"] (dispatch LaunchMissiles) $ do
-- > faIcon_ "rocket"
-- > "Launch the missiles!"
clbutton_ :: String -- ^ class names separated by spaces
-> handler -- ^ the onClick handler for the button
-> ReactElementM handler a -- ^ the children
-> ReactElementM handler a
clbutton_ cl h = button_ ["className" @= cl, onClick (\_ _ -> h)]
-- | A 'label_' and an 'input_' together. Useful for laying out forms. For example, a
-- stacked <http://purecss.io/forms/ Pure CSS Form> could be
--
-- >form_ ["className" $= "pure-form pure-form-stacked"] $
-- > fieldset_ $ do
-- > legend_ "A stacked form"
-- > labeledInput_ "email" "Email" ["type" $= "email"]
-- > labeledInput_ "password"
-- > ($(message "password-label" "Your password") [])
-- > ["type" $= "password"]
--
-- The second 'labeledInput_' shows an example using "React.Flux.Addons.Intl".
labeledInput_ :: String -- ^ the ID for the input element
-> ReactElementM handler () -- ^ the label content. This is wrapped in a 'label_' with a `htmlFor` property
-- equal to the given ID.
-> [PropertyOrHandler handler] -- ^ the properties to pass to 'input_'. A property with key `id` is added to this list of properties.
-> ReactElementM handler ()
labeledInput_ ident lbl props = label_ ["htmlFor" @= ident] lbl <> input_ (("id" @= ident):props)
-- | A <http://fortawesome.github.io/Font-Awesome/ Font Awesome> icon. The given string is prefixed
-- by `fa fa-` and then used as the class for an `i` element. This allows you to icons such as
--
-- >faIcon_ "fighter-jet" -- produces <i class="fa fa-fighter-jet">
-- >faIcon_ "refresh fa-spin" -- produces <i class="fa fa-refresh fa-spin">
faIcon_ :: String -> ReactElementM handler ()
faIcon_ cl = i_ ["className" @= ("fa fa-" ++ cl)] mempty
|
nrolland/react-flux
|
src/React/Flux/Combinators.hs
|
bsd-3-clause
| 5,203 | 7 | 10 | 1,164 | 455 | 280 | 175 | 32 | 1 |
module Problem112 where
import Data.Function
main :: IO ()
main =
print
. snd
. head
. dropWhile ((< 0.99) . uncurry ((/) `on` fromIntegral))
. scanl
(\(bouncy, total) num ->
if isBouncy num then (bouncy + 1, total + 1) else (bouncy, total + 1)
)
(0, 100)
$ [101 ..]
isBouncy :: Int -> Bool
isBouncy n = not (isIncreasing n') && not (isDecreasing n')
where
n' = show n
isIncreasing x = and $ zipWith (<=) x (tail x)
isDecreasing x = and $ zipWith (>=) x (tail x)
|
adityagupta1089/Project-Euler-Haskell
|
src/problems/Problem112.hs
|
bsd-3-clause
| 576 | 0 | 13 | 205 | 241 | 132 | 109 | 18 | 2 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE OverloadedStrings #-}
module Stanag.EoIrConfigurationState where
import Ivory.Language
import Stanag.Packing
import Util.Logger
eoIrConfigurationStateInstance :: MemArea (Stored Uint32)
eoIrConfigurationStateInstance = area "eoIrConfigurationStateInstance" (Just (ival 0))
[ivoryFile|Stanag/EoIrConfigurationState.ivory|]
|
GaloisInc/loi
|
Stanag/EoIrConfigurationState.hs
|
bsd-3-clause
| 460 | 0 | 9 | 46 | 69 | 41 | 28 | 12 | 1 |
-- | Media data box
module Data.ByteString.IsoBaseFileFormat.Boxes.MediaData where
import qualified Data.ByteString as B
import Data.ByteString.IsoBaseFileFormat.Box
import Data.ByteString.IsoBaseFileFormat.Util.BoxContent
-- | Media data box phantom type
newtype MediaData = MediaData B.ByteString
deriving (Show, IsBoxContent)
instance IsBox MediaData where
type BoxContent MediaData = MediaData
type instance BoxTypeSymbol MediaData = "mdat"
-- | Create a 'MediaDataBox' from a strict 'ByteString'
mediaData :: MediaData -> Box MediaData
mediaData = Box
|
sheyll/isobmff-builder
|
src/Data/ByteString/IsoBaseFileFormat/Boxes/MediaData.hs
|
bsd-3-clause
| 566 | 0 | 6 | 74 | 100 | 63 | 37 | -1 | -1 |
{-# LANGUAGE FlexibleContexts, RankNTypes, RecordWildCards #-}
module Cloud.AWS.CloudWatch.Metric
( listMetrics
, getMetricStatistics
, putMetricData
) where
import Data.Text (Text)
import Data.Time (UTCTime)
import Control.Applicative
import Control.Monad.Trans.Resource (MonadThrow, MonadResource, MonadBaseControl)
import Cloud.AWS.Lib.Parser.Unordered (XmlElement, (.<))
import Cloud.AWS.CloudWatch.Internal
import Cloud.AWS.Lib.Query
import Cloud.AWS.Lib.Parser (members, nodata)
import Cloud.AWS.CloudWatch.Types
dimensionFiltersParam :: [DimensionFilter] -> QueryParam
dimensionFiltersParam =
("Dimensions" |.+) . ("member" |.#.) . map filterParams
where
filterParams (k, v) =
[ "Name" |= k
, "Value" |= v
]
listMetrics
:: (MonadBaseControl IO m, MonadResource m)
=> [DimensionFilter] -- ^ Dimensions
-> Maybe Text -- ^ MetricName
-> Maybe Text -- ^ Namespace
-> Maybe Text -- ^ NextToken
-> CloudWatch m ([Metric], Maybe Text)
listMetrics ds mn ns nt = cloudWatchQuery "ListMetrics" params $ \xml ->
(,) <$> members "Metrics" sinkMetric xml <*> xml .< "NextToken"
where
params =
[ dimensionFiltersParam ds
, "MetricName" |=? mn
, "Namespace" |=? ns
, "NextToken" |=? nt
]
sinkMetric :: (MonadThrow m, Applicative m)
=> XmlElement -> m Metric
sinkMetric xml =
Metric
<$> members "Dimensions" sinkDimension xml
<*> xml .< "MetricName"
<*> xml .< "Namespace"
getMetricStatistics
:: (MonadBaseControl IO m, MonadResource m)
=> [DimensionFilter]
-> UTCTime -- ^ StartTime
-> UTCTime -- ^ EndTime
-> Text -- ^ MetricName
-> Text -- ^ Namespace
-> Int -- ^ Period
-> [Statistic] -- ^ Statistics
-> Maybe Text -- ^ Unit
-> CloudWatch m ([Datapoint], Text) -- ^ Datapoints and Label
getMetricStatistics ds start end mn ns pe sts unit =
cloudWatchQuery "GetMetricStatistics" params $ \xml -> (,)
<$> members "Datapoints" (\xml' -> Datapoint
<$> xml' .< "Timestamp"
<*> xml' .< "SampleCount"
<*> xml' .< "Unit"
<*> xml' .< "Minimum"
<*> xml' .< "Maximum"
<*> xml' .< "Sum"
<*> xml' .< "Average"
) xml
<*> xml .< "Label"
where
params =
[ dimensionFiltersParam ds
, "StartTime" |= start
, "EndTime" |= end
, "MetricName" |= mn
, "Namespace" |= ns
, "Period" |= pe
, "Statistics" |.+ "member" |.#= sts
, "Unit" |=? unit
]
putMetricData
:: (MonadBaseControl IO m, MonadResource m)
=> [MetricDatum] -- ^ A list of data describing the metric.
-> Text -- ^ The namespace for the metric data.
-> CloudWatch m ()
putMetricData dats ns =
cloudWatchQuery "PutMetricData" params nodata
where
params =
[ "MetricData.member" |.#. map fromMetricDatum dats
, "Namespace" |= ns
]
fromMetricDatum :: MetricDatum -> [QueryParam]
fromMetricDatum MetricDatum{..} =
[ "Dimensions.member" |.#. map fromDimension metricDatumDimensions
, "MetricName" |= metricDatumMetricName
, metricDatumValueParam metricDatumValue
, "Timestamp" |=? metricDatumTimestamp
, "Unit" |=? metricDatumUnit
]
metricDatumValueParam :: MetricDatumValue -> QueryParam
metricDatumValueParam (MetricDatumValue v) = "Value" |= v
metricDatumValueParam (MetricDatumStatisticValues s) = "StatisticValues" |. fromStatisticSet s
fromStatisticSet :: StatisticSet -> [QueryParam]
fromStatisticSet StatisticSet{..} =
[ "Maximum" |= statisticSetMaximum
, "Minimum" |= statisticSetMinimum
, "SampleCount" |= statisticSetSampleCount
, "Sum" |= statisticSetSum
]
|
worksap-ate/aws-sdk
|
Cloud/AWS/CloudWatch/Metric.hs
|
bsd-3-clause
| 3,778 | 0 | 26 | 938 | 925 | 512 | 413 | 99 | 1 |
{-# LANGUAGE Rank2Types, ExistentialQuantification #-}
module Data.Aeson.Error.JsonError where
import Data.Aeson.Types (Value, ToJSON, toJSON, (.=), object )
import qualified Data.Text as StrictText
import qualified Data.Text.Lazy as LazyText
import qualified Data.Text.Encoding as StrictTextEncoding
import qualified Data.Text.Lazy.Encoding as LazyTextEncoding
import qualified Data.ByteString as StrictByteString
import qualified Data.ByteString.Char8 as StrictChar8
import qualified Data.ByteString.Lazy as LazyByteString
import qualified Data.ByteString.Lazy.Char8 as LazyChar8
import Data.Textual
-- + -- + -- + -- + -- + -- + -- + -- + -- + -- + --
-- Descriptive Error Messages
-- + -- + -- + -- + -- + -- + -- + -- + -- + -- + --
{- we want error messages to be consistent across an application.
we define a JsonError datatype as an envelope for three params:
* Brief explanation of the failure, should be presentable to end users in a UI
* A Developer-level explanation of the failure, and how to correct it.
* A JSON payload for adding any additional information.
-}
data JsonError = forall summary detail. (Textual summary, Textual detail) => JsonError summary detail (Maybe Value)
-- Aeson Value for our descriptive error messages
instance ToJSON JsonError where
toJSON (JsonError summary detail additionalInfo) =
let pairs = [ (toText "summary") .= (toText summary)
, (toText "detail") .= (toText detail)
]
in object $ pairs ++ (maybe [] (\ ai -> [(toText "additional_info") .= ai]) additionalInfo)
-- + -- + -- + -- + -- + -- + -- + -- + -- + -- + --
|
necrobious/aeson-descriptive-errors
|
src/Data/Aeson/Error/JsonError.hs
|
bsd-3-clause
| 1,700 | 0 | 16 | 384 | 273 | 171 | 102 | 18 | 0 |
-- Skip Chp10 - Functionally Solving Problems
{-- Functor
Many times the box analogy is used to help you get some intuition for how functors work, and later, we'll probably use the same analogy for applicative functors and monads.
A more correct term for what a functor is would be "computational context". The context might be that the computation can have a value or it might have failed (Maybe and Either a) or that there might be more values (lists), stuff like that.
When we first learned about curried functions, we said that all Haskell functions actually take one parameter. A function a -> b -> c actually takes just one parameter of type a and then returns a function b -> c
So a -> b -> c can be written as a -> (b -> c), to make the currying more apparent.
In the same vein, if we write fmap :: (a -> b) -> (f a -> f b), we can think it as take a function, and another function which takes Functor a and returns Functor as result.
It takes an a -> b function and returns a function f a -> f b. This is called lifting a function.
:t fmap (*2)
fmap (*2) :: (Num a, Functor f) => f a -> f a
The expression fmap (*2) is a function that takes a functor f over numbers and returns a functor over numbers.
This is even more apparent if we partially apply, say, fmap (++"!") and then bind it to a name in GHCI.
--}
{--
You can think of fmap as either a function that takes a function and a functor and then maps that function over the functor, or you can think of it as a function that takes a function and lifts that function so that it operates on functors(lifting).
--}
{--
Another instance of Functor that we've been dealing with all along but didn't know was a Functor is (->) r. You're probably slightly confused now, since what the heck does (->) r mean?
The function type r -> a can be rewritten as (->) r a, much like we can write 2 + 3 as (+) 2 3. When we look at it as (->) r a, we can see (->) in a slighty different light, because we see that it's just a type constructor that takes two type parameters, just like Either.
But remember, we said that a type constructor has to take exactly one type parameter so that it can be made an instance of Functor.
That's why we can't make (->) an instance of Functor, but if we partially apply it to (->) r, it doesn't pose any problems.
If the syntax allowed for type constructors to be partially applied with sections (like we can partially apply + by doing (2+), which is the same as (+) 2), you could write (->) r as (r ->). How are functions functors? Well, let's take a look at the implementation, which lies in Control.Monad.Instances
We usually mark functions that take anything and return anything as a -> b. r -> a is the same thing, we just used different letters for the type variables.
instance Functor ((->) r) where
fmap f g = (\x -> f (g x))
If the syntax allowed for it, it could have been written as
instance Functor (r ->) where
fmap f g = (\x -> f (g x))
But it doesn't, so we have to write it in the former fashion.
First of all, let's think about fmap's type. It's fmap :: (a -> b) -> f a -> f b. Now what we'll do is mentally replace all the f's, which are the role that our functor instance plays, with (->) r's.
We'll do that to see how fmap should behave for this particular instance.
We get fmap :: (a -> b) -> ((->) r a) -> ((->) r b).
Now what we can do is write the (->) r a and (-> r b) types as infix r -> a and r -> b, like we normally do with functions.
What we get now is fmap :: (a -> b) -> (r -> a) -> (r -> b).
We pipe the output of r -> a into the input of a -> b to get a function r -> b, which is exactly what function composition is about.
If you look at how the instance is defined above, you'll see that it's just function composition. Another way to write this instance would be:
instance Functor ((->) r) where
fmap = (.)
ghci> :t fmap (*3) (+100)
fmap (*3) (+100) :: (Num a) => a -> a
ghci> fmap (*3) (+100) 1
303
ghci> (*3) `fmap` (+100) $ 1
303
ghci> (*3) . (+100) $ 1
303
ghci> fmap (show . (*3)) (*100) 1
"300"
Now we can see how fmap acts just like . for functions.
The fact that fmap is function composition when used on functions isn't so terribly useful right now, but at least it's very interesting.
It also bends our minds a bit and let us see how things that act more like computations than boxes (IO and (->) r) can be functors.
The function being mapped over a computation results in the same computation but the result of that computation is modified with the function.
https://stackoverflow.com/questions/9136421/where-can-i-read-up-on-the-haskell-operator
(->) is often called the "function arrow" or "function type constructor", and while it does have some special syntax, there's not that much special about it.
It's essentially an infix "type" constructor. Give it two types, and it gives you the type of functions between those types.
The infix style of this type constructor is not part of the Haskell standard, that's why (r ->) is not allowed but ((->) r) is fine in above instance definition.
we can say (->) is a higher-kinded type. Maybe is a higher-kinded type, but (Maybe a) is a concrete type. Maybe is a type constructor, as well as type functions.
type is a set of values, typeclass is a set of types. Functor typeclass is a set of higher-kinded types which kind signature is * -> *. So partial applied higher-kinded types maybe in the set of Functor typeclass. As long as it has the same kind signature * -> *.
--}
{-- Functor laws
we're going to look at the functor laws. In order for something to be a functor, it should satisfy some laws. All functors are expected to exhibit certain kinds of functor-like properties and behaviors:
1. If we map the id function over a functor, the functor that we get back should be the same as the original functor. Remember, id is the identity function, which just returns its parameter unmodified.
fmap id = id
2. Composing two functions and then mapping the resulting function over a functor should be the same as first mapping one function over the functor and then mapping the other one.
fmap f . fmap g = fmap (f . g)
or for any functor F
fmap f (fmap g F) = fmap (f . g) F
--}
{--
Let's take a look at a pathological example of a type constructor being an instance of the Functor typeclass but not really being a functor, because it doesn't satisfy the laws. Let's say that we have a type:
--}
data CMaybe a = CNothing | CJust Int a deriving (Show)
-- The C here stands for counter.
instance Functor CMaybe where
fmap f CNothing = CNothing
fmap f (CJust counter x) = CJust (counter+1) (f x)
{--
we can even play with this a bit:
ghci> fmap (++"ha") (CJust 0 "ho")
CJust 1 "hoha"
ghci> fmap (++"he") (fmap (++"ha") (CJust 0 "ho"))
CJust 2 "hohahe"
ghci> fmap (++"blah") CNothing
CNothing
Does this obey the functor laws? In order to see that something doesn't obey a law, it's enough to find just one counter-example:
ghci> fmap id (CJust 0 "haha")
CJust 1 "haha"
ghci> id (CJust 0 "haha")
CJust 0 "haha"
When we use a functor, it shouldn't matter if we first compose a few functions and then map them over the functor or if we just map each function over a functor in succession. But with CMaybe, it matters, because it keeps track of how many times it's been mapped over.
If we wanted CMaybe to obey the functor laws, we'd have to make it so that the Int field stays the same when we use fmap.
--}
{--
At first, the functor laws might seem a bit confusing and unnecessary, but then we see that if we know that a type obeys both laws, we can make certain assumptions about how it will act.
If a type obeys the functor laws, we know that calling fmap on a value of that type will only map the function over it, nothing more.
This leads to code that is more abstract and extensible, because we can use laws to reason about behaviors that any functor should have and make functions that operate reliably on any functor.
--}
{-- Applicative Functor
But what if we have a functor value of Just (3 *) and a functor value of Just 5 and we want to take out the function from Just (3 *) and map it over Just 5? With normal functors, we're out of luck, because all they support is just mapping normal functions over existing functors.
But we can't map a function that's inside a functor over another functor with what fmap offers us. We could pattern-match against the Just constructor to get the function out of it and then map it over Just 5, but we're looking for a more general and abstract way of doing that, which works across functors.
Meet the Applicative typeclass. It lies in the Control.Applicative module and it defines two methods, pure and <*>.
It doesn't provide a default implementation for any of them, so we have to define them both if we want something to be an applicative functor. The class is defined like so:
class (Functor f) => Applicative f where
pure :: a -> f a
(<*>) :: f (a -> b) -> f a -> f b
A better way of thinking about pure would be to say that it takes a value and puts it in some sort of default (or pure) context—a minimal context that still yields that value.
Whereas fmap takes a function and a functor and applies the function inside the functor, <*> takes a functor that has a function in it and another functor and sort of extracts that function from the first functor and then maps it over the second one.
When I say extract, I actually sort of mean run and then extract, maybe even sequence. We'll see why soon.
instance Applicative Maybe where
pure = Just
Nothing <*> _ = Nothing
(Just f) <*> something = fmap f something
ghci> Just (++"hahah") <*> Nothing
Nothing
ghci> Nothing <*> Just "woot"
Nothing
The first four input lines demonstrate how the function is extracted and then mapped, but in this case, they could have been achieved by just mapping unwrapped functions over functors.
The last line is interesting, because we try to extract a function from a Nothing and then map it over something, which of course results in a Nothing.
--}
{--
With normal functors, you can just map a function over a functor and then you can't get the result out in any general way, even if the result is a partially applied function.
Applicative functors, on the other hand, allow you to operate on several functors with a single function. Check out this piece of code:
ghci> pure (+) <*> Just 3 <*> Just 5
Just 8
ghci> pure (+) <*> Just 3 <*> Nothing
Nothing
ghci> pure (+) <*> Nothing <*> Just 5
Nothing
<*> is left-associative, which means that pure (+) <*> Just 3 <*> Just 5 is the same as (pure (+) <*> Just 3) <*> Just 5
(pure (+) <*> Just 3) <*> Just 5
=> (Just (+) <*> Just 3) <*> Just 5
=> Just (3+) <*> Just 5
=> fmap (3+) Just 5
=> Just (3+5)
=> Just 8
Applicative functors and the applicative style of doing pure f <*> x <*> y <*> ... allow us to take a function that expects parameters that aren't necessarily wrapped in functors and use that function to operate on several values that are in functor contexts.
The function can take as many parameters as we want, because it's always partially applied step by step between occurences of <*>
This becomes even more handy and apparent if we consider the fact that pure f <*> x equals fmap f x. This is one of the applicative laws.
Instead of writing pure f <*> x <*> y <*> ..., we can write fmap f x <*> y <*> ....
This is why Control.Applicative exports a function called <$>, which is just fmap as an infix operator. Here's how it's defined:
(<$>) :: (Functor f) => (a -> b) -> f a -> f b
f <$> x = fmap f x
Quick reminder:
type variables are independent of parameter names or other value names.
The f in the function declaration here is a type variable with a class constraint saying that any type constructor that replaces f should be in the Functor typeclass.
The f in the function body denotes a function that we map over x.
The fact that we used f to represent both of those doesn't mean that they somehow represent the same thing.
--}
{--
By using <$>, the applicative style really shines, because now if we want to apply a function f between three applicative functors, we can write f <$> x <*> y <*> z.
If the parameters weren't applicative functors but normal values, we'd write f x y z.
--}
{--
Lists (actually the list type constructor, []) are applicative functors.
instance Applicative [] where
pure x = [x]
fs <*> xs = [f x | f <- fs, x <- xs]
map function f over x in same context(here is list [])
Earlier, we said that pure takes a value and puts it in a default context. Or in other words, a minimal context that still yields that value.
The minimal context for lists would be the empty list, [], but the empty list represents the lack of a value, so it can't hold in itself the value that we used pure on. That's why pure takes a value and puts it in a singleton list.
Similarly, the minimal context for the Maybe applicative functor would be a Nothing, but it represents the lack of a value instead of a value, so pure is implemented as Just in the instance implementation for Maybe.
ghci> [(*0),(+100),(^2)] <*> [1,2,3]
[0,0,0,101,102,103,1,4,9]
[(+),(*)] <*> [1,2] <*> [3,4]
[(1+),(2+),(1*),(2*)] <*> [3,4]
-- because every function on the left gets applied to every value on the right.
[4,5,5,6,3,4,6,8]
--}
{--
Another instance of Applicative that we've already encountered is IO. This is how the instance is implemented:
instance Applicative IO where
pure = return
a <*> b = do
f <- a
x <- b
return (f x)
Since pure is all about putting a value in a minimal context that still holds it as its result, it makes sense that pure is just return, because return does exactly that; it makes an I/O action that doesn't do anything, it just yields some value as its result, but it doesn't really do any I/O operations like printing to the terminal or reading from a file.
--}
myAction :: IO String
myAction = (++) <$> getLine <*> getLine
main1 = do
a <- (++) <$> getLine <*> getLine
putStrLn $ "The two lines concatenated turn out to be: " ++ a
|
jamesyang124/haskell-playground
|
src/Chp110.hs
|
bsd-3-clause
| 14,141 | 0 | 9 | 2,867 | 147 | 84 | 63 | 9 | 1 |
{-|
Module : Idris.Elab.Term
Description : Code to elaborate terms.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE LambdaCase, PatternGuards, ViewPatterns #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Elab.Term where
import Idris.AbsSyntax
import Idris.AbsSyntaxTree
import Idris.Core.CaseTree (SC, SC'(STerm), findCalls, findUsedArgs)
import Idris.Core.Elaborate hiding (Tactic(..))
import Idris.Core.Evaluate
import Idris.Core.ProofTerm (getProofTerm)
import Idris.Core.TT
import Idris.Core.Typecheck (check, converts, isType, recheck)
import Idris.Core.Unify
import Idris.Core.WHNF (whnf, whnfArgs)
import Idris.Coverage (genClauses, recoverableCoverage, validCoverageCase)
import Idris.Delaborate
import Idris.DSL
import Idris.Elab.Quasiquote (extractUnquotes)
import Idris.Elab.Rewrite
import Idris.Elab.Utils
import Idris.Error
import Idris.ErrReverse (errReverse)
import Idris.Output (pshow)
import Idris.ProofSearch
import Idris.Reflection
import Idris.Termination (buildSCG, checkDeclTotality, checkPositive)
import qualified Util.Pretty as U
import Control.Applicative ((<$>))
import Control.Monad
import Control.Monad.State.Strict
import Data.Foldable (for_)
import Data.List
import qualified Data.Map as M
import Data.Maybe (catMaybes, fromMaybe, mapMaybe, maybeToList)
import qualified Data.Set as S
import qualified Data.Text as T
import Debug.Trace
data ElabMode = ETyDecl | ETransLHS | ELHS | EImpossible | ERHS
deriving Eq
data ElabResult = ElabResult {
-- | The term resulting from elaboration
resultTerm :: Term
-- | Information about new metavariables
, resultMetavars :: [(Name, (Int, Maybe Name, Type, [Name]))]
-- | Deferred declarations as the meaning of case blocks
, resultCaseDecls :: [PDecl]
-- | The potentially extended context from new definitions
, resultContext :: Context
-- | Meta-info about the new type declarations
, resultTyDecls :: [RDeclInstructions]
-- | Saved highlights from elaboration
, resultHighlighting :: [(FC, OutputAnnotation)]
-- | The new global name counter
, resultName :: Int
}
-- | Using the elaborator, convert a term in raw syntax to a fully
-- elaborated, typechecked term.
--
-- If building a pattern match, we convert undeclared variables from
-- holes to pattern bindings.
--
-- Also find deferred names in the term and their types
build :: IState
-> ElabInfo
-> ElabMode
-> FnOpts
-> Name
-> PTerm
-> ElabD ElabResult
build ist info emode opts fn tm
= do elab ist info emode opts fn tm
let tmIn = tm
let inf = case lookupCtxt fn (idris_tyinfodata ist) of
[TIPartial] -> True
_ -> False
hs <- get_holes
ivs <- get_implementations
ptm <- get_term
-- Resolve remaining interfaces. Two passes - first to get the
-- default Num implementations, second to clean up the rest
when (not pattern) $
mapM_ (\n -> when (n `elem` hs) $
do focus n
g <- goal
try (resolveTC' True True 10 g fn ist)
(movelast n)) ivs
ivs <- get_implementations
hs <- get_holes
when (not pattern) $
mapM_ (\n -> when (n `elem` hs) $
do focus n
g <- goal
ptm <- get_term
resolveTC' True True 10 g fn ist) ivs
when (not pattern) $ solveAutos ist fn False
tm <- get_term
ctxt <- get_context
probs <- get_probs
u <- getUnifyLog
hs <- get_holes
when (not pattern) $
traceWhen u ("Remaining holes:\n" ++ show hs ++ "\n" ++
"Remaining problems:\n" ++ qshow probs) $
do unify_all; matchProblems True; unifyProblems
when (not pattern) $ solveAutos ist fn True
probs <- get_probs
case probs of
[] -> return ()
((_,_,_,_,e,_,_):es) -> traceWhen u ("Final problems:\n" ++ qshow probs ++ "\nin\n" ++ show tm) $
if inf then return ()
else lift (Error e)
when tydecl (do mkPat
update_term liftPats
update_term orderPats)
EState is _ impls highlights _ _ <- getAux
tt <- get_term
ctxt <- get_context
let (tm, ds) = runState (collectDeferred (Just fn) (map fst is) ctxt tt) []
log <- getLog
g_nextname <- get_global_nextname
if log /= ""
then trace log $ return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
else return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
where pattern = emode == ELHS || emode == EImpossible
tydecl = emode == ETyDecl
mkPat = do hs <- get_holes
tm <- get_term
case hs of
(h: hs) -> do patvar h; mkPat
[] -> return ()
-- | Build a term autogenerated as an interface method definition.
--
-- (Separate, so we don't go overboard resolving things that we don't
-- know about yet on the LHS of a pattern def)
buildTC :: IState -> ElabInfo -> ElabMode -> FnOpts -> Name ->
[Name] -> -- Cached names in the PTerm, before adding PAlternatives
PTerm ->
ElabD ElabResult
buildTC ist info emode opts fn ns tm
= do let tmIn = tm
let inf = case lookupCtxt fn (idris_tyinfodata ist) of
[TIPartial] -> True
_ -> False
-- set name supply to begin after highest index in tm
initNextNameFrom ns
elab ist info emode opts fn tm
probs <- get_probs
tm <- get_term
case probs of
[] -> return ()
((_,_,_,_,e,_,_):es) -> if inf then return ()
else lift (Error e)
dots <- get_dotterm
-- 'dots' are the PHidden things which have not been solved by
-- unification
when (not (null dots)) $
lift (Error (CantMatch (getInferTerm tm)))
EState is _ impls highlights _ _ <- getAux
tt <- get_term
ctxt <- get_context
let (tm, ds) = runState (collectDeferred (Just fn) (map fst is) ctxt tt) []
log <- getLog
g_nextname <- get_global_nextname
if (log /= "")
then trace log $ return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
else return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
where pattern = emode == ELHS || emode == EImpossible
-- | return whether arguments of the given constructor name can be
-- matched on. If they're polymorphic, no, unless the type has beed
-- made concrete by the time we get around to elaborating the
-- argument.
getUnmatchable :: Context -> Name -> [Bool]
getUnmatchable ctxt n | isDConName n ctxt && n /= inferCon
= case lookupTyExact n ctxt of
Nothing -> []
Just ty -> checkArgs [] [] ty
where checkArgs :: [Name] -> [[Name]] -> Type -> [Bool]
checkArgs env ns (Bind n (Pi _ _ t _) sc)
= let env' = case t of
TType _ -> n : env
_ -> env in
checkArgs env' (intersect env (refsIn t) : ns)
(instantiate (P Bound n t) sc)
checkArgs env ns t
= map (not . null) (reverse ns)
getUnmatchable ctxt n = []
data ElabCtxt = ElabCtxt { e_inarg :: Bool,
e_isfn :: Bool, -- ^ Function part of application
e_guarded :: Bool,
e_intype :: Bool,
e_qq :: Bool,
e_nomatching :: Bool -- ^ can't pattern match
}
initElabCtxt = ElabCtxt False False False False False False
goal_polymorphic :: ElabD Bool
goal_polymorphic =
do ty <- goal
case ty of
P _ n _ -> do env <- get_env
case lookupBinder n env of
Nothing -> return False
_ -> return True
_ -> return False
-- | Returns the set of declarations we need to add to complete the
-- definition (most likely case blocks to elaborate) as well as
-- declarations resulting from user tactic scripts (%runElab)
elab :: IState
-> ElabInfo
-> ElabMode
-> FnOpts
-> Name
-> PTerm
-> ElabD ()
elab ist info emode opts fn tm
= do let loglvl = opt_logLevel (idris_options ist)
when (loglvl > 5) $ unifyLog True
compute -- expand type synonyms, etc
let fc = maybe "(unknown)"
elabE initElabCtxt (elabFC info) tm -- (in argument, guarded, in type, in qquote)
est <- getAux
sequence_ (get_delayed_elab est)
end_unify
when (pattern || intransform) $
-- convert remaining holes to pattern vars
do unify_all
matchProblems False -- only the ones we matched earlier
unifyProblems
mkPat
update_term liftPats
ptm <- get_term
when pattern $
-- Look for Rig1 (linear) pattern bindings
do let pnms = findLinear ist [] ptm
update_term (setLinear pnms)
where
pattern = emode == ELHS || emode == EImpossible
eimpossible = emode == EImpossible
intransform = emode == ETransLHS
bindfree = emode == ETyDecl || emode == ELHS || emode == ETransLHS
|| emode == EImpossible
autoimpls = opt_autoimpls (idris_options ist)
get_delayed_elab est =
let ds = delayed_elab est in
map snd $ sortBy (\(p1, _) (p2, _) -> compare p1 p2) ds
tcgen = Dictionary `elem` opts
reflection = Reflection `elem` opts
isph arg = case getTm arg of
Placeholder -> (True, priority arg)
tm -> (False, priority arg)
toElab ina arg = case getTm arg of
Placeholder -> Nothing
v -> Just (priority arg, elabE ina (elabFC info) v)
toElab' ina arg = case getTm arg of
Placeholder -> Nothing
v -> Just (elabE ina (elabFC info) v)
mkPat = do hs <- get_holes
tm <- get_term
case hs of
(h: hs) -> do patvar h; mkPat
[] -> return ()
elabRec = elabE initElabCtxt Nothing
-- | elabE elaborates an expression, possibly wrapping implicit coercions
-- and forces/delays. If you make a recursive call in elab', it is
-- normally correct to call elabE - the ones that don't are `desugarings
-- typically
elabE :: ElabCtxt -> Maybe FC -> PTerm -> ElabD ()
elabE ina fc' t =
do solved <- get_recents
as <- get_autos
hs <- get_holes
-- If any of the autos use variables which have recently been solved,
-- have another go at solving them now.
mapM_ (\(a, (failc, ns)) ->
if any (\n -> n `elem` solved) ns && head hs /= a
then solveAuto ist fn False (a, failc)
else return ()) as
apt <- expandToArity t
itm <- if not pattern then insertImpLam ina apt else return apt
ct <- insertCoerce ina itm
t' <- insertLazy ina ct
g <- goal
tm <- get_term
ps <- get_probs
hs <- get_holes
--trace ("Elaborating " ++ show t' ++ " in " ++ show g
-- ++ "\n" ++ show tm
-- ++ "\nholes " ++ show hs
-- ++ "\nproblems " ++ show ps
-- ++ "\n-----------\n") $
--trace ("ELAB " ++ show t') $
env <- get_env
let fc = fileFC "Force"
handleError (forceErr t' env)
(elab' ina fc' t')
(elab' ina fc' (PApp fc (PRef fc [] (sUN "Force"))
[pimp (sUN "t") Placeholder True,
pimp (sUN "a") Placeholder True,
pexp ct]))
forceErr orig env (CantUnify _ (t,_) (t',_) _ _ _)
| (P _ (UN ht) _, _) <- unApply (normalise (tt_ctxt ist) env t),
ht == txt "Delayed" = notDelay orig
forceErr orig env (CantUnify _ (t,_) (t',_) _ _ _)
| (P _ (UN ht) _, _) <- unApply (normalise (tt_ctxt ist) env t'),
ht == txt "Delayed" = notDelay orig
forceErr orig env (InfiniteUnify _ t _)
| (P _ (UN ht) _, _) <- unApply (normalise (tt_ctxt ist) env t),
ht == txt "Delayed" = notDelay orig
forceErr orig env (Elaborating _ _ _ t) = forceErr orig env t
forceErr orig env (ElaboratingArg _ _ _ t) = forceErr orig env t
forceErr orig env (At _ t) = forceErr orig env t
forceErr orig env t = False
notDelay t@(PApp _ (PRef _ _ (UN l)) _) | l == txt "Delay" = False
notDelay _ = True
local f = do e <- get_env
return (f `elem` map fstEnv e)
-- | Is a constant a type?
constType :: Const -> Bool
constType (AType _) = True
constType StrType = True
constType VoidType = True
constType _ = False
-- "guarded" means immediately under a constructor, to help find patvars
elab' :: ElabCtxt -- ^ (in an argument, guarded, in a type, in a quasiquote)
-> Maybe FC -- ^ The closest FC in the syntax tree, if applicable
-> PTerm -- ^ The term to elaborate
-> ElabD ()
elab' ina fc (PNoImplicits t) = elab' ina fc t -- skip elabE step
elab' ina fc (PType fc') =
do apply RType []
solve
highlightSource fc' (AnnType "Type" "The type of types")
elab' ina fc (PUniverse fc' u) =
do unless (UniquenessTypes `elem` idris_language_extensions ist
|| e_qq ina) $
lift $ tfail $ At fc' (Msg "You must turn on the UniquenessTypes extension to use UniqueType or AnyType")
apply (RUType u) []
solve
highlightSource fc' (AnnType (show u) "The type of unique types")
-- elab' (_,_,inty) (PConstant c)
-- | constType c && pattern && not reflection && not inty
-- = lift $ tfail (Msg "Typecase is not allowed")
elab' ina fc tm@(PConstant fc' c)
| pattern && not reflection && not (e_qq ina) && not (e_intype ina)
&& isTypeConst c
= lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
| pattern && not reflection && not (e_qq ina) && e_nomatching ina
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| otherwise = do apply (RConstant c) []
solve
highlightSource fc' (AnnConst c)
elab' ina fc (PQuote r) = do fill r; solve
elab' ina _ (PTrue fc _) =
do compute
g <- goal
case g of
TType _ -> elab' ina (Just fc) (PRef fc [] unitTy)
UType _ -> elab' ina (Just fc) (PRef fc [] unitTy)
_ -> elab' ina (Just fc) (PRef fc [] unitCon)
elab' ina fc (PResolveTC (FC "HACK" _ _)) -- for chasing parent interfaces
= do g <- goal; resolveTC False False 5 g fn elabRec ist
elab' ina fc (PResolveTC fc')
= do c <- getNameFrom (sMN 0 "__interface")
implementationArg c
-- Elaborate the equality type first homogeneously, then
-- heterogeneously as a fallback
elab' ina _ (PApp fc (PRef _ _ n) args)
| n == eqTy, [Placeholder, Placeholder, l, r] <- map getTm args
= try (do tyn <- getNameFrom (sMN 0 "aqty")
claim tyn RType
movelast tyn
elab' ina (Just fc) (PApp fc (PRef fc [] eqTy)
[pimp (sUN "A") (PRef NoFC [] tyn) True,
pimp (sUN "B") (PRef NoFC [] tyn) False,
pexp l, pexp r]))
(do atyn <- getNameFrom (sMN 0 "aqty")
btyn <- getNameFrom (sMN 0 "bqty")
claim atyn RType
movelast atyn
claim btyn RType
movelast btyn
elab' ina (Just fc) (PApp fc (PRef fc [] eqTy)
[pimp (sUN "A") (PRef NoFC [] atyn) True,
pimp (sUN "B") (PRef NoFC [] btyn) False,
pexp l, pexp r]))
elab' ina _ (PPair fc hls _ l r)
= do compute
g <- goal
let (tc, _) = unApply g
case g of
TType _ -> elab' ina (Just fc) (PApp fc (PRef fc hls pairTy)
[pexp l,pexp r])
UType _ -> elab' ina (Just fc) (PApp fc (PRef fc hls upairTy)
[pexp l,pexp r])
_ -> case tc of
P _ n _ | n == upairTy
-> elab' ina (Just fc) (PApp fc (PRef fc hls upairCon)
[pimp (sUN "A") Placeholder False,
pimp (sUN "B") Placeholder False,
pexp l, pexp r])
_ -> elab' ina (Just fc) (PApp fc (PRef fc hls pairCon)
[pimp (sUN "A") Placeholder False,
pimp (sUN "B") Placeholder False,
pexp l, pexp r])
elab' ina _ (PDPair fc hls p l@(PRef nfc hl n) t r)
= case p of
IsType -> asType
IsTerm -> asValue
TypeOrTerm ->
do compute
g <- goal
case g of
TType _ -> asType
_ -> asValue
where asType = elab' ina (Just fc) (PApp fc (PRef NoFC hls sigmaTy)
[pexp t,
pexp (PLam fc n nfc Placeholder r)])
asValue = elab' ina (Just fc) (PApp fc (PRef fc hls sigmaCon)
[pimp (sMN 0 "a") t False,
pimp (sMN 0 "P") Placeholder True,
pexp l, pexp r])
elab' ina _ (PDPair fc hls p l t r) = elab' ina (Just fc) (PApp fc (PRef fc hls sigmaCon)
[pimp (sMN 0 "a") t False,
pimp (sMN 0 "P") Placeholder True,
pexp l, pexp r])
elab' ina fc (PAlternative ms (ExactlyOne delayok) as)
= do as_pruned <- doPrune as
-- Finish the mkUniqueNames job with the pruned set, rather than
-- the full set.
uns <- get_usedns
let as' = map (mkUniqueNames (uns ++ map snd ms) ms) as_pruned
(h : hs) <- get_holes
ty <- goal
case as' of
[] -> do hds <- mapM showHd as
lift $ tfail $ NoValidAlts hds
[x] -> elab' ina fc x
-- If there's options, try now, and if that fails, postpone
-- to later.
_ -> handleError isAmbiguous
(do hds <- mapM showHd as'
tryAll (zip (map (elab' ina fc) as')
hds))
(do movelast h
delayElab 5 $ do
hs <- get_holes
when (h `elem` hs) $ do
focus h
as'' <- doPrune as'
case as'' of
[x] -> elab' ina fc x
_ -> do hds <- mapM showHd as''
tryAll' False (zip (map (elab' ina fc) as'')
hds))
where showHd (PApp _ (PRef _ _ (UN l)) [_, _, arg])
| l == txt "Delay" = showHd (getTm arg)
showHd (PApp _ (PRef _ _ n) _) = return n
showHd (PRef _ _ n) = return n
showHd (PApp _ h _) = showHd h
showHd (PHidden h) = showHd h
showHd x = getNameFrom (sMN 0 "_") -- We probably should do something better than this here
doPrune as =
do compute -- to get 'Delayed' if it's there
ty <- goal
ctxt <- get_context
env <- get_env
let ty' = unDelay ty
let (tc, _) = unApply ty'
return $ pruneByType eimpossible env tc ty' ist as
unDelay t | (P _ (UN l) _, [_, arg]) <- unApply t,
l == txt "Delayed" = unDelay arg
| otherwise = t
isAmbiguous (CantResolveAlts _) = delayok
isAmbiguous (Elaborating _ _ _ e) = isAmbiguous e
isAmbiguous (ElaboratingArg _ _ _ e) = isAmbiguous e
isAmbiguous (At _ e) = isAmbiguous e
isAmbiguous _ = False
elab' ina fc (PAlternative ms FirstSuccess as_in)
= do -- finish the mkUniqueNames job
uns <- get_usedns
let as = map (mkUniqueNames (uns ++ map snd ms) ms) as_in
trySeq as
where -- if none work, take the error from the first
trySeq (x : xs) = let e1 = elab' ina fc x in
try' e1 (trySeq' e1 xs) True
trySeq [] = fail "Nothing to try in sequence"
trySeq' deferr [] = do deferr; unifyProblems
trySeq' deferr (x : xs)
= try' (tryCatch (do elab' ina fc x
solveAutos ist fn False
unifyProblems)
(\_ -> trySeq' deferr []))
(trySeq' deferr xs) True
elab' ina fc (PAlternative ms TryImplicit (orig : alts)) = do
env <- get_env
compute
ty <- goal
let doelab = elab' ina fc orig
tryCatch doelab
(\err ->
if recoverableErr err
then -- trace ("NEED IMPLICIT! " ++ show orig ++ "\n" ++
-- show alts ++ "\n" ++
-- showQuick err) $
-- Prune the coercions so that only the ones
-- with the right type to fix the error will be tried!
case pruneAlts err alts env of
[] -> lift $ tfail err
alts' -> do
try' (elab' ina fc (PAlternative ms (ExactlyOne False) alts'))
(lift $ tfail err) -- take error from original if all fail
True
else lift $ tfail err)
where
recoverableErr (CantUnify _ _ _ _ _ _) = True
recoverableErr (TooManyArguments _) = False
recoverableErr (CantSolveGoal _ _) = False
recoverableErr (CantResolveAlts _) = False
recoverableErr (NoValidAlts _) = True
recoverableErr (ProofSearchFail (Msg _)) = True
recoverableErr (ProofSearchFail _) = False
recoverableErr (ElaboratingArg _ _ _ e) = recoverableErr e
recoverableErr (At _ e) = recoverableErr e
recoverableErr (ElabScriptDebug _ _ _) = False
recoverableErr _ = True
pruneAlts (CantUnify _ (inc, _) (outc, _) _ _ _) alts env
= case unApply (normalise (tt_ctxt ist) env inc) of
(P (TCon _ _) n _, _) -> filter (hasArg n env) alts
(Constant _, _) -> alts
_ -> filter isLend alts -- special case hack for 'Borrowed'
pruneAlts (ElaboratingArg _ _ _ e) alts env = pruneAlts e alts env
pruneAlts (At _ e) alts env = pruneAlts e alts env
pruneAlts (NoValidAlts as) alts env = alts
pruneAlts err alts _ = filter isLend alts
hasArg n env ap | isLend ap = True -- special case hack for 'Borrowed'
hasArg n env (PApp _ (PRef _ _ a) _)
= case lookupTyExact a (tt_ctxt ist) of
Just ty -> let args = map snd (getArgTys (normalise (tt_ctxt ist) env ty)) in
any (fnIs n) args
Nothing -> False
hasArg n env (PAlternative _ _ as) = any (hasArg n env) as
hasArg n _ tm = False
isLend (PApp _ (PRef _ _ l) _) = l == sNS (sUN "lend") ["Ownership"]
isLend _ = False
fnIs n ty = case unApply ty of
(P _ n' _, _) -> n == n'
_ -> False
showQuick (CantUnify _ (l, _) (r, _) _ _ _)
= show (l, r)
showQuick (ElaboratingArg _ _ _ e) = showQuick e
showQuick (At _ e) = showQuick e
showQuick (ProofSearchFail (Msg _)) = "search fail"
showQuick _ = "No chance"
elab' ina _ (PPatvar fc n) | bindfree
= do patvar n
update_term liftPats
highlightSource fc (AnnBoundName n False)
-- elab' (_, _, inty) (PRef fc f)
-- | isTConName f (tt_ctxt ist) && pattern && not reflection && not inty
-- = lift $ tfail (Msg "Typecase is not allowed")
elab' ec fc' tm@(PRef fc hls n)
| pattern && not reflection && not (e_qq ec) && not (e_intype ec)
&& isTConName n (tt_ctxt ist)
= lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
| pattern && not reflection && not (e_qq ec) && e_nomatching ec
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| (pattern || intransform || (bindfree && bindable n)) && not (inparamBlock n) && not (e_qq ec)
= do ty <- goal
testImplicitWarning fc n ty
let ina = e_inarg ec
guarded = e_guarded ec
inty = e_intype ec
ctxt <- get_context
env <- get_env
-- If the name is defined, globally or locally, elaborate it
-- as a reference, otherwise it might end up as a pattern var.
let defined = case lookupTy n ctxt of
[] -> case lookupTyEnv n env of
Just _ -> True
_ -> False
_ -> True
-- this is to stop us resolving interfaces recursively
if (tcname n && ina && not intransform)
then erun fc $
do patvar n
update_term liftPats
highlightSource fc (AnnBoundName n False)
else if defined -- finally, ordinary PRef elaboration
then elabRef ec fc' fc hls n tm
else try (do apply (Var n) []
annot <- findHighlight n
solve
highlightSource fc annot)
(do patvar n
update_term liftPats
highlightSource fc (AnnBoundName n False))
where inparamBlock n = case lookupCtxtName n (inblock info) of
[] -> False
_ -> True
bindable (NS _ _) = False
bindable (MN _ _) = True
bindable n = implicitable n && autoimpls
elab' ina _ f@(PInferRef fc hls n) = elab' ina (Just fc) (PApp NoFC f [])
elab' ina fc' tm@(PRef fc hls n)
| pattern && not reflection && not (e_qq ina) && not (e_intype ina)
&& isTConName n (tt_ctxt ist)
= lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
| pattern && not reflection && not (e_qq ina) && e_nomatching ina
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| otherwise = elabRef ina fc' fc hls n tm
elab' ina _ (PLam _ _ _ _ PImpossible) = lift . tfail . Msg $ "Only pattern-matching lambdas can be impossible"
elab' ina _ (PLam fc n nfc Placeholder sc)
= do -- if n is a type constructor name, this makes no sense...
ctxt <- get_context
when (isTConName n ctxt) $
lift $ tfail (Msg $ "Can't use type constructor " ++ show n ++ " here")
checkPiGoal n
attack; intro (Just n);
addPSname n -- okay for proof search
-- trace ("------ intro " ++ show n ++ " ---- \n" ++ show ptm)
elabE (ina { e_inarg = True } ) (Just fc) sc; solve
highlightSource nfc (AnnBoundName n False)
elab' ec _ (PLam fc n nfc ty sc)
= do tyn <- getNameFrom (sMN 0 "lamty")
-- if n is a type constructor name, this makes no sense...
ctxt <- get_context
when (isTConName n ctxt) $
lift $ tfail (Msg $ "Can't use type constructor " ++ show n ++ " here")
checkPiGoal n
claim tyn RType
explicit tyn
attack
ptm <- get_term
hs <- get_holes
introTy (Var tyn) (Just n)
addPSname n -- okay for proof search
focus tyn
elabE (ec { e_inarg = True, e_intype = True }) (Just fc) ty
elabE (ec { e_inarg = True }) (Just fc) sc
solve
highlightSource nfc (AnnBoundName n False)
elab' ina fc (PPi p n nfc Placeholder sc)
= do attack;
case pcount p of
RigW -> return ()
_ -> unless (LinearTypes `elem` idris_language_extensions ist
|| e_qq ina) $
lift $ tfail $ At nfc (Msg "You must turn on the LinearTypes extension to use a count")
arg n (pcount p) (is_scoped p) (sMN 0 "phTy")
addAutoBind p n
addPSname n -- okay for proof search
elabE (ina { e_inarg = True, e_intype = True }) fc sc
solve
highlightSource nfc (AnnBoundName n False)
elab' ina fc (PPi p n nfc ty sc)
= do attack; tyn <- getNameFrom (sMN 0 "piTy")
claim tyn RType
n' <- case n of
MN _ _ -> unique_hole n
_ -> return n
case pcount p of
RigW -> return ()
_ -> unless (LinearTypes `elem` idris_language_extensions ist
|| e_qq ina) $
lift $ tfail $ At nfc (Msg "You must turn on the LinearTypes extension to use a linear argument")
forall n' (pcount p) (is_scoped p) (Var tyn)
addAutoBind p n'
addPSname n' -- okay for proof search
focus tyn
let ec' = ina { e_inarg = True, e_intype = True }
elabE ec' fc ty
elabE ec' fc sc
solve
highlightSource nfc (AnnBoundName n False)
elab' ina _ tm@(PLet fc n nfc ty val sc)
= do attack
ivs <- get_implementations
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
explicit valn
letbind n (Var tyn) (Var valn)
addPSname n
case ty of
Placeholder -> return ()
_ -> do focus tyn
explicit tyn
elabE (ina { e_inarg = True, e_intype = True })
(Just fc) ty
focus valn
elabE (ina { e_inarg = True, e_intype = True })
(Just fc) val
ivs' <- get_implementations
env <- get_env
elabE (ina { e_inarg = True }) (Just fc) sc
when (not (pattern || intransform)) $
mapM_ (\n -> do focus n
g <- goal
hs <- get_holes
if all (\n -> n == tyn || not (n `elem` hs)) (freeNames g)
then handleError (tcRecoverable emode)
(resolveTC True False 10 g fn elabRec ist)
(movelast n)
else movelast n)
(ivs' \\ ivs)
-- HACK: If the name leaks into its type, it may leak out of
-- scope outside, so substitute in the outer scope.
expandLet n (case lookupBinder n env of
Just (Let t v) -> v
other -> error ("Value not a let binding: " ++ show other))
solve
highlightSource nfc (AnnBoundName n False)
elab' ina _ (PGoal fc r n sc) = do
rty <- goal
attack
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letbind n (Var tyn) (Var valn)
focus valn
elabE (ina { e_inarg = True, e_intype = True }) (Just fc) (PApp fc r [pexp (delab ist rty)])
env <- get_env
computeLet n
elabE (ina { e_inarg = True }) (Just fc) sc
solve
-- elab' ina fc (PLet n Placeholder
-- (PApp fc r [pexp (delab ist rty)]) sc)
elab' ina _ tm@(PApp fc (PInferRef _ _ f) args) = do
rty <- goal
ds <- get_deferred
ctxt <- get_context
-- make a function type a -> b -> c -> ... -> rty for the
-- new function name
env <- get_env
argTys <- claimArgTys env args
fn <- getNameFrom (sMN 0 "inf_fn")
let fty = fnTy argTys rty
-- trace (show (ptm, map fst argTys)) $ focus fn
-- build and defer the function application
attack; deferType (mkN f) fty (map fst argTys); solve
-- elaborate the arguments, to unify their types. They all have to
-- be explicit.
mapM_ elabIArg (zip argTys args)
where claimArgTys env [] = return []
claimArgTys env (arg : xs) | Just n <- localVar env (getTm arg)
= do nty <- get_type (Var n)
ans <- claimArgTys env xs
return ((n, (False, forget nty)) : ans)
claimArgTys env (_ : xs)
= do an <- getNameFrom (sMN 0 "inf_argTy")
aval <- getNameFrom (sMN 0 "inf_arg")
claim an RType
claim aval (Var an)
ans <- claimArgTys env xs
return ((aval, (True, (Var an))) : ans)
fnTy [] ret = forget ret
fnTy ((x, (_, xt)) : xs) ret = RBind x (Pi RigW Nothing xt RType) (fnTy xs ret)
localVar env (PRef _ _ x)
= case lookupBinder x env of
Just _ -> Just x
_ -> Nothing
localVar env _ = Nothing
elabIArg ((n, (True, ty)), def) =
do focus n; elabE ina (Just fc) (getTm def)
elabIArg _ = return () -- already done, just a name
mkN n@(NS _ _) = n
mkN n@(SN _) = n
mkN n = case namespace info of
xs@(_:_) -> sNS n xs
_ -> n
elab' ina _ (PMatchApp fc fn)
= do (fn', imps) <- case lookupCtxtName fn (idris_implicits ist) of
[(n, args)] -> return (n, map (const True) args)
_ -> lift $ tfail (NoSuchVariable fn)
ns <- match_apply (Var fn') (map (\x -> (x,0)) imps)
solve
-- if f is local, just do a simple_app
-- FIXME: Anyone feel like refactoring this mess? - EB
elab' ina topfc tm@(PApp fc (PRef ffc hls f) args_in)
| pattern && not reflection && not (e_qq ina) && e_nomatching ina
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| otherwise = implicitApp $
do env <- get_env
ty <- goal
fty <- get_type (Var f)
ctxt <- get_context
let dataCon = isDConName f ctxt
annot <- findHighlight f
knowns_m <- mapM getKnownImplicit args_in
let knowns = mapMaybe id knowns_m
args <- insertScopedImps fc f knowns (normalise ctxt env fty) args_in
let unmatchableArgs = if pattern
then getUnmatchable (tt_ctxt ist) f
else []
-- trace ("BEFORE " ++ show f ++ ": " ++ show ty) $
when (pattern && not reflection && not (e_qq ina) && not (e_intype ina)
&& isTConName f (tt_ctxt ist)) $
lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
-- trace (show (f, args_in, args)) $
if (f `elem` map fstEnv env && length args == 1 && length args_in == 1)
then -- simple app, as below
do simple_app False
(elabE (ina { e_isfn = True }) (Just fc) (PRef ffc hls f))
(elabE (ina { e_inarg = True,
e_guarded = dataCon }) (Just fc) (getTm (head args)))
(show tm)
solve
mapM (uncurry highlightSource) $
(ffc, annot) : map (\f -> (f, annot)) hls
return []
else
do ivs <- get_implementations
ps <- get_probs
-- HACK: we shouldn't resolve interfaces if we're defining an implementation
-- function or default definition.
let isinf = f == inferCon || tcname f
-- if f is an interface, we need to know its arguments so that
-- we can unify with them
case lookupCtxt f (idris_interfaces ist) of
[] -> return ()
_ -> do mapM_ setInjective (map getTm args)
-- maybe more things are solvable now
unifyProblems
let guarded = isConName f ctxt
-- trace ("args is " ++ show args) $ return ()
ns <- apply (Var f) (map isph args)
-- trace ("ns is " ++ show ns) $ return ()
-- mark any interface arguments as injective
-- when (not pattern) $
mapM_ checkIfInjective (map snd ns)
unifyProblems -- try again with the new information,
-- to help with disambiguation
ulog <- getUnifyLog
annot <- findHighlight f
mapM (uncurry highlightSource) $
(ffc, annot) : map (\f -> (f, annot)) hls
elabArgs ist (ina { e_inarg = e_inarg ina || not isinf,
e_guarded = dataCon })
[] fc False f
(zip ns (unmatchableArgs ++ repeat False))
(f == sUN "Force")
(map (\x -> getTm x) args) -- TODO: remove this False arg
imp <- if (e_isfn ina) then
do guess <- get_guess
env <- get_env
case safeForgetEnv (map fstEnv env) guess of
Nothing ->
return []
Just rguess -> do
gty <- get_type rguess
let ty_n = normalise ctxt env gty
return $ getReqImps ty_n
else return []
-- Now we find out how many implicits we needed at the
-- end of the application by looking at the goal again
-- - Have another go, but this time add the
-- implicits (can't think of a better way than this...)
case imp of
rs@(_:_) | not pattern -> return rs -- quit, try again
_ -> do solve
hs <- get_holes
ivs' <- get_implementations
-- Attempt to resolve any interfaces which have 'complete' types,
-- i.e. no holes in them
when (not pattern || (e_inarg ina && not tcgen)) $
mapM_ (\n -> do focus n
g <- goal
env <- get_env
hs <- get_holes
if all (\n -> not (n `elem` hs)) (freeNames g)
then handleError (tcRecoverable emode)
(resolveTC False False 10 g fn elabRec ist)
(movelast n)
else movelast n)
(ivs' \\ ivs)
return []
where
-- Run the elaborator, which returns how many implicit
-- args were needed, then run it again with those args. We need
-- this because we have to elaborate the whole application to
-- find out whether any computations have caused more implicits
-- to be needed.
implicitApp :: ElabD [ImplicitInfo] -> ElabD ()
implicitApp elab
| pattern || intransform = do elab; return ()
| otherwise
= do s <- get
imps <- elab
case imps of
[] -> return ()
es -> do put s
elab' ina topfc (PAppImpl tm es)
getKnownImplicit imp
| UnknownImp `elem` argopts imp
= return Nothing -- lift $ tfail $ UnknownImplicit (pname imp) f
| otherwise = return (Just (pname imp))
getReqImps (Bind x (Pi _ (Just i) ty _) sc)
= i : getReqImps sc
getReqImps _ = []
checkIfInjective n = do
env <- get_env
case lookupBinder n env of
Nothing -> return ()
Just b ->
case unApply (normalise (tt_ctxt ist) env (binderTy b)) of
(P _ c _, args) ->
case lookupCtxtExact c (idris_interfaces ist) of
Nothing -> return ()
Just ci -> -- interface, set as injective
do mapM_ setinjArg (getDets 0 (interface_determiners ci) args)
-- maybe we can solve more things now...
ulog <- getUnifyLog
probs <- get_probs
inj <- get_inj
traceWhen ulog ("Injective now " ++ show args ++ "\nAll: " ++ show inj
++ "\nProblems: " ++ qshow probs) $
unifyProblems
probs <- get_probs
traceWhen ulog (qshow probs) $ return ()
_ -> return ()
setinjArg (P _ n _) = setinj n
setinjArg _ = return ()
getDets i ds [] = []
getDets i ds (a : as) | i `elem` ds = a : getDets (i + 1) ds as
| otherwise = getDets (i + 1) ds as
tacTm (PTactics _) = True
tacTm (PProof _) = True
tacTm _ = False
setInjective (PRef _ _ n) = setinj n
setInjective (PApp _ (PRef _ _ n) _) = setinj n
setInjective _ = return ()
elab' ina _ tm@(PApp fc f [arg]) =
erun fc $
do simple_app (not $ headRef f)
(elabE (ina { e_isfn = True }) (Just fc) f)
(elabE (ina { e_inarg = True }) (Just fc) (getTm arg))
(show tm)
solve
where headRef (PRef _ _ _) = True
headRef (PApp _ f _) = headRef f
headRef (PAlternative _ _ as) = all headRef as
headRef _ = False
elab' ina fc (PAppImpl f es) = do appImpl (reverse es) -- not that we look...
solve
where appImpl [] = elab' (ina { e_isfn = False }) fc f -- e_isfn not set, so no recursive expansion of implicits
appImpl (e : es) = simple_app False
(appImpl es)
(elab' ina fc Placeholder)
(show f)
elab' ina fc Placeholder
= do (h : hs) <- get_holes
movelast h
elab' ina fc (PMetavar nfc n) =
do ptm <- get_term
-- When building the metavar application, leave out the unique
-- names which have been used elsewhere in the term, since we
-- won't be able to use them in the resulting application.
let unique_used = getUniqueUsed (tt_ctxt ist) ptm
let n' = metavarName (namespace info) n
attack
psns <- getPSnames
n' <- defer unique_used n'
solve
highlightSource nfc (AnnName n' (Just MetavarOutput) Nothing Nothing)
elab' ina fc (PProof ts) = do compute; mapM_ (runTac True ist (elabFC info) fn) ts
elab' ina fc (PTactics ts)
| not pattern = do mapM_ (runTac False ist fc fn) ts
| otherwise = elab' ina fc Placeholder
elab' ina fc (PElabError e) = lift $ tfail e
elab' ina mfc (PRewrite fc substfn rule sc newg)
= elabRewrite (elab' ina mfc) ist fc substfn rule sc newg
-- A common error case if trying to typecheck an autogenerated case block
elab' ina _ c@(PCase fc Placeholder opts)
= lift $ tfail (Msg "No expression for the case to inspect.\nYou need to replace the _ with an expression.")
elab' ina _ c@(PCase fc scr opts)
= do attack
tyn <- getNameFrom (sMN 0 "scty")
claim tyn RType
valn <- getNameFrom (sMN 0 "scval")
scvn <- getNameFrom (sMN 0 "scvar")
claim valn (Var tyn)
letbind scvn (Var tyn) (Var valn)
-- Start filling in the scrutinee type, if we can work one
-- out from the case options
let scrTy = getScrType (map fst opts)
case scrTy of
Nothing -> return ()
Just ty -> do focus tyn
elabE ina (Just fc) ty
focus valn
elabE (ina { e_inarg = True }) (Just fc) scr
-- Solve any remaining implicits - we need to solve as many
-- as possible before making the 'case' type
unifyProblems
matchProblems True
args <- get_env
envU <- mapM (getKind args) args
let namesUsedInRHS = nub $ scvn : concatMap (\(_,rhs) -> allNamesIn rhs) opts
-- Drop the unique arguments used in the term already
-- and in the scrutinee (since it's
-- not valid to use them again anyway)
--
-- Also drop unique arguments which don't appear explicitly
-- in either case branch so they don't count as used
-- unnecessarily (can only do this for unique things, since we
-- assume they don't appear implicitly in types)
ptm <- get_term
let inOpts = (filter (/= scvn) (map fstEnv args)) \\ (concatMap (\x -> allNamesIn (snd x)) opts)
let argsDropped = filter (\t -> isUnique envU t || isNotLift args t)
(nub $ allNamesIn scr ++ inApp ptm ++
inOpts)
let args' = filter (\(n, _, _) -> n `notElem` argsDropped) args
attack
cname' <- defer argsDropped (mkN (mkCaseName fc fn))
solve
-- if the scrutinee is one of the 'args' in env, we should
-- inspect it directly, rather than adding it as a new argument
let newdef = PClauses fc [] cname'
(caseBlock fc cname' scr
(map (isScr scr) (reverse args')) opts)
-- elaborate case
updateAux (\e -> e { case_decls = (cname', newdef) : case_decls e } )
-- if we haven't got the type yet, hopefully we'll get it later!
movelast tyn
solve
where mkCaseName fc (NS n ns) = NS (mkCaseName fc n) ns
mkCaseName fc n = SN (CaseN (FC' fc) n)
-- mkCaseName (UN x) = UN (x ++ "_case")
-- mkCaseName (MN i x) = MN i (x ++ "_case")
mkN n@(NS _ _) = n
mkN n = case namespace info of
xs@(_:_) -> sNS n xs
_ -> n
getScrType [] = Nothing
getScrType (f : os) = maybe (getScrType os) Just (getAppType f)
getAppType (PRef _ _ n) =
case lookupTyName n (tt_ctxt ist) of
[(n', ty)] | isDConName n' (tt_ctxt ist) ->
case unApply (getRetTy ty) of
(P _ tyn _, args) ->
Just (PApp fc (PRef fc [] tyn)
(map pexp (map (const Placeholder) args)))
_ -> Nothing
_ -> Nothing -- ambiguity is no help to us!
getAppType (PApp _ t as) = getAppType t
getAppType _ = Nothing
inApp (P _ n _) = [n]
inApp (App _ f a) = inApp f ++ inApp a
inApp (Bind n (Let _ v) sc) = inApp v ++ inApp sc
inApp (Bind n (Guess _ v) sc) = inApp v ++ inApp sc
inApp (Bind n b sc) = inApp sc
inApp _ = []
isUnique envk n = case lookup n envk of
Just u -> u
_ -> False
getKind env (n, _, _)
= case lookupBinder n env of
Nothing -> return (n, False) -- can't happen, actually...
Just b ->
do ty <- get_type (forget (binderTy b))
case ty of
UType UniqueType -> return (n, True)
UType AllTypes -> return (n, True)
_ -> return (n, False)
tcName tm | (P _ n _, _) <- unApply tm
= case lookupCtxt n (idris_interfaces ist) of
[_] -> True
_ -> False
tcName _ = False
isNotLift env n
= case lookupBinder n env of
Just ty ->
case unApply (binderTy ty) of
(P _ n _, _) -> n `elem` noCaseLift info
_ -> False
_ -> False
usedIn ns (n, b)
= n `elem` ns
|| any (\x -> x `elem` ns) (allTTNames (binderTy b))
elab' ina fc (PUnifyLog t) = do unifyLog True
elab' ina fc t
unifyLog False
elab' ina fc (PQuasiquote t goalt)
= do -- First extract the unquoted subterms, replacing them with fresh
-- names in the quasiquoted term. Claim their reflections to be
-- an inferred type (to support polytypic quasiquotes).
finalTy <- goal
(t, unq) <- extractUnquotes 0 t
let unquoteNames = map fst unq
mapM_ (\uqn -> claim uqn (forget finalTy)) unquoteNames
-- Save the old state - we need a fresh proof state to avoid
-- capturing lexically available variables in the quoted term.
ctxt <- get_context
datatypes <- get_datatypes
g_nextname <- get_global_nextname
saveState
updatePS (const .
newProof (sMN 0 "q") (constraintNS info) ctxt datatypes g_nextname $
P Ref (reflm "TT") Erased)
-- Re-add the unquotes, letting Idris infer the (fictional)
-- types. Here, they represent the real type rather than the type
-- of their reflection.
mapM_ (\n -> do ty <- getNameFrom (sMN 0 "unqTy")
claim ty RType
movelast ty
claim n (Var ty)
movelast n)
unquoteNames
-- Determine whether there's an explicit goal type, and act accordingly
-- Establish holes for the type and value of the term to be
-- quasiquoted
qTy <- getNameFrom (sMN 0 "qquoteTy")
claim qTy RType
movelast qTy
qTm <- getNameFrom (sMN 0 "qquoteTm")
claim qTm (Var qTy)
-- Let-bind the result of elaborating the contained term, so that
-- the hole doesn't disappear
nTm <- getNameFrom (sMN 0 "quotedTerm")
letbind nTm (Var qTy) (Var qTm)
-- Fill out the goal type, if relevant
case goalt of
Nothing -> return ()
Just gTy -> do focus qTy
elabE (ina { e_qq = True }) fc gTy
-- Elaborate the quasiquoted term into the hole
focus qTm
elabE (ina { e_qq = True }) fc t
end_unify
-- We now have an elaborated term. Reflect it and solve the
-- original goal in the original proof state, preserving highlighting
env <- get_env
EState _ _ _ hs _ _ <- getAux
loadState
updateAux (\aux -> aux { highlighting = hs })
let quoted = fmap (explicitNames . binderVal) $ lookupBinder nTm env
isRaw = case unApply (normaliseAll ctxt env finalTy) of
(P _ n _, []) | n == reflm "Raw" -> True
_ -> False
case quoted of
Just q -> do ctxt <- get_context
(q', _, _) <- lift $ recheck (constraintNS info) ctxt [(uq, RigW, Lam RigW Erased) | uq <- unquoteNames] (forget q) q
if pattern
then if isRaw
then reflectRawQuotePattern unquoteNames (forget q')
else reflectTTQuotePattern unquoteNames q'
else do if isRaw
then -- we forget q' instead of using q to ensure rechecking
fill $ reflectRawQuote unquoteNames (forget q')
else fill $ reflectTTQuote unquoteNames q'
solve
Nothing -> lift . tfail . Msg $ "Broken elaboration of quasiquote"
-- Finally fill in the terms or patterns from the unquotes. This
-- happens last so that their holes still exist while elaborating
-- the main quotation.
mapM_ elabUnquote unq
where elabUnquote (n, tm)
= do focus n
elabE (ina { e_qq = False }) fc tm
elab' ina fc (PUnquote t) = fail "Found unquote outside of quasiquote"
elab' ina fc (PQuoteName n False nfc) =
do fill $ reflectName n
solve
elab' ina fc (PQuoteName n True nfc) =
do ctxt <- get_context
env <- get_env
case lookupBinder n env of
Just _ -> do fill $ reflectName n
solve
highlightSource nfc (AnnBoundName n False)
Nothing ->
case lookupNameDef n ctxt of
[(n', _)] -> do fill $ reflectName n'
solve
highlightSource nfc (AnnName n' Nothing Nothing Nothing)
[] -> lift . tfail . NoSuchVariable $ n
more -> lift . tfail . CantResolveAlts $ map fst more
elab' ina fc (PAs _ n t) = lift . tfail . Msg $ "@-pattern not allowed here"
elab' ina fc (PHidden t)
| reflection = elab' ina fc t
| otherwise
= do (h : hs) <- get_holes
-- Dotting a hole means that either the hole or any outer
-- hole (a hole outside any occurrence of it)
-- must be solvable by unification as well as being filled
-- in directly.
-- Delay dotted things to the end, then when we elaborate them
-- we can check the result against what was inferred
movelast h
(h' : hs) <- get_holes
-- If we're at the end anyway, do it now
if h == h' then elabHidden h
else delayElab 10 $ elabHidden h
where
elabHidden h = do hs <- get_holes
when (h `elem` hs) $ do
focus h
dotterm
elab' ina fc t
elab' ina fc (PRunElab fc' tm ns) =
do unless (ElabReflection `elem` idris_language_extensions ist) $
lift $ tfail $ At fc' (Msg "You must turn on the ElabReflection extension to use %runElab")
attack
let elabName = sNS (sUN "Elab") ["Elab", "Reflection", "Language"]
n <- getNameFrom (sMN 0 "tacticScript")
let scriptTy = RApp (Var elabName) (Var unitTy)
claim n scriptTy
focus n
elabUnit <- goal
attack -- to get an extra hole
elab' ina (Just fc') tm
script <- get_guess
fullyElaborated script
solve -- eliminate the hole. Because there are no references, the script is only in the binding
ctxt <- get_context
env <- get_env
(scriptTm, scriptTy) <- lift $ check ctxt [] (forget script)
lift $ converts ctxt env elabUnit scriptTy
env <- get_env
runElabAction info ist (maybe fc' id fc) env script ns
solve
elab' ina fc (PConstSugar constFC tm) =
-- Here we elaborate the contained term, then calculate
-- highlighting for constFC. The highlighting is the
-- highlighting for the outermost constructor of the result of
-- evaluating the elaborated term, if one exists (it always
-- should, but better to fail gracefully for something silly
-- like highlighting info). This is how implicit applications of
-- fromInteger get highlighted.
do saveState -- so we don't pollute the elaborated term
n <- getNameFrom (sMN 0 "cstI")
n' <- getNameFrom (sMN 0 "cstIhole")
g <- forget <$> goal
claim n' g
movelast n'
-- In order to intercept the elaborated value, we need to
-- let-bind it.
attack
letbind n g (Var n')
focus n'
elab' ina fc tm
env <- get_env
ctxt <- get_context
let v = fmap (normaliseAll ctxt env . finalise . binderVal)
(lookupBinder n env)
loadState -- we have the highlighting - re-elaborate the value
elab' ina fc tm
case v of
Just val -> highlightConst constFC val
Nothing -> return ()
where highlightConst fc (P _ n _) =
highlightSource fc (AnnName n Nothing Nothing Nothing)
highlightConst fc (App _ f _) =
highlightConst fc f
highlightConst fc (Constant c) =
highlightSource fc (AnnConst c)
highlightConst _ _ = return ()
elab' ina fc x = fail $ "Unelaboratable syntactic form " ++ showTmImpls x
-- delay elaboration of 't', with priority 'pri' until after everything
-- else is done.
-- The delayed things with lower numbered priority will be elaborated
-- first. (In practice, this means delayed alternatives, then PHidden
-- things.)
delayElab pri t
= updateAux (\e -> e { delayed_elab = delayed_elab e ++ [(pri, t)] })
isScr :: PTerm -> (Name, RigCount, Binder Term) -> (Name, (Bool, Binder Term))
isScr (PRef _ _ n) (n', _, b) = (n', (n == n', b))
isScr _ (n', _, b) = (n', (False, b))
caseBlock :: FC -> Name
-> PTerm -- original scrutinee
-> [(Name, (Bool, Binder Term))] -> [(PTerm, PTerm)] -> [PClause]
caseBlock fc n scr env opts
= let args' = findScr env
args = map mkarg (map getNmScr args') in
map (mkClause args) opts
where -- Find the variable we want as the scrutinee and mark it as
-- 'True'. If the scrutinee is in the environment, match on that
-- otherwise match on the new argument we're adding.
findScr ((n, (True, t)) : xs)
= (n, (True, t)) : scrName n xs
findScr [(n, (_, t))] = [(n, (True, t))]
findScr (x : xs) = x : findScr xs
-- [] can't happen since scrutinee is in the environment!
findScr [] = error "The impossible happened - the scrutinee was not in the environment"
-- To make sure top level pattern name remains in scope, put
-- it at the end of the environment
scrName n [] = []
scrName n [(_, t)] = [(n, t)]
scrName n (x : xs) = x : scrName n xs
getNmScr (n, (s, _)) = (n, s)
mkarg (n, s) = (PRef fc [] n, s)
-- may be shadowed names in the new pattern - so replace the
-- old ones with an _
-- Also, names which don't appear on the rhs should not be
-- fixed on the lhs, or this restricts the kind of matching
-- we can do to non-dependent types.
mkClause args (l, r)
= let args' = map (shadowed (allNamesIn l)) args
args'' = map (implicitable (allNamesIn r ++
keepscrName scr)) args'
lhs = PApp (getFC fc l) (PRef NoFC [] n)
(map (mkLHSarg l) args'') in
PClause (getFC fc l) n lhs [] r []
-- Keep scrutinee available if it's just a name (this makes
-- the names in scope look better when looking at a hole on
-- the rhs of a case)
keepscrName (PRef _ _ n) = [n]
keepscrName _ = []
mkLHSarg l (tm, True) = pexp l
mkLHSarg l (tm, False) = pexp tm
shadowed new (PRef _ _ n, s) | n `elem` new = (Placeholder, s)
shadowed new t = t
implicitable rhs (PRef _ _ n, s) | n `notElem` rhs = (Placeholder, s)
implicitable rhs t = t
getFC d (PApp fc _ _) = fc
getFC d (PRef fc _ _) = fc
getFC d (PAlternative _ _ (x:_)) = getFC d x
getFC d x = d
-- Fail if a term is not yet fully elaborated (e.g. if it contains
-- case block functions that don't yet exist)
fullyElaborated :: Term -> ElabD ()
fullyElaborated (P _ n _) =
do estate <- getAux
case lookup n (case_decls estate) of
Nothing -> return ()
Just _ -> lift . tfail $ ElabScriptStaging n
fullyElaborated (Bind n b body) = fullyElaborated body >> for_ b fullyElaborated
fullyElaborated (App _ l r) = fullyElaborated l >> fullyElaborated r
fullyElaborated (Proj t _) = fullyElaborated t
fullyElaborated _ = return ()
-- If the goal type is a "Lazy", then try elaborating via 'Delay'
-- first. We need to do this brute force approach, rather than anything
-- more precise, since there may be various other ambiguities to resolve
-- first.
insertLazy :: ElabCtxt -> PTerm -> ElabD PTerm
insertLazy ina t@(PApp _ (PRef _ _ (UN l)) _) | l == txt "Delay" = return t
insertLazy ina t@(PApp _ (PRef _ _ (UN l)) _) | l == txt "Force" = return t
insertLazy ina (PCoerced t) = return t
-- Don't add a delay to top level pattern variables, since they
-- can be forced on the rhs if needed
insertLazy ina t@(PPatvar _ _) | pattern && not (e_guarded ina) = return t
insertLazy ina t =
do ty <- goal
env <- get_env
let (tyh, _) = unApply (normalise (tt_ctxt ist) env ty)
let tries = [mkDelay env t, t]
case tyh of
P _ (UN l) _ | l == txt "Delayed"
-> return (PAlternative [] FirstSuccess tries)
_ -> return t
where
mkDelay env (PAlternative ms b xs) = PAlternative ms b (map (mkDelay env) xs)
mkDelay env t
= let fc = fileFC "Delay" in
addImplBound ist (map fstEnv env) (PApp fc (PRef fc [] (sUN "Delay"))
[pexp t])
-- Don't put implicit coercions around applications which are marked
-- as '%noImplicit', or around case blocks, otherwise we get exponential
-- blowup especially where there are errors deep in large expressions.
notImplicitable (PApp _ f _) = notImplicitable f
-- TMP HACK no coercing on bind (make this configurable)
notImplicitable (PRef _ _ n)
| [opts] <- lookupCtxt n (idris_flags ist)
= NoImplicit `elem` opts
notImplicitable (PAlternative _ _ as) = any notImplicitable as
-- case is tricky enough without implicit coercions! If they are needed,
-- they can go in the branches separately.
notImplicitable (PCase _ _ _) = True
notImplicitable _ = False
-- Elaboration works more smoothly if we expand function applications
-- to their full arity and elaborate it all at once (better error messages
-- in particular)
expandToArity tm@(PApp fc f a) = do
env <- get_env
case fullApp tm of
-- if f is global, leave it alone because we've already
-- expanded it to the right arity
PApp fc ftm@(PRef _ _ f) args | Just aty <- lookupBinder f env ->
do let a = length (getArgTys (normalise (tt_ctxt ist) env (binderTy aty)))
return (mkPApp fc a ftm args)
_ -> return tm
expandToArity t = return t
fullApp (PApp _ (PApp fc f args) xs) = fullApp (PApp fc f (args ++ xs))
fullApp x = x
-- See if the name is listed as an implicit. If it is, return it, and
-- drop it from the rest of the list
findImplicit :: Name -> [PArg] -> (Maybe PArg, [PArg])
findImplicit n [] = (Nothing, [])
findImplicit n (i@(PImp _ _ _ n' _) : args)
| n == n' = (Just i, args)
findImplicit n (i@(PTacImplicit _ _ n' _ _) : args)
| n == n' = (Just i, args)
findImplicit n (x : xs) = let (arg, rest) = findImplicit n xs in
(arg, x : rest)
insertScopedImps :: FC -> Name -> [Name] -> Type -> [PArg] -> ElabD [PArg]
insertScopedImps fc f knowns ty xs =
do mapM_ (checkKnownImplicit (map fst (getArgTys ty) ++ knowns)) xs
doInsert ty xs
where
doInsert ty@(Bind n (Pi _ im@(Just i) _ _) sc) xs
| (Just arg, xs') <- findImplicit n xs,
not (toplevel_imp i)
= liftM (arg :) (doInsert sc xs')
| tcimplementation i && not (toplevel_imp i)
= liftM (pimp n (PResolveTC fc) True :) (doInsert sc xs)
| not (toplevel_imp i)
= liftM (pimp n Placeholder True :) (doInsert sc xs)
doInsert (Bind n (Pi _ _ _ _) sc) (x : xs)
= liftM (x :) (doInsert sc xs)
doInsert ty xs = return xs
-- Any implicit in the application needs to have the name of a
-- scoped implicit or a top level implicit, otherwise report an error
checkKnownImplicit ns imp@(PImp{})
| pname imp `elem` ns = return ()
| otherwise = lift $ tfail $ At fc $ UnknownImplicit (pname imp) f
checkKnownImplicit ns _ = return ()
insertImpLam ina t =
do ty <- goal
env <- get_env
let ty' = normalise (tt_ctxt ist) env ty
addLam ty' t
where
-- just one level at a time
addLam goal@(Bind n (Pi _ (Just _) _ _) sc) t =
do impn <- unique_hole n -- (sMN 0 "scoped_imp")
return (PLam emptyFC impn NoFC Placeholder t)
addLam _ t = return t
insertCoerce ina t@(PCase _ _ _) = return t
insertCoerce ina t | notImplicitable t = return t
insertCoerce ina t =
do ty <- goal
-- Check for possible coercions to get to the goal
-- and add them as 'alternatives'
env <- get_env
let ty' = normalise (tt_ctxt ist) env ty
let cs = getCoercionsTo ist ty'
let t' = case (t, cs) of
(PCoerced tm, _) -> tm
(_, []) -> t
(_, cs) -> PAlternative [] TryImplicit
(t : map (mkCoerce env t) cs)
return t'
where
mkCoerce env (PAlternative ms aty tms) n
= PAlternative ms aty (map (\t -> mkCoerce env t n) tms)
mkCoerce env t n = let fc = maybe (fileFC "Coercion") id (highestFC t) in
addImplBound ist (map fstEnv env)
(PApp fc (PRef fc [] n) [pexp (PCoerced t)])
elabRef :: ElabCtxt -> Maybe FC -> FC -> [FC] -> Name -> PTerm -> ElabD ()
elabRef ina fc' fc hls n tm =
do fty <- get_type (Var n) -- check for implicits
ctxt <- get_context
env <- get_env
a' <- insertScopedImps fc n [] (normalise ctxt env fty) []
if null a'
then erun fc $
do apply (Var n) []
hilite <- findHighlight n
solve
mapM_ (uncurry highlightSource) $
(fc, hilite) : map (\f -> (f, hilite)) hls
else elab' ina fc' (PApp fc tm [])
-- | Elaborate the arguments to a function
elabArgs :: IState -- ^ The current Idris state
-> ElabCtxt -- ^ (in an argument, guarded, in a type, in a qquote)
-> [Bool]
-> FC -- ^ Source location
-> Bool
-> Name -- ^ Name of the function being applied
-> [((Name, Name), Bool)] -- ^ (Argument Name, Hole Name, unmatchable)
-> Bool -- ^ under a 'force'
-> [PTerm] -- ^ argument
-> ElabD ()
elabArgs ist ina failed fc retry f [] force _ = return ()
elabArgs ist ina failed fc r f (((argName, holeName), unm):ns) force (t : args)
= do hs <- get_holes
if holeName `elem` hs then
do focus holeName
case t of
Placeholder -> do movelast holeName
elabArgs ist ina failed fc r f ns force args
_ -> elabArg t
else elabArgs ist ina failed fc r f ns force args
where elabArg t =
do -- solveAutos ist fn False
now_elaborating fc f argName
wrapErr f argName $ do
hs <- get_holes
tm <- get_term
-- No coercing under an explicit Force (or it can Force/Delay
-- recursively!)
let elab = if force then elab' else elabE
failed' <- -- trace (show (n, t, hs, tm)) $
-- traceWhen (not (null cs)) (show ty ++ "\n" ++ showImp True t) $
do focus holeName;
g <- goal
-- Can't pattern match on polymorphic goals
poly <- goal_polymorphic
ulog <- getUnifyLog
traceWhen ulog ("Elaborating argument " ++ show (argName, holeName, g)) $
elab (ina { e_nomatching = unm && poly }) (Just fc) t
return failed
done_elaborating_arg f argName
elabArgs ist ina failed fc r f ns force args
wrapErr f argName action =
do elabState <- get
while <- elaborating_app
let while' = map (\(x, y, z)-> (y, z)) while
(result, newState) <- case runStateT action elabState of
OK (res, newState) -> return (res, newState)
Error e -> do done_elaborating_arg f argName
lift (tfail (elaboratingArgErr while' e))
put newState
return result
elabArgs _ _ _ _ _ _ (((arg, hole), _) : _) _ [] =
fail $ "Can't elaborate these args: " ++ show arg ++ " " ++ show hole
addAutoBind :: Plicity -> Name -> ElabD ()
addAutoBind (Imp _ _ _ _ False _) n
= updateAux (\est -> est { auto_binds = n : auto_binds est })
addAutoBind _ _ = return ()
testImplicitWarning :: FC -> Name -> Type -> ElabD ()
testImplicitWarning fc n goal
| implicitable n && emode == ETyDecl
= do env <- get_env
est <- getAux
when (n `elem` auto_binds est) $
tryUnify env (lookupTyName n (tt_ctxt ist))
| otherwise = return ()
where
tryUnify env [] = return ()
tryUnify env ((nm, ty) : ts)
= do inj <- get_inj
hs <- get_holes
case unify (tt_ctxt ist) env (ty, Nothing) (goal, Nothing)
inj hs [] [] of
OK _ ->
updateAux (\est -> est { implicit_warnings =
(fc, nm) : implicit_warnings est })
_ -> tryUnify env ts
-- For every alternative, look at the function at the head. Automatically resolve
-- any nested alternatives where that function is also at the head
pruneAlt :: [PTerm] -> [PTerm]
pruneAlt xs = map prune xs
where
prune (PApp fc1 (PRef fc2 hls f) as)
= PApp fc1 (PRef fc2 hls f) (fmap (fmap (choose f)) as)
prune t = t
choose f (PAlternative ms a as)
= let as' = fmap (choose f) as
fs = filter (headIs f) as' in
case fs of
[a] -> a
_ -> PAlternative ms a as'
choose f (PApp fc f' as) = PApp fc (choose f f') (fmap (fmap (choose f)) as)
choose f t = t
headIs f (PApp _ (PRef _ _ f') _) = f == f'
headIs f (PApp _ f' _) = headIs f f'
headIs f _ = True -- keep if it's not an application
-- | Use the local elab context to work out the highlighting for a name
findHighlight :: Name -> ElabD OutputAnnotation
findHighlight n = do ctxt <- get_context
env <- get_env
case lookupBinder n env of
Just _ -> return $ AnnBoundName n False
Nothing -> case lookupTyExact n ctxt of
Just _ -> return $ AnnName n Nothing Nothing Nothing
Nothing -> lift . tfail . InternalMsg $
"Can't find name " ++ show n
-- Try again to solve auto implicits
solveAuto :: IState -> Name -> Bool -> (Name, [FailContext]) -> ElabD ()
solveAuto ist fn ambigok (n, failc)
= do hs <- get_holes
when (not (null hs)) $ do
env <- get_env
g <- goal
handleError cantsolve (when (n `elem` hs) $ do
focus n
isg <- is_guess -- if it's a guess, we're working on it recursively, so stop
when (not isg) $
proofSearch' ist True ambigok 100 True Nothing fn [] [])
(lift $ Error (addLoc failc
(CantSolveGoal g (map (\(n, _, b) -> (n, binderTy b)) env))))
return ()
where addLoc (FailContext fc f x : prev) err
= At fc (ElaboratingArg f x
(map (\(FailContext _ f' x') -> (f', x')) prev) err)
addLoc _ err = err
cantsolve (CantSolveGoal _ _) = True
cantsolve (InternalMsg _) = True
cantsolve (At _ e) = cantsolve e
cantsolve (Elaborating _ _ _ e) = cantsolve e
cantsolve (ElaboratingArg _ _ _ e) = cantsolve e
cantsolve _ = False
solveAutos :: IState -> Name -> Bool -> ElabD ()
solveAutos ist fn ambigok
= do autos <- get_autos
mapM_ (solveAuto ist fn ambigok) (map (\(n, (fc, _)) -> (n, fc)) autos)
-- Return true if the given error suggests an interface failure is
-- recoverable
tcRecoverable :: ElabMode -> Err -> Bool
tcRecoverable ERHS (CantResolve f g _) = f
tcRecoverable ETyDecl (CantResolve f g _) = f
tcRecoverable e (ElaboratingArg _ _ _ err) = tcRecoverable e err
tcRecoverable e (At _ err) = tcRecoverable e err
tcRecoverable _ _ = True
trivial' ist
= trivial (elab ist toplevel ERHS [] (sMN 0 "tac")) ist
trivialHoles' psn h ist
= trivialHoles psn h (elab ist toplevel ERHS [] (sMN 0 "tac")) ist
proofSearch' ist rec ambigok depth prv top n psns hints
= do unifyProblems
proofSearch rec prv ambigok (not prv) depth
(elab ist toplevel ERHS [] (sMN 0 "tac")) top n psns hints ist
resolveTC' di mv depth tm n ist
= resolveTC di mv depth tm n (elab ist toplevel ERHS [] (sMN 0 "tac")) ist
collectDeferred :: Maybe Name -> [Name] -> Context ->
Term -> State [(Name, (Int, Maybe Name, Type, [Name]))] Term
collectDeferred top casenames ctxt tm = cd [] tm
where
cd env (Bind n (GHole i psns t) app) =
do ds <- get
t' <- collectDeferred top casenames ctxt t
when (not (n `elem` map fst ds)) $ put (ds ++ [(n, (i, top, t', psns))])
cd env app
cd env (Bind n b t)
= do b' <- cdb b
t' <- cd ((n, b) : env) t
return (Bind n b' t')
where
cdb (Let t v) = liftM2 Let (cd env t) (cd env v)
cdb (Guess t v) = liftM2 Guess (cd env t) (cd env v)
cdb b = do ty' <- cd env (binderTy b)
return (b { binderTy = ty' })
cd env (App s f a) = liftM2 (App s) (cd env f)
(cd env a)
cd env t = return t
case_ :: Bool -> Bool -> IState -> Name -> PTerm -> ElabD ()
case_ ind autoSolve ist fn tm = do
attack
tyn <- getNameFrom (sMN 0 "ity")
claim tyn RType
valn <- getNameFrom (sMN 0 "ival")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "irule")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
env <- get_env
let (Just binding) = lookupBinder letn env
let val = binderVal binding
if ind then induction (forget val)
else casetac (forget val)
when autoSolve solveAll
-- | Compute the appropriate name for a top-level metavariable
metavarName :: [String] -> Name -> Name
metavarName _ n@(NS _ _) = n
metavarName (ns@(_:_)) n = sNS n ns
metavarName _ n = n
runElabAction :: ElabInfo -> IState -> FC -> Env -> Term -> [String] -> ElabD Term
runElabAction info ist fc env tm ns = do tm' <- eval tm
runTacTm tm'
where
eval tm = do ctxt <- get_context
return $ normaliseAll ctxt env (finalise tm)
returnUnit = return $ P (DCon 0 0 False) unitCon (P (TCon 0 0) unitTy Erased)
patvars :: [(Name, Term)] -> Term -> ([(Name, Term)], Term)
patvars ns (Bind n (PVar _ t) sc) = patvars ((n, t) : ns) (instantiate (P Bound n t) sc)
patvars ns tm = (ns, tm)
pullVars :: (Term, Term) -> ([(Name, Term)], Term, Term)
pullVars (lhs, rhs) = (fst (patvars [] lhs), snd (patvars [] lhs), snd (patvars [] rhs)) -- TODO alpha-convert rhs
requireError :: Err -> ElabD a -> ElabD ()
requireError orErr elab =
do state <- get
case runStateT elab state of
OK (_, state') -> lift (tfail orErr)
Error e -> return ()
-- create a fake TT term for the LHS of an impossible case
fakeTT :: Raw -> Term
fakeTT (Var n) =
case lookupNameDef n (tt_ctxt ist) of
[(n', TyDecl nt _)] -> P nt n' Erased
_ -> P Ref n Erased
fakeTT (RBind n b body) = Bind n (fmap fakeTT b) (fakeTT body)
fakeTT (RApp f a) = App Complete (fakeTT f) (fakeTT a)
fakeTT RType = TType (UVar [] (-1))
fakeTT (RUType u) = UType u
fakeTT (RConstant c) = Constant c
defineFunction :: RFunDefn Raw -> ElabD ()
defineFunction (RDefineFun n clauses) =
do ctxt <- get_context
ty <- maybe (fail "no type decl") return $ lookupTyExact n ctxt
let info = CaseInfo True True False -- TODO document and figure out
clauses' <- forM clauses (\case
RMkFunClause lhs rhs ->
do (lhs', lty) <- lift $ check ctxt [] lhs
(rhs', rty) <- lift $ check ctxt [] rhs
lift $ converts ctxt [] lty rty
return $ Right (lhs', rhs')
RMkImpossibleClause lhs ->
do requireError (Msg "Not an impossible case") . lift $
check ctxt [] lhs
return $ Left (fakeTT lhs))
let clauses'' = map (\case Right c -> pullVars c
Left lhs -> let (ns, lhs') = patvars [] lhs
in (ns, lhs', Impossible))
clauses'
let clauses''' = map (\(ns, lhs, rhs) -> (map fst ns, lhs, rhs)) clauses''
let argtys = map (\x -> (x, isCanonical x ctxt))
(map snd (getArgTys (normalise ctxt [] ty)))
ctxt'<- lift $
addCasedef n (const [])
info False (STerm Erased)
True False -- TODO what are these?
argtys [] -- TODO inaccessible types
clauses'
clauses'''
clauses'''
ty
ctxt
set_context ctxt'
updateAux $ \e -> e { new_tyDecls = RClausesInstrs n clauses'' : new_tyDecls e}
return ()
checkClosed :: Raw -> Elab' aux (Term, Type)
checkClosed tm = do ctxt <- get_context
(val, ty) <- lift $ check ctxt [] tm
return $! (finalise val, finalise ty)
-- | Add another argument to a Pi
mkPi :: RFunArg -> Raw -> Raw
mkPi arg rTy = RBind (argName arg) (Pi RigW Nothing (argTy arg) (RUType AllTypes)) rTy
mustBeType ctxt tm ty =
case normaliseAll ctxt [] (finalise ty) of
UType _ -> return ()
TType _ -> return ()
ty' -> lift . tfail . InternalMsg $
show tm ++ " is not a type: it's " ++ show ty'
mustNotBeDefined ctxt n =
case lookupDefExact n ctxt of
Just _ -> lift . tfail . InternalMsg $
show n ++ " is already defined."
Nothing -> return ()
-- | Prepare a constructor to be added to a datatype being defined here
prepareConstructor :: Name -> RConstructorDefn -> ElabD (Name, [PArg], Type)
prepareConstructor tyn (RConstructor cn args resTy) =
do ctxt <- get_context
-- ensure the constructor name is not qualified, and
-- construct a qualified one
notQualified cn
let qcn = qualify cn
-- ensure that the constructor name is not defined already
mustNotBeDefined ctxt qcn
-- construct the actual type for the constructor
let cty = foldr mkPi resTy args
(checkedTy, ctyTy) <- lift $ check ctxt [] cty
mustBeType ctxt checkedTy ctyTy
-- ensure that the constructor builds the right family
case unApply (getRetTy (normaliseAll ctxt [] (finalise checkedTy))) of
(P _ n _, _) | n == tyn -> return ()
t -> lift . tfail . Msg $ "The constructor " ++ show cn ++
" doesn't construct " ++ show tyn ++
" (return type is " ++ show t ++ ")"
-- add temporary type declaration for constructor (so it can
-- occur in later constructor types)
set_context (addTyDecl qcn (DCon 0 0 False) checkedTy ctxt)
-- Save the implicits for high-level Idris
let impls = map rFunArgToPArg args
return (qcn, impls, checkedTy)
where
notQualified (NS _ _) = lift . tfail . Msg $ "Constructor names may not be qualified"
notQualified _ = return ()
qualify n = case tyn of
(NS _ ns) -> NS n ns
_ -> n
getRetTy :: Type -> Type
getRetTy (Bind _ (Pi _ _ _ _) sc) = getRetTy sc
getRetTy ty = ty
elabScriptStuck :: Term -> ElabD a
elabScriptStuck x = lift . tfail $ ElabScriptStuck x
-- Should be dependent
tacTmArgs :: Int -> Term -> [Term] -> ElabD [Term]
tacTmArgs l t args | length args == l = return args
| otherwise = elabScriptStuck t -- Probably should be an argument size mismatch internal error
-- | Do a step in the reflected elaborator monad. The input is the
-- step, the output is the (reflected) term returned.
runTacTm :: Term -> ElabD Term
runTacTm tac@(unApply -> (P _ n _, args))
| n == tacN "Prim__Solve"
= do ~[] <- tacTmArgs 0 tac args -- patterns are irrefutable because `tacTmArgs` returns lists of exactly the size given to it as first argument
solve
returnUnit
| n == tacN "Prim__Goal"
= do ~[] <- tacTmArgs 0 tac args
hs <- get_holes
case hs of
(h : _) -> do t <- goal
fmap fst . checkClosed $
rawPair (Var (reflm "TTName"), Var (reflm "TT"))
(reflectName h, reflect t)
[] -> lift . tfail . Msg $
"Elaboration is complete. There are no goals."
| n == tacN "Prim__Holes"
= do ~[] <- tacTmArgs 0 tac args
hs <- get_holes
fmap fst . checkClosed $
mkList (Var $ reflm "TTName") (map reflectName hs)
| n == tacN "Prim__Guess"
= do ~[] <- tacTmArgs 0 tac args
g <- get_guess
fmap fst . checkClosed $ reflect g
| n == tacN "Prim__LookupTy"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
ctxt <- get_context
let getNameTypeAndType = \case Function ty _ -> (Ref, ty)
TyDecl nt ty -> (nt, ty)
Operator ty _ _ -> (Ref, ty)
CaseOp _ ty _ _ _ _ -> (Ref, ty)
-- Idris tuples nest to the right
reflectTriple (x, y, z) =
raw_apply (Var pairCon) [ Var (reflm "TTName")
, raw_apply (Var pairTy) [Var (reflm "NameType"), Var (reflm "TT")]
, x
, raw_apply (Var pairCon) [ Var (reflm "NameType"), Var (reflm "TT")
, y, z]]
let defs = [ reflectTriple (reflectName n, reflectNameType nt, reflect ty)
| (n, def) <- lookupNameDef n' ctxt
, let (nt, ty) = getNameTypeAndType def ]
fmap fst . checkClosed $
rawList (raw_apply (Var pairTy) [ Var (reflm "TTName")
, raw_apply (Var pairTy) [ Var (reflm "NameType")
, Var (reflm "TT")]])
defs
| n == tacN "Prim__LookupDatatype"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
datatypes <- get_datatypes
ctxt <- get_context
fmap fst . checkClosed $
rawList (Var (tacN "Datatype"))
(map reflectDatatype (buildDatatypes ist n'))
| n == tacN "Prim__LookupFunDefn"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
fmap fst . checkClosed $
rawList (RApp (Var $ tacN "FunDefn") (Var $ reflm "TT"))
(map reflectFunDefn (buildFunDefns ist n'))
| n == tacN "Prim__LookupArgs"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
let listTy = Var (sNS (sUN "List") ["List", "Prelude"])
listFunArg = RApp listTy (Var (tacN "FunArg"))
-- Idris tuples nest to the right
let reflectTriple (x, y, z) =
raw_apply (Var pairCon) [ Var (reflm "TTName")
, raw_apply (Var pairTy) [listFunArg, Var (reflm "Raw")]
, x
, raw_apply (Var pairCon) [listFunArg, Var (reflm "Raw")
, y, z]]
let out =
[ reflectTriple (reflectName fn, reflectList (Var (tacN "FunArg")) (map reflectArg args), reflectRaw res)
| (fn, pargs) <- lookupCtxtName n' (idris_implicits ist)
, (args, res) <- getArgs pargs . forget <$>
maybeToList (lookupTyExact fn (tt_ctxt ist))
]
fmap fst . checkClosed $
rawList (raw_apply (Var pairTy) [Var (reflm "TTName")
, raw_apply (Var pairTy) [ RApp listTy
(Var (tacN "FunArg"))
, Var (reflm "Raw")]])
out
| n == tacN "Prim__SourceLocation"
= do ~[] <- tacTmArgs 0 tac args
fmap fst . checkClosed $
reflectFC fc
| n == tacN "Prim__Namespace"
= do ~[] <- tacTmArgs 0 tac args
fmap fst . checkClosed $
rawList (RConstant StrType) (map (RConstant . Str) ns)
| n == tacN "Prim__Env"
= do ~[] <- tacTmArgs 0 tac args
env <- get_env
fmap fst . checkClosed $ reflectEnv env
| n == tacN "Prim__Fail"
= do ~[_a, errs] <- tacTmArgs 2 tac args
errs' <- eval errs
parts <- reifyReportParts errs'
lift . tfail $ ReflectionError [parts] (Msg "")
| n == tacN "Prim__PureElab"
= do ~[_a, tm] <- tacTmArgs 2 tac args
return tm
| n == tacN "Prim__BindElab"
= do ~[_a, _b, first, andThen] <- tacTmArgs 4 tac args
first' <- eval first
res <- eval =<< runTacTm first'
next <- eval (App Complete andThen res)
runTacTm next
| n == tacN "Prim__Try"
= do ~[_a, first, alt] <- tacTmArgs 3 tac args
first' <- eval first
alt' <- eval alt
try' (runTacTm first') (runTacTm alt') True
| n == tacN "Prim__Fill"
= do ~[raw] <- tacTmArgs 1 tac args
raw' <- reifyRaw =<< eval raw
apply raw' []
returnUnit
| n == tacN "Prim__Apply" || n == tacN "Prim__MatchApply"
= do ~[raw, argSpec] <- tacTmArgs 2 tac args
raw' <- reifyRaw =<< eval raw
argSpec' <- map (\b -> (b, 0)) <$> reifyList reifyBool argSpec
let op = if n == tacN "Prim__Apply"
then apply
else match_apply
ns <- op raw' argSpec'
fmap fst . checkClosed $
rawList (rawPairTy (Var $ reflm "TTName") (Var $ reflm "TTName"))
[ rawPair (Var $ reflm "TTName", Var $ reflm "TTName")
(reflectName n1, reflectName n2)
| (n1, n2) <- ns
]
| n == tacN "Prim__Gensym"
= do ~[hint] <- tacTmArgs 1 tac args
hintStr <- eval hint
case hintStr of
Constant (Str h) -> do
n <- getNameFrom (sMN 0 h)
fmap fst $ get_type_val (reflectName n)
_ -> fail "no hint"
| n == tacN "Prim__Claim"
= do ~[n, ty] <- tacTmArgs 2 tac args
n' <- reifyTTName n
ty' <- reifyRaw ty
claim n' ty'
returnUnit
| n == tacN "Prim__Check"
= do ~[env', raw] <- tacTmArgs 2 tac args
env <- reifyEnv env'
raw' <- reifyRaw =<< eval raw
ctxt <- get_context
(tm, ty) <- lift $ check ctxt env raw'
fmap fst . checkClosed $
rawPair (Var (reflm "TT"), Var (reflm "TT"))
(reflect tm, reflect ty)
| n == tacN "Prim__Attack"
= do ~[] <- tacTmArgs 0 tac args
attack
returnUnit
| n == tacN "Prim__Rewrite"
= do ~[rule] <- tacTmArgs 1 tac args
r <- reifyRaw rule
rewrite r
returnUnit
| n == tacN "Prim__Focus"
= do ~[what] <- tacTmArgs 1 tac args
n' <- reifyTTName what
hs <- get_holes
if elem n' hs
then focus n' >> returnUnit
else lift . tfail . Msg $ "The name " ++ show n' ++ " does not denote a hole"
| n == tacN "Prim__Unfocus"
= do ~[what] <- tacTmArgs 1 tac args
n' <- reifyTTName what
movelast n'
returnUnit
| n == tacN "Prim__Intro"
= do ~[mn] <- tacTmArgs 1 tac args
n <- case fromTTMaybe mn of
Nothing -> return Nothing
Just name -> fmap Just $ reifyTTName name
intro n
returnUnit
| n == tacN "Prim__Forall"
= do ~[n, ty] <- tacTmArgs 2 tac args
n' <- reifyTTName n
ty' <- reifyRaw ty
forall n' RigW Nothing ty'
returnUnit
| n == tacN "Prim__PatVar"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
patvar' n'
returnUnit
| n == tacN "Prim__PatBind"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
patbind n' RigW
returnUnit
| n == tacN "Prim__LetBind"
= do ~[n, ty, tm] <- tacTmArgs 3 tac args
n' <- reifyTTName n
ty' <- reifyRaw ty
tm' <- reifyRaw tm
letbind n' ty' tm'
returnUnit
| n == tacN "Prim__Compute"
= do ~[] <- tacTmArgs 0 tac args; compute ; returnUnit
| n == tacN "Prim__Normalise"
= do ~[env, tm] <- tacTmArgs 2 tac args
env' <- reifyEnv env
tm' <- reifyTT tm
ctxt <- get_context
let out = normaliseAll ctxt env' (finalise tm')
fmap fst . checkClosed $ reflect out
| n == tacN "Prim__Whnf"
= do ~[tm] <- tacTmArgs 1 tac args
tm' <- reifyTT tm
ctxt <- get_context
fmap fst . checkClosed . reflect $ whnf ctxt [] tm'
| n == tacN "Prim__Converts"
= do ~[env, tm1, tm2] <- tacTmArgs 3 tac args
env' <- reifyEnv env
tm1' <- reifyTT tm1
tm2' <- reifyTT tm2
ctxt <- get_context
lift $ converts ctxt env' tm1' tm2'
returnUnit
| n == tacN "Prim__DeclareType"
= do ~[decl] <- tacTmArgs 1 tac args
(RDeclare n args res) <- reifyTyDecl decl
ctxt <- get_context
let rty = foldr mkPi res args
(checked, ty') <- lift $ check ctxt [] rty
mustBeType ctxt checked ty'
mustNotBeDefined ctxt n
let decl = TyDecl Ref checked
ctxt' = addCtxtDef n decl ctxt
set_context ctxt'
updateAux $ \e -> e { new_tyDecls = (RTyDeclInstrs n fc (map rFunArgToPArg args) checked) :
new_tyDecls e }
returnUnit
| n == tacN "Prim__DefineFunction"
= do ~[decl] <- tacTmArgs 1 tac args
defn <- reifyFunDefn decl
defineFunction defn
returnUnit
| n == tacN "Prim__DeclareDatatype"
= do ~[decl] <- tacTmArgs 1 tac args
RDeclare n args resTy <- reifyTyDecl decl
ctxt <- get_context
let tcTy = foldr mkPi resTy args
(checked, ty') <- lift $ check ctxt [] tcTy
mustBeType ctxt checked ty'
mustNotBeDefined ctxt n
let ctxt' = addTyDecl n (TCon 0 0) checked ctxt
set_context ctxt'
updateAux $ \e -> e { new_tyDecls = RDatatypeDeclInstrs n (map rFunArgToPArg args) : new_tyDecls e }
returnUnit
| n == tacN "Prim__DefineDatatype"
= do ~[defn] <- tacTmArgs 1 tac args
RDefineDatatype n ctors <- reifyRDataDefn defn
ctxt <- get_context
tyconTy <- case lookupTyExact n ctxt of
Just t -> return t
Nothing -> lift . tfail . Msg $ "Type not previously declared"
datatypes <- get_datatypes
case lookupCtxtName n datatypes of
[] -> return ()
_ -> lift . tfail . Msg $ show n ++ " is already defined as a datatype."
-- Prepare the constructors
ctors' <- mapM (prepareConstructor n) ctors
ttag <- do ES (ps, aux) str prev <- get
let i = global_nextname ps
put $ ES (ps { global_nextname = global_nextname ps + 1 },
aux)
str
prev
return i
let ctxt' = addDatatype (Data n ttag tyconTy False (map (\(cn, _, cty) -> (cn, cty)) ctors')) ctxt
set_context ctxt'
-- the rest happens in a bit
updateAux $ \e -> e { new_tyDecls = RDatatypeDefnInstrs n tyconTy ctors' : new_tyDecls e }
returnUnit
| n == tacN "Prim__AddImplementation"
= do ~[cls, impl] <- tacTmArgs 2 tac args
interfaceName <- reifyTTName cls
implName <- reifyTTName impl
updateAux $ \e -> e { new_tyDecls = RAddImplementation interfaceName implName :
new_tyDecls e }
returnUnit
| n == tacN "Prim__IsTCName"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
case lookupCtxtExact n' (idris_interfaces ist) of
Just _ -> fmap fst . checkClosed $ Var (sNS (sUN "True") ["Bool", "Prelude"])
Nothing -> fmap fst . checkClosed $ Var (sNS (sUN "False") ["Bool", "Prelude"])
| n == tacN "Prim__ResolveTC"
= do ~[fn] <- tacTmArgs 1 tac args
g <- goal
fn <- reifyTTName fn
resolveTC' False True 100 g fn ist
returnUnit
| n == tacN "Prim__Search"
= do ~[depth, hints] <- tacTmArgs 2 tac args
d <- eval depth
hints' <- eval hints
case (d, unList hints') of
(Constant (I i), Just hs) ->
do actualHints <- mapM reifyTTName hs
unifyProblems
let psElab = elab ist toplevel ERHS [] (sMN 0 "tac")
proofSearch True True False False i psElab Nothing (sMN 0 "search ") [] actualHints ist
returnUnit
(Constant (I _), Nothing ) ->
lift . tfail . InternalMsg $ "Not a list: " ++ show hints'
(_, _) -> lift . tfail . InternalMsg $ "Can't reify int " ++ show d
| n == tacN "Prim__RecursiveElab"
= do ~[goal, script] <- tacTmArgs 2 tac args
goal' <- reifyRaw goal
ctxt <- get_context
script <- eval script
(goalTT, goalTy) <- lift $ check ctxt [] goal'
lift $ isType ctxt [] goalTy
recH <- getNameFrom (sMN 0 "recElabHole")
aux <- getAux
datatypes <- get_datatypes
env <- get_env
g_next <- get_global_nextname
(ctxt', ES (p, aux') _ _) <-
do (ES (current_p, _) _ _) <- get
lift $ runElab aux
(do runElabAction info ist fc [] script ns
ctxt' <- get_context
return ctxt')
((newProof recH (constraintNS info) ctxt datatypes g_next goalTT)
{ nextname = nextname current_p })
set_context ctxt'
let tm_out = getProofTerm (pterm p)
do (ES (prf, _) s e) <- get
let p' = prf { nextname = nextname p
, global_nextname = global_nextname p
}
put (ES (p', aux') s e)
env' <- get_env
(tm, ty, _) <- lift $ recheck (constraintNS info) ctxt' env (forget tm_out) tm_out
let (tm', ty') = (reflect tm, reflect ty)
fmap fst . checkClosed $
rawPair (Var $ reflm "TT", Var $ reflm "TT")
(tm', ty')
| n == tacN "Prim__Metavar"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
ctxt <- get_context
ptm <- get_term
-- See documentation above in the elab case for PMetavar
let unique_used = getUniqueUsed ctxt ptm
let mvn = metavarName ns n'
attack
defer unique_used mvn
solve
returnUnit
| n == tacN "Prim__Fixity"
= do ~[op'] <- tacTmArgs 1 tac args
opTm <- eval op'
case opTm of
Constant (Str op) ->
let opChars = ":!#$%&*+./<=>?@\\^|-~"
invalidOperators = [":", "=>", "->", "<-", "=", "?=", "|", "**", "==>", "\\", "%", "~", "?", "!"]
fixities = idris_infixes ist
in if not (all (flip elem opChars) op) || elem op invalidOperators
then lift . tfail . Msg $ "'" ++ op ++ "' is not a valid operator name."
else case nub [f | Fix f someOp <- fixities, someOp == op] of
[] -> lift . tfail . Msg $ "No fixity found for operator '" ++ op ++ "'."
[f] -> fmap fst . checkClosed $ reflectFixity f
many -> lift . tfail . InternalMsg $ "Ambiguous fixity for '" ++ op ++ "'! Found " ++ show many
_ -> lift . tfail . Msg $ "Not a constant string for an operator name: " ++ show opTm
| n == tacN "Prim__Debug"
= do ~[ty, msg] <- tacTmArgs 2 tac args
msg' <- eval msg
parts <- reifyReportParts msg
debugElaborator parts
runTacTm x = elabScriptStuck x
-- Running tactics directly
-- if a tactic adds unification problems, return an error
runTac :: Bool -> IState -> Maybe FC -> Name -> PTactic -> ElabD ()
runTac autoSolve ist perhapsFC fn tac
= do env <- get_env
g <- goal
let tac' = fmap (addImplBound ist (map fstEnv env)) tac
if autoSolve
then runT tac'
else no_errors (runT tac')
(Just (CantSolveGoal g (map (\(n, _, b) -> (n, binderTy b)) env)))
where
runT (Intro []) = do g <- goal
attack; intro (bname g)
where
bname (Bind n _ _) = Just n
bname _ = Nothing
runT (Intro xs) = mapM_ (\x -> do attack; intro (Just x)) xs
runT Intros = do g <- goal
attack;
intro (bname g)
try' (runT Intros)
(return ()) True
where
bname (Bind n _ _) = Just n
bname _ = Nothing
runT (Exact tm) = do elab ist toplevel ERHS [] (sMN 0 "tac") tm
when autoSolve solveAll
runT (MatchRefine fn)
= do fnimps <-
case lookupCtxtName fn (idris_implicits ist) of
[] -> do a <- envArgs fn
return [(fn, a)]
ns -> return (map (\ (n, a) -> (n, map (const True) a)) ns)
let tacs = map (\ (fn', imps) ->
(match_apply (Var fn') (map (\x -> (x, 0)) imps),
fn')) fnimps
tryAll tacs
when autoSolve solveAll
where envArgs n = do e <- get_env
case lookupBinder n e of
Just t -> return $ map (const False)
(getArgTys (binderTy t))
_ -> return []
runT (Refine fn [])
= do fnimps <-
case lookupCtxtName fn (idris_implicits ist) of
[] -> do a <- envArgs fn
return [(fn, a)]
ns -> return (map (\ (n, a) -> (n, map isImp a)) ns)
let tacs = map (\ (fn', imps) ->
(apply (Var fn') (map (\x -> (x, 0)) imps),
fn')) fnimps
tryAll tacs
when autoSolve solveAll
where isImp (PImp _ _ _ _ _) = True
isImp _ = False
envArgs n = do e <- get_env
case lookupBinder n e of
Just t -> return $ map (const False)
(getArgTys (binderTy t))
_ -> return []
runT (Refine fn imps) = do ns <- apply (Var fn) (map (\x -> (x,0)) imps)
when autoSolve solveAll
runT DoUnify = do unify_all
when autoSolve solveAll
runT (Claim n tm) = do tmHole <- getNameFrom (sMN 0 "newGoal")
claim tmHole RType
claim n (Var tmHole)
focus tmHole
elab ist toplevel ERHS [] (sMN 0 "tac") tm
focus n
runT (Equiv tm) -- let bind tm, then
= do attack
tyn <- getNameFrom (sMN 0 "ety")
claim tyn RType
valn <- getNameFrom (sMN 0 "eqval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "equiv_val")
letbind letn (Var tyn) (Var valn)
focus tyn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
focus valn
when autoSolve solveAll
runT (Rewrite tm) -- to elaborate tm, let bind it, then rewrite by that
= do attack; -- (h:_) <- get_holes
tyn <- getNameFrom (sMN 0 "rty")
-- start_unify h
claim tyn RType
valn <- getNameFrom (sMN 0 "rval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "rewrite_rule")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
rewrite (Var letn)
when autoSolve solveAll
runT (Induction tm) -- let bind tm, similar to the others
= case_ True autoSolve ist fn tm
runT (CaseTac tm)
= case_ False autoSolve ist fn tm
runT (LetTac n tm)
= do attack
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- unique_hole n
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
when autoSolve solveAll
runT (LetTacTy n ty tm)
= do attack
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- unique_hole n
letbind letn (Var tyn) (Var valn)
focus tyn
elab ist toplevel ERHS [] (sMN 0 "tac") ty
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
when autoSolve solveAll
runT Compute = compute
runT Trivial = do trivial' ist; when autoSolve solveAll
runT TCImplementation = runT (Exact (PResolveTC emptyFC))
runT (ProofSearch rec prover depth top psns hints)
= do proofSearch' ist rec False depth prover top fn psns hints
when autoSolve solveAll
runT (Focus n) = focus n
runT Unfocus = do hs <- get_holes
case hs of
[] -> return ()
(h : _) -> movelast h
runT Solve = solve
runT (Try l r) = do try' (runT l) (runT r) True
runT (TSeq l r) = do runT l; runT r
runT (ApplyTactic tm) = do tenv <- get_env -- store the environment
tgoal <- goal -- store the goal
attack -- let f : List (TTName, Binder TT) -> TT -> Tactic = tm in ...
script <- getNameFrom (sMN 0 "script")
claim script scriptTy
scriptvar <- getNameFrom (sMN 0 "scriptvar" )
letbind scriptvar scriptTy (Var script)
focus script
elab ist toplevel ERHS [] (sMN 0 "tac") tm
(script', _) <- get_type_val (Var scriptvar)
-- now that we have the script apply
-- it to the reflected goal and context
restac <- getNameFrom (sMN 0 "restac")
claim restac tacticTy
focus restac
fill (raw_apply (forget script')
[reflectEnv tenv, reflect tgoal])
restac' <- get_guess
solve
-- normalise the result in order to
-- reify it
ctxt <- get_context
env <- get_env
let tactic = normalise ctxt env restac'
runReflected tactic
where tacticTy = Var (reflm "Tactic")
listTy = Var (sNS (sUN "List") ["List", "Prelude"])
scriptTy = (RBind (sMN 0 "__pi_arg")
(Pi RigW Nothing (RApp listTy envTupleType) RType)
(RBind (sMN 1 "__pi_arg")
(Pi RigW Nothing (Var $ reflm "TT") RType) tacticTy))
runT (ByReflection tm) -- run the reflection function 'tm' on the
-- goal, then apply the resulting reflected Tactic
= do tgoal <- goal
attack
script <- getNameFrom (sMN 0 "script")
claim script scriptTy
scriptvar <- getNameFrom (sMN 0 "scriptvar" )
letbind scriptvar scriptTy (Var script)
focus script
ptm <- get_term
env <- get_env
let denv = map (\(n, _, b) -> (n, binderTy b)) env
elab ist toplevel ERHS [] (sMN 0 "tac")
(PApp emptyFC tm [pexp (delabTy' ist [] denv tgoal True True True)])
(script', _) <- get_type_val (Var scriptvar)
-- now that we have the script apply
-- it to the reflected goal
restac <- getNameFrom (sMN 0 "restac")
claim restac tacticTy
focus restac
fill (forget script')
restac' <- get_guess
solve
-- normalise the result in order to
-- reify it
ctxt <- get_context
env <- get_env
let tactic = normalise ctxt env restac'
runReflected tactic
where tacticTy = Var (reflm "Tactic")
scriptTy = tacticTy
runT (Reflect v) = do attack -- let x = reflect v in ...
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "letvar")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") v
(value, _) <- get_type_val (Var letn)
ctxt <- get_context
env <- get_env
let value' = normalise ctxt env value
runTac autoSolve ist perhapsFC fn (Exact $ PQuote (reflect value'))
runT (Fill v) = do attack -- let x = fill x in ...
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "letvar")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") v
(value, _) <- get_type_val (Var letn)
ctxt <- get_context
env <- get_env
let value' = normalise ctxt env value
rawValue <- reifyRaw value'
runTac autoSolve ist perhapsFC fn (Exact $ PQuote rawValue)
runT (GoalType n tac) = do g <- goal
case unApply g of
(P _ n' _, _) ->
if nsroot n' == sUN n
then runT tac
else fail "Wrong goal type"
_ -> fail "Wrong goal type"
runT ProofState = do g <- goal
return ()
runT Skip = return ()
runT (TFail err) = lift . tfail $ ReflectionError [err] (Msg "")
runT SourceFC =
case perhapsFC of
Nothing -> lift . tfail $ Msg "There is no source location available."
Just fc ->
do fill $ reflectFC fc
solve
runT Qed = lift . tfail $ Msg "The qed command is only valid in the interactive prover"
runT x = fail $ "Not implemented " ++ show x
runReflected t = do t' <- reify ist t
runTac autoSolve ist perhapsFC fn t'
elaboratingArgErr :: [(Name, Name)] -> Err -> Err
elaboratingArgErr [] err = err
elaboratingArgErr ((f,x):during) err = fromMaybe err (rewrite err)
where rewrite (ElaboratingArg _ _ _ _) = Nothing
rewrite (ProofSearchFail e) = fmap ProofSearchFail (rewrite e)
rewrite (At fc e) = fmap (At fc) (rewrite e)
rewrite err = Just (ElaboratingArg f x during err)
withErrorReflection :: Idris a -> Idris a
withErrorReflection x = idrisCatch x (\ e -> handle e >>= ierror)
where handle :: Err -> Idris Err
handle e@(ReflectionError _ _) = do logElab 3 "Skipping reflection of error reflection result"
return e -- Don't do meta-reflection of errors
handle e@(ReflectionFailed _ _) = do logElab 3 "Skipping reflection of reflection failure"
return e
-- At and Elaborating are just plumbing - error reflection shouldn't rewrite them
handle e@(At fc err) = do logElab 3 "Reflecting body of At"
err' <- handle err
return (At fc err')
handle e@(Elaborating what n ty err) = do logElab 3 "Reflecting body of Elaborating"
err' <- handle err
return (Elaborating what n ty err')
handle e@(ElaboratingArg f a prev err) = do logElab 3 "Reflecting body of ElaboratingArg"
hs <- getFnHandlers f a
err' <- if null hs
then handle err
else applyHandlers err hs
return (ElaboratingArg f a prev err')
-- ProofSearchFail is an internal detail - so don't expose it
handle (ProofSearchFail e) = handle e
-- TODO: argument-specific error handlers go here for ElaboratingArg
handle e = do ist <- getIState
logElab 2 "Starting error reflection"
logElab 5 (show e)
let handlers = idris_errorhandlers ist
applyHandlers e handlers
getFnHandlers :: Name -> Name -> Idris [Name]
getFnHandlers f arg = do ist <- getIState
let funHandlers = maybe M.empty id .
lookupCtxtExact f .
idris_function_errorhandlers $ ist
return . maybe [] S.toList . M.lookup arg $ funHandlers
applyHandlers e handlers =
do ist <- getIState
let err = fmap (errReverse ist) e
logElab 3 $ "Using reflection handlers " ++
concat (intersperse ", " (map show handlers))
let reports = map (\n -> RApp (Var n) (reflectErr err)) handlers
-- Typecheck error handlers - if this fails, then something else was wrong earlier!
handlers <- case mapM (check (tt_ctxt ist) []) reports of
Error e -> ierror $ ReflectionFailed "Type error while constructing reflected error" e
OK hs -> return hs
-- Normalize error handler terms to produce the new messages
-- Need to use 'normaliseAll' since we have to reduce private
-- names in error handlers too
ctxt <- getContext
let results = map (normaliseAll ctxt []) (map fst handlers)
logElab 3 $ "New error message info: " ++ concat (intersperse " and " (map show results))
-- For each handler term output, either discard it if it is Nothing or reify it the Haskell equivalent
let errorpartsTT = mapMaybe unList (mapMaybe fromTTMaybe results)
errorparts <- case mapM (mapM reifyReportPart) errorpartsTT of
Left err -> ierror err
Right ok -> return ok
return $ case errorparts of
[] -> e
parts -> ReflectionError errorparts e
solveAll = try (do solve; solveAll) (return ())
-- | Do the left-over work after creating declarations in reflected
-- elaborator scripts
processTacticDecls :: ElabInfo -> [RDeclInstructions] -> Idris ()
processTacticDecls info steps =
-- The order of steps is important: type declarations might
-- establish metavars that later function bodies resolve.
forM_ (reverse steps) $ \case
RTyDeclInstrs n fc impls ty ->
do logElab 3 $ "Declaration from tactics: " ++ show n ++ " : " ++ show ty
logElab 3 $ " It has impls " ++ show impls
updateIState $ \i -> i { idris_implicits =
addDef n impls (idris_implicits i) }
addIBC (IBCImp n)
ds <- checkDef info fc (\_ e -> e) True [(n, (-1, Nothing, ty, []))]
addIBC (IBCDef n)
ctxt <- getContext
case lookupDef n ctxt of
(TyDecl _ _ : _) ->
-- If the function isn't defined at the end of the elab script,
-- then it must be added as a metavariable. This needs guarding
-- to prevent overwriting case defs with a metavar, if the case
-- defs come after the type decl in the same script!
let ds' = map (\(n, (i, top, t, ns)) -> (n, (i, top, t, ns, True, True))) ds
in addDeferred ds'
_ -> return ()
RDatatypeDeclInstrs n impls ->
do addIBC (IBCDef n)
updateIState $ \i -> i { idris_implicits = addDef n impls (idris_implicits i) }
addIBC (IBCImp n)
RDatatypeDefnInstrs tyn tyconTy ctors ->
do let cn (n, _, _) = n
cimpls (_, impls, _) = impls
cty (_, _, t) = t
addIBC (IBCDef tyn)
mapM_ (addIBC . IBCDef . cn) ctors
ctxt <- getContext
let params = findParams tyn (normalise ctxt [] tyconTy) (map cty ctors)
let typeInfo = TI (map cn ctors) False [] params [] False
-- implicit precondition to IBCData is that idris_datatypes on the IState is populated.
-- otherwise writing the IBC just fails silently!
updateIState $ \i -> i { idris_datatypes =
addDef tyn typeInfo (idris_datatypes i) }
addIBC (IBCData tyn)
ttag <- getName -- from AbsSyntax.hs, really returns a disambiguating Int
let metainf = DataMI params
addIBC (IBCMetaInformation tyn metainf)
updateContext (setMetaInformation tyn metainf)
for_ ctors $ \(cn, impls, _) ->
do updateIState $ \i -> i { idris_implicits = addDef cn impls (idris_implicits i) }
addIBC (IBCImp cn)
for_ ctors $ \(ctorN, _, _) ->
do totcheck (NoFC, ctorN)
ctxt <- tt_ctxt <$> getIState
case lookupTyExact ctorN ctxt of
Just cty -> do checkPositive (tyn : map cn ctors) (ctorN, cty)
return ()
Nothing -> return ()
case ctors of
[ctor] -> do setDetaggable (cn ctor); setDetaggable tyn
addIBC (IBCOpt (cn ctor)); addIBC (IBCOpt tyn)
_ -> return ()
-- TODO: inaccessible
RAddImplementation interfaceName implName ->
do -- The interface resolution machinery relies on a special
logElab 2 $ "Adding elab script implementation " ++ show implName ++
" for " ++ show interfaceName
addImplementation False True interfaceName implName
addIBC (IBCImplementation False True interfaceName implName)
RClausesInstrs n cs ->
do logElab 3 $ "Pattern-matching definition from tactics: " ++ show n
solveDeferred emptyFC n
let lhss = map (\(ns, lhs, _) -> (map fst ns, lhs)) cs
let fc = fileFC "elab_reflected"
pmissing <-
do ist <- getIState
possible <- genClauses fc n lhss
(map (\ (ns, lhs) ->
delab' ist lhs True True) lhss)
missing <- filterM (checkPossible n) possible
let undef = filter (noMatch ist (map snd lhss)) missing
return undef
let tot = if null pmissing
then Unchecked -- still need to check recursive calls
else Partial NotCovering -- missing cases implies not total
setTotality n tot
updateIState $ \i -> i { idris_patdefs =
addDef n (cs, pmissing) $ idris_patdefs i }
addIBC (IBCDef n)
ctxt <- getContext
case lookupDefExact n ctxt of
Just (CaseOp _ _ _ _ _ cd) ->
-- Here, we populate the call graph with a list of things
-- we refer to, so that if they aren't total, the whole
-- thing won't be.
let (scargs, sc) = cases_compiletime cd
calls = map fst $ findCalls sc scargs
in do logElab 2 $ "Called names in reflected elab: " ++ show calls
addCalls n calls
addIBC $ IBCCG n
Just _ -> return () -- TODO throw internal error
Nothing -> return ()
-- checkDeclTotality requires that the call graph be present
-- before calling it.
-- TODO: reduce code duplication with Idris.Elab.Clause
buildSCG (fc, n)
-- Actually run the totality checker. In the main clause
-- elaborator, this is deferred until after. Here, we run it
-- now to get totality information as early as possible.
tot' <- checkDeclTotality (fc, n)
setTotality n tot'
when (tot' /= Unchecked) $ addIBC (IBCTotal n tot')
where
-- TODO: see if the code duplication with Idris.Elab.Clause can be
-- reduced or eliminated.
-- These are always cases generated by genClauses
checkPossible :: Name -> PTerm -> Idris Bool
checkPossible fname lhs_in =
do ctxt <- getContext
ist <- getIState
let lhs = addImplPat ist lhs_in
let fc = fileFC "elab_reflected_totality"
case elaborate (constraintNS info) ctxt (idris_datatypes ist) (idris_name ist) (sMN 0 "refPatLHS") infP initEState
(erun fc (buildTC ist info EImpossible [] fname (allNamesIn lhs_in)
(infTerm lhs))) of
OK (ElabResult lhs' _ _ _ _ _ name', _) ->
do -- not recursively calling here, because we don't
-- want to run infinitely many times
let lhs_tm = orderPats (getInferTerm lhs')
updateIState $ \i -> i { idris_name = name' }
case recheck (constraintNS info) ctxt [] (forget lhs_tm) lhs_tm of
OK _ -> return True
err -> return False
-- if it's a recoverable error, the case may become possible
Error err -> return (recoverableCoverage ctxt err)
-- TODO: Attempt to reduce/eliminate code duplication with Idris.Elab.Clause
noMatch i cs tm = all (\x -> case matchClause i (delab' i x True True) tm of
Right _ -> False
Left _ -> True) cs
|
FranklinChen/Idris-dev
|
src/Idris/Elab/Term.hs
|
bsd-3-clause
| 130,616 | 1,309 | 29 | 56,381 | 25,722 | 14,944 | 10,778 | 2,315 | 249 |
module Main where
import Language.TECS.Jack.ToDeck.Layout
import Language.TECS.Jack.ToDeck.Compile
import Language.TECS.Jack.Parser
import System.Environment (getArgs)
import System.FilePath (replaceExtension)
import System.IO (hPutStrLn, stderr, withFile, IOMode(..))
import qualified Data.ByteString.Lazy as BS (readFile)
import Text.PrettyPrint.HughesPJClass (pPrint)
import Text.PrettyPrint.HughesPJ (render)
main =
do
filenames <- getArgs
mapM_ compileFile filenames
compileFile filename =
do
putStr $ unwords ["Compiling", filename, "... "]
bs <- BS.readFile filename
case parseJack bs filename of
Left err ->
do
putStrLn ""
hPutStrLn stderr err
Right jack ->
do
-- Use withFile/hPutStrLn to ensure terminating newline
withFile outfile WriteMode $ \h -> do
hPutStrLn h $ render . pPrint $ compile . layout $ jack
putStrLn $ unwords ["created", outfile]
where
outfile = replaceExtension filename "deck"
|
gergoerdi/tecs
|
src/Language/TECS/Jack/jackc.hs
|
bsd-3-clause
| 1,036 | 0 | 21 | 240 | 281 | 152 | 129 | 29 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
module V1.Projects.Actions.Create (Api, server) where
import Data.Aeson
import Servant
import Project (ProjectCreateParams(ProjectCreateParams), createProject)
import LibUtils (libToServant)
import V1.Projects.Schema (Response, buildResponse)
import DeployAppConfig (AppHandler)
type Api = ReqBody '[JSON] Body :> Post '[JSON] Response
newtype Body = Body
{ projectName :: String }
instance FromJSON Body where
parseJSON = withObject "project" parse'
where
parse' o = Body <$> o .: "name"
create :: Body -> AppHandler Response
create body = buildResponse <$> create' body
where
create' = createProject . buildParams
buildParams = ProjectCreateParams . projectName
server :: Server Api
server = enter libToServant create
|
thiagorp/deployments-web
|
app/v1/projects/actions/Create.hs
|
bsd-3-clause
| 851 | 0 | 9 | 143 | 216 | 124 | 92 | 22 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Database.TxtSushi.ExternalSort
-- Copyright : (c) Keith Sheppard 2009-2010
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- For sorting huge lists on disk
--
-----------------------------------------------------------------------------
module Database.TxtSushi.ExternalSort (
externalSort,
externalSortBy,
externalSortByConstrained,
defaultByteQuota,
defaultMaxOpenFiles) where
import Data.Binary
import Data.Binary.Get
import Data.Int
import qualified Data.ByteString.Lazy as BS
import Data.List
import System.IO
import System.IO.Unsafe
import System.Directory
-- | performs an external sort on the given list using the default resource
-- constraints
externalSort :: (Binary b, Ord b) => [b] -> [b]
externalSort = externalSortBy compare
-- | performs an external sort on the given list using the given comparison
-- function and the default resource constraints
externalSortBy :: (Binary b) => (b -> b -> Ordering) -> [b] -> [b]
externalSortBy = externalSortByConstrained defaultByteQuota defaultMaxOpenFiles
-- | Currently 16 MB. Don't rely on this value staying the same in future
-- releases!
defaultByteQuota :: Int
defaultByteQuota = 16 * 1024 * 1024
-- | Currently 17 files. Don't rely on this value staying the same in future
-- releases!
defaultMaxOpenFiles :: Int
defaultMaxOpenFiles = 17
-- | performs an external sort on the given list using the given resource
-- constraints
{-# NOINLINE externalSortByConstrained #-}
externalSortByConstrained :: (Binary b, Integral i) => i -> i -> (b -> b -> Ordering) -> [b] -> [b]
externalSortByConstrained byteQuota maxOpenFiles cmp xs = unsafePerformIO $ do
partialSortFiles <- bufferPartialSortsBy (fromIntegral byteQuota) cmp xs
-- now we must merge together the partial sorts
externalMergeAllBy (fromIntegral maxOpenFiles) cmp partialSortFiles
-- | merge a list of sorted lists into a single sorted list
mergeAllBy :: (a -> a -> Ordering) -> [[a]] -> [a]
mergeAllBy _ [] = []
mergeAllBy _ [singletonList] = singletonList
mergeAllBy cmp (fstList:sndList:[]) = mergeBy cmp fstList sndList
mergeAllBy cmp listList =
-- recurse after breking the list down by about 1/2 the size
mergeAllBy cmp (partitionAndMerge 2 cmp listList)
-- TODO add a smart adjustment so that the last partition will not ever
-- be more than 1 element different than the others
-- | partitions the given sorted lists into groupings containing `partitionSize`
-- or fewer lists then merges each of those partitions. So the returned
-- list should normally be shorter than the given list
partitionAndMerge :: Int -> (a -> a -> Ordering) -> [[a]] -> [[a]]
partitionAndMerge _ _ [] = []
partitionAndMerge partitionSize cmp listList =
map (mergeAllBy cmp) (regularPartitions partitionSize listList)
-- | chops up the given list at regular intervals
regularPartitions :: Int -> [a] -> [[a]]
regularPartitions _ [] = []
regularPartitions partitionSize xs =
let (currPartition, otherXs) = splitAt partitionSize xs
in currPartition : regularPartitions partitionSize otherXs
-- | merge two sorted lists into a single sorted list
mergeBy :: (a -> a -> Ordering) -> [a] -> [a] -> [a]
mergeBy _ [] list2 = list2
mergeBy _ list1 [] = list1
mergeBy comparisonFunction list1@(head1:tail1) list2@(head2:tail2) =
case head1 `comparisonFunction` head2 of
GT -> head2 : mergeBy comparisonFunction list1 tail2
_ -> head1 : mergeBy comparisonFunction tail1 list2
externalMergePass :: Binary b => Int -> (b -> b -> Ordering) -> [String] -> IO [String]
externalMergePass _ _ [] = return []
externalMergePass maxOpenFiles cmp files = do
-- we use (maxOpenFiles - 1) because we need to account for the file
-- handle that we're reading from
let (mergeNowFiles, mergeLaterFiles) = splitAt (maxOpenFiles - 1) files
mergeNowBinStrs <- readThenDelBinFiles mergeNowFiles
let mergeNowBinaries = map decodeAll mergeNowBinStrs
mergedNowFile <- bufferToTempFile $ mergeAllBy cmp mergeNowBinaries
mergedLaterFiles <- externalMergePass maxOpenFiles cmp mergeLaterFiles
return $ mergedNowFile : mergedLaterFiles
externalMergeAllBy :: Binary b => Int -> (b -> b -> Ordering) -> [String] -> IO [b]
externalMergeAllBy _ _ [] = return []
-- TODO do i need to write singleton lists to file in order to keep the max open file promise??
externalMergeAllBy _ _ [singletonFile] =
fmap decodeAll (readThenDelBinFile singletonFile)
externalMergeAllBy maxOpenFiles cmp files = do
partiallyMergedFiles <- externalMergePass maxOpenFiles cmp files
externalMergeAllBy maxOpenFiles cmp partiallyMergedFiles
-- | create a list of parial sorts
bufferPartialSortsBy :: (Binary b) => Int64 -> (b -> b -> Ordering) -> [b] -> IO [String]
bufferPartialSortsBy _ _ [] = return []
bufferPartialSortsBy byteQuota cmp xs = do
let (sortNowList, sortLaterList) = splitAfterQuota byteQuota xs
sortedRows = sortBy cmp sortNowList
sortBuffer <- bufferToTempFile sortedRows
otherSortBuffers <- bufferPartialSortsBy byteQuota cmp sortLaterList
return (sortBuffer:otherSortBuffers)
-- TODO not efficiet! we're converting to binary twice so that we don't have
-- the bytestrings buffered to memory during the sort (that would about double
-- our mem usage). I think the right answer is to add a class extending binary
-- that has a sizeOf function
splitAfterQuota :: (Binary b) => Int64 -> [b] -> ([b], [b])
splitAfterQuota _ [] = ([], [])
splitAfterQuota quotaInBytes (binaryHead:binaryTail) =
let
quotaRemaining = quotaInBytes - BS.length (encode binaryHead)
(fstBinsTail, sndBins) = splitAfterQuota quotaRemaining binaryTail
in
if quotaRemaining <= 0
then ([binaryHead], binaryTail)
else (binaryHead:fstBinsTail, sndBins)
-- | lazily reads then deletes the given files
readThenDelBinFiles :: [String] -> IO [BS.ByteString]
readThenDelBinFiles = mapM readThenDelBinFile
-- | lazily reads then deletes the given file after the last byte is read
readThenDelBinFile :: String -> IO BS.ByteString
readThenDelBinFile fileName = do
binStr <- BS.readFile fileName
emptyStr <- unsafeInterleaveIO $ removeFile fileName >> return BS.empty
return $ binStr `BS.append` emptyStr
-- | buffer the binaries to a temporary file and return a handle to that file
bufferToTempFile :: (Binary b) => [b] -> IO String
bufferToTempFile [] = return []
bufferToTempFile xs = do
tempDir <- getTemporaryDirectory
(tempFilePath, tempFileHandle) <- openBinaryTempFile tempDir "sort.txt"
BS.hPut tempFileHandle (encodeAll xs)
hClose tempFileHandle
return tempFilePath
encodeAll :: (Binary b) => [b] -> BS.ByteString
encodeAll = BS.concat . map encode
decodeAll :: (Binary b) => BS.ByteString -> [b]
decodeAll bs
| BS.null bs = []
| otherwise =
let (decodedBin, remainingBs, _) = runGetState get bs 0
in decodedBin : decodeAll remainingBs
|
keithshep/txt-sushi
|
Database/TxtSushi/ExternalSort.hs
|
bsd-3-clause
| 7,173 | 0 | 12 | 1,336 | 1,670 | 883 | 787 | 105 | 2 |
module LLVMPrim where
import LLVM
import LLVMKindle
genPrimitives = do
-- primitive struct definitions found in rts (name [(fieldname,fieldtype)])
addStruct "LIST" [("GCINFO",poly)]
addStruct "CONS" [("GCINFO",poly),
("a", poly),
("b", liststruct)]
addStruct "TUP2" [("GCINFO",poly),
("a", poly),
("b", poly)]
addStruct "TUP3" [("GCINFO",poly),
("a", poly),
("b", poly),
("c", poly)]
addStruct "TUP4" [("GCINFO",poly),
("a", poly),
("b", poly),
("c", poly),
("d", poly)]
addStruct "TUPLE" [("GCINFO",poly),
("size", int),
("elems",array 0 poly)]
addStruct "CLOS1" [("GCINFO",poly),
("Code", ptr (fun poly [clos1struct,
poly]))]
addStruct "CLOS2" [("GCINFO",poly),
("Code", ptr (fun poly [clos2struct,
poly,
poly]))]
addStruct "CLOS3" [("GCINFO",poly),
("Code", ptr (fun poly [clos3struct,
poly,
poly,
poly]))]
addStruct "CLOS" [("GCINFO",poly),
("Code", ptr (fun void []))]
-- the size of the struct depends on "__WORDSIZE"
-- 32 bit __WORDSIZE
addStruct "PTHREAD_MUTEX_T" [("a", array 24 bit8)]
-- 64 bit __WORDSIZE
--addStruct "PTHREAD_MUTEX_T" [("a", array 40 bit8)]
addStruct "Ref" [("GCINFO", poly),
("mut", struct "PTHREAD_MUTEX_T"),
("STATE", poly)]
addStruct "AbsTime" [("a", int),
("b", int)]
addStruct "Msg" [("GCINFO", poly),
("Code", ptr (fun int [msgstruct])),
("baseline", struct "AbsTime"),
("deadline", struct "AbsTime"),
("next", msgstruct)]
addStruct "Time" [("GCINFO", poly),
("sec", int),
("usec", int)]
addStruct "EITHER" [("GCINFO", poly),
("Tag", int)]
addStruct "LEFT" [("GCINFO", poly),
("Tag", int),
("a", poly)]
addStruct "RIGHT" [("GCINFO", poly),
("Tag", int),
("a", poly)]
addStruct "WORLD" [("", opaque)]
addStruct "Array" [("GCINFO", poly),
("size", int),
("elems", array 0 poly)]
addStruct "Timer" [("GCINFO", poly),
("reset", ptr (fun unit [timerstruct,int])),
("sample", ptr (fun timestruct [timerstruct,int]))]
-- External functions from rts, (name returntype [argumenttype])
addExternalFun "new" void [ptr (ptr int), int]
addExternalFun "LOCK" (ptr int) [ptr int]
addExternalFun "UNLOCK" bit8 [ptr int]
addExternalFun "INITREF" void [refstruct]
addExternalFun "ASYNC" bit8 [msgstruct,timestruct,timestruct]
addExternalFun "primTimeMin" timestruct [timestruct,timestruct];
addExternalFun "primTimePlus" timestruct [timestruct,timestruct];
addExternalFun "primTimeMinus" timestruct [timestruct,timestruct];
addExternalFun "primTimeEQ" bool [timestruct,timestruct];
addExternalFun "primTimeNE" bool [timestruct,timestruct];
addExternalFun "primTimeLT" bool [timestruct,timestruct];
addExternalFun "primTimeLE" bool [timestruct,timestruct];
addExternalFun "primTimeGT" bool [timestruct,timestruct];
addExternalFun "primTimeGE" bool [timestruct,timestruct];
addExternalFun "sec" timestruct [int]
addExternalFun "millisec" timestruct [int]
addExternalFun "microsec" timestruct [int]
addExternalFun "secOf" int [timestruct]
addExternalFun "microsecOf" int [timestruct]
addExternalFun "RAISE" void [int]
addExternalFun "Raise" poly [bit32,int]
addExternalFun "primRefl" poly [bit32,poly]
addExternalFun "getStr" liststruct [ptr char]
addExternalFun "strEq" int [liststruct,liststruct]
addExternalFun "primListArray" arraystruct [bit32,liststruct]
addExternalFun "primUniArray" arraystruct [bit32,int,poly]
addExternalFun "EmptyArray" arraystruct [bit32,int]
addExternalFun "CloneArray" arraystruct [bit32,arraystruct,int]
addExternalFun "UpdateArray" arraystruct [bit32,arraystruct,int,poly]
addExternalFun "primShowFloat" liststruct [float]
addExternalFun "ABORT" bit8 [bit32,msgstruct,refstruct]
addExternalFun "primTIMERTERM" timerstruct [int]
addExternalFun "CYCLIC_BEGIN" arraystruct [int,int]
addExternalFun "CYCLIC_UPDATE" void [arraystruct, int, addr]
addExternalFun "CYCLIC_END" void [arraystruct, addr]
-- mathematical functions
addExternalFun "sqrtf" float [float]
addExternalFun "logf" float [float]
addExternalFun "log10f" float [float]
addExternalFun "expf" float [float]
addExternalFun "sinf" float [float]
addExternalFun "cosf" float [float]
addExternalFun "tanf" float [float]
addExternalFun "asinf" float [float]
addExternalFun "acosf" float [float]
addExternalFun "atanf" float [float]
addExternalFun "sinhf" float [float]
addExternalFun "coshf" float [float]
-- The heap pointer, used by CYCLIC_*
addGlobalVar "hp" (LLVMRegister (ptr poly) "hp" (TagGlobal [External,Global] Nothing))
-- References to external GC tags, all arrays of unknown lenght with int elements
addExternalGC "TUP2" (array 0 int)
addExternalGC "TUP3" (array 0 int)
addExternalGC "TUP4" (array 0 int)
addExternalGC "TUPLE" (array 0 int)
addExternalGC "CLOS1" (array 0 int)
addExternalGC "CLOS2" (array 0 int)
addExternalGC "CLOS3" (array 0 int)
addExternalGC "CLOS" (array 0 int)
addExternalGC "CONS" (array 0 int)
addExternalGC "EITHER" (array 0 int)
addExternalGC "LEFT" (array 0 int)
addExternalGC "RIGHT" (array 0 int)
addExternalGC "Msg" (array 0 int)
addExternalGC "Timer" (array 0 int)
addExternalGC "Ref" (array 0 int)
addExternalGC "Time" (array 0 int)
addExternalGC "Array" (array 0 int)
|
mattias-lundell/timber-llvm
|
src/LLVMPrim.hs
|
bsd-3-clause
| 6,346 | 0 | 13 | 1,874 | 1,885 | 990 | 895 | 131 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Network.PeyoTLS.Ecdsa (blSign, makeKs, ecdsaPubKey) where
import Control.Applicative ((<$>), (<*>))
import Data.Maybe (mapMaybe)
import Data.Bits (shiftR, xor)
import Crypto.Number.ModArithmetic (inverse)
import qualified Data.ByteString as BS
import qualified Data.ASN1.Types as ASN1
import qualified Data.ASN1.Encoding as ASN1
import qualified Data.ASN1.BinaryEncoding as ASN1
import qualified Codec.Bytable.BigEndian as B
import qualified Crypto.Types.PubKey.ECC as ECC
import qualified Crypto.PubKey.ECC.Prim as ECC
import qualified Crypto.PubKey.ECC.ECDSA as ECDSA
moduleName :: String
moduleName = "Newtork.PeyoTLS.Ecdsa"
type Hash = BS.ByteString -> BS.ByteString
blSign :: ECDSA.PrivateKey -> Hash -> [Integer] -> Integer ->
BS.ByteString -> ECDSA.Signature
blSign (ECDSA.PrivateKey crv d) hs ks bl m = head $ bs `mapMaybe` ks
where
bs k = do
r <- case bPointMul bl crv k g of
ECC.PointO -> Nothing
ECC.Point 0 _ -> Nothing
ECC.Point x _ -> return $ x `mod` n
ki <- inverse k n
case ki * (z + r * d) `mod` n of
0 -> Nothing
s -> Just $ ECDSA.Signature r s
ECC.CurveCommon _ _ g n _ = ECC.common_curve crv
z = if dl > 0 then e `shiftR` dl else e
e = either error id . B.decode $ hs m
dl = qlen e - qlen n
bPointMul :: Integer -> ECC.Curve -> Integer -> ECC.Point -> ECC.Point
bPointMul bl c@(ECC.CurveFP (ECC.CurvePrime _ cc)) k p =
ECC.pointMul c (bl * ECC.ecc_n cc + k) p
bPointMul _ _ _ _ = error $ moduleName ++ ".bPointMul: not implemented"
ecdsaPubKey :: ECC.CurveName -> BS.ByteString -> ECDSA.PublicKey
ecdsaPubKey c xy = ECDSA.PublicKey (ECC.getCurveByName c) $ pnt xy
where pnt s = case BS.uncons s of
Just (4, p) -> let (x, y) = BS.splitAt 32 p in ECC.Point
(either error id $ B.decode x)
(either error id $ B.decode y)
_ -> error $ moduleName ++ ".decodePoint: not implemented point fmt"
instance B.Bytable ECDSA.Signature where
encode (ECDSA.Signature r s) = ASN1.encodeASN1' ASN1.DER [
ASN1.Start ASN1.Sequence,
ASN1.IntVal r, ASN1.IntVal s, ASN1.End ASN1.Sequence ]
decode bs = case ASN1.decodeASN1' ASN1.DER bs of
Right [ASN1.Start ASN1.Sequence,
ASN1.IntVal r, ASN1.IntVal s, ASN1.End ASN1.Sequence] ->
Right $ ECDSA.Signature r s
Right _ -> Left $ moduleName ++ ": ECDSA.Signature.decode"
Left err -> Left $
moduleName ++ ": ECDSA.Signature.decode: " ++ show err
-- RFC 6979
makeKs :: (Hash, Int) -> Integer -> Integer -> BS.ByteString -> [Integer]
makeKs hb@(hs, _) q x = filter ((&&) <$> (> 0) <*> (< q))
. uncurry (createKs hb q) . initializeKV hb q x . hs
createKs :: (Hash, Int) -> Integer -> BS.ByteString -> BS.ByteString -> [Integer]
createKs hb@(hs, bls) q k v = kk : createKs hb q k' v''
where
(t, v') = createT hb q k v ""
kk = bits2int q t
k' = hmac hs bls k $ v' `BS.append` "\x00"
v'' = hmac hs bls k' v'
createT :: (Hash, Int) -> Integer -> BS.ByteString -> BS.ByteString ->
BS.ByteString -> (BS.ByteString, BS.ByteString)
createT hb@(hs, bls) q k v t
| blen t < qlen q = createT hb q k v' $ t `BS.append` v'
| otherwise = (t, v)
where v' = hmac hs bls k v
initializeKV :: (Hash, Int) ->
Integer -> Integer -> BS.ByteString -> (BS.ByteString, BS.ByteString)
initializeKV (hs, bls) q x h = (k2, v2)
where
k0 = BS.replicate (BS.length h) 0
v0 = BS.replicate (BS.length h) 1
k1 = hmac hs bls k0 $
BS.concat [v0, "\x00", int2octets q x, bits2octets q h]
v1 = hmac hs bls k1 v0
k2 = hmac hs bls k1 $
BS.concat [v1, "\x01", int2octets q x, bits2octets q h]
v2 = hmac hs bls k2 v1
hmac :: (BS.ByteString -> BS.ByteString) -> Int ->
BS.ByteString -> BS.ByteString -> BS.ByteString
hmac hs bls sk =
hs . BS.append (BS.map (0x5c `xor`) k) .
hs . BS.append (BS.map (0x36 `xor`) k)
where
k = padd $ if BS.length sk > bls then hs sk else sk
padd bs = bs `BS.append` BS.replicate (bls - BS.length bs) 0
qlen :: Integer -> Int
qlen 0 = 0
qlen q = succ . qlen $ q `shiftR` 1
rlen :: Integer -> Int
rlen 0 = 0
rlen q = 8 + rlen (q `shiftR` 8)
blen :: BS.ByteString -> Int
blen = (8 *) . BS.length
bits2int :: Integer -> BS.ByteString -> Integer
bits2int q bs
| bl > ql = i `shiftR` (bl - ql)
| otherwise = i
where ql = qlen q; bl = blen bs; i = either error id $ B.decode bs
int2octets :: Integer -> Integer -> BS.ByteString
int2octets q i
| bl <= rl = BS.replicate (rl - bl) 0 `BS.append` bs
| otherwise = error $ moduleName ++ ".int2octets: too large integer"
where rl = rlen q `div` 8; bs = B.encode i; bl = BS.length bs
bits2octets :: Integer -> BS.ByteString -> BS.ByteString
bits2octets q bs = int2octets q $ bits2int q bs `mod` q
|
YoshikuniJujo/peyotls
|
peyotls/src/Network/PeyoTLS/Ecdsa.hs
|
bsd-3-clause
| 4,664 | 33 | 17 | 941 | 2,100 | 1,112 | 988 | 109 | 5 |
{-# LANGUAGE CPP, GADTs #-}
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
----------------------------------------------------------------
-- 2015.12.19
-- |
-- Module : Language.Hakaru.Syntax.AST.Sing
-- Copyright : Copyright (c) 2016 the Hakaru team
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC-only
--
-- Factored out from "Language.Hakaru.Syntax.AST".
--
-- TODO: if we're not going to have this in "Language.Hakaru.Syntax.AST", then we should rename it to @Language.Hakaru.Syntax.AST.Sing@ or the like.
----------------------------------------------------------------
module Language.Hakaru.Syntax.AST.Sing
( sing_NaryOp
, sing_PrimOp
, sing_ArrayOp
, sing_MeasureOp
, sing_Literal
) where
import Language.Hakaru.Syntax.IClasses
import Language.Hakaru.Types.HClasses
import Language.Hakaru.Types.Sing
import Language.Hakaru.Syntax.AST
----------------------------------------------------------------
----------------------------------------------------------------
-- N.B., we do case analysis so that we don't need the class constraint!
sing_Literal :: Literal a -> Sing a
sing_Literal (LNat _) = sing
sing_Literal (LInt _) = sing
sing_Literal (LProb _) = sing
sing_Literal (LReal _) = sing
-- TODO: we don't need to store the HOrd\/HSemiring values here,
-- we can recover them by typeclass, just like we use 'sing' to get
-- 'sBool' for the other ones...
sing_NaryOp :: NaryOp a -> Sing a
sing_NaryOp And = sing
sing_NaryOp Or = sing
sing_NaryOp Xor = sing
sing_NaryOp Iff = sing
sing_NaryOp (Min theOrd) = sing_HOrd theOrd
sing_NaryOp (Max theOrd) = sing_HOrd theOrd
sing_NaryOp (Sum theSemi) = sing_HSemiring theSemi
sing_NaryOp (Prod theSemi) = sing_HSemiring theSemi
-- TODO: is there any way to define a @sing_List1@ like @sing@ for automating all these monomorphic cases?
sing_PrimOp :: PrimOp typs a -> (List1 Sing typs, Sing a)
sing_PrimOp Not = (sing `Cons1` Nil1, sing)
sing_PrimOp Impl = (sing `Cons1` sing `Cons1` Nil1, sing)
sing_PrimOp Diff = (sing `Cons1` sing `Cons1` Nil1, sing)
sing_PrimOp Nand = (sing `Cons1` sing `Cons1` Nil1, sing)
sing_PrimOp Nor = (sing `Cons1` sing `Cons1` Nil1, sing)
sing_PrimOp Pi = (Nil1, sing)
sing_PrimOp Sin = (sing `Cons1` Nil1, sing)
sing_PrimOp Cos = (sing `Cons1` Nil1, sing)
sing_PrimOp Tan = (sing `Cons1` Nil1, sing)
sing_PrimOp Asin = (sing `Cons1` Nil1, sing)
sing_PrimOp Acos = (sing `Cons1` Nil1, sing)
sing_PrimOp Atan = (sing `Cons1` Nil1, sing)
sing_PrimOp Sinh = (sing `Cons1` Nil1, sing)
sing_PrimOp Cosh = (sing `Cons1` Nil1, sing)
sing_PrimOp Tanh = (sing `Cons1` Nil1, sing)
sing_PrimOp Asinh = (sing `Cons1` Nil1, sing)
sing_PrimOp Acosh = (sing `Cons1` Nil1, sing)
sing_PrimOp Atanh = (sing `Cons1` Nil1, sing)
sing_PrimOp RealPow = (sing `Cons1` sing `Cons1` Nil1, sing)
sing_PrimOp Exp = (sing `Cons1` Nil1, sing)
sing_PrimOp Log = (sing `Cons1` Nil1, sing)
sing_PrimOp (Infinity h) = (Nil1, sing_HIntegrable h)
sing_PrimOp GammaFunc = (sing `Cons1` Nil1, sing)
sing_PrimOp BetaFunc = (sing `Cons1` sing `Cons1` Nil1, sing)
-- Mere case analysis isn't enough for the rest of these, because
-- of the class constraints. We fix that by various helper functions
-- on explicit dictionary passing.
--
-- TODO: is there any way to automate building these from their
-- respective @a@ proofs?
sing_PrimOp (Equal theEq) =
let a = sing_HEq theEq
in (a `Cons1` a `Cons1` Nil1, sBool)
sing_PrimOp (Less theOrd) =
let a = sing_HOrd theOrd
in (a `Cons1` a `Cons1` Nil1, sBool)
sing_PrimOp (NatPow theSemi) =
let a = sing_HSemiring theSemi
in (a `Cons1` SNat `Cons1` Nil1, a)
sing_PrimOp (Negate theRing) =
let a = sing_HRing theRing
in (a `Cons1` Nil1, a)
sing_PrimOp (Abs theRing) =
let a = sing_HRing theRing
b = sing_NonNegative theRing
in (a `Cons1` Nil1, b)
sing_PrimOp (Signum theRing) =
let a = sing_HRing theRing
in (a `Cons1` Nil1, a)
sing_PrimOp (Recip theFrac) =
let a = sing_HFractional theFrac
in (a `Cons1` Nil1, a)
sing_PrimOp (NatRoot theRad) =
let a = sing_HRadical theRad
in (a `Cons1` SNat `Cons1` Nil1, a)
sing_PrimOp (Erf theCont) =
let a = sing_HContinuous theCont
in (a `Cons1` Nil1, a)
sing_ArrayOp :: ArrayOp typs a -> (List1 Sing typs, Sing a)
sing_ArrayOp (Index a) = (SArray a `Cons1` SNat `Cons1` Nil1, a)
sing_ArrayOp (Size a) = (SArray a `Cons1` Nil1, SNat)
sing_ArrayOp (Reduce a) =
((a `SFun` a `SFun` a) `Cons1` a `Cons1` SArray a `Cons1` Nil1, a)
sing_MeasureOp :: MeasureOp typs a -> (List1 Sing typs, Sing a)
sing_MeasureOp Lebesgue = (Nil1, sing)
sing_MeasureOp Counting = (Nil1, sing)
sing_MeasureOp Categorical = (sing `Cons1` Nil1, sing)
sing_MeasureOp Uniform = (sing `Cons1` sing `Cons1` Nil1, sing)
sing_MeasureOp Normal = (sing `Cons1` sing `Cons1` Nil1, sing)
sing_MeasureOp Poisson = (sing `Cons1` Nil1, sing)
sing_MeasureOp Gamma = (sing `Cons1` sing `Cons1` Nil1, sing)
sing_MeasureOp Beta = (sing `Cons1` sing `Cons1` Nil1, sing)
----------------------------------------------------------------
----------------------------------------------------------- fin.
|
zaxtax/hakaru
|
haskell/Language/Hakaru/Syntax/AST/Sing.hs
|
bsd-3-clause
| 5,509 | 0 | 11 | 1,203 | 1,552 | 892 | 660 | 93 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module: Data.Stencil.Types
-- Copyright: Tobias Florek 2014
-- License: BSD3
--
-- Maintainer: Tobias Florek <[email protected]>
-- Stability: experimental
-- Portability: unknown
--
-- Base types used throughout Stencil.
module Text.Stencil.Types
( Context(..)
, Dict
, List
, defaultAsBool
, defaultAsDict
, asEscapedNoEscape
, defaultAsList
, defaultAsText
, asEscapedWithEscaper
, Value(..)
, StencilError(..)
, ErrorHandler
, Loader
, JinjaSYM(..)
, JinjaVariableSYM(..)
, JinjaIncludeSYM(..)
, Variable(..)
, Key(..)
, Condition(..)
, Identifier(..)
, duplicate
, lookupVariable
) where
#ifndef MIN_VERSION_base
#define MIN_VERSION_base(x,y,z) 1
#endif
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
import Data.Monoid (Monoid, mappend, mempty)
#endif
import Data.HashMap.Strict (HashMap)
import Data.String (IsString, fromString)
import Data.Text (Text)
import Data.Text.Lazy.Builder (Builder)
import Data.Typeable (Typeable)
import Data.Vector (Vector, (!?))
import qualified Data.Aeson as A
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Vector as V
type List = Vector Value
type Dict = HashMap Text Value
data Context = Context { asEscaped :: Value -> Maybe Text
, asText :: Value -> Maybe Text
, asList :: Value -> Maybe List
, asDict :: Value -> Maybe Dict
, asBool :: Value -> Maybe Bool
, dict :: Dict
}
-- that should be more typesafe. see upcoming lambda definition.
-- but what to do with dicts and lists? Tags (a la Typeable)?
data Value = EscapedV Text
| TextV Text
| DictV Dict
| ListV List
| BoolV Bool
| IntV Int
-- Lambda :: (b -> a) -> Value
data StencilError = InvalidTemplate Text String
| InvalidTemplateFile FilePath
| NoSuchInclude FilePath
| LookupError Variable
| ManyErrors [StencilError]
deriving (Show, Read, Eq, Ord, Typeable)
instance Monoid StencilError where
mappend e e' = ManyErrors [e, e']
mempty = ManyErrors []
type ErrorHandler r = StencilError -> Either StencilError r
type Loader m = FilePath -> m (Maybe TL.Text)
data Variable = Variable Text
| ObjectKey Text Text
| ListIndex Text Int
deriving (Eq, Ord, Read, Show)
asEscapedWithEscaper :: (Text -> Text) -> Value -> Maybe Text
asEscapedWithEscaper _ (EscapedV t) = Just t
asEscapedWithEscaper escape v = escape <$> defaultAsText v
asEscapedNoEscape :: Value -> Maybe Text
asEscapedNoEscape = asEscapedWithEscaper id
defaultAsText :: Value -> Maybe Text
defaultAsText (TextV t) = Just t
defaultAsText (EscapedV t) = Just t
defaultAsText (IntV i) = Just . T.pack $ show i
defaultAsText (BoolV True) = Just "True"
defaultAsText (BoolV False) = Just "False"
defaultAsText _ = Nothing
defaultAsDict :: Value -> Maybe Dict
defaultAsDict (DictV d) = Just d
defaultAsDict (ListV l) =
Just . HM.fromList $ V.ifoldl' step mempty l
where
step xs i x = (T.pack (show i), x) : xs
defaultAsDict _ = Nothing
defaultAsList :: Value -> Maybe List
defaultAsList (ListV l) = Just l
defaultAsList _ = Nothing
defaultAsBool :: Value -> Maybe Bool
defaultAsBool (BoolV b) = Just b
defaultAsBool (IntV i) = Just $ i > 0
defaultAsBool (TextV t) = Just . not $ T.null t
defaultAsBool (EscapedV t) = Just . not $ T.null t
defaultAsBool (ListV l) = Just . not $ V.null l
defaultAsBool (DictV d) = Just . not $ HM.null d
lookupKey :: Context -> Text -> Maybe Value
lookupKey ctx k = HM.lookup k (dict ctx)
lookupVariable :: Variable -> Context -> Maybe Value
lookupVariable (Variable v) ctx = lookupKey ctx v
lookupVariable (ObjectKey v k) ctx =
case lookupKey ctx v of
Just (DictV d) -> HM.lookup k d
_ -> Nothing
lookupVariable (ListIndex v i) ctx =
case lookupKey ctx v of
Just (ListV l) -> l !? i
_ -> Nothing
newtype Key = Key Text
deriving (Eq, Ord, Read, Show)
-- | XXX should have other conditions
data Condition = VariableNotNull Variable -- ^ {% if name %}
-- | VariableDefined Variable -- ^ {% if name is defined %}
deriving (Eq, Ord, Read, Show)
newtype Identifier = Identifier Text
deriving (Eq, Ord, Read, Show)
class JinjaSYM repr where
tokens :: [repr] -> repr
literal :: Builder -> repr
class JinjaVariableSYM repr where
variable :: Variable -> repr
condition :: Condition -> [repr] -> [repr] -> repr
loop :: Variable -> Identifier -> [repr] -> repr
class JinjaIncludeSYM repr where
include :: FilePath -> repr
instance (JinjaSYM repr, JinjaSYM repr') => JinjaSYM (repr, repr') where
tokens terms = (tokens xs, tokens xs')
where (xs, xs') = unzip terms
literal x = (literal x, literal x)
instance (JinjaVariableSYM repr, JinjaVariableSYM repr') => JinjaVariableSYM (repr, repr') where
variable var = (variable var, variable var)
condition l t f = (condition l ifB elseB, condition l ifB' elseB')
where (ifB, ifB') = unzip t
(elseB, elseB') = unzip f
loop l ident b = (loop l ident body, loop l ident body')
where (body, body') = unzip b
instance (JinjaIncludeSYM repr, JinjaIncludeSYM repr') => JinjaIncludeSYM (repr, repr') where
include path = (include path, include path)
duplicate :: (repr, repr') -> (repr, repr')
duplicate = id
instance A.FromJSON Value where
parseJSON o@(A.Object _) = DictV <$> A.parseJSON o
parseJSON a@(A.Array _) = ListV <$> A.parseJSON a
parseJSON v = TextV . fromString <$> A.parseJSON v
{-# INLINE parseJSON #-}
instance IsString Value where
fromString = EscapedV . fromString
|
ibotty/stencil
|
src/Text/Stencil/Types.hs
|
bsd-3-clause
| 6,213 | 0 | 11 | 1,685 | 1,885 | 1,025 | 860 | 143 | 3 |
{-# LANGUAGE TemplateHaskell, QuasiQuotes #-}
{- | A grab bag of useful instances for Template Haskell types -}
module Language.Haskell.TH.LiftInstances where
import qualified Language.Haskell.TH.Lift as L
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
--import Language.Haskell.TH.LambdaConvert
import Language.Haskell.TH.KindInference
$(L.deriveLiftMany [''Match, ''Range, ''Stmt, ''Strict,
''InlineSpec,
''Safety,
''Callconv,
''Guard,
''Lit,
''Exp, ''FixityDirection, ''Clause, ''Pat,
''Body, ''FunDep, ''Foreign,
''Pragma, ''FamFlavour, ''TyVarBndr,
''Kind, ''Pred, ''Con,
''Dec, ''ClassInstance, ''Type, ''Fixity, ''Info])
un_ty_con (TyConI x) = x
explictly_kind name = do
Right kind <- inferKind name
TyConI data_dec <- reify name
return $ convert_ty_vars data_dec kind
convert_ty_vars :: Dec -> Kind -> Dec
convert_ty_vars (DataD x y ty_vars z w) kind = DataD x y (replace_ty_vars ty_vars kind) z w
convert_ty_vars (NewtypeD x y ty_vars z w) kind = NewtypeD x y (replace_ty_vars ty_vars kind) z w
replace_ty_vars ty_vars kind = result where
kinds = unfold_kinds kind (length ty_vars)
result = zipWith add_kind ty_vars kinds
unfold_kinds x y = unfold_kinds' x y []
unfold_kinds' _ 0 accum = accum
unfold_kinds' (ArrowK x y) i accum = unfold_kinds' y (i - 1) (x:accum)
add_kind (PlainTV n) k = KindedTV n k
add_kind x k = x
|
jfischoff/th-instances
|
src/Language/Haskell/TH/LiftInstances.hs
|
bsd-3-clause
| 1,609 | 0 | 9 | 451 | 490 | 263 | 227 | 33 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Ivory.Language.BoundedInteger where
import Text.Printf
import Ivory.Language.Proxy
import qualified Ivory.Language.Syntax as I
import Ivory.Language.Type
--------------------------------------------------------------------------------
-- | It is an error if a constant implicitly underflows/overflows.
boundedFromInteger :: forall a b . (Num a, IvoryType a, Bounded b, Integral b)
=> (I.Expr -> a) -> b -> Integer -> a
boundedFromInteger constr _ i
| i > snd bounds
= error $ printf "The constant %d is too large to cast to type %s." i tyStr
| i < fst bounds
= error $ printf "The constant %d is too small to cast to type %s." i tyStr
| otherwise
= constr (fromInteger i)
where
ty = ivoryType (Proxy :: Proxy a)
tyStr = show ty
bounds :: (Integer, Integer)
bounds = (fromIntegral (minBound :: b), fromIntegral (maxBound :: b))
|
GaloisInc/ivory
|
ivory/src/Ivory/Language/BoundedInteger.hs
|
bsd-3-clause
| 952 | 0 | 10 | 215 | 243 | 132 | 111 | 19 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.