code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
{-| Module : Pipes.KeyValueCsv.IO Copyright : (c) Marcin Mrotek, 2015 License : BSD3 Maintainer : [email protected] Stability : experimental File input and output. -} {-# LANGUAGE DataKinds , FlexibleContexts , PolyKinds , RankNTypes , TypeOperators #-} module Pipes.KeyValueCsv.IO ( streamIn , streamInBS -- * Re-exports , SafeT ) where import Prelude hiding (lines) import Pipes.KeyValueCsv import Data.Vinyl import Data.Vinyl.Functor import Pipes import Pipes.ByteString (ByteString) import qualified Pipes.ByteString as Pipes import Pipes.Safe (SafeT, MonadMask) import qualified Pipes.Safe as Pipes import Pipes.Text.Encoding import System.IO streamInBS :: ( MonadIO m , MonadMask m ) => FilePath -> Producer ByteString (SafeT m) () -- ^Read a 'ByteString' stream from file. streamInBS path = Pipes.bracket ( liftIO $ openFile path ReadMode ) ( liftIO . hClose ) Pipes.fromHandle streamIn :: forall (m :: * -> *) (f :: k -> *) (g :: j -> *) (hs :: [k]) (rs :: [j]) . ( MonadIO m , MonadMask m , Record hs ) => Codec -- ^File encoding. -> FilePath -- ^Input file. -> Options (SafeT m) f g hs rs -- ^Parsing options. -> SafeT m ( Rec (WithKeyValueError :. f) hs , Producer (Rec (WithCsvError :. g) rs) (SafeT m) ( Producer ByteString (SafeT m) () ) ) -- ^Read a CSV file. streamIn codec path options = parseKeyValueCsv options . decode codec $ streamInBS path
marcinmrotek/pipes-key-value-csv
src/Pipes/KeyValueCsv/IO.hs
bsd-3-clause
1,627
0
16
477
391
223
168
47
1
{-# LANGUAGE BangPatterns , RecordWildCards , TransformListComp #-} -- | DNS Message builder. module Network.DNS.Encode.Builders ( putDNSMessage , putDNSFlags , putHeader , putDomain , putMailbox , putResourceRecord ) where import Control.Monad.State (State, modify, execState, gets) import qualified Control.Exception as E import qualified Data.ByteString.Builder as BB import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy.Char8 as LBS import qualified Data.IP import Data.IP (IP(..), fromIPv4, fromIPv6b, makeAddrRange) import GHC.Exts (the, groupWith) import Network.DNS.Imports import Network.DNS.StateBinary import Network.DNS.Types.Internal ---------------------------------------------------------------- putDNSMessage :: DNSMessage -> SPut putDNSMessage msg = putHeader hd <> putNums <> mconcat (map putQuestion qs) <> mconcat (map putResourceRecord an) <> mconcat (map putResourceRecord au) <> mconcat (map putResourceRecord ad) where putNums = mconcat $ fmap putInt16 [ length qs , length an , length au , length ad ] hm = header msg fl = flags hm eh = ednsHeader msg qs = question msg an = answer msg au = authority msg hd = ifEDNS eh hm $ hm { flags = fl { rcode = rc } } rc = ifEDNS eh <$> id <*> nonEDNSrcode $ rcode fl where nonEDNSrcode code | fromRCODE code < 16 = code | otherwise = FormatErr ad = prependOpt $ additional msg where prependOpt ads = mapEDNS eh (fromEDNS ads $ fromRCODE rc) ads where fromEDNS :: AdditionalRecords -> Word16 -> EDNS -> AdditionalRecords fromEDNS rrs rc' edns = ResourceRecord name' type' class' ttl' rdata' : rrs where name' = BS.singleton '.' type' = OPT class' = maxUdpSize `min` (minUdpSize `max` ednsUdpSize edns) ttl0' = fromIntegral (rc' .&. 0xff0) `shiftL` 20 vers' = fromIntegral (ednsVersion edns) `shiftL` 16 ttl' | ednsDnssecOk edns = ttl0' `setBit` 15 .|. vers' | otherwise = ttl0' .|. vers' rdata' = RD_OPT $ ednsOptions edns putHeader :: DNSHeader -> SPut putHeader hdr = putIdentifier (identifier hdr) <> putDNSFlags (flags hdr) where putIdentifier = put16 putDNSFlags :: DNSFlags -> SPut putDNSFlags DNSFlags{..} = put16 word where set :: Word16 -> State Word16 () set byte = modify (.|. byte) st :: State Word16 () st = sequence_ [ set (fromRCODE rcode .&. 0x0f) , when chkDisable $ set (bit 4) , when authenData $ set (bit 5) , when recAvailable $ set (bit 7) , when recDesired $ set (bit 8) , when trunCation $ set (bit 9) , when authAnswer $ set (bit 10) , set (fromOPCODE opcode `shiftL` 11) , when (qOrR==QR_Response) $ set (bit 15) ] word = execState st 0 -- XXX: Use question class when implemented -- putQuestion :: Question -> SPut putQuestion Question{..} = putDomain qname <> put16 (fromTYPE qtype) <> put16 classIN putResourceRecord :: ResourceRecord -> SPut putResourceRecord ResourceRecord{..} = mconcat [ putDomain rrname , put16 (fromTYPE rrtype) , put16 rrclass , put32 rrttl , putResourceRData rdata ] where putResourceRData :: RData -> SPut putResourceRData rd = do addPositionW 2 -- "simulate" putInt16 rDataBuilder <- putRData rd let rdataLength = fromIntegral . LBS.length . BB.toLazyByteString $ rDataBuilder let rlenBuilder = BB.int16BE rdataLength return $ rlenBuilder <> rDataBuilder putRData :: RData -> SPut putRData rd = case rd of RD_A address -> mconcat $ map putInt8 (fromIPv4 address) RD_NS nsdname -> putDomain nsdname RD_CNAME cname -> putDomain cname RD_SOA a b c d e f g -> putSOA a b c d e f g RD_NULL bytes -> putByteString bytes RD_PTR ptrdname -> putDomain ptrdname RD_MX pref exch -> mconcat [put16 pref, putDomain exch] RD_TXT textstring -> putTXT textstring RD_RP mbox dname -> putMailbox mbox <> putDomain dname RD_AAAA address -> mconcat $ map putInt8 (fromIPv6b address) RD_SRV pri wei prt tgt -> putSRV pri wei prt tgt RD_DNAME dname -> putDomain dname RD_OPT options -> mconcat $ fmap putOData options RD_DS kt ka dt d -> putDS kt ka dt d RD_CDS kt ka dt d -> putDS kt ka dt d RD_RRSIG rrsig -> putRRSIG rrsig RD_NSEC next types -> putDomain next <> putNsecTypes types RD_DNSKEY f p alg key -> putDNSKEY f p alg key RD_CDNSKEY f p alg key -> putDNSKEY f p alg key RD_NSEC3 a f i s h types -> putNSEC3 a f i s h types RD_NSEC3PARAM a f iter salt -> putNSEC3PARAM a f iter salt RD_TLSA u s m dgst -> putTLSA u s m dgst UnknownRData bytes -> putByteString bytes where putSOA mn mr serial refresh retry expire minttl = mconcat [ putDomain mn , putMailbox mr , put32 serial , put32 refresh , put32 retry , put32 expire , put32 minttl ] -- TXT record string fragments are at most 255 bytes putTXT textstring = let (!h, !t) = BS.splitAt 255 textstring in putByteStringWithLength h <> if BS.null t then mempty else putTXT t putSRV priority weight port target = mconcat [ put16 priority , put16 weight , put16 port , putDomain target ] putDS keytag keyalg digestType digest = mconcat [ put16 keytag , put8 keyalg , put8 digestType , putByteString digest ] putRRSIG RDREP_RRSIG{..} = mconcat [ put16 $ fromTYPE rrsigType , put8 rrsigKeyAlg , put8 rrsigNumLabels , put32 rrsigTTL , put32 $ fromIntegral rrsigExpiration , put32 $ fromIntegral rrsigInception , put16 rrsigKeyTag , putDomain rrsigZone , putByteString rrsigValue ] putDNSKEY flags protocol alg key = mconcat [ put16 flags , put8 protocol , put8 alg , putByteString key ] putNSEC3 alg flags iterations salt hash types = mconcat [ put8 alg , put8 flags , put16 iterations , putByteStringWithLength salt , putByteStringWithLength hash , putNsecTypes types ] putNSEC3PARAM alg flags iterations salt = mconcat [ put8 alg , put8 flags , put16 iterations , putByteStringWithLength salt ] putTLSA usage selector mtype assocData = mconcat [ put8 usage , put8 selector , put8 mtype , putByteString assocData ] -- | Encode DNSSEC NSEC type bits putNsecTypes :: [TYPE] -> SPut putNsecTypes types = putTypeList $ map fromTYPE types where putTypeList :: [Word16] -> SPut putTypeList ts = mconcat [ putWindow (the top8) bot8 | t <- ts, let top8 = fromIntegral t `shiftR` 8, let bot8 = fromIntegral t .&. 0xff, then group by top8 using groupWith ] putWindow :: Int -> [Int] -> SPut putWindow top8 bot8s = let blks = maximum bot8s `shiftR` 3 in putInt8 top8 <> put8 (1 + fromIntegral blks) <> putBits 0 [ (the block, foldl' mergeBits 0 bot8) | bot8 <- bot8s, let block = bot8 `shiftR` 3, then group by block using groupWith ] where -- | Combine type bits in network bit order, i.e. bit 0 first. mergeBits acc b = setBit acc (7 - b.&.0x07) putBits :: Int -> [(Int, Word8)] -> SPut putBits _ [] = pure mempty putBits n ((block, octet) : rest) = putReplicate (block-n) 0 <> put8 octet <> putBits (block + 1) rest -- | Encode EDNS OPTION consisting of a list of octets. putODWords :: Word16 -> [Word8] -> SPut putODWords code ws = mconcat [ put16 code , putInt16 $ length ws , mconcat $ map put8 ws ] -- | Encode an EDNS OPTION byte string. putODBytes :: Word16 -> ByteString -> SPut putODBytes code bs = mconcat [ put16 code , putInt16 $ BS.length bs , putByteString bs ] putOData :: OData -> SPut putOData (OD_NSID nsid) = putODBytes (fromOptCode NSID) nsid putOData (OD_DAU as) = putODWords (fromOptCode DAU) as putOData (OD_DHU hs) = putODWords (fromOptCode DHU) hs putOData (OD_N3U hs) = putODWords (fromOptCode N3U) hs putOData (OD_ClientSubnet srcBits scpBits ip) = -- https://tools.ietf.org/html/rfc7871#section-6 -- -- o ADDRESS, variable number of octets, contains either an IPv4 or -- IPv6 address, depending on FAMILY, which MUST be truncated to the -- number of bits indicated by the SOURCE PREFIX-LENGTH field, -- padding with 0 bits to pad to the end of the last octet needed. -- -- o A server receiving an ECS option that uses either too few or too -- many ADDRESS octets, or that has non-zero ADDRESS bits set beyond -- SOURCE PREFIX-LENGTH, SHOULD return FORMERR to reject the packet, -- as a signal to the software developer making the request to fix -- their implementation. -- let octets = fromIntegral $ (srcBits + 7) `div` 8 prefix addr = Data.IP.addr $ makeAddrRange addr $ fromIntegral srcBits (family, raw) = case ip of IPv4 ip4 -> (1, take octets $ fromIPv4 $ prefix ip4) IPv6 ip6 -> (2, take octets $ fromIPv6b $ prefix ip6) dataLen = 2 + 2 + octets in mconcat [ put16 $ fromOptCode ClientSubnet , putInt16 dataLen , put16 family , put8 srcBits , put8 scpBits , mconcat $ fmap putInt8 raw ] putOData (OD_ECSgeneric family srcBits scpBits addr) = mconcat [ put16 $ fromOptCode ClientSubnet , putInt16 $ 4 + BS.length addr , put16 family , put8 srcBits , put8 scpBits , putByteString addr ] putOData (UnknownOData code bs) = putODBytes code bs -- In the case of the TXT record, we need to put the string length -- fixme : What happens with the length > 256 ? putByteStringWithLength :: BS.ByteString -> SPut putByteStringWithLength bs = putInt8 (fromIntegral $ BS.length bs) -- put the length of the given string <> putByteString bs ---------------------------------------------------------------- rootDomain :: Domain rootDomain = BS.pack "." putDomain :: Domain -> SPut putDomain = putDomain' '.' putMailbox :: Mailbox -> SPut putMailbox = putDomain' '@' putDomain' :: Char -> ByteString -> SPut putDomain' sep dom | BS.null dom || dom == rootDomain = put8 0 | otherwise = do mpos <- wsPop dom cur <- gets wsPosition case mpos of Just pos -> putPointer pos Nothing -> do -- Pointers are limited to 14-bits! when (cur <= 0x3fff) $ wsPush dom cur mconcat [ putPartialDomain hd , putDomain' '.' tl ] where -- Try with the preferred separator if present, else fall back to '.'. (hd, tl) = loop (c2w sep) where loop w = case parseLabel w dom of Right p | w /= 0x2e && BS.null (snd p) -> loop 0x2e | otherwise -> p Left e -> E.throw e c2w = fromIntegral . fromEnum putPointer :: Int -> SPut putPointer pos = putInt16 (pos .|. 0xc000) putPartialDomain :: Domain -> SPut putPartialDomain = putByteStringWithLength
kazu-yamamoto/dns
internal/Network/DNS/Encode/Builders.hs
bsd-3-clause
12,654
0
18
4,512
3,380
1,694
1,686
268
24
{-# LANGUAGE FlexibleContexts #-} module Language.Mojito.Syntax.SExpr where import Text.ParserCombinators.Parsec hiding (State) import Control.Monad.Except ---------------------------------------------------------------------- -- S-Expressions ---------------------------------------------------------------------- -- An s-expression is either an atom or a list of s-expression. -- An atom can be a floating or integral number, a string, or -- a symbol (anything else). data SExpr = Sym String | FltNum Double | IntNum Integer | Str String | List [SExpr] deriving (Eq, Show) isSym :: SExpr -> Bool isSym (Sym _) = True isSym _ = False ---------------------------------------------------------------------- -- S-Expressions parsing ---------------------------------------------------------------------- -- Like parseSExpr but turns the Parsec ParseError into a string. parseSExpr' :: MonadError String m => String -> m SExpr parseSExpr' s = case parseSExpr s of Left err -> throwError $ show err Right r -> return r -- Like parseSExprs but turns the Parsec ParseError into a string. parseSExprs' :: MonadError String m => String -> m [SExpr] parseSExprs' s = case parseSExprs s of Left err -> throwError $ show err Right r -> return r -- Parse an s-expression and return either a parse error -- or the parsed s-expression. parseSExpr :: String -> Either ParseError SExpr parseSExpr = parse (skipMany blank >> parseExpr) "s-expression" parseSExprs :: String -> Either ParseError [SExpr] parseSExprs = parse (many (skipMany blank >> parseExpr)) "s-expressions" -- Parse a complete symbol. parseSymbol :: Parser SExpr parseSymbol = do a <- (noneOf " \t\n\"()0123456789") b <- many (noneOf " \t\n\"()") return $ Sym (a : b) -- Parse a number, i.e. any symbol beginning with a digit. parseNumber :: Parser SExpr parseNumber = (intConstant >>= return . IntNum) <|> (floatingConstant >>= return . FltNum) -- Parse a string in double quotes. parseString :: Parser SExpr parseString = do _ <- char '"' x <- many (noneOf "\t\n\"") _ <- char '"' return $ Str x -- Parse an atom. The () atom is handled by parseList. parseAtom :: Parser SExpr parseAtom = parseSymbol <|> parseNumber <|> parseString -- Parse a list, i.e. many expressions bracketed by parens -- or the () atom. parseList :: Parser SExpr parseList = do _ <- char '(' skipMany blank x <- parseExprs _ <- char ')' return $ if null x then Sym "()" else List x -- Parse an expression (an atom or a list). parseExpr :: Parser SExpr parseExpr = do s <- (parseAtom <|> parseList) skipMany blank return s -- Parse many expressions (parens not included). parseExprs :: Parser [SExpr] parseExprs = many parseExpr ---------------------------------------------------------------------- -- Number parsing (taken from language-glsl). ---------------------------------------------------------------------- -- TODO the size of the int should fit its type. intConstant :: Parser Integer intConstant = choice [ hexadecimal , octal , badOctal >> fail "Invalid octal number" , decimal ] floatingConstant :: Parser Double floatingConstant = choice [ floatExponent , floatPoint , pointFloat ] ---------------------------------------------------------------------- -- Lexical elements helpers ---------------------------------------------------------------------- comment :: Parser () comment = do _ <- string "--- " _ <- manyTill anyChar ((newline >> return ()) <|> eof) return () blank :: Parser () blank = try comment <|> (space >> return ()) hexadecimal :: Parser Integer hexadecimal = try $ do _ <- char '0' _ <- oneOf "Xx" d <- many1 hexDigit _ <- optionMaybe $ oneOf "Uu" -- TODO return $ read ("0x" ++ d) octal :: Parser Integer octal = try $ do _ <- char '0' d <- many1 octDigit _ <- optionMaybe $ oneOf "Uu" -- TODO return $ read ("0o" ++ d) badOctal :: Parser () badOctal = try $ char '0' >> many1 hexDigit >> return () decimal :: Parser Integer decimal = try $ do d <- many1 digit notFollowedBy (char '.' <|> (expo >> return ' ')) _ <- optionMaybe $ oneOf "Uu" -- TODO return $ read d floatExponent :: Parser Double floatExponent = try $ do d <- many1 digit e <- expo _ <- optionMaybe $ oneOf "Ff" -- TODO return $ read $ d ++ e floatPoint :: Parser Double floatPoint = try $ do d <- many1 digit _ <- char '.' d' <- many digit let d'' = if null d' then "0" else d' e <- optionMaybe expo _ <- optionMaybe $ oneOf "Ff" -- TODO return $ read $ d ++ "." ++ d'' ++ maybe "" id e pointFloat :: Parser Double pointFloat = try $ do _ <- char '.' d <- many1 digit e <- optionMaybe expo _ <- optionMaybe $ oneOf "Ff" -- TODO return $ read $ "0." ++ d ++ maybe "" id e expo :: Parser String expo = try $ do _ <- oneOf "Ee" s <- optionMaybe (oneOf "+-") d <- many1 digit return $ "e" ++ maybe "" (:[]) s ++ d
noteed/mojito
Language/Mojito/Syntax/SExpr.hs
bsd-3-clause
4,913
0
14
957
1,420
698
722
119
2
{-# LANGUAGE BangPatterns #-} module Language.BCoPL.DataLevel.ReduceNatExp ( -- * Types Judge(OnNat,ReduceTo) -- * Deducers , deduceOne , deduceDetL , deduceDetR , deduceMulti -- * Sessions , sessionDetL , sessionDetR , sessionMultiL , sessionMultiR , sessionMulti , sessionOne , sessionDetL' , sessionDetR' , sessionMultiL' , sessionMultiR' , sessionMulti' , sessionOne' ) where import Control.Applicative ((<|>)) import Language.BCoPL.DataLevel.Nat (Nat(..)) import qualified Language.BCoPL.DataLevel.Nat as Nat (Judge(..),deduce) import Language.BCoPL.DataLevel.Exp (Exp(..),operator,loperand,roperand) import Language.BCoPL.DataLevel.Derivation (Tree(..),Derivation,Deducer,sessionGen,sessionGen') data Judge = OnNat Nat.Judge | ReduceTo Exp Exp | ReduceToOne Exp Exp | ReduceToDet Exp Exp toOne :: Judge -> Judge toOne (ReduceTo e1 e2) = ReduceToOne e1 e2 toOne j = j toDet :: Judge -> Judge toDet (ReduceTo e1 e2) = ReduceToDet e1 e2 toDet j = j instance Show Judge where show (OnNat jn) = show jn show (ReduceTo e1 e2) = unwords [show e1,"-*->",show e2] show (ReduceToOne e1 e2) = unwords [show e1,"--->",show e2] show (ReduceToDet e1 e2) = unwords [show e1,"-d->",show e2] instance Read Judge where readsPrec _ s = case break ("-*->"==) (words s) of (es1,_:es2) -> [(ReduceTo (read (concat es1)) (read (concat es2)),"")] (es1,[]) -> case break ("--->"==) es1 of (es1',_:es2') -> [(ReduceToOne (read (concat es1')) (read (concat es2')),"")] (es1',[]) -> case break ("-d->"==) es1' of (es1'',_:es2'') -> [(ReduceToDet (read (concat es1'')) (read (concat es2'')),"")] _ -> error ("Invalid syntax for 'ReduceNatExp' judge: "++s) toJudge :: Derivation Nat.Judge -> Derivation Judge toJudge (Node (s,nj) ts) = Node (s,OnNat nj) (map toJudge ts) deduceOne :: Deducer Judge deduceOne j = case j of OnNat nj -> map toJudge (Nat.deduce nj) ReduceTo _ _ -> deduceOne (toOne j) ReduceToOne exp1 exp2 -> case exp2 of Nat n3 -> case exp1 of Nat n1 :+: Nat n2 -> [ Node ("R-Plus",j) [toJudge j1] | j1 <- Nat.deduce (Nat.Plus n1 n2 n3) ] Nat n1 :*: Nat n2 -> [ Node ("R-Times",j) [toJudge j1] | j1 <- Nat.deduce (Nat.Times n1 n2 n3) ] _ -> [] e1' :+: e2' -> case exp1 of e1 :+: e2 | e2 == e2' -> [ Node ("R-PlusL",j) [j1] | j1 <- deduceOne (ReduceToOne e1 e1') ] | e1 == e1' -> [ Node ("R-PlusR",j) [j1] | j1 <- deduceOne (ReduceToOne e2 e2') ] _ -> [] e1' :*: e2' -> case exp1 of e1 :*: e2 | e2 == e2' -> [ Node ("R-TimesL",j) [j1] | j1 <- deduceOne (ReduceToOne e1 e1') ] | e1 == e1' -> [ Node ("R-TimesR",j) [j1] | j1 <- deduceOne (ReduceToOne e2 e2') ] _ -> [] deduceMulti :: Deducer Judge -> Deducer Judge deduceMulti deduce1 j@(ReduceTo exp1 exp2) = if exp1 == exp2 then [ Node ("MR-Zero",j) [] ] else case [ Node ("MR-One",j) [j'] | j' <- deduce1 j ] of d@(_:_) -> d [] -> case j of ReduceTo exp1 exp2 | exp1 == exp2 -> [ Node ("MR-Zero",j) [] ] | not (isNormalForm exp1) && not (isNormalForm exp2) && (operator exp1 z z /= operator exp2 z z) -> [] | otherwise -> [ Node ("MR-Multi",j) [j1,j2] | exp' <- genExps exp1 exp2 , j2 <- deduceMulti deduce1 $ ReduceTo exp' exp2 , j1 <- deduceMulti deduce1 $ ReduceTo exp1 exp' ] where z = Nat Z genExps :: Exp -> Exp -> [Exp] genExps exp1 exp2 | isNormalForm exp2 = case exp1 of Nat _ -> [] _ -> case exp2 of Nat Z -> case exp1 of _ :+: _ -> [ Nat Z :+: Nat Z ] _ :*: _ -> [ Nat Z :*: Nat n | n <- [Z ..] ] n -> case loperand exp1 of Nat n1 -> [ operator exp1 (Nat n1) (Nat n2) | n2 <- [S Z .. ] ] _ -> [ operator exp1 (Nat n1) (roperand exp1) | n1 <- [S Z .. ] ] | isDeltaRedex exp2 = case exp1 of Nat _ -> [] _ -> case loperand exp1 of e1@(Nat _) | e1 == loperand exp2 -> [ operator exp1 e1 (roperand exp2) ] | otherwise -> [] e1 -> [ operator exp1 (loperand exp2) (roperand exp1) ] | otherwise = case exp1 of Nat _ -> [] _ -> case loperand exp1 of e1@(Nat _) | e1 == loperand exp2 -> [ operator exp1 e1 (roperand exp2) ] | otherwise -> [] e1 -> [ operator exp1 (loperand exp2) (roperand exp1) ] deduceDetL :: Deducer Judge deduceDetL j = case j of ReduceTo _ _ -> deduceDetL (toDet j) OnNat nj -> map toJudge (Nat.deduce nj) ReduceToDet exp1 exp2 -> case exp1 of e1 :+: e2 -> case e1 of Nat n1 -> case e2 of Nat n2 -> case exp2 of Nat n3 -> [Node ("DR-Plus",j) [toJudge j'] | j' <- Nat.deduce (Nat.Plus n1 n2 n3)] _ -> [] _ -> case exp2 of Nat n1' :+: e2' | n1 == n1' -> [Node ("DR-PlusR",j) [j'] | j' <- deduceDetL (ReduceTo e2 e2')] _ -> [] _ -> case exp2 of e1' :+: e2' | e2 == e2' -> [Node ("DR-PlusL",j) [j'] | j' <- deduceDetL (ReduceTo e1 e1')] _ -> [] e1 :*: e2 -> case e1 of Nat n1 -> case e2 of Nat n2 -> case exp2 of Nat n3 -> [Node ("DR-Times",j) [toJudge j'] | j' <- Nat.deduce (Nat.Times n1 n2 n3)] _ -> [] _ -> case exp2 of Nat n1' :*: e2' | n1 == n1' -> [Node ("DR-TimesR",j) [j'] | j' <- deduceDetL (ReduceTo e2 e2')] _ -> [] _ -> case exp2 of e1' :*: e2' | e2 == e2' -> [Node ("DR-TimesL",j) [j'] | j' <- deduceDetL (ReduceTo e1 e1')] _ -> [] _ -> [] _ -> [] deduceDetR :: Deducer Judge deduceDetR j = case j of ReduceTo _ _ -> deduceDetR (toDet j) OnNat nj -> map toJudge (Nat.deduce nj) ReduceToDet exp1 exp2 -> case exp1 of e1 :+: e2 -> case e2 of Nat n2 -> case e1 of Nat n1 -> case exp2 of Nat n3 -> [Node ("DR-Plus",j) [toJudge j'] | j' <- Nat.deduce (Nat.Plus n1 n2 n3)] _ -> [] _ -> case exp2 of e1' :+: Nat n2' | n2 == n2' -> [Node ("DR-PlusL",j) [j'] | j' <- deduceDetR (ReduceTo e1 e1')] _ -> [] _ -> case exp2 of e1' :+: e2' | e1 == e1' -> [Node ("DR-PlusL",j) [j'] | j' <- deduceDetR (ReduceTo e2 e2')] _ -> [] e1 :*: e2 -> case e2 of Nat n2 -> case e1 of Nat n1 -> case exp2 of Nat n3 -> [Node ("DR-Times",j) [toJudge j'] | j' <- Nat.deduce (Nat.Times n1 n2 n3)] _ -> [] _ -> case exp2 of e1' :*: Nat n2' | n2 == n2' -> [Node ("DR-TimesL",j) [j'] | j' <- deduceDetR (ReduceTo e1 e1')] _ -> [] _ -> case exp2 of e1' :*: e2' | e1 == e1' -> [Node ("DR-TimesL",j) [j'] | j' <- deduceDetR (ReduceTo e2 e2')] _ -> [] _ -> [] _ -> [] isNormalForm :: Exp -> Bool isNormalForm (Nat _) = True isNormalForm _ = False isDeltaRedex :: Exp -> Bool isDeltaRedex e = case e of (e1 :+: e2) -> isNormalForm e1 && isNormalForm e2 (e1 :*: e2) -> isNormalForm e1 && isNormalForm e2 _ -> False sessionDetL,sessionDetR,sessionMultiL,sessionMultiR,sessionMulti,sessionOne :: IO () sessionDetL = sessionGen ("ReduceDetL> ",deduceDetL) sessionDetR = sessionGen ("ReduceDetR> ",deduceDetR) sessionMultiL = sessionGen ("ReduceMultiL> ",deduceMulti deduceDetL) sessionMultiR = sessionGen ("ReduceMultiR> ",deduceMulti deduceDetR) sessionMulti = sessionGen ("ReduceMulti> ",deduceMulti deduceOne) sessionOne = sessionGen ("ReduceOne> ",deduceOne) sessionDetL',sessionDetR',sessionMultiL',sessionMultiR',sessionMulti',sessionOne' :: IO () sessionDetL' = sessionGen' ("ReduceDetL> ",deduceDetL) sessionDetR' = sessionGen' ("ReduceDetR> ",deduceDetR) sessionMultiL' = sessionGen' ("ReduceMultiL> ",deduceMulti deduceDetL) sessionMultiR' = sessionGen' ("ReduceMultiR> ",deduceMulti deduceDetR) sessionMulti' = sessionGen' ("ReduceMulti> ",deduceMulti deduceOne) sessionOne' = sessionGen' ("ReduceOne> ",deduceOne) {- S(Z) * S(Z) + S(Z) * S(Z) -*-> S(S(Z)) by MR-Multi { S(Z) * S(Z) + S(Z) * S(Z) -*-> S(Z) + S(Z) * S(Z) by MR-One { S(Z) * S(Z) + S(Z) * S(Z) ---> S(Z) + S(Z) * S(Z) by R-PlusL { S(Z) * S(Z) ---> S(Z) by R-Times { S(Z) times S(Z) is S(Z) by T-Succ { Z times S(Z) is Z by T-Zero { } ; S(Z) plus Z is S(Z) by P-Succ { Z plus Z is Z by P-Zero { } } } } } } ; S(Z) + S(Z) * S(Z) -*-> S(S(Z)) by MR-Multi { S(Z) + S(Z) * S(Z) -*-> S(Z) + S(Z) by MR-One { S(Z) + S(Z) * S(Z) ---> S(Z) + S(Z) by R-PlusR { S(Z) * S(Z) ---> S(Z) by R-Times { S(Z) times S(Z) is S(Z) by T-Succ { Z times S(Z) is Z by T-Zero { } ; S(Z) plus Z is S(Z) by P-Succ { Z plus Z is Z by P-Zero { } } } } } } ; S(Z) + S(Z) -*-> S(S(Z)) by MR-One { S(Z) + S(Z) ---> S(S(Z)) by R-Plus { S(Z) plus S(Z) is S(S(Z)) by P-Succ { Z plus S(Z) is S(Z) by P-Zero { } } } } } } -}
nobsun/hs-bcopl
src/Language/BCoPL/DataLevel/ReduceNatExp.hs
bsd-3-clause
10,418
0
26
4,012
3,529
1,808
1,721
211
16
module Run where import Data.Maybe (fromMaybe) import Network.Mail.Mime import Pipes import Pipes.Prelude as P import Pipes.Safe import System.Environment import System.Exit.Compat import System.IO import Systemd.Journal import Options import Process run :: IO () run = do options <- getConfiguration case receivers options of [] -> do progName <- getProgName die ("usage: " ++ progName ++ " EMAIL_ADDRESS...") _ -> runEffect $ runSafeP $ (journal >-> process options ioJournal >-> for cat (liftIO . notify options)) journal :: MonadSafe m => Producer JournalFields m () journal = openJournal [] FromEnd Nothing Nothing >-> P.map journalEntryFields notify :: Configuration String -> Mail -> IO () notify options mail = do hPutStrLn stderr "sending mail notification" renderSendMailCustom sendmailPath' sendmailOpts' mail where sendmailPath' = fromMaybe "/usr/sbin/sendmail" $ sendmailPath options sendmailOpts' = fromMaybe [] $ sendmailOpts options
zalora/journal-mailer
src/Run.hs
bsd-3-clause
1,117
0
16
299
300
152
148
31
2
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable, DeriveGeneric #-} {-# OPTIONS_GHC -Wall #-} import Control.Distributed.Process import Control.Distributed.Process.Closure import Control.Monad import Text.Printf import GHC.Generics (Generic) import Data.Binary import Data.Typeable import DistribUtils -- <<Message data Message = Ping (SendPort ProcessId) deriving (Typeable, Generic) instance Binary Message -- >> -- <<pingServer pingServer :: Process () pingServer = do Ping chan <- expect say $ printf "ping received from %s" (show chan) mypid <- getSelfPid sendChan chan mypid -- >> -- <<remotable remotable ['pingServer] -- >> -- <<master master :: [NodeId] -> Process () master peers = do ps <- forM peers $ \nid -> do say $ printf "spawning on %s" (show nid) spawn nid $(mkStaticClosure 'pingServer) mapM_ monitor ps ports <- forM ps $ \pid -> do say $ printf "pinging %s" (show pid) (sendport,recvport) <- newChan -- <1> send pid (Ping sendport) -- <2> return recvport forM_ ports $ \port -> do -- <3> _ <- receiveChan port return () say "All pongs successfully received" terminate -- >> -- <<main main :: IO () main = distribMain master Main.__remoteTable -- >>
mono0926/ParallelConcurrentHaskell
distrib-ping/ping-tc.hs
bsd-3-clause
1,280
31
8
286
347
196
151
38
1
{-# LANGUAGE OverloadedStrings #-} module Auth where import Data.ByteString (ByteString) import qualified Data.ByteString as BS import Data.ByteString.Base64 (decode, encode) import Data.Maybe (isJust) import qualified Data.Text as T import Snap.Core (Snap, writeBS, getRequest, getHeader, modifyResponse, setResponseStatus, getResponse, finishWith, writeText, logError) type Username = ByteString type Password = ByteString type Credentials = (Username, Password) -- TODO: comes from a DB validCredentials = [ "b2xpdmVyLmR1bmtsQGdtYWlsLmNvbTpvZGk=" ] -- 32 is the numeric literal for ' ' (space) -- :m +Data.Char -- fromIntegral (ord ' ') β‡’ 32 -- to pase: Basic lkasjdfla832jlkfzuo= extractCredentials ∷ ByteString β†’ Maybe ByteString extractCredentials bs = case BS.split 32 bs of [] β†’ Nothing hs β†’ case hs of h | length h < 2 β†’ Nothing -- header is corrupt or not available | head h /= "Basic" β†’ Nothing -- header does not start with "Basic" | otherwise β†’ (Just $ head . tail $ h) -- | Check if the credentials are valid or not. -- TODO: get credentials from DB? credentialsValid ∷ ByteString β†’ Bool credentialsValid bs = bs `elem` validCredentials throwDenied ∷ T.Text β†’ ByteString β†’ Snap () throwDenied msg bs = do modifyResponse $ setResponseStatus 403 "Access Denied" writeText $ T.append "Access Denied: " msg logError $ BS.append "Access Denied from: " bs getResponse >>= finishWith -- | Basic authorization for requests. -- TODO: logging ... withAuth :: Snap () -> Snap () withAuth succ = do rq <- getRequest let mh = getHeader "Authorization" rq case mh of Just header -> case extractCredentials header of Just cred β†’ if credentialsValid cred then logError (BS.append "Request from: " cred) >> succ else throwDenied "wrong username/password" cred Nothing β†’ throwDenied "error in parsing request-header" "error in parsing request-header" Nothing -> throwDenied "could not find matching header for authentication" "could not find matching header for authentication"
odi/tcl-reservation
src/Auth.hs
bsd-3-clause
2,350
0
18
647
478
254
224
-1
-1
import System.Process (runProcess, waitForProcess) import System.Environment (getArgs) import System.Exit (ExitCode (ExitSuccess), exitWith) import Control.Monad (unless, when, forM_) import System.Directory import Data.List (isSuffixOf, isPrefixOf) import qualified Codec.Archive.Tar as Tar import qualified Codec.Archive.Tar.Entry as TE import Data.Monoid (mempty) import qualified Data.ByteString as S import qualified Data.ByteString.Lazy as L import Control.Applicative ((<$>)) import Control.Exception (throw) import System.FilePath ((</>)) rawSystem' :: String -> [String] -> FilePath -> IO () rawSystem' a b wdir = do ph <- runProcess a b (Just wdir) Nothing Nothing Nothing Nothing ec <- waitForProcess ph unless (ec == ExitSuccess) $ exitWith ec main :: IO () main = do args <- getArgs let isSrcOnly = args == ["--src-only"] unless isSrcOnly $ rawSystem' "cabal" ("install" : args) "." hasSources <- doesFileExist "sources.txt" if hasSources then do ls <- fmap lines $ readFile "sources.txt" forM_ ls $ \l -> do exists <- doesDirectoryExist l when exists $ do files <- getDirectoryContents l when (any (".cabal" `isSuffixOf`) files) $ installSrc l else installSrc "." installSrc :: FilePath -> IO () installSrc root = do putStrLn $ "Installing source package: " ++ root let dist = root </> "dist" distExists <- doesDirectoryExist dist when distExists $ getDirectoryContents dist >>= mapM_ (\fp -> when (".tar.gz" `isSuffixOf` fp) $ removeFile $ dist </> fp) rawSystem' "cabal" ["sdist"] root files <- getDirectoryContents dist case filter (".tar.gz" `isSuffixOf`) files of [x] -> do let y = drop 1 $ dropWhile (/= '.') $ drop 1 $ dropWhile (/= '.') $ reverse x let (ver', name') = break (== '-') y let ver = reverse ver' let name = reverse $ drop 1 name' addToDB root dist name ver [] -> error "Missing tarball" _ -> error "Too many tarballs" addToDB root dist name ver = do cabal <- getAppUserDataDirectory "cabal" let pd = cabal ++ "/packages/cabal-src/" createDirectoryIfMissing True pd let tb = pd ++ "00-index.tar" e <- doesFileExist tb entries <- if e then Tar.foldEntries (:) [] throw . Tar.read . L.fromChunks . return <$> S.readFile tb else return [] cabalLBS <- L.readFile $ root </> name ++ ".cabal" Right tarPath <- return $ TE.toTarPath False $ concat [name, "/", ver, "/", name, "-", ver, ".cabal"] let entry = TE.fileEntry tarPath cabalLBS let entries' = entry : filter (\e -> TE.entryTarPath e /= tarPath) entries L.writeFile tb $ Tar.write entries' let dir = pd ++ concat [name, "/", ver, "/"] createDirectoryIfMissing True dir let filename = concat [name, "-", ver, ".tar.gz"] copyFile (dist </> filename) (dir ++ filename) fixConfig pd $ cabal ++ "/config" fixConfig pd fn = do ls' <- lines <$> readFile fn let oldLines = [ "remote-repo: cabal-src:http://www.haskell.org/" ] let s = "local-repo: " ++ pd let ls = filter (not . flip elem oldLines) ls' unless (s `elem` ls) $ writeFile fn $ unlines $ addRepo s ls addRepo s [] = [s] addRepo s (x:xs) | "remote-repo:" `isPrefixOf` x = s : x : xs | otherwise = x : addRepo s xs
yesodweb/cabal-src
cabal-src-install.hs
bsd-3-clause
3,544
0
21
982
1,259
630
629
88
3
-- Taken from GHC: compiler/utils/Encoding.hs import Text.Encoding.Z (zDecodeString) import System.Environment (getArgs) main :: IO () main = do args <- getArgs putStrLn . unwords $ map zDecodeString args
Peaker/ghczdecode
zdecode.hs
bsd-3-clause
211
0
8
34
61
32
29
6
1
{-# LANGUAGE Rank2Types, TypeSynonymInstances, DeriveDataTypeable #-} {- | Module : Data.FileStore.Types Copyright : Copyright (C) 2009 John MacFarlane License : BSD 3 Maintainer : John MacFarlane <[email protected]> Stability : alpha Portability : GHC 6.10 required Type definitions for "Data.FileStore". -} module Data.FileStore.Types ( RevisionId , Resource(..) , Author(..) , Change(..) , Description , Revision(..) , Contents(..) , TimeRange(..) , MergeInfo(..) , FileStoreError(..) , SearchMatch(..) , SearchQuery(..) , defaultSearchQuery , DateTime , FileStore (..) ) where import Data.ByteString.Lazy (ByteString) import Data.Typeable import Data.ByteString.Lazy.UTF8 (toString, fromString) import Data.DateTime (DateTime) import Control.Exception (Exception) import Prelude hiding (catch) type RevisionId = String data Resource = FSFile FilePath | FSDirectory FilePath deriving (Show, Read, Eq, Typeable, Ord) data Author = Author { authorName :: String , authorEmail :: String } deriving (Show, Read, Eq, Typeable) data Change = Added FilePath | Deleted FilePath | Modified FilePath deriving (Show, Read, Eq, Typeable) type Description = String data Revision = Revision { revId :: RevisionId , revDateTime :: DateTime , revAuthor :: Author , revDescription :: Description , revChanges :: [Change] } deriving (Show, Read, Eq, Typeable) class Contents a where fromByteString :: ByteString -> a toByteString :: a -> ByteString instance Contents ByteString where toByteString = id fromByteString = id instance Contents String where toByteString = fromString fromByteString = toString data TimeRange = TimeRange { timeFrom :: Maybe DateTime -- ^ @Nothing@ means no lower bound , timeTo :: Maybe DateTime -- ^ @Nothing@ means no upper bound } deriving (Show, Read, Eq, Typeable) data MergeInfo = MergeInfo { mergeRevision :: Revision -- ^ The revision w/ which changes were merged , mergeConflicts :: Bool -- ^ @True@ if there were merge conflicts , mergeText :: String -- ^ The merged text, w/ conflict markers } deriving (Show, Read, Eq, Typeable) data FileStoreError = RepositoryExists -- ^ Tried to initialize a repo that exists | ResourceExists -- ^ Tried to create a resource that exists | NotFound -- ^ Requested resource was not found | IllegalResourceName -- ^ The specified resource name is illegal | Unchanged -- ^ The resource was not modified, -- because the contents were unchanged | UnsupportedOperation | NoMaxCount -- ^ The darcs version used does not support -- --max-count | UnknownError String deriving (Read, Eq, Typeable) instance Show FileStoreError where show RepositoryExists = "RepositoryExists" show ResourceExists = "ResourceExists" show NotFound = "NotFound" show IllegalResourceName = "IllegalResourceName" show Unchanged = "Unchanged" show UnsupportedOperation = "UnsupportedOperation" show NoMaxCount = "NoMaxCount:\n" ++ "filestore was compiled with the maxcount flag, but your version of\n" ++ "darcs does not support the --max-count option. You should either\n" ++ "upgrade to darcs >= 2.3.0 (recommended) or compile filestore without\n" ++ "the maxcount flag (cabal install filestore -f-maxcount)." show (UnknownError s) = "UnknownError: " ++ s instance Exception FileStoreError data SearchQuery = SearchQuery { queryPatterns :: [String] -- ^ Patterns to match , queryWholeWords :: Bool -- ^ Match patterns only with whole words? , queryMatchAll :: Bool -- ^ Return matches only from files in which -- all patterns match? , queryIgnoreCase :: Bool -- ^ Make matches case-insensitive? } deriving (Show, Read, Eq, Typeable) defaultSearchQuery :: SearchQuery defaultSearchQuery = SearchQuery { queryPatterns = [] , queryWholeWords = True , queryMatchAll = True , queryIgnoreCase = True } data SearchMatch = SearchMatch { matchResourceName :: FilePath , matchLineNumber :: Integer , matchLine :: String } deriving (Show, Read, Eq, Typeable) -- | A versioning filestore, which can be implemented using the -- file system, a database, or revision-control software. data FileStore = FileStore { -- | Initialize a new filestore. initialize :: IO () -- | Save contents in the filestore. , save :: Contents a => FilePath -- Resource to save. -> Author -- Author of change. -> Description -- Description of change. -> a -- New contents of resource. -> IO () -- | Retrieve the contents of the named resource. , retrieve :: Contents a => FilePath -- Resource to retrieve. -> Maybe RevisionId -- @Just@ a particular revision ID, -- or @Nothing@ for latest -> IO a -- | Delete a named resource, providing author and log message. , delete :: FilePath -- Resource to delete. -> Author -- Author of change. -> Description -- Description of change. -> IO () -- | Rename a resource, providing author and log message. , rename :: FilePath -- Resource original name. -> FilePath -- Resource new name. -> Author -- Author of change. -> Description -- Description of change. -> IO () -- | Get history for a list of named resources in a (possibly openended) -- time range. If the list is empty, history for all resources will -- be returned. , history :: [FilePath] -- List of resources to get history for -- or @[]@ for all. -> TimeRange -- Time range in which to get history. -> IO [Revision] -- | Return the revision ID of the latest change for a resource. -- Raises 'NotFound' if the resource is not found. , latest :: FilePath -- Resource to get revision ID for. -> IO RevisionId -- | Return information about a revision, given the ID. -- Raises 'NotFound' if there is no such revision. , revision :: RevisionId -- Revision ID to get information for. -> IO Revision -- | Return a list of resources in the filestore. , index :: IO [FilePath] -- | Return a list of resources in a directory of the filestore. , directory :: FilePath -- Directory to list (empty for root) -> IO [Resource] -- | @True@ if the revision IDs match, in the sense that the -- can be treated as specifying the same revision. , idsMatch :: RevisionId -> RevisionId -> Bool -- | Search the filestore for patterns. , search :: SearchQuery -> IO [SearchMatch] }
amplify-education/filestore
Data/FileStore/Types.hs
bsd-3-clause
7,665
0
15
2,578
1,087
651
436
145
1
module Main (main) where import System.Process import Language.Mecha import Language.Mecha.Examples.CSG main :: IO () main = do putStrLn "Writing file csg.scad. Opening with OpenSCAD ..." writeFile "csg.scad" $ openSCAD $ scaleAll 10 $ csg readProcess "OpenSCAD" ["csg.scad"] "" return ()
tomahawkins/mecha
mains/MechaExamples.hs
bsd-3-clause
302
0
10
53
89
45
44
10
1
{-# LANGUAGE PatternSynonyms #-} -------------------------------------------------------------------------------- -- | -- Module : Graphics.GL.APPLE.VertexArrayRange -- Copyright : (c) Sven Panne 2019 -- License : BSD3 -- -- Maintainer : Sven Panne <[email protected]> -- Stability : stable -- Portability : portable -- -------------------------------------------------------------------------------- module Graphics.GL.APPLE.VertexArrayRange ( -- * Extension Support glGetAPPLEVertexArrayRange, gl_APPLE_vertex_array_range, -- * Enums pattern GL_STORAGE_CACHED_APPLE, pattern GL_STORAGE_CLIENT_APPLE, pattern GL_STORAGE_SHARED_APPLE, pattern GL_VERTEX_ARRAY_RANGE_APPLE, pattern GL_VERTEX_ARRAY_RANGE_LENGTH_APPLE, pattern GL_VERTEX_ARRAY_RANGE_POINTER_APPLE, pattern GL_VERTEX_ARRAY_STORAGE_HINT_APPLE, -- * Functions glFlushVertexArrayRangeAPPLE, glVertexArrayParameteriAPPLE, glVertexArrayRangeAPPLE ) where import Graphics.GL.ExtensionPredicates import Graphics.GL.Tokens import Graphics.GL.Functions
haskell-opengl/OpenGLRaw
src/Graphics/GL/APPLE/VertexArrayRange.hs
bsd-3-clause
1,060
0
5
131
93
65
28
17
0
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveAnyClass #-} module System.Physics where import Control.Monad.State import Data.ECS import Data.Yaml import GHC.Generics import Control.Lens.Extra import Physics.Bullet import Linear.Extra import Control.Monad.Reader data PhysicsSystem = PhysicsSystem { _phyDynamicsWorld :: DynamicsWorld } deriving Show makeLenses ''PhysicsSystem defineSystemKey ''PhysicsSystem data ShapeType = CubeShape | SphereShape deriving (Show, Generic, ToJSON, FromJSON) defineComponentKey ''ShapeType newtype Mass = Mass { unMass :: Float } deriving (Show, Generic, ToJSON, FromJSON) defineComponentKey ''Mass newtype Restitution = Restitution { unRestitution :: Float } deriving (Show, Generic, ToJSON, FromJSON) defineComponentKey ''Restitution defineComponentKey ''RigidBody initSystemPhysics :: (MonadIO m, MonadState ECS m) => m () initSystemPhysics = do dynamicsWorld <- createDynamicsWorld mempty registerSystem sysPhysics (PhysicsSystem dynamicsWorld) registerComponent "RigidBody" myRigidBody $ (newComponentInterface myRigidBody) { ciDeriveComponent = Just $ do let bodyInfo = mempty mShapeType <- getComponent myShapeType forM_ mShapeType $ \shapeType -> do shape <- case shapeType of CubeShape -> createBoxShape (1 :: V3 Float) SphereShape -> createSphereShape (1 :: Float) entityID <- ask rigidBody <- addRigidBody dynamicsWorld (CollisionObjectID entityID) shape bodyInfo setComponent myRigidBody rigidBody , ciRemoveComponent = withComponent_ myRigidBody $ \rigidBody -> do removeRigidBody dynamicsWorld rigidBody removeComponent myRigidBody } registerComponent "Mass" myMass (savedComponentInterface myMass) registerComponent "Restitution" myRestitution (savedComponentInterface myRestitution) registerComponent "ShapeType" myShapeType (savedComponentInterface myShapeType) tickSystemPhysics :: (MonadState ECS m, MonadIO m) => m () tickSystemPhysics = do dynamicsWorld <- viewSystem sysPhysics phyDynamicsWorld stepSimulationSimple dynamicsWorld (1/60)
lukexi/extensible-ecs
app/System/Physics.hs
bsd-3-clause
2,350
0
23
512
552
278
274
49
2
{- (c) The University of Glasgow 2006 (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 Pattern-matching bindings (HsBinds and MonoBinds) Handles @HsBinds@; those at the top level require different handling, in that the @Rec@/@NonRec@/etc structure is thrown away (whereas at lower levels it is preserved with @let@/@letrec@s). -} {-# LANGUAGE CPP #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE FlexibleContexts #-} {-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} module DsBinds ( dsTopLHsBinds, dsLHsBinds, decomposeRuleLhs, dsSpec, dsHsWrapper, dsTcEvBinds, dsTcEvBinds_s, dsEvBinds, dsMkUserRule ) where #include "HsVersions.h" import GhcPrelude import {-# SOURCE #-} DsExpr( dsLExpr ) import {-# SOURCE #-} Match( matchWrapper ) import DsMonad import DsGRHSs import DsUtils import GHC.HsToCore.PmCheck ( needToRunPmCheck, addTyCsDs, checkGuardMatches ) import GHC.Hs -- lots of things import CoreSyn -- lots of things import CoreOpt ( simpleOptExpr ) import OccurAnal ( occurAnalyseExpr ) import MkCore import CoreUtils import CoreArity ( etaExpand ) import CoreUnfold import CoreFVs import Digraph import Predicate import PrelNames import TyCon import TcEvidence import TcType import Type import Coercion import TysWiredIn ( typeNatKind, typeSymbolKind ) import Id import MkId(proxyHashId) import Name import VarSet import Rules import VarEnv import Var( EvVar ) import Outputable import Module import SrcLoc import Maybes import OrdList import Bag import BasicTypes import DynFlags import FastString import Util import UniqSet( nonDetEltsUniqSet ) import MonadUtils import qualified GHC.LanguageExtensions as LangExt import Control.Monad {-********************************************************************** * * Desugaring a MonoBinds * * **********************************************************************-} -- | Desugar top level binds, strict binds are treated like normal -- binds since there is no good time to force before first usage. dsTopLHsBinds :: LHsBinds GhcTc -> DsM (OrdList (Id,CoreExpr)) dsTopLHsBinds binds -- see Note [Strict binds checks] | not (isEmptyBag unlifted_binds) || not (isEmptyBag bang_binds) = do { mapBagM_ (top_level_err "bindings for unlifted types") unlifted_binds ; mapBagM_ (top_level_err "strict bindings") bang_binds ; return nilOL } | otherwise = do { (force_vars, prs) <- dsLHsBinds binds ; when debugIsOn $ do { xstrict <- xoptM LangExt.Strict ; MASSERT2( null force_vars || xstrict, ppr binds $$ ppr force_vars ) } -- with -XStrict, even top-level vars are listed as force vars. ; return (toOL prs) } where unlifted_binds = filterBag (isUnliftedHsBind . unLoc) binds bang_binds = filterBag (isBangedHsBind . unLoc) binds top_level_err desc (L loc bind) = putSrcSpanDs loc $ errDs (hang (text "Top-level" <+> text desc <+> text "aren't allowed:") 2 (ppr bind)) -- | Desugar all other kind of bindings, Ids of strict binds are returned to -- later be forced in the binding group body, see Note [Desugar Strict binds] dsLHsBinds :: LHsBinds GhcTc -> DsM ([Id], [(Id,CoreExpr)]) dsLHsBinds binds = do { ds_bs <- mapBagM dsLHsBind binds ; return (foldBag (\(a, a') (b, b') -> (a ++ b, a' ++ b')) id ([], []) ds_bs) } ------------------------ dsLHsBind :: LHsBind GhcTc -> DsM ([Id], [(Id,CoreExpr)]) dsLHsBind (L loc bind) = do dflags <- getDynFlags putSrcSpanDs loc $ dsHsBind dflags bind -- | Desugar a single binding (or group of recursive binds). dsHsBind :: DynFlags -> HsBind GhcTc -> DsM ([Id], [(Id,CoreExpr)]) -- ^ The Ids of strict binds, to be forced in the body of the -- binding group see Note [Desugar Strict binds] and all -- bindings and their desugared right hand sides. dsHsBind dflags (VarBind { var_id = var , var_rhs = expr , var_inline = inline_regardless }) = do { core_expr <- dsLExpr expr -- Dictionary bindings are always VarBinds, -- so we only need do this here ; let var' | inline_regardless = var `setIdUnfolding` mkCompulsoryUnfolding core_expr | otherwise = var ; let core_bind@(id,_) = makeCorePair dflags var' False 0 core_expr force_var = if xopt LangExt.Strict dflags then [id] else [] ; return (force_var, [core_bind]) } dsHsBind dflags b@(FunBind { fun_id = L _ fun , fun_matches = matches , fun_co_fn = co_fn , fun_tick = tick }) = do { (args, body) <- matchWrapper (mkPrefixFunRhs (noLoc $ idName fun)) Nothing matches ; core_wrap <- dsHsWrapper co_fn ; let body' = mkOptTickBox tick body rhs = core_wrap (mkLams args body') core_binds@(id,_) = makeCorePair dflags fun False 0 rhs force_var -- Bindings are strict when -XStrict is enabled | xopt LangExt.Strict dflags , matchGroupArity matches == 0 -- no need to force lambdas = [id] | isBangedHsBind b = [id] | otherwise = [] ; --pprTrace "dsHsBind" (vcat [ ppr fun <+> ppr (idInlinePragma fun) -- , ppr (mg_alts matches) -- , ppr args, ppr core_binds]) $ return (force_var, [core_binds]) } dsHsBind dflags (PatBind { pat_lhs = pat, pat_rhs = grhss , pat_ext = NPatBindTc _ ty , pat_ticks = (rhs_tick, var_ticks) }) = do { body_expr <- dsGuarded grhss ty ; checkGuardMatches PatBindGuards grhss ; let body' = mkOptTickBox rhs_tick body_expr pat' = decideBangHood dflags pat ; (force_var,sel_binds) <- mkSelectorBinds var_ticks pat body' -- We silently ignore inline pragmas; no makeCorePair -- Not so cool, but really doesn't matter ; let force_var' = if isBangedLPat pat' then [force_var] else [] ; return (force_var', sel_binds) } dsHsBind dflags (AbsBinds { abs_tvs = tyvars, abs_ev_vars = dicts , abs_exports = exports , abs_ev_binds = ev_binds , abs_binds = binds, abs_sig = has_sig }) = do { ds_binds <- applyWhen (needToRunPmCheck dflags FromSource) -- FromSource might not be accurate, but at worst -- we do superfluous calls to the pattern match -- oracle. -- addTyCsDs: push type constraints deeper -- for inner pattern match check -- See Check, Note [Type and Term Equality Propagation] (addTyCsDs (listToBag dicts)) (dsLHsBinds binds) ; ds_ev_binds <- dsTcEvBinds_s ev_binds -- dsAbsBinds does the hard work ; dsAbsBinds dflags tyvars dicts exports ds_ev_binds ds_binds has_sig } dsHsBind _ (PatSynBind{}) = panic "dsHsBind: PatSynBind" dsHsBind _ (XHsBindsLR nec) = noExtCon nec ----------------------- dsAbsBinds :: DynFlags -> [TyVar] -> [EvVar] -> [ABExport GhcTc] -> [CoreBind] -- Desugared evidence bindings -> ([Id], [(Id,CoreExpr)]) -- Desugared value bindings -> Bool -- Single binding with signature -> DsM ([Id], [(Id,CoreExpr)]) dsAbsBinds dflags tyvars dicts exports ds_ev_binds (force_vars, bind_prs) has_sig -- A very important common case: one exported variable -- Non-recursive bindings come through this way -- So do self-recursive bindings | [export] <- exports , ABE { abe_poly = global_id, abe_mono = local_id , abe_wrap = wrap, abe_prags = prags } <- export , Just force_vars' <- case force_vars of [] -> Just [] [v] | v == local_id -> Just [global_id] _ -> Nothing -- If there is a variable to force, it's just the -- single variable we are binding here = do { core_wrap <- dsHsWrapper wrap -- Usually the identity ; let rhs = core_wrap $ mkLams tyvars $ mkLams dicts $ mkCoreLets ds_ev_binds $ body body | has_sig , [(_, lrhs)] <- bind_prs = lrhs | otherwise = mkLetRec bind_prs (Var local_id) ; (spec_binds, rules) <- dsSpecs rhs prags ; let global_id' = addIdSpecialisations global_id rules main_bind = makeCorePair dflags global_id' (isDefaultMethod prags) (dictArity dicts) rhs ; return (force_vars', main_bind : fromOL spec_binds) } -- Another common case: no tyvars, no dicts -- In this case we can have a much simpler desugaring | null tyvars, null dicts = do { let mk_bind (ABE { abe_wrap = wrap , abe_poly = global , abe_mono = local , abe_prags = prags }) = do { core_wrap <- dsHsWrapper wrap ; return (makeCorePair dflags global (isDefaultMethod prags) 0 (core_wrap (Var local))) } mk_bind (XABExport nec) = noExtCon nec ; main_binds <- mapM mk_bind exports ; return (force_vars, flattenBinds ds_ev_binds ++ bind_prs ++ main_binds) } -- The general case -- See Note [Desugaring AbsBinds] | otherwise = do { let core_bind = Rec [ makeCorePair dflags (add_inline lcl_id) False 0 rhs | (lcl_id, rhs) <- bind_prs ] -- Monomorphic recursion possible, hence Rec new_force_vars = get_new_force_vars force_vars locals = map abe_mono exports all_locals = locals ++ new_force_vars tup_expr = mkBigCoreVarTup all_locals tup_ty = exprType tup_expr ; let poly_tup_rhs = mkLams tyvars $ mkLams dicts $ mkCoreLets ds_ev_binds $ mkLet core_bind $ tup_expr ; poly_tup_id <- newSysLocalDs (exprType poly_tup_rhs) -- Find corresponding global or make up a new one: sometimes -- we need to make new export to desugar strict binds, see -- Note [Desugar Strict binds] ; (exported_force_vars, extra_exports) <- get_exports force_vars ; let mk_bind (ABE { abe_wrap = wrap , abe_poly = global , abe_mono = local, abe_prags = spec_prags }) -- See Note [AbsBinds wrappers] in HsBinds = do { tup_id <- newSysLocalDs tup_ty ; core_wrap <- dsHsWrapper wrap ; let rhs = core_wrap $ mkLams tyvars $ mkLams dicts $ mkTupleSelector all_locals local tup_id $ mkVarApps (Var poly_tup_id) (tyvars ++ dicts) rhs_for_spec = Let (NonRec poly_tup_id poly_tup_rhs) rhs ; (spec_binds, rules) <- dsSpecs rhs_for_spec spec_prags ; let global' = (global `setInlinePragma` defaultInlinePragma) `addIdSpecialisations` rules -- Kill the INLINE pragma because it applies to -- the user written (local) function. The global -- Id is just the selector. Hmm. ; return ((global', rhs) : fromOL spec_binds) } mk_bind (XABExport nec) = noExtCon nec ; export_binds_s <- mapM mk_bind (exports ++ extra_exports) ; return ( exported_force_vars , (poly_tup_id, poly_tup_rhs) : concat export_binds_s) } where inline_env :: IdEnv Id -- Maps a monomorphic local Id to one with -- the inline pragma from the source -- The type checker put the inline pragma -- on the *global* Id, so we need to transfer it inline_env = mkVarEnv [ (lcl_id, setInlinePragma lcl_id prag) | ABE { abe_mono = lcl_id, abe_poly = gbl_id } <- exports , let prag = idInlinePragma gbl_id ] add_inline :: Id -> Id -- tran add_inline lcl_id = lookupVarEnv inline_env lcl_id `orElse` lcl_id global_env :: IdEnv Id -- Maps local Id to its global exported Id global_env = mkVarEnv [ (local, global) | ABE { abe_mono = local, abe_poly = global } <- exports ] -- find variables that are not exported get_new_force_vars lcls = foldr (\lcl acc -> case lookupVarEnv global_env lcl of Just _ -> acc Nothing -> lcl:acc) [] lcls -- find exports or make up new exports for force variables get_exports :: [Id] -> DsM ([Id], [ABExport GhcTc]) get_exports lcls = foldM (\(glbls, exports) lcl -> case lookupVarEnv global_env lcl of Just glbl -> return (glbl:glbls, exports) Nothing -> do export <- mk_export lcl let glbl = abe_poly export return (glbl:glbls, export:exports)) ([],[]) lcls mk_export local = do global <- newSysLocalDs (exprType (mkLams tyvars (mkLams dicts (Var local)))) return (ABE { abe_ext = noExtField , abe_poly = global , abe_mono = local , abe_wrap = WpHole , abe_prags = SpecPrags [] }) -- | This is where we apply INLINE and INLINABLE pragmas. All we need to -- do is to attach the unfolding information to the Id. -- -- Other decisions about whether to inline are made in -- `calcUnfoldingGuidance` but the decision about whether to then expose -- the unfolding in the interface file is made in `GHC.Iface.Tidy.addExternal` -- using this information. ------------------------ makeCorePair :: DynFlags -> Id -> Bool -> Arity -> CoreExpr -> (Id, CoreExpr) makeCorePair dflags gbl_id is_default_method dict_arity rhs | is_default_method -- Default methods are *always* inlined -- See Note [INLINE and default methods] in TcInstDcls = (gbl_id `setIdUnfolding` mkCompulsoryUnfolding rhs, rhs) | otherwise = case inlinePragmaSpec inline_prag of NoUserInline -> (gbl_id, rhs) NoInline -> (gbl_id, rhs) Inlinable -> (gbl_id `setIdUnfolding` inlinable_unf, rhs) Inline -> inline_pair where inline_prag = idInlinePragma gbl_id inlinable_unf = mkInlinableUnfolding dflags rhs inline_pair | Just arity <- inlinePragmaSat inline_prag -- Add an Unfolding for an INLINE (but not for NOINLINE) -- And eta-expand the RHS; see Note [Eta-expanding INLINE things] , let real_arity = dict_arity + arity -- NB: The arity in the InlineRule takes account of the dictionaries = ( gbl_id `setIdUnfolding` mkInlineUnfoldingWithArity real_arity rhs , etaExpand real_arity rhs) | otherwise = pprTrace "makeCorePair: arity missing" (ppr gbl_id) $ (gbl_id `setIdUnfolding` mkInlineUnfolding rhs, rhs) dictArity :: [Var] -> Arity -- Don't count coercion variables in arity dictArity dicts = count isId dicts {- Note [Desugaring AbsBinds] ~~~~~~~~~~~~~~~~~~~~~~~~~~ In the general AbsBinds case we desugar the binding to this: tup a (d:Num a) = let fm = ...gm... gm = ...fm... in (fm,gm) f a d = case tup a d of { (fm,gm) -> fm } g a d = case tup a d of { (fm,gm) -> fm } Note [Rules and inlining] ~~~~~~~~~~~~~~~~~~~~~~~~~ Common special case: no type or dictionary abstraction This is a bit less trivial than you might suppose The naive way would be to desugar to something like f_lcl = ...f_lcl... -- The "binds" from AbsBinds M.f = f_lcl -- Generated from "exports" But we don't want that, because if M.f isn't exported, it'll be inlined unconditionally at every call site (its rhs is trivial). That would be ok unless it has RULES, which would thereby be completely lost. Bad, bad, bad. Instead we want to generate M.f = ...f_lcl... f_lcl = M.f Now all is cool. The RULES are attached to M.f (by SimplCore), and f_lcl is rapidly inlined away. This does not happen in the same way to polymorphic binds, because they desugar to M.f = /\a. let f_lcl = ...f_lcl... in f_lcl Although I'm a bit worried about whether full laziness might float the f_lcl binding out and then inline M.f at its call site Note [Specialising in no-dict case] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Even if there are no tyvars or dicts, we may have specialisation pragmas. Class methods can generate AbsBinds [] [] [( ... spec-prag] { AbsBinds [tvs] [dicts] ...blah } So the overloading is in the nested AbsBinds. A good example is in GHC.Float: class (Real a, Fractional a) => RealFrac a where round :: (Integral b) => a -> b instance RealFrac Float where {-# SPECIALIZE round :: Float -> Int #-} The top-level AbsBinds for $cround has no tyvars or dicts (because the instance does not). But the method is locally overloaded! Note [Abstracting over tyvars only] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When abstracting over type variable only (not dictionaries), we don't really need to built a tuple and select from it, as we do in the general case. Instead we can take AbsBinds [a,b] [ ([a,b], fg, fl, _), ([b], gg, gl, _) ] { fl = e1 gl = e2 h = e3 } and desugar it to fg = /\ab. let B in e1 gg = /\b. let a = () in let B in S(e2) h = /\ab. let B in e3 where B is the *non-recursive* binding fl = fg a b gl = gg b h = h a b -- See (b); note shadowing! Notice (a) g has a different number of type variables to f, so we must use the mkArbitraryType thing to fill in the gaps. We use a type-let to do that. (b) The local variable h isn't in the exports, and rather than clone a fresh copy we simply replace h by (h a b), where the two h's have different types! Shadowing happens here, which looks confusing but works fine. (c) The result is *still* quadratic-sized if there are a lot of small bindings. So if there are more than some small number (10), we filter the binding set B by the free variables of the particular RHS. Tiresome. Why got to this trouble? It's a common case, and it removes the quadratic-sized tuple desugaring. Less clutter, hopefully faster compilation, especially in a case where there are a *lot* of bindings. Note [Eta-expanding INLINE things] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider foo :: Eq a => a -> a {-# INLINE foo #-} foo x = ... If (foo d) ever gets floated out as a common sub-expression (which can happen as a result of method sharing), there's a danger that we never get to do the inlining, which is a Terribly Bad thing given that the user said "inline"! To avoid this we pre-emptively eta-expand the definition, so that foo has the arity with which it is declared in the source code. In this example it has arity 2 (one for the Eq and one for x). Doing this should mean that (foo d) is a PAP and we don't share it. Note [Nested arities] ~~~~~~~~~~~~~~~~~~~~~ For reasons that are not entirely clear, method bindings come out looking like this: AbsBinds [] [] [$cfromT <= [] fromT] $cfromT [InlPrag=INLINE] :: T Bool -> Bool { AbsBinds [] [] [fromT <= [] fromT_1] fromT :: T Bool -> Bool { fromT_1 ((TBool b)) = not b } } } Note the nested AbsBind. The arity for the InlineRule on $cfromT should be gotten from the binding for fromT_1. It might be better to have just one level of AbsBinds, but that requires more thought! Note [Desugar Strict binds] ~~~~~~~~~~~~~~~~~~~~~~~~~~~ See https://gitlab.haskell.org/ghc/ghc/wikis/strict-pragma Desugaring strict variable bindings looks as follows (core below ==>) let !x = rhs in body ==> let x = rhs in x `seq` body -- seq the variable and if it is a pattern binding the desugaring looks like let !pat = rhs in body ==> let x = rhs -- bind the rhs to a new variable pat = x in x `seq` body -- seq the new variable if there is no variable in the pattern desugaring looks like let False = rhs in body ==> let x = case rhs of {False -> (); _ -> error "Match failed"} in x `seq` body In order to force the Ids in the binding group they are passed around in the dsHsBind family of functions, and later seq'ed in DsExpr.ds_val_bind. Consider a recursive group like this letrec f : g = rhs[f,g] in <body> Without `Strict`, we get a translation like this: let t = /\a. letrec tm = rhs[fm,gm] fm = case t of fm:_ -> fm gm = case t of _:gm -> gm in (fm,gm) in let f = /\a. case t a of (fm,_) -> fm in let g = /\a. case t a of (_,gm) -> gm in <body> Here `tm` is the monomorphic binding for `rhs`. With `Strict`, we want to force `tm`, but NOT `fm` or `gm`. Alas, `tm` isn't in scope in the `in <body>` part. The simplest thing is to return it in the polymorphic tuple `t`, thus: let t = /\a. letrec tm = rhs[fm,gm] fm = case t of fm:_ -> fm gm = case t of _:gm -> gm in (tm, fm, gm) in let f = /\a. case t a of (_,fm,_) -> fm in let g = /\a. case t a of (_,_,gm) -> gm in let tm = /\a. case t a of (tm,_,_) -> tm in tm `seq` <body> See https://gitlab.haskell.org/ghc/ghc/wikis/strict-pragma for a more detailed explanation of the desugaring of strict bindings. Note [Strict binds checks] ~~~~~~~~~~~~~~~~~~~~~~~~~~ There are several checks around properly formed strict bindings. They all link to this Note. These checks must be here in the desugarer because we cannot know whether or not a type is unlifted until after zonking, due to levity polymorphism. These checks all used to be handled in the typechecker in checkStrictBinds (before Jan '17). We define an "unlifted bind" to be any bind that binds an unlifted id. Note that x :: Char (# True, x #) = blah is *not* an unlifted bind. Unlifted binds are detected by GHC.Hs.Utils.isUnliftedHsBind. Define a "banged bind" to have a top-level bang. Detected by GHC.Hs.Pat.isBangedHsBind. Define a "strict bind" to be either an unlifted bind or a banged bind. The restrictions are: 1. Strict binds may not be top-level. Checked in dsTopLHsBinds. 2. Unlifted binds must also be banged. (There is no trouble to compile an unbanged unlifted bind, but an unbanged bind looks lazy, and we don't want users to be surprised by the strictness of an unlifted bind.) Checked in first clause of DsExpr.ds_val_bind. 3. Unlifted binds may not have polymorphism (#6078). (That is, no quantified type variables or constraints.) Checked in first clause of DsExpr.ds_val_bind. 4. Unlifted binds may not be recursive. Checked in second clause of ds_val_bind. -} ------------------------ dsSpecs :: CoreExpr -- Its rhs -> TcSpecPrags -> DsM ( OrdList (Id,CoreExpr) -- Binding for specialised Ids , [CoreRule] ) -- Rules for the Global Ids -- See Note [Handling SPECIALISE pragmas] in TcBinds dsSpecs _ IsDefaultMethod = return (nilOL, []) dsSpecs poly_rhs (SpecPrags sps) = do { pairs <- mapMaybeM (dsSpec (Just poly_rhs)) sps ; let (spec_binds_s, rules) = unzip pairs ; return (concatOL spec_binds_s, rules) } dsSpec :: Maybe CoreExpr -- Just rhs => RULE is for a local binding -- Nothing => RULE is for an imported Id -- rhs is in the Id's unfolding -> Located TcSpecPrag -> DsM (Maybe (OrdList (Id,CoreExpr), CoreRule)) dsSpec mb_poly_rhs (L loc (SpecPrag poly_id spec_co spec_inl)) | isJust (isClassOpId_maybe poly_id) = putSrcSpanDs loc $ do { warnDs NoReason (text "Ignoring useless SPECIALISE pragma for class method selector" <+> quotes (ppr poly_id)) ; return Nothing } -- There is no point in trying to specialise a class op -- Moreover, classops don't (currently) have an inl_sat arity set -- (it would be Just 0) and that in turn makes makeCorePair bleat | no_act_spec && isNeverActive rule_act = putSrcSpanDs loc $ do { warnDs NoReason (text "Ignoring useless SPECIALISE pragma for NOINLINE function:" <+> quotes (ppr poly_id)) ; return Nothing } -- Function is NOINLINE, and the specialisation inherits that -- See Note [Activation pragmas for SPECIALISE] | otherwise = putSrcSpanDs loc $ do { uniq <- newUnique ; let poly_name = idName poly_id spec_occ = mkSpecOcc (getOccName poly_name) spec_name = mkInternalName uniq spec_occ (getSrcSpan poly_name) (spec_bndrs, spec_app) = collectHsWrapBinders spec_co -- spec_co looks like -- \spec_bndrs. [] spec_args -- perhaps with the body of the lambda wrapped in some WpLets -- E.g. /\a \(d:Eq a). let d2 = $df d in [] (Maybe a) d2 ; core_app <- dsHsWrapper spec_app ; let ds_lhs = core_app (Var poly_id) spec_ty = mkLamTypes spec_bndrs (exprType ds_lhs) ; -- pprTrace "dsRule" (vcat [ text "Id:" <+> ppr poly_id -- , text "spec_co:" <+> ppr spec_co -- , text "ds_rhs:" <+> ppr ds_lhs ]) $ dflags <- getDynFlags ; case decomposeRuleLhs dflags spec_bndrs ds_lhs of { Left msg -> do { warnDs NoReason msg; return Nothing } ; Right (rule_bndrs, _fn, args) -> do { this_mod <- getModule ; let fn_unf = realIdUnfolding poly_id spec_unf = specUnfolding dflags spec_bndrs core_app arity_decrease fn_unf spec_id = mkLocalId spec_name spec_ty `setInlinePragma` inl_prag `setIdUnfolding` spec_unf arity_decrease = count isValArg args - count isId spec_bndrs ; rule <- dsMkUserRule this_mod is_local_id (mkFastString ("SPEC " ++ showPpr dflags poly_name)) rule_act poly_name rule_bndrs args (mkVarApps (Var spec_id) spec_bndrs) ; let spec_rhs = mkLams spec_bndrs (core_app poly_rhs) -- Commented out: see Note [SPECIALISE on INLINE functions] -- ; when (isInlinePragma id_inl) -- (warnDs $ text "SPECIALISE pragma on INLINE function probably won't fire:" -- <+> quotes (ppr poly_name)) ; return (Just (unitOL (spec_id, spec_rhs), rule)) -- NB: do *not* use makeCorePair on (spec_id,spec_rhs), because -- makeCorePair overwrites the unfolding, which we have -- just created using specUnfolding } } } where is_local_id = isJust mb_poly_rhs poly_rhs | Just rhs <- mb_poly_rhs = rhs -- Local Id; this is its rhs | Just unfolding <- maybeUnfoldingTemplate (realIdUnfolding poly_id) = unfolding -- Imported Id; this is its unfolding -- Use realIdUnfolding so we get the unfolding -- even when it is a loop breaker. -- We want to specialise recursive functions! | otherwise = pprPanic "dsImpSpecs" (ppr poly_id) -- The type checker has checked that it *has* an unfolding id_inl = idInlinePragma poly_id -- See Note [Activation pragmas for SPECIALISE] inl_prag | not (isDefaultInlinePragma spec_inl) = spec_inl | not is_local_id -- See Note [Specialising imported functions] -- in OccurAnal , isStrongLoopBreaker (idOccInfo poly_id) = neverInlinePragma | otherwise = id_inl -- Get the INLINE pragma from SPECIALISE declaration, or, -- failing that, from the original Id spec_prag_act = inlinePragmaActivation spec_inl -- See Note [Activation pragmas for SPECIALISE] -- no_act_spec is True if the user didn't write an explicit -- phase specification in the SPECIALISE pragma no_act_spec = case inlinePragmaSpec spec_inl of NoInline -> isNeverActive spec_prag_act _ -> isAlwaysActive spec_prag_act rule_act | no_act_spec = inlinePragmaActivation id_inl -- Inherit | otherwise = spec_prag_act -- Specified by user dsMkUserRule :: Module -> Bool -> RuleName -> Activation -> Name -> [CoreBndr] -> [CoreExpr] -> CoreExpr -> DsM CoreRule dsMkUserRule this_mod is_local name act fn bndrs args rhs = do let rule = mkRule this_mod False is_local name act fn bndrs args rhs dflags <- getDynFlags when (isOrphan (ru_orphan rule) && wopt Opt_WarnOrphans dflags) $ warnDs (Reason Opt_WarnOrphans) (ruleOrphWarn rule) return rule ruleOrphWarn :: CoreRule -> SDoc ruleOrphWarn rule = text "Orphan rule:" <+> ppr rule {- Note [SPECIALISE on INLINE functions] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We used to warn that using SPECIALISE for a function marked INLINE would be a no-op; but it isn't! Especially with worker/wrapper split we might have {-# INLINE f #-} f :: Ord a => Int -> a -> ... f d x y = case x of I# x' -> $wf d x' y We might want to specialise 'f' so that we in turn specialise '$wf'. We can't even /name/ '$wf' in the source code, so we can't specialise it even if we wanted to. #10721 is a case in point. Note [Activation pragmas for SPECIALISE] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From a user SPECIALISE pragma for f, we generate a) A top-level binding spec_fn = rhs b) A RULE f dOrd = spec_fn We need two pragma-like things: * spec_fn's inline pragma: inherited from f's inline pragma (ignoring activation on SPEC), unless overridden by SPEC INLINE * Activation of RULE: from SPECIALISE pragma (if activation given) otherwise from f's inline pragma This is not obvious (see #5237)! Examples Rule activation Inline prag on spec'd fn --------------------------------------------------------------------- SPEC [n] f :: ty [n] Always, or NOINLINE [n] copy f's prag NOINLINE f SPEC [n] f :: ty [n] NOINLINE copy f's prag NOINLINE [k] f SPEC [n] f :: ty [n] NOINLINE [k] copy f's prag INLINE [k] f SPEC [n] f :: ty [n] INLINE [k] copy f's prag SPEC INLINE [n] f :: ty [n] INLINE [n] (ignore INLINE prag on f, same activation for rule and spec'd fn) NOINLINE [k] f SPEC f :: ty [n] INLINE [k] ************************************************************************ * * \subsection{Adding inline pragmas} * * ************************************************************************ -} decomposeRuleLhs :: DynFlags -> [Var] -> CoreExpr -> Either SDoc ([Var], Id, [CoreExpr]) -- (decomposeRuleLhs bndrs lhs) takes apart the LHS of a RULE, -- The 'bndrs' are the quantified binders of the rules, but decomposeRuleLhs -- may add some extra dictionary binders (see Note [Free dictionaries]) -- -- Returns an error message if the LHS isn't of the expected shape -- Note [Decomposing the left-hand side of a RULE] decomposeRuleLhs dflags orig_bndrs orig_lhs | not (null unbound) -- Check for things unbound on LHS -- See Note [Unused spec binders] = Left (vcat (map dead_msg unbound)) | Var funId <- fun2 , Just con <- isDataConId_maybe funId = Left (constructor_msg con) -- See Note [No RULES on datacons] | Just (fn_id, args) <- decompose fun2 args2 , let extra_bndrs = mk_extra_bndrs fn_id args = -- pprTrace "decmposeRuleLhs" (vcat [ text "orig_bndrs:" <+> ppr orig_bndrs -- , text "orig_lhs:" <+> ppr orig_lhs -- , text "lhs1:" <+> ppr lhs1 -- , text "extra_dict_bndrs:" <+> ppr extra_dict_bndrs -- , text "fn_id:" <+> ppr fn_id -- , text "args:" <+> ppr args]) $ Right (orig_bndrs ++ extra_bndrs, fn_id, args) | otherwise = Left bad_shape_msg where lhs1 = drop_dicts orig_lhs lhs2 = simpleOptExpr dflags lhs1 -- See Note [Simplify rule LHS] (fun2,args2) = collectArgs lhs2 lhs_fvs = exprFreeVars lhs2 unbound = filterOut (`elemVarSet` lhs_fvs) orig_bndrs orig_bndr_set = mkVarSet orig_bndrs -- Add extra tyvar binders: Note [Free tyvars in rule LHS] -- and extra dict binders: Note [Free dictionaries in rule LHS] mk_extra_bndrs fn_id args = scopedSort unbound_tvs ++ unbound_dicts where unbound_tvs = [ v | v <- unbound_vars, isTyVar v ] unbound_dicts = [ mkLocalId (localiseName (idName d)) (idType d) | d <- unbound_vars, isDictId d ] unbound_vars = [ v | v <- exprsFreeVarsList args , not (v `elemVarSet` orig_bndr_set) , not (v == fn_id) ] -- fn_id: do not quantify over the function itself, which may -- itself be a dictionary (in pathological cases, #10251) decompose (Var fn_id) args | not (fn_id `elemVarSet` orig_bndr_set) = Just (fn_id, args) decompose _ _ = Nothing bad_shape_msg = hang (text "RULE left-hand side too complicated to desugar") 2 (vcat [ text "Optimised lhs:" <+> ppr lhs2 , text "Orig lhs:" <+> ppr orig_lhs]) dead_msg bndr = hang (sep [ text "Forall'd" <+> pp_bndr bndr , text "is not bound in RULE lhs"]) 2 (vcat [ text "Orig bndrs:" <+> ppr orig_bndrs , text "Orig lhs:" <+> ppr orig_lhs , text "optimised lhs:" <+> ppr lhs2 ]) pp_bndr bndr | isTyVar bndr = text "type variable" <+> quotes (ppr bndr) | isEvVar bndr = text "constraint" <+> quotes (ppr (varType bndr)) | otherwise = text "variable" <+> quotes (ppr bndr) constructor_msg con = vcat [ text "A constructor," <+> ppr con <> text ", appears as outermost match in RULE lhs." , text "This rule will be ignored." ] drop_dicts :: CoreExpr -> CoreExpr drop_dicts e = wrap_lets needed bnds body where needed = orig_bndr_set `minusVarSet` exprFreeVars body (bnds, body) = split_lets (occurAnalyseExpr e) -- The occurAnalyseExpr drops dead bindings which is -- crucial to ensure that every binding is used later; -- which in turn makes wrap_lets work right split_lets :: CoreExpr -> ([(DictId,CoreExpr)], CoreExpr) split_lets (Let (NonRec d r) body) | isDictId d = ((d,r):bs, body') where (bs, body') = split_lets body -- handle "unlifted lets" too, needed for "map/coerce" split_lets (Case r d _ [(DEFAULT, _, body)]) | isCoVar d = ((d,r):bs, body') where (bs, body') = split_lets body split_lets e = ([], e) wrap_lets :: VarSet -> [(DictId,CoreExpr)] -> CoreExpr -> CoreExpr wrap_lets _ [] body = body wrap_lets needed ((d, r) : bs) body | rhs_fvs `intersectsVarSet` needed = mkCoreLet (NonRec d r) (wrap_lets needed' bs body) | otherwise = wrap_lets needed bs body where rhs_fvs = exprFreeVars r needed' = (needed `minusVarSet` rhs_fvs) `extendVarSet` d {- Note [Decomposing the left-hand side of a RULE] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ There are several things going on here. * drop_dicts: see Note [Drop dictionary bindings on rule LHS] * simpleOptExpr: see Note [Simplify rule LHS] * extra_dict_bndrs: see Note [Free dictionaries] Note [Free tyvars on rule LHS] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider data T a = C foo :: T a -> Int foo C = 1 {-# RULES "myrule" foo C = 1 #-} After type checking the LHS becomes (foo alpha (C alpha)), where alpha is an unbound meta-tyvar. The zonker in TcHsSyn is careful not to turn the free alpha into Any (as it usually does). Instead it turns it into a TyVar 'a'. See TcHsSyn Note [Zonking the LHS of a RULE]. Now we must quantify over that 'a'. It's /really/ inconvenient to do that in the zonker, because the HsExpr data type is very large. But it's /easy/ to do it here in the desugarer. Moreover, we have to do something rather similar for dictionaries; see Note [Free dictionaries on rule LHS]. So that's why we look for type variables free on the LHS, and quantify over them. Note [Free dictionaries on rule LHS] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When the LHS of a specialisation rule, (/\as\ds. f es) has a free dict, which is presumably in scope at the function definition site, we can quantify over it too. *Any* dict with that type will do. So for example when you have f :: Eq a => a -> a f = <rhs> ... SPECIALISE f :: Int -> Int ... Then we get the SpecPrag SpecPrag (f Int dInt) And from that we want the rule RULE forall dInt. f Int dInt = f_spec f_spec = let f = <rhs> in f Int dInt But be careful! That dInt might be GHC.Base.$fOrdInt, which is an External Name, and you can't bind them in a lambda or forall without getting things confused. Likewise it might have an InlineRule or something, which would be utterly bogus. So we really make a fresh Id, with the same unique and type as the old one, but with an Internal name and no IdInfo. Note [Drop dictionary bindings on rule LHS] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ drop_dicts drops dictionary bindings on the LHS where possible. E.g. let d:Eq [Int] = $fEqList $fEqInt in f d --> f d Reasoning here is that there is only one d:Eq [Int], and so we can quantify over it. That makes 'd' free in the LHS, but that is later picked up by extra_dict_bndrs (Note [Dead spec binders]). NB 1: We can only drop the binding if the RHS doesn't bind one of the orig_bndrs, which we assume occur on RHS. Example f :: (Eq a) => b -> a -> a {-# SPECIALISE f :: Eq a => b -> [a] -> [a] #-} Here we want to end up with RULE forall d:Eq a. f ($dfEqList d) = f_spec d Of course, the ($dfEqlist d) in the pattern makes it less likely to match, but there is no other way to get d:Eq a NB 2: We do drop_dicts *before* simplOptEpxr, so that we expect all the evidence bindings to be wrapped around the outside of the LHS. (After simplOptExpr they'll usually have been inlined.) dsHsWrapper does dependency analysis, so that civilised ones will be simple NonRec bindings. We don't handle recursive dictionaries! NB3: In the common case of a non-overloaded, but perhaps-polymorphic specialisation, we don't need to bind *any* dictionaries for use in the RHS. For example (#8331) {-# SPECIALIZE INLINE useAbstractMonad :: ReaderST s Int #-} useAbstractMonad :: MonadAbstractIOST m => m Int Here, deriving (MonadAbstractIOST (ReaderST s)) is a lot of code but the RHS uses no dictionaries, so we want to end up with RULE forall s (d :: MonadAbstractIOST (ReaderT s)). useAbstractMonad (ReaderT s) d = $suseAbstractMonad s #8848 is a good example of where there are some interesting dictionary bindings to discard. The drop_dicts algorithm is based on these observations: * Given (let d = rhs in e) where d is a DictId, matching 'e' will bind e's free variables. * So we want to keep the binding if one of the needed variables (for which we need a binding) is in fv(rhs) but not already in fv(e). * The "needed variables" are simply the orig_bndrs. Consider f :: (Eq a, Show b) => a -> b -> String ... SPECIALISE f :: (Show b) => Int -> b -> String ... Then orig_bndrs includes the *quantified* dictionaries of the type namely (dsb::Show b), but not the one for Eq Int So we work inside out, applying the above criterion at each step. Note [Simplify rule LHS] ~~~~~~~~~~~~~~~~~~~~~~~~ simplOptExpr occurrence-analyses and simplifies the LHS: (a) Inline any remaining dictionary bindings (which hopefully occur just once) (b) Substitute trivial lets, so that they don't get in the way. Note that we substitute the function too; we might have this as a LHS: let f71 = M.f Int in f71 (c) Do eta reduction. To see why, consider the fold/build rule, which without simplification looked like: fold k z (build (/\a. g a)) ==> ... This doesn't match unless you do eta reduction on the build argument. Similarly for a LHS like augment g (build h) we do not want to get augment (\a. g a) (build h) otherwise we don't match when given an argument like augment (\a. h a a) (build h) Note [Unused spec binders] ~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider f :: a -> a ... SPECIALISE f :: Eq a => a -> a ... It's true that this *is* a more specialised type, but the rule we get is something like this: f_spec d = f RULE: f = f_spec d Note that the rule is bogus, because it mentions a 'd' that is not bound on the LHS! But it's a silly specialisation anyway, because the constraint is unused. We could bind 'd' to (error "unused") but it seems better to reject the program because it's almost certainly a mistake. That's what the isDeadBinder call detects. Note [No RULES on datacons] ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Previously, `RULES` like "JustNothing" forall x . Just x = Nothing were allowed. Simon Peyton Jones says this seems to have been a mistake, that such rules have never been supported intentionally, and that he doesn't know if they can break in horrible ways. Furthermore, Ben Gamari and Reid Barton are considering trying to detect the presence of "static data" that the simplifier doesn't need to traverse at all. Such rules do not play well with that. So for now, we ban them altogether as requested by #13290. See also #7398. ************************************************************************ * * Desugaring evidence * * ************************************************************************ -} dsHsWrapper :: HsWrapper -> DsM (CoreExpr -> CoreExpr) dsHsWrapper WpHole = return $ \e -> e dsHsWrapper (WpTyApp ty) = return $ \e -> App e (Type ty) dsHsWrapper (WpEvLam ev) = return $ Lam ev dsHsWrapper (WpTyLam tv) = return $ Lam tv dsHsWrapper (WpLet ev_binds) = do { bs <- dsTcEvBinds ev_binds ; return (mkCoreLets bs) } dsHsWrapper (WpCompose c1 c2) = do { w1 <- dsHsWrapper c1 ; w2 <- dsHsWrapper c2 ; return (w1 . w2) } -- See comments on WpFun in TcEvidence for an explanation of what -- the specification of this clause is dsHsWrapper (WpFun c1 c2 t1 doc) = do { x <- newSysLocalDsNoLP t1 ; w1 <- dsHsWrapper c1 ; w2 <- dsHsWrapper c2 ; let app f a = mkCoreAppDs (text "dsHsWrapper") f a arg = w1 (Var x) ; (_, ok) <- askNoErrsDs $ dsNoLevPolyExpr arg doc ; if ok then return (\e -> (Lam x (w2 (app e arg)))) else return id } -- this return is irrelevant dsHsWrapper (WpCast co) = ASSERT(coercionRole co == Representational) return $ \e -> mkCastDs e co dsHsWrapper (WpEvApp tm) = do { core_tm <- dsEvTerm tm ; return (\e -> App e core_tm) } -------------------------------------- dsTcEvBinds_s :: [TcEvBinds] -> DsM [CoreBind] dsTcEvBinds_s [] = return [] dsTcEvBinds_s (b:rest) = ASSERT( null rest ) -- Zonker ensures null dsTcEvBinds b dsTcEvBinds :: TcEvBinds -> DsM [CoreBind] dsTcEvBinds (TcEvBinds {}) = panic "dsEvBinds" -- Zonker has got rid of this dsTcEvBinds (EvBinds bs) = dsEvBinds bs dsEvBinds :: Bag EvBind -> DsM [CoreBind] dsEvBinds bs = do { ds_bs <- mapBagM dsEvBind bs ; return (mk_ev_binds ds_bs) } mk_ev_binds :: Bag (Id,CoreExpr) -> [CoreBind] -- We do SCC analysis of the evidence bindings, /after/ desugaring -- them. This is convenient: it means we can use the CoreSyn -- free-variable functions rather than having to do accurate free vars -- for EvTerm. mk_ev_binds ds_binds = map ds_scc (stronglyConnCompFromEdgedVerticesUniq edges) where edges :: [ Node EvVar (EvVar,CoreExpr) ] edges = foldr ((:) . mk_node) [] ds_binds mk_node :: (Id, CoreExpr) -> Node EvVar (EvVar,CoreExpr) mk_node b@(var, rhs) = DigraphNode { node_payload = b , node_key = var , node_dependencies = nonDetEltsUniqSet $ exprFreeVars rhs `unionVarSet` coVarsOfType (varType var) } -- It's OK to use nonDetEltsUniqSet here as stronglyConnCompFromEdgedVertices -- is still deterministic even if the edges are in nondeterministic order -- as explained in Note [Deterministic SCC] in Digraph. ds_scc (AcyclicSCC (v,r)) = NonRec v r ds_scc (CyclicSCC prs) = Rec prs dsEvBind :: EvBind -> DsM (Id, CoreExpr) dsEvBind (EvBind { eb_lhs = v, eb_rhs = r}) = liftM ((,) v) (dsEvTerm r) {-********************************************************************** * * Desugaring EvTerms * * **********************************************************************-} dsEvTerm :: EvTerm -> DsM CoreExpr dsEvTerm (EvExpr e) = return e dsEvTerm (EvTypeable ty ev) = dsEvTypeable ty ev dsEvTerm (EvFun { et_tvs = tvs, et_given = given , et_binds = ev_binds, et_body = wanted_id }) = do { ds_ev_binds <- dsTcEvBinds ev_binds ; return $ (mkLams (tvs ++ given) $ mkCoreLets ds_ev_binds $ Var wanted_id) } {-********************************************************************** * * Desugaring Typeable dictionaries * * **********************************************************************-} dsEvTypeable :: Type -> EvTypeable -> DsM CoreExpr -- Return a CoreExpr :: Typeable ty -- This code is tightly coupled to the representation -- of TypeRep, in base library Data.Typeable.Internals dsEvTypeable ty ev = do { tyCl <- dsLookupTyCon typeableClassName -- Typeable ; let kind = typeKind ty Just typeable_data_con = tyConSingleDataCon_maybe tyCl -- "Data constructor" -- for Typeable ; rep_expr <- ds_ev_typeable ty ev -- :: TypeRep a -- Package up the method as `Typeable` dictionary ; return $ mkConApp typeable_data_con [Type kind, Type ty, rep_expr] } type TypeRepExpr = CoreExpr -- | Returns a @CoreExpr :: TypeRep ty@ ds_ev_typeable :: Type -> EvTypeable -> DsM CoreExpr ds_ev_typeable ty (EvTypeableTyCon tc kind_ev) = do { mkTrCon <- dsLookupGlobalId mkTrConName -- mkTrCon :: forall k (a :: k). TyCon -> TypeRep k -> TypeRep a ; someTypeRepTyCon <- dsLookupTyCon someTypeRepTyConName ; someTypeRepDataCon <- dsLookupDataCon someTypeRepDataConName -- SomeTypeRep :: forall k (a :: k). TypeRep a -> SomeTypeRep ; tc_rep <- tyConRep tc -- :: TyCon ; let ks = tyConAppArgs ty -- Construct a SomeTypeRep toSomeTypeRep :: Type -> EvTerm -> DsM CoreExpr toSomeTypeRep t ev = do rep <- getRep ev t return $ mkCoreConApps someTypeRepDataCon [Type (typeKind t), Type t, rep] ; kind_arg_reps <- sequence $ zipWith toSomeTypeRep ks kind_ev -- :: TypeRep t ; let -- :: [SomeTypeRep] kind_args = mkListExpr (mkTyConTy someTypeRepTyCon) kind_arg_reps -- Note that we use the kind of the type, not the TyCon from which it -- is constructed since the latter may be kind polymorphic whereas the -- former we know is not (we checked in the solver). ; let expr = mkApps (Var mkTrCon) [ Type (typeKind ty) , Type ty , tc_rep , kind_args ] -- ; pprRuntimeTrace "Trace mkTrTyCon" (ppr expr) expr ; return expr } ds_ev_typeable ty (EvTypeableTyApp ev1 ev2) | Just (t1,t2) <- splitAppTy_maybe ty = do { e1 <- getRep ev1 t1 ; e2 <- getRep ev2 t2 ; mkTrApp <- dsLookupGlobalId mkTrAppName -- mkTrApp :: forall k1 k2 (a :: k1 -> k2) (b :: k1). -- TypeRep a -> TypeRep b -> TypeRep (a b) ; let (k1, k2) = splitFunTy (typeKind t1) ; let expr = mkApps (mkTyApps (Var mkTrApp) [ k1, k2, t1, t2 ]) [ e1, e2 ] -- ; pprRuntimeTrace "Trace mkTrApp" (ppr expr) expr ; return expr } ds_ev_typeable ty (EvTypeableTrFun ev1 ev2) | Just (t1,t2) <- splitFunTy_maybe ty = do { e1 <- getRep ev1 t1 ; e2 <- getRep ev2 t2 ; mkTrFun <- dsLookupGlobalId mkTrFunName -- mkTrFun :: forall r1 r2 (a :: TYPE r1) (b :: TYPE r2). -- TypeRep a -> TypeRep b -> TypeRep (a -> b) ; let r1 = getRuntimeRep t1 r2 = getRuntimeRep t2 ; return $ mkApps (mkTyApps (Var mkTrFun) [r1, r2, t1, t2]) [ e1, e2 ] } ds_ev_typeable ty (EvTypeableTyLit ev) = -- See Note [Typeable for Nat and Symbol] in TcInteract do { fun <- dsLookupGlobalId tr_fun ; dict <- dsEvTerm ev -- Of type KnownNat/KnownSymbol ; let proxy = mkTyApps (Var proxyHashId) [ty_kind, ty] ; return (mkApps (mkTyApps (Var fun) [ty]) [ dict, proxy ]) } where ty_kind = typeKind ty -- tr_fun is the Name of -- typeNatTypeRep :: KnownNat a => Proxy# a -> TypeRep a -- of typeSymbolTypeRep :: KnownSymbol a => Proxy# a -> TypeRep a tr_fun | ty_kind `eqType` typeNatKind = typeNatTypeRepName | ty_kind `eqType` typeSymbolKind = typeSymbolTypeRepName | otherwise = panic "dsEvTypeable: unknown type lit kind" ds_ev_typeable ty ev = pprPanic "dsEvTypeable" (ppr ty $$ ppr ev) getRep :: EvTerm -- ^ EvTerm for @Typeable ty@ -> Type -- ^ The type @ty@ -> DsM TypeRepExpr -- ^ Return @CoreExpr :: TypeRep ty@ -- namely @typeRep# dict@ -- Remember that -- typeRep# :: forall k (a::k). Typeable k a -> TypeRep a getRep ev ty = do { typeable_expr <- dsEvTerm ev ; typeRepId <- dsLookupGlobalId typeRepIdName ; let ty_args = [typeKind ty, ty] ; return (mkApps (mkTyApps (Var typeRepId) ty_args) [ typeable_expr ]) } tyConRep :: TyCon -> DsM CoreExpr -- Returns CoreExpr :: TyCon tyConRep tc | Just tc_rep_nm <- tyConRepName_maybe tc = do { tc_rep_id <- dsLookupGlobalId tc_rep_nm ; return (Var tc_rep_id) } | otherwise = pprPanic "tyConRep" (ppr tc)
sdiehl/ghc
compiler/deSugar/DsBinds.hs
bsd-3-clause
54,392
0
20
17,634
7,650
4,014
3,636
549
7
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} module Main where import System.Process (shell, createProcess, readProcess) import System.Directory (getHomeDirectory, doesDirectoryExist, getCurrentDirectory, setCurrentDirectory) import System.FilePath.Posix ((</>)) import System.IO.Unsafe (unsafePerformIO) import Data.Aeson (ToJSON, FromJSON, decode, encode, decode') import Data.List (tails, groupBy, sortBy, intercalate) import Control.Exception (catch) import qualified Data.ByteString.Lazy as BS import qualified Data.ByteString.Lazy.Char8 as BSC import qualified Data.Csv as CSV import GHC.Generics (Generic) import qualified Data.Vector as V import Data.Function (on) homeDir = unsafePerformIO getCurrentDirectory configFile = homeDir </> "mastery.json" reposDir = homeDir </> ".mastery/" statsFile = homeDir </> "stats.json" configJSFile = homeDir </> "config.js" cloc = homeDir </> "cloc" data GlobalStats = GlobalStats { allLanguages :: [LanguageStats] , allRepos :: [RepoStats] } deriving (Show, Generic) data LanguageStats = LanguageStats { language :: String , code :: Int , comment :: Int , blank :: Int , files :: Int } deriving (Show, Generic) data RepoStats = RepoStats { repo :: Repo , languages :: [LanguageStats] } deriving (Show, Generic) data Repo = Repo { name :: String, url :: String, ignore :: [String] } deriving (Show, Generic) data Config = Config { emails :: [String] , repos :: [Repo] } deriving (Show, Generic) instance ToJSON GlobalStats instance ToJSON Repo instance ToJSON LanguageStats instance ToJSON RepoStats instance ToJSON Config instance FromJSON Repo instance FromJSON Config -- Might throw exception readConfig :: IO Config readConfig = do contents <-BS.readFile configFile -- might throws exception :/ config <- case decode' contents of Just conf -> return conf Nothing -> error "Problems parsing config file" return config repoDirName :: Repo -> FilePath repoDirName repo = reposDir </> name where name = reverse $ drop 4 $ reverse $head $ dropWhile ('/' `elem`) $ tails (url repo) -- It actually works !! createRepo :: Repo -> IO () createRepo repo = do putStrLn $ "cloning " ++ (name repo) let repoDir = repoDirName repo _ <- readProcess "git" ["clone", url repo, repoDir] "" return () updateRepo :: Repo -> IO () updateRepo repo = do putStrLn $ "updating " ++ (name repo) let repoDir = repoDirName repo setCurrentDirectory repoDir _ <- readProcess "git" ["pull"] "" return () pullRepo :: Repo -> IO () pullRepo repo = do let repoDir = repoDirName repo repoExists <- doesDirectoryExist repoDir if repoExists then updateRepo repo else createRepo repo repoStats :: Repo -> IO (RepoStats) repoStats repo = do let repoDir = repoDirName repo let ignoredFiles = if ignore repo == [] then [] else [ "--exclude-dir="++(intercalate "," (ignore repo)) ] out <- readProcess cloc ([repoDir, "--csv", "--quiet"] ++ ignoredFiles) "" let processedOut = BSC.pack $ unlines $ drop 2 $ lines out let eitherStats = CSV.decode CSV.NoHeader processedOut :: Either String (V.Vector (Int, String, Int, Int, Int)) return $ case eitherStats of Left errMsg -> error errMsg Right stats -> RepoStats { repo = repo , languages = V.toList $ V.map (\(f, lang, b, c, loc) -> LanguageStats { language = lang , files = f , blank = b , comment = c , code = loc }) stats } aggregateLanguageStats :: [LanguageStats] -> LanguageStats aggregateLanguageStats ls = LanguageStats { language = language (head ls) , code = sum $ map code ls , comment = sum $ map comment ls , blank = sum $ map blank ls , files = sum $ map files ls } computeGlobalStats :: [RepoStats] -> GlobalStats computeGlobalStats repoStats = GlobalStats { allLanguages = map aggregateLanguageStats languageGroups , allRepos = repoStats } where languageGroups = groupBy ((==) `on` language) $ sortBy (\a b -> compare (language a) (language b)) $ concatMap languages repoStats main :: IO () main = do config <- readConfig -- Don't care if crashes mapM_ pullRepo (repos config) perRepoStats <- mapM repoStats (repos config) let globalStats = computeGlobalStats perRepoStats BS.writeFile statsFile $ "var stats_json = " `BS.append` encode globalStats BS.writeFile configJSFile $ "var config = " `BS.append` encode config
davidrusu/mastery
app/Main.hs
bsd-3-clause
5,205
0
18
1,634
1,460
782
678
104
3
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeSynonymInstances #-} module TransactionServer where import System.Random import Control.Monad.Trans.Except import Control.Monad.Trans.Resource import Control.Monad.IO.Class import Data.Aeson import Data.Aeson.TH import Data.Bson.Generic import GHC.Generics import Network.Wai hiding(Response) import Network.Wai.Handler.Warp import Network.Wai.Logger import Servant import Servant.API import Servant.Client import System.IO import System.Directory import System.Environment (getArgs, getProgName, lookupEnv) import System.Log.Formatter import System.Log.Handler (setFormatter) import System.Log.Handler.Simple import System.Log.Handler.Syslog import System.Log.Logger import Data.Bson.Generic import qualified Data.List as DL import Data.Maybe (catMaybes) import Data.Text (pack, unpack) import Data.Time.Clock (UTCTime, getCurrentTime) import Data.Time.Format (defaultTimeLocale, formatTime) import Database.MongoDB import Control.Monad (when) import Network.HTTP.Client (newManager, defaultManagerSettings) import CommonResources import MongodbHelpers type ApiHandler = ExceptT ServantErr IO transactionApi :: Proxy TransactionApi transactionApi = Proxy server :: Server TransactionApi server = beginTrans :<|> downloadTrans :<|> uploadTrans :<|> commitTrans transactionApp :: Application transactionApp = serve transactionApi server directoryApi :: Proxy DirectoryApi directoryApi = Proxy join :: FileServer -> ClientM Response open :: FileName -> ClientM File close :: FileUpload -> ClientM Response allfiles :: Ticket -> ClientM [String] remove :: FileName -> ClientM Response join :<|> open :<|> close :<|> allfiles :<|> remove = client directoryApi runApp :: IO() runApp = do run (read (transserverport) ::Int) transactionApp beginTrans :: Ticket -> ApiHandler Response beginTrans (Ticket ticket encryptedTimeout) = liftIO $ do let sessionKey = encryptDecrypt sharedSecret ticket let decryptedTimeout = decryptTime sharedSecret encryptedTimeout putStrLn ("Checking Client Credentials...") currentTime <- getCurrentTime if (currentTime > decryptedTimeout) then do putStrLn "Client session timeout" return (Response (encryptDecrypt sessionKey "Failed")) else do putStrLn "Starting transaction" putStrLn "Storing client sessionKey as transaction ID" withMongoDbConnection $ upsert (select ["transactionID" =: sessionKey] "TRANSACTION_ID_RECORD") $ toBSON sessionKey return (Response (encryptDecrypt sessionKey "Successful")) downloadTrans :: FileName -> ApiHandler File downloadTrans fileName@(FileName ticket encryptedTimeout encryptedFN) = liftIO $ do let sessionKey = encryptDecrypt sharedSecret ticket let decryptedTimeout = decryptTime sharedSecret encryptedTimeout let decryptedFN = encryptDecrypt sessionKey encryptedFN putStrLn ("Checking Client Credentials...") currentTime <- getCurrentTime if (currentTime > decryptedTimeout) then do putStrLn "Client session timeout" return (File "Failed" "Failed") else do manager <- newManager defaultManagerSettings res <- runClientM (open fileName) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) "")) case res of Left err -> do putStrLn (show err) return (File "Failed" "Failed") Right file -> do putStrLn "Storing file transaction data" withMongoDbConnection $ upsert (select ["userID" =: sessionKey] "TRANSACTION_FILE_RECORD") $ toBSON (TransactionFile decryptedFN sessionKey) return file uploadTrans :: FileUpload -> ApiHandler Response uploadTrans fileUpload@(FileUpload ticket encryptedTimeout (File encryptedFN encryptedFC)) = liftIO $ do let sessionKey = encryptDecrypt sharedSecret ticket let decryptedTimeout = decryptTime sharedSecret encryptedTimeout let decryptedFN = encryptDecrypt sessionKey encryptedFN putStrLn ("Checking Client Credentials...") currentTime <- getCurrentTime if (currentTime > decryptedTimeout) then do putStrLn "Client session timeout" return (Response (encryptDecrypt sessionKey "Failed")) else do manager <- newManager defaultManagerSettings let tempFileName = encryptDecrypt sessionKey ("TMP~"++decryptedFN) let fupload = FileUpload ticket encryptedTimeout (File tempFileName encryptedFC) res <- runClientM (TransactionServer.close fupload) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) "")) case res of Left err -> do putStrLn (show err) return (Response (encryptDecrypt sessionKey "Failed")) Right (Response response) -> do let decryptedres = encryptDecrypt sessionKey response putStrLn ("Uploaded temp file - " ++ decryptedres) return (Response response) commitTrans :: Ticket -> ApiHandler Response commitTrans tic@(Ticket ticket encryptedTimeout) = liftIO $ do let sessionKey = encryptDecrypt sharedSecret ticket let decryptedTimeout = decryptTime sharedSecret encryptedTimeout putStrLn ("Checking Client Credentials...") currentTime <- getCurrentTime if (currentTime > decryptedTimeout) then do putStrLn "Client session timeout" return (Response (encryptDecrypt sessionKey "Failed")) else do transactions <- liftIO $ withMongoDbConnection $ do docs <- find (select ["userID" =: sessionKey] "TRANSACTION_FILE_RECORD") >>= drainCursor return $ catMaybes $ DL.map (\ b -> fromBSON b :: Maybe TransactionFile) docs mapM (commitfile tic) transactions return (Response (encryptDecrypt sessionKey "Successful")) commitfile :: Ticket -> TransactionFile -> IO() commitfile (Ticket ticket encryptedTimeout) (TransactionFile decryptedFN sessionKey) = liftIO $ do putStrLn ("Commiting file: " ++ decryptedFN) manager <- newManager defaultManagerSettings let temp_file = encryptDecrypt sessionKey ("TMP~"++ decryptedFN) let fileName = (FileName ticket encryptedTimeout temp_file) res <- runClientM (open fileName) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) "")) case res of Left err -> putStrLn (show err) Right (File encryptedFN encryptedFC) -> do let fn = encryptDecrypt sessionKey encryptedFN let temp = encryptDecrypt sessionKey temp_file case (temp == fn) of False -> putStrLn "Commit Failed" True -> do let fileupload = (FileUpload ticket encryptedTimeout (File (encryptDecrypt sessionKey decryptedFN) encryptedFC)) res <- runClientM (TransactionServer.close fileupload) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) "")) case res of Left err -> do putStrLn (show err) Right (Response response) -> do case response of "Successful" -> do res <- runClientM (remove (FileName ticket encryptedTimeout temp_file)) (ClientEnv manager (BaseUrl Http dirserverhost (read (dirserverport) :: Int) "")) case res of Left err -> putStrLn (show err) Right (Response response) -> putStrLn (encryptDecrypt sessionKey response) _ -> putStrLn "Shouldnt get here"
Garygunn94/DFS
TransactionServer/.stack-work/intero/intero12761-oC.hs
bsd-3-clause
8,200
42
38
2,045
2,074
1,037
1,037
166
6
module Data.Typewriter.Data ( module Data.Typewriter.Data.List , module Data.Typewriter.Data.Nat ) where import Data.Typewriter.Data.List import Data.Typewriter.Data.Nat
isomorphism/typewriter
Data/Typewriter/Data.hs
bsd-3-clause
227
0
5
71
39
28
11
4
0
module Text.Highlighter.Lexers.CoffeeScript (lexer) where import Text.Regex.PCRE.Light import Text.Highlighter.Types lexer :: Lexer lexer = Lexer { lName = "CoffeeScript" , lAliases = ["coffee-script", "coffeescript"] , lExtensions = [".coffee"] , lMimetypes = ["text/coffeescript"] , lStart = root' , lFlags = [dotall] } commentsandwhitespace' :: TokenMatcher commentsandwhitespace' = [ tok "\\s+" (Arbitrary "Text") , tok "#.*?\\n" (Arbitrary "Comment" :. Arbitrary "Single") ] root' :: TokenMatcher root' = [ tokNext "^(?=\\s|/|<!--)" (Arbitrary "Text") (GoTo slashstartsregex') , anyOf commentsandwhitespace' , tokNext "\\+\\+|--|\126|&&|\\band\\b|\\bor\\b|\\bis\\b|\\bisnt\\b|\\bnot\\b|\\?|:|=|\\|\\||\\\\(?=\\n)|(<<|>>>?|==?|!=?|[-<>+*`%&\\|\\^/])=?" (Arbitrary "Operator") (GoTo slashstartsregex') , tok "\\([^()]*\\)\\s*->" (Arbitrary "Name" :. Arbitrary "Function") , tokNext "[{(\\[;,]" (Arbitrary "Punctuation") (GoTo slashstartsregex') , tok "[})\\].]" (Arbitrary "Punctuation") , tokNext "(for|in|of|while|break|return|continue|switch|when|then|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|super|extends|this|class|by)\\b" (Arbitrary "Keyword") (GoTo slashstartsregex') , tok "(true|false|yes|no|on|off|null|NaN|Infinity|undefined)\\b" (Arbitrary "Keyword" :. Arbitrary "Constant") , tok "(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|document|window)\\b" (Arbitrary "Name" :. Arbitrary "Builtin") , tokNext "[$a-zA-Z_][a-zA-Z0-9_\\.:]*\\s*[:=]\\s" (Arbitrary "Name" :. Arbitrary "Variable") (GoTo slashstartsregex') , tokNext "@[$a-zA-Z_][a-zA-Z0-9_\\.:]*\\s*[:=]\\s" (Arbitrary "Name" :. Arbitrary "Variable" :. Arbitrary "Instance") (GoTo slashstartsregex') , tokNext "@?[$a-zA-Z_][a-zA-Z0-9_]*" (Arbitrary "Name" :. Arbitrary "Other") (GoTo slashstartsregex') , tok "[0-9][0-9]*\\.[0-9]+([eE][0-9]+)?[fd]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Float") , tok "0x[0-9a-fA-F]+" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Hex") , tok "[0-9]+" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Integer") , tok "\"(\\\\\\\\|\\\\\"|[^\"])*\"" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Double") , tok "'(\\\\\\\\|\\\\'|[^'])*'" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Single") ] slashstartsregex' :: TokenMatcher slashstartsregex' = [ anyOf commentsandwhitespace' , tokNext "/(\\\\.|[^[/\\\\\\n]|\\[(\\\\.|[^\\]\\\\\\n])*])+/([gim]+\\b|\\B)" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Regex") Pop , tokNext "(?=/)" (Arbitrary "Text") (DoAll [Pop, (GoTo badregex')]) , tokNext "" (Arbitrary "Text") Pop ] badregex' :: TokenMatcher badregex' = [ tokNext "\10" (Arbitrary "Text") Pop ]
chemist/highlighter
src/Text/Highlighter/Lexers/CoffeeScript.hs
bsd-3-clause
2,984
0
11
408
671
345
326
43
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} module Web.XING.Calls.User ( demoUser, demoUser' , demoUsers, demoUsers' , getUsers ) where import Web.XING.Types import Web.XING.API import Data.Aeson (encode, decode, Value(..), object, (.=)) import qualified Data.ByteString.Lazy.Char8 as BSL import Network.HTTP.Conduit (Response(..)) import Control.Monad.Trans.Control (MonadBaseControl) import Control.Monad.Trans.Resource (MonadResource) import Data.Monoid (mappend) import Control.Exception (throw) import Data.Text.Encoding (encodeUtf8) import Data.Text (intercalate) -- | Get user details <https://dev.xing.com/docs/get/users/:id> getUsers :: (MonadResource m, MonadBaseControl IO m) => OAuth -> Manager -> AccessToken -> [UserId] -> m UserList getUsers oa manager cr uids = do res <- apiRequest oa manager cr "GET" ("/v1/users/" `mappend` (encodeUtf8 $ intercalate "," uids)) case decode (responseBody res) of Just a -> return a Nothing -> throw Mapping -- https://dev.xing.com/docs/get/users/:id demoUsers :: Value demoUsers = object [ "users" .= [demoUser] ] demoUsers' :: BSL.ByteString demoUsers' = encode demoUsers demoUser :: Value demoUser = object [ "id" .= ("12345_abcdef" :: BSL.ByteString) , "display_name" .= ("Max Mustermann" :: BSL.ByteString) , "permalink" .= ("https://www.xing.com/profile/Max_Mustermann" :: BSL.ByteString) , "first_name" .= ("Max" :: BSL.ByteString) , "last_name" .= ("Mustermann" :: BSL.ByteString) , "page_name" .= ("Max_Mustermann" :: BSL.ByteString) , "gender" .= ("m" :: BSL.ByteString) , "active_email" .= ("[email protected]" :: BSL.ByteString) , "time_zone" .= object [ "name" .= ("Europe/Copenhagen" :: BSL.ByteString) , "utc_offset" .= (2.0 :: Float) ] , "premium_services" .= (["SEARCH", "PRIVATEMESSAGES"] :: [BSL.ByteString]) , "badges" .= (["PREMIUM", "PRIVATEMESSAGES"] :: [BSL.ByteString]) , "languages" .= object [ "de" .= ("NATIVE" :: BSL.ByteString) , "en" .= ("FLUENT" :: BSL.ByteString) , "fr" .= Null , "zh" .= ("BASIC" :: BSL.ByteString) ] , "wants" .= (encodeUtf8 "einen neuen Job") , "haves" .= (encodeUtf8 "viele tolle Skills") , "interests" .= (encodeUtf8 "Flitzebogen schießen and so on") , "organisation_member" .= (encodeUtf8 "ACM, GI") , "private_address" .= object [ "street" .= (encodeUtf8 "Privatstraße 1") , "zip_code" .= (encodeUtf8 "20357") , "city" .= (encodeUtf8 "Hamburg") , "province" .= (encodeUtf8 "Hamburg") , "country" .= (encodeUtf8 "DE") , "email" .= (encodeUtf8 "[email protected]") , "phone" .= (encodeUtf8 "49|40|1234560") , "fax" .= (encodeUtf8 "||") , "mobile_phone" .= (encodeUtf8 "49|0155|1234567") ] , "business_address" .= object [ "city" .= (encodeUtf8 "Hamburg") , "country" .= (encodeUtf8 "DE") , "zip_code" .= (encodeUtf8 "20357") , "street" .= (encodeUtf8 "GeschΓ€ftsstraße 1a") , "phone" .= (encodeUtf8 "49|40|1234569") , "fax" .= (encodeUtf8 "49|40|1234561") , "province" .= (encodeUtf8 "Hamburg") , "email" .= (encodeUtf8 "[email protected]") , "mobile_phone" .= (encodeUtf8 "49|160|66666661") ] , "web_profiles" .= object [ "qype" .= (["http://qype.de/users/foo"] :: [BSL.ByteString]) , "google_plus" .= (["http://plus.google.com/foo"] :: [BSL.ByteString]) , "blog" .= (["http://blog.example.org"] :: [BSL.ByteString]) , "homepage" .= (["http://example.org", "http://another-example.org"] :: [BSL.ByteString]) ] , "instant_messaging_accounts" .= object [ "skype" .= (encodeUtf8 "1122334455") , "googletalk" .= (encodeUtf8 "max.mustermann") ] , "professional_experience" .= object [ "primary_company" .= object [ "name" .= (encodeUtf8 "XING AG") , "title" .= (encodeUtf8 "Softwareentwickler") , "company_size" .= (encodeUtf8 "201-500") , "tag" .= Null , "url" .= (encodeUtf8 "http://www.xing.com") , "career_level" .= (encodeUtf8 "PROFESSIONAL_EXPERIENCED") , "begin_date" .= (encodeUtf8 "2010-01") , "description" .= Null , "end_date" .= Null , "industry" .= (encodeUtf8 "AEROSPACE") ] , "non_primary_companies" .= [ object [ "name" .= (encodeUtf8 "Ninja Ltd.") , "title" .= (encodeUtf8 "DevOps") , "company_size" .= Null , "tag" .= (encodeUtf8 "NINJA") , "url" .= (encodeUtf8 "http://www.ninja-ltd.co.uk") , "career_level" .= Null , "begin_date" .= (encodeUtf8 "2009-04") , "description" .= Null , "end_date" .= (encodeUtf8 "2010-07") , "industry" .= (encodeUtf8 "ALTERNATIVE_MEDICINE") ] , object [ "name" .= Null , "title" .= (encodeUtf8 "Wiss. Mitarbeiter") , "company_size" .= Null , "tag" .= (encodeUtf8 "OFFIS") , "url" .= (encodeUtf8 "http://www.uni.de") , "career_level" .= Null , "begin_date" .= (encodeUtf8 "2007") , "description" .= Null , "end_date" .= (encodeUtf8 "2008") , "industry" .= (encodeUtf8 "APPAREL_AND_FASHION") ] , object [ "name" .= Null , "title" .= (encodeUtf8 "TEST NINJA") , "company_size" .= (encodeUtf8 "201-500") , "tag" .= (encodeUtf8 "TESTCOMPANY") , "url" .= Null , "career_level" .= (encodeUtf8 "ENTRY_LEVEL") , "begin_date" .= (encodeUtf8 "1998-12") , "description" .= Null , "end_date" .= (encodeUtf8 "1999-05") , "industry" .= (encodeUtf8 "ARTS_AND_CRAFTS") ] ] , "awards" .= [ object [ "name" .= (encodeUtf8 "Awesome Dude Of The Year") , "date_awarded" .= (2007 :: Int) , "url" .= Null ] ] ] , "educational_background" .= object [ "schools" .= [ object [ "name" .= (encodeUtf8 "Carl-von-Ossietzky UniverstΓ€t Schellenburg") , "degree" .= (encodeUtf8 "MSc CE/CS") , "notes" .= Null , "subject" .= Null , "begin_date" .= (encodeUtf8 "1998-08") , "end_date" .= (encodeUtf8 "2005-02") ] ] , "qualifications" .= (["TOEFLS", "PADI AOWD"] :: [BSL.ByteString]) ] , "photo_urls" .= object [ "large" .= ("http://www.xing.com/img/users/e/3/d/f94ef165a.123456,1.140x185.jpg" :: BSL.ByteString) , "mini_thumb" .= ("http://www.xing.com/img/users/e/3/d/f94ef165a.123456,1.18x24.jpg" :: BSL.ByteString) , "thumb" .= ("http://www.xing.com/img/users/e/3/d/f94ef165a.123456,1.30x40.jpg" :: BSL.ByteString) , "medium_thumb" .= ("http://www.xing.com/img/users/e/3/d/f94ef165a.123456,1.57x75.jpg" :: BSL.ByteString) , "maxi_thumb" .= ("http://www.xing.com/img/users/e/3/d/f94ef165a.123456,1.70x93.jpg" :: BSL.ByteString) ] , "birth_date" .= object [ "day" .= (12 :: Int) , "month" .= (8 :: Int) , "year" .= (1963 :: Int) ] ] demoUser' :: BSL.ByteString demoUser' = encode demoUser
JanAhrens/xing-api-haskell
lib/Web/XING/Calls/User.hs
bsd-3-clause
8,056
0
16
2,562
1,847
1,044
803
161
2
------------------------------------------------------------------------------ -- | This is the main interface, the only file you should need to import for REPL ------------------------------------------------------------------------------ {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE QuasiQuotes #-} module Graphics.HSD3.D3.Render( Render( .. ), renderGraph ) where import Control.Lens import Data.Monoid import Language.Javascript.JMacro import Text.PrettyPrint.Leijen.Text hiding (group, width, (<$>)) import Text.Regex import Graphics.HSD3.D3.Graph import Graphics.HSD3.D3.JMacro ------------------------------------------------------------------------------ class Render b where render :: ToJExpr a => a -- ^ Chart data -> b a () -- ^ The chart description -> String instance Render (GraphT ()) where render = renderGraph () -- | Renders a `GraphT` s a () as a `String`. renderGraph :: ToJExpr a => s -> a -> GraphT s a () -> String renderGraph st dat graph = renderGraphState . snd . runGraph (setData dat >> graph) $ emptyState st -- | Renders a GraphState as a string. renderGraphState :: GraphState s -> String renderGraphState = renderText . replace "__target__" (jsv "d3") . replace "__index__" 0 . replace "__group__" 0 . replace "__datum__" (jsv "__cursor__") . ($ mempty) . view jstat -- | Renders a `JExpr` as a string. renderText :: JStat -> String renderText = compress . show . renderPretty 1.0 500 . renderJs . onload compress :: String -> String compress = flip (subRegex (mkRegexWithOpts "var (jmId_[0-9]+);[ \\t\n]*jmId_[0-9]+" True True)) "var \\1" -- | Convenience method for attaching behavior to the browser window's -- onload method. onload :: JStat -> JStat onload js = [jmacro| window.onload = function () { `(js)`; var svg = d3.selectAll("svg"); var txt = d3.selectAll("text"); window.onresize = function() { var height = parseFloat(svg.style("height")); var width = parseFloat(svg.style("width")); var scalar = 12 / height; txt.style("font-size", scalar).attr("transform", "scale(" + (height / width) + ", 1)"); }; window.onresize(); }; |] ------------------------------------------------------------------------------
Soostone/hs-d3
src/Graphics/HSD3/D3/Render.hs
bsd-3-clause
2,474
0
13
571
397
220
177
41
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE FlexibleContexts #-} module Main where import Data.Attoparsec.ByteString.Char8 (Parser) import qualified Data.Attoparsec.ByteString.Char8 as A (parseOnly, manyTill, anyChar, string, endOfLine, eitherP, endOfInput, sepBy1, many1, digit, char) import Data.ByteString.Char8 (ByteString) import qualified Data.ByteString.Char8 as S (pack) import Data.List (uncons, intersperse, isPrefixOf) import Data.Either (isLeft, lefts) import Data.Maybe (fromMaybe) import System.FilePath.Posix ((-<.>), (</>), takeDirectory, takeFileName, takeExtension) import System.Directory (makeAbsolute, doesFileExist, getDirectoryContents) import System.Posix.Files (nullFileMode, ownerReadMode, ownerWriteMode, unionFileModes, setFileMode) import System.Console.GetOpt import System.Environment import System.Exit import Data.UUID.V4 (nextRandom) import qualified Data.UUID as UUID (toString) import System.IO (stderr, hPutStrLn) import Control.Monad (when) import Data.List (nub) -- TODO : add possibility to change cpCFGdir = "/etc/NetworkManager/system-connections" -- TODO : change options -- TODO : split config into config and make -- TODO : check if /etc/NetworkManager/system-connections/file exists before overridding it (it screws up Network Manager) ----------------------- -- arguments / commands management ----------------------- data Options = Options { optHelp :: Bool , optVerbose :: Bool , optShowVersion :: Bool , optRename :: Maybe String , optOutput :: Maybe FilePath , optInput :: Maybe FilePath , optLibDirs :: [FilePath] } deriving Show defaultOptions = Options { optHelp = False , optVerbose = False , optShowVersion = False , optRename = Nothing , optOutput = Nothing , optInput = Nothing , optLibDirs = [] } data ConfigParams = ConfigParams { cpID :: String , cpUUID :: String , cpAuth :: String , cpCipher :: String , cpIP :: String , cpPort :: String , cpProt :: String , cpCAfp :: String , cpCRfp :: String , cpKYfp :: String , cpCFGdir :: String } deriving Show defConfigParams = ConfigParams { cpID = "" , cpUUID = "" , cpAuth = "" , cpCipher = "" , cpIP = "" , cpPort = "" , cpProt = "" , cpCAfp = "" , cpCRfp = "" , cpKYfp = "" , cpCFGdir = "/etc/NetworkManager/system-connections" } data Command = Extract -- extract certificate details and save to files (.cert, .ca, .key) | Config -- make configuration file and save to file | Nil -- no command deriving (Eq,Ord,Show) toCommand :: String -> Command toCommand "extract" = Extract toCommand "config" = Config toCommand _ = Nil arity :: Command -> Int arity Extract = 1 arity Config = 1 arity Nil = 0 arity_err :: Command -> String arity_err Extract = "extract command: Missing file name." arity_err Config = "config command: Missing file name." arity_err Nil = "" ----------------------- -- vpn file params ----------------------- data Params = Params { prms_tag :: (String, String) , prms_sta :: String , prms_end :: String } def = Params { prms_tag = ("<cert>", "</cert>") , prms_sta = "-----BEGIN CERTIFICATE-----" , prms_end = "-----END CERTIFICATE-----" } ca_prms = def {prms_tag = ("<ca>", "</ca>")} cert_prms = def key_prms = def {prms_tag = ("<key>", "</key>") , prms_sta = "-----BEGIN RSA PRIVATE KEY-----" , prms_end = "-----END RSA PRIVATE KEY-----" } ----------------------- -- main ----------------------- -- for testing in ghci: -- *Main> :main arg1, arg2.., opt1, opt2... main :: IO () main = getArgs >>= compilerOpts >>= doJobs ----------------------- -- jobs ----------------------- doJobs :: (Options, [FilePath], Command) -> IO () doJobs (args, files, cmd) = do -- head should be safe as compilerOpts fails otherwise -- resolve ambibuity if filepath is "." by replacing by "./" let fp = let fp' = head files in if fp' == "." then "./" else fp' when (takeFileName fp /= "") $ doOneJob args fp cmd -- if the filepath is a directory, then apply the command to all .ovpn files in this directory when (isDirectory fp) $ do fps <- filter (\p -> takeExtension p == ".ovpn") <$> getDirectoryContents fp putStrLn $ "FPS: " ++ (show fps) if fps == [] then do putStrLn $ "Error: No .ovpn files in " ++ fp ++ " directory." exitWith (ExitFailure 1) else mapM_ (\p -> doOneJob args p cmd) fps doOneJob :: Options -> FilePath -> Command -> IO () doOneJob args fn cmd = do putStrLn $ "FILES2: " ++ (show fn) let fn' = fromMaybe fn $ (replacepath fn) <$> optRename args content <- S.pack <$> readFile fn -- extract and save cert files when (cmd == Extract) $ doExtract args fn' content -- extract details and create a config file when (cmd == Config) $ doConfig args fn' content -- extract details and create a config file doConfig :: Options -> FilePath -> ByteString -> IO () doConfig args fn' content = do certfilestest <- filter not <$> (mapM doesFileExist $ certfilesnms fn') -- if cert files aren't there, create them if certfilestest == [] then return () else doExtract args fn' content -- cert files absolute paths fpca:fpcr:fpky:xs <- mapM makeAbsolute $ certfilesnms fn' -- parse ovpn file for relevant details let auth' = maperr1 "auth" $ A.parseOnly pAuth content ciph' = maperr1 "cipher" $ A.parseOnly pCiph content remo' = maperr1 "remote" $ A.parseOnly pIP content proto' = maperr1 "proto" $ A.parseOnly pProto content errs = lefts [auth', ciph', proto'] ++ lefts [remo'] -- exit in case error in parsing details if errs /= [] then do putStrLn $ "Error: " ++ (concat errs) exitWith (ExitFailure 1) else do cpuuid <- UUID.toString <$> nextRandom let Right auth = auth' Right ciph = ciph' Right proto = proto' Right (ip,port) = remo' cpid = fn' -- "vpn_gate_" ++ ip ++ "_" ++ proto ++ "_" ++ port ++ "_TEST" let cp = defConfigParams { cpID = cpid, cpUUID = cpuuid , cpAuth = auth, cpCipher = ciph, cpIP = ip, cpPort = port, cpProt = proto , cpCAfp = fpca, cpCRfp = fpcr, cpKYfp = fpky } if optVerbose args then do hPutStrLn stderr ("auth: " ++ auth) hPutStrLn stderr ("cipher: " ++ ciph) hPutStrLn stderr ("IP: " ++ ip) hPutStrLn stderr ("port: " ++ port) hPutStrLn stderr ("proto: " ++ proto) hPutStrLn stderr ("\nfile output:\n" ++ confStr cp) -- hPutStrLn stderr ("\nall saved in:\n" ++ fpca ++ "\n" ++ fpcr ++ "\n" ++ fpky) else return() -- saving config file let fp = cpCFGdir cp </> cpID cp writeFile fp $ confStr cp -- NetworkManager requires no permission for group and others sequence $ map (setFileMode fp) [nullFileMode, unionFileModes ownerReadMode ownerWriteMode] return () -- extract ca, cert and key strings and store them in .ca, .cert, .key files doExtract :: Options -> FilePath -> ByteString -> IO () doExtract args fn' content = do let ca' = maperr1 "ca" $ A.parseOnly (grab ca_prms) content cert' = maperr1 "cert" $ A.parseOnly (grab cert_prms) content key' = maperr1 "key" $ A.parseOnly (grab key_prms) content errs = lefts [ca', cert', key'] if errs /= [] then do putStrLn $ "Error: " ++ (concat errs) exitWith (ExitFailure 1) else do let Right ca = ca' Right cert = cert' Right key = key' fpca:fpcr:fpky:xs = certfilesnms fn' if optVerbose args then do hPutStrLn stderr ("\nca string:\n" ++ ca) hPutStrLn stderr ("\ncert string:\n" ++ cert) hPutStrLn stderr ("\nkey string:\n" ++ key) hPutStrLn stderr ("\nall saved in:\n" ++ fpca ++ "\n" ++ fpcr ++ "\n" ++ fpky) else return() writeFile fpca ca writeFile fpcr cert writeFile fpky key ---------------------- -- argument management ---------------------- options :: [OptDescr (Options -> Options)] options = [ Option ['h','?'] ["help"] (NoArg (\ opts -> opts { optHelp = True })) "print this help message." , Option ['v'] ["verbose"] (NoArg (\ opts -> opts { optVerbose = True })) "chatty output on stderr." , Option [] ["version"] (NoArg (\ opts -> opts { optShowVersion = True })) "show version number." , Option ['r'] ["rename"] (ReqArg (\ fn opts -> opts { optRename = Just fn }) "PATH") "rename the output files." {- , Option ['o'] ["output"] (OptArg ((\ f opts -> opts { optOutput = Just f }) . fromMaybe "output") "FILE") "output FILE" , Option ['c'] [] (OptArg ((\ f opts -> opts { optInput = Just f }) . fromMaybe "input") "FILE") "input FILE" , Option ['L'] ["libdir"] (ReqArg (\ d opts -> opts { optLibDirs = optLibDirs opts ++ [d] }) "DIR") "library directory" -} ] compilerOpts :: [String] -> IO (Options, [String], Command) compilerOpts argv = case getOpt Permute options argv of (o',n'',[] ) -> do let o = foldl (flip id) defaultOptions o' -- print help if -h detected, and success exit when (optHelp o) $ do hPutStrLn stderr (usageInfo header options) exitWith ExitSuccess -- print version if --version detected, and success exit when (optShowVersion o) $ do hPutStrLn stderr "Version 0.1.0." exitWith ExitSuccess -- failure exit if no command names detected (i.e. ovpn somecmd etc...) when (null n'') $ do hPutStrLn stderr "No command name provided." exitWith (ExitFailure 1) let x:n' = n'' cmd = toCommand x -- failure exit if command is not a valid command name when (cmd == Nil) $ do hPutStrLn stderr $ x ++ " is not a command name." exitWith (ExitFailure 1) -- failure exit if not enough command arguments (typically filenames) when (arity cmd > length n') $ do hPutStrLn stderr $ arity_err cmd exitWith (ExitFailure 1) let n = take (arity cmd) n' return (o, n, cmd) (_,_,errs) -> ioError (userError (concat errs ++ usageInfo header options)) where header = "Usage: ovpn [COMMAND...] FILE [OPTION...]" ---------------------- -- parser functions ---------------------- -- | return the string delimited tags: tag1 prms_sta RETURNEDSTRING prms_end tag2 grab :: Params -> Parser String grab prms = do let (tag1,tag2) = prms_tag prms A.manyTill A.anyChar $ A.string $ S.pack tag1 A.manyTill A.anyChar $ A.string $ S.pack $ prms_sta prms x <- A.manyTill A.anyChar $ A.string $ S.pack $ prms_end prms return (prms_sta prms ++ x ++ prms_end prms) foldmsg :: String -> (String, (Bool, String)) -> String foldmsg = \m' (tag,(e,m)) -> if e then m' ++ "/" ++ tag ++ m else m' pSimple :: String -> Parser String pSimple s = do A.manyTill A.anyChar $ do A.endOfLine -- FIXME : should add an alternative if the key string is at the begining of the file A.string $ S.pack s A.many1 (A.char ' ') x <- A.manyTill A.anyChar $ A.eitherP A.endOfLine A.endOfInput return x -- | parse what comes after the "auth" line pAuth :: Parser String pAuth = pSimple "auth" -- | parse what comes after the "cipher" line pCiph :: Parser String pCiph = pSimple "cipher" -- | parse what comes after the "proto" line pProto :: Parser String pProto = pSimple "proto" -- | return (IP, Port) from "bla remote 255.1.23.255 1234 bla\n" type strings pIP :: Parser (String, String) pIP = do A.manyTill A.anyChar $ do A.endOfLine -- FIXME : should add an alternative if the key string is at the begining of the file A.string $ "remote" A.many1 (A.char ' ') x <- A.sepBy1 (A.many1 A.digit) (A.char '.') A.many1 (A.char ' ') y <- A.manyTill (A.digit) $ A.eitherP A.endOfLine A.endOfInput return $ (concat $ intersperse "." x, y) ---------------------- -- argtest ---------------------- confStr :: ConfigParams -> String confStr cp = "[connection]\n\ \id="++ cpID cp ++ "\n\ \uuid="++ cpUUID cp ++ "\n\ \type=vpn\n\ \ \n\ \[vpn]\n\ \service-type=org.freedesktop.NetworkManager.openvpn\n\ \connection-type=tls\n\ \auth="++ cpAuth cp ++ "\n\ \remote="++ cpIP cp ++ "\n\ \cipher="++ cpCipher cp ++ "\n\ \cert-pass-flags=0\n\ \port="++ cpPort cp ++ "\n\ \cert="++ cpCRfp cp ++ "\n\ \ca="++ cpCAfp cp ++ "\n\ \key="++ cpKYfp cp ++ "\n\ \ \n\ \[ipv6]\n\ \method=auto\n\ \ \n\ \[ipv4]\n\ \method=auto" ---------------------- -- local question ---------------------- -- needed to improve the error messages sent by ParseOnly mapLeft :: (a -> b) -> Either a c -> Either b c mapLeft f (Left x) = Left $ f x mapLeft _ (Right x) = Right x -- error msg template errmap1 :: String -> (a -> String) errmap1 s = const $ "Couldn't find '" ++ s ++ "' details. " maperr1 s = mapLeft $ errmap1 s certfilesnms :: String -> [String] certfilesnms fn = map (fn -<.>) ["ca", ".cert", ".key"] -- FIXME : check what happen if fp1 or fp2 are "." replacepath :: FilePath -> FilePath -> FilePath replacepath fp1 fp2 = dir2 </> fn2 where dir1 = takeDirectory fp1 fn1 = takeFileName fp1 dir2 = let d2 = takeDirectory fp2 in if d2 == "." && (not $ d2 `isPrefixOf` fp2) then dir1 else d2 fn2 = let f2 = takeFileName fp2 in if f2 == "" then fn1 else f2 -- hasDirectory "foo" is False, hasDirectory "./foo", "./foo/" "./bar/foo", etc are True hasDirectory :: FilePath -> Bool hasDirectory fp = let d = takeDirectory fp in if d == "." && (not $ d `isPrefixOf` fp) then False else True isDirectory :: FilePath -> Bool isDirectory fp = hasDirectory fp && takeFileName fp == ""
JAnthelme/ovpn-utilities
app/Main.hs
bsd-3-clause
14,867
0
23
4,344
3,783
1,992
1,791
265
4
module Eval where import EvalAST import qualified Data.Map.Strict as Map import Data.List (find, intercalate) import Control.Monad.State noop :: Block noop = Block Nothing NobodyCares EmptyBlock ---------- applyPrefixOperation :: PrefixOperator -> Value -> Value applyPrefixOperation Not (BoolValue b) = BoolValue $ not b applyPrefixOperation Negate (IntValue i) = IntValue $ -i applyPrefixOperation op arg = error $ "Invalid operand for " ++ show op ++ ": " ++ show arg applyInfixOperation :: InfixOperator -> Value -> Value -> Value applyInfixOperation Times (IntValue a) (IntValue b) = IntValue $ a * b applyInfixOperation Divide (IntValue a) (IntValue b) = IntValue $ a `div` b applyInfixOperation Modulo (IntValue a) (IntValue b) = IntValue $ a `mod` b applyInfixOperation Concat a b = StringValue $ repr a ++ repr b applyInfixOperation Plus (IntValue a) (IntValue b) = IntValue $ a + b applyInfixOperation Minus (IntValue a) (IntValue b) = IntValue $ a - b applyInfixOperation Eq a b = BoolValue $ a == b applyInfixOperation NotEq a b = BoolValue $ a /= b applyInfixOperation Less a b = BoolValue $ a < b applyInfixOperation LessEq a b = BoolValue $ a <= b applyInfixOperation Greater a b = BoolValue $ a > b applyInfixOperation GreaterEq a b = BoolValue $ a >= b applyInfixOperation And (BoolValue a) (BoolValue b) = BoolValue $ a && b applyInfixOperation Or (BoolValue a) (BoolValue b) = BoolValue $ a || b applyInfixOperation op arg1 arg2 = error $ "Invalid operands for " ++ show op ++ ": " ++ show arg1 ++ ", " ++ show arg2 ---------- instance Show Scope where show scope = "{" ++ (intercalate ", " [k ++ " = " ++ show v | (k,v) <- Map.toList $ vars scope]) ++ "; " ++ show (nestedBlockIds scope) ++ "}" -- Given a scope, an lvalue and a value, return that same scope, but with the the lvalue bound to the value bind :: Scope -> LValue -> Value -> Scope bind old pattern val = Scope (bind' (vars old) pattern val) (nestedBlockIds old) where bind' :: Map.Map String Value -> LValue -> Value -> Map.Map String Value bind' old NobodyCares _ = old bind' old (VarBind varName) val = Map.insert varName val old bind' old (TuplePattern patterns) (TupleValue values) | length patterns == length values = foldl (\vars (pattern, val) -> bind' vars pattern val) old (zip patterns values) | otherwise = error $ "Pattern match failed, matching " ++ show patterns ++ " to " ++ show values bind' _ p v = error $ "Pattern match failed, matching " ++ show p ++ " to " ++ show v getAndIncrement :: State Int Int getAndIncrement = do id <- get put $ id + 1 return id -- Evaluate an expression in some scope. The state monad is used to assign -- unique IDs to blocks as they're encountered evaluate :: Scope -> Expr -> State Int Value evaluate _ (IntLiteral n) = return $ IntValue n evaluate _ (StringLiteral s) = return $ StringValue s evaluate _ (BoolLiteral b) = return $ BoolValue b evaluate scope (IfElse cond ifTrue ifFalse) = do evaluatedCond <- evaluate scope cond evaluatedTrue <- evaluate scope ifTrue evaluatedFalse <- evaluate scope ifFalse return $ case evaluatedCond of BoolValue True -> evaluatedTrue BoolValue False -> evaluatedFalse evaluate scope (PrefixOperation op arg) = do evaluatedArg <- evaluate scope arg return $ applyPrefixOperation op evaluatedArg evaluate scope (InfixOperation op arg1 arg2) = do evaluatedArg1 <- evaluate scope arg1 evaluatedArg2 <- evaluate scope arg2 return $ applyInfixOperation op evaluatedArg1 evaluatedArg2 evaluate scope (TupleLiteral exprs) = do values <- mapM (evaluate scope) exprs return $ TupleValue values evaluate scope (VarAccess varName) = return $ case Map.lookup varName (vars scope) of Just value -> value Nothing -> error $ "Variable not in scope: " ++ varName evaluate scope (Builtin expr (BuiltinFunc func)) = do evaluated <- evaluate scope expr return $ func evaluated evaluate scope (BlockLiteral block) = do -- get a unique ID blockId <- getAndIncrement -- if the block wanted its ID bound to a variable, add it to its scope now let newScope = case block of Block (Just name) _ _ -> bind scope (VarBind name) (BlockIdValue blockId) Block Nothing _ _ -> scope -- also add the new block ID to the new block's list of nested block IDs return $ BlockValue blockId blockId [] (Scope (vars newScope) (blockId : nestedBlockIds newScope)) block -- Current state of an executing CTU prog data CTUState = CTUState { stateBlockId :: Int, stateStack :: Stack, stateScope :: Scope, stateBody :: BlockBody, stateIdCounter :: Int } instance Show CTUState where show st = "blockId: " ++ show (stateBlockId st) ++ "\n\n" ++ "stack: " ++ simplifiedStack ++ "\n\n" ++ "scope: " ++ show (stateScope st) ++ "\n\n" ++ "body: " ++ take 100 (show $ stateBody st) ++ "\n\n" ++ "idCounter: " ++ show (stateIdCounter st) ++ "\n\n" where simplifiedStack = show $ map (map (\(name, inId, outId, _, _) -> (name, inId, outId))) $ stateStack st makeBlankState :: BlockBody -> CTUState makeBlankState body = CTUState 0 [] (Scope Map.empty [0]) body 1 -- Advance to the next state nextState :: CTUState -> CTUState nextState (CTUState blId stack scope (Bind expr (Block _ pattern body)) idCount) = -- Evaluate the expression, bind it into the scope and switch to the next block let (value, idCount2) = runState (evaluate scope expr) idCount in CTUState blId stack (bind scope pattern value) body idCount2 nextState (CTUState blId stack scope (Advance blockExpr arg listeners) idCount) = -- Evaluate the expression for the block being advanced, and make sure it really is a block let (blockValue, idCount2) = runState (evaluate scope blockExpr) idCount in case blockValue of BlockValue newUpperId newLowerId newStack newScope (Block _ argPattern blockBody) -> -- Evaluate the expression for the argument let (argValue, idCount3) = runState (evaluate scope arg) idCount2 -- bind the argument into the destination scope finalScope = bind newScope argPattern argValue -- construct a list of listeners and add it to the stack, then add all -- of the saved stack frames from the block being advanced newListeners = [(listenerName, newUpperId, blId, scope, listenerBlock) | (listenerName, listenerBlock) <- listeners] joinedStack = newStack ++ (newListeners : stack) -- switch to the new block in CTUState newLowerId joinedStack finalScope blockBody idCount3 _ -> error "Tried to advance something that isn't a block" nextState (CTUState blId stack scope (Yield listenerName yieldExpr contBlock) idCount) = -- for a normal yield action, yield only from blocks in the current lexical scope doYield (CTUState blId stack scope (Yield listenerName yieldExpr contBlock) idCount) (`elem` nestedBlockIds scope) nextState (CTUState blId stack scope EmptyBlock idCount) = -- at the end of a block, do an "end" yield action, but only from the current block doYield (CTUState blId stack scope (Yield "end" (TupleLiteral []) noop) idCount) (== blId) nextState (CTUState blId stack scope (QualifiedYield blockIdExpr listenerName yieldExpr contBlock) idCount) = -- evaluate the block ID expression and make sure it really is a block ID let (blockIdValue, idCount2) = runState (evaluate scope blockIdExpr) idCount in case blockIdValue of -- do the yield, but only from that particular block BlockIdValue qualifiedId -> doYield (CTUState blId stack scope (Yield listenerName yieldExpr contBlock) idCount2) (== qualifiedId) _ -> error $ "Qualified yield from something other than block id: " ++ show blockIdValue -- Do the "yield" action, taking a predicate which says which block IDs are OK doYield :: CTUState -> (Int -> Bool) -> CTUState doYield (CTUState blId stack contScope (Yield listenerName yieldExpr contBlock) idCount) idIsOkFunc = -- Evaluate the yield argument let (yieldVal, idCount2) = runState (evaluate contScope yieldExpr) idCount -- Search for a valid listener and the appropriate block to switch to; construct a continuation block (newBlockId, newStack, newScope, (Block _ newPattern newBody), contVal) = searchForListener -- Bind the yield argument and the continuation block to the destination scope finalScope = bind newScope newPattern (TupleValue [yieldVal, contVal]) -- Do the switch in CTUState newBlockId newStack finalScope newBody idCount2 where searchForListener :: (Int, Stack, Scope, Block, Value) searchForListener = searchForListener' stack [] searchForListener' :: Stack -> Stack -> (Int, Stack, Scope, Block, Value) searchForListener' [] contStack = error $ "No listener for " ++ listenerName searchForListener' (listeners : restOfStack) contStack = -- Look through the current stack frame for a valid listener case find (\(name, inId, _, _, _) -> listenerName == name && idIsOkFunc inId) listeners of -- We found one, return the appropriate destination block and construct a continuation block Just (_, inId, outId, scope, block) -> (outId, restOfStack, scope, block, (BlockValue inId blId (reverse contStack) contScope contBlock)) -- We didn't find one, move up the stack and try again; save the failed stack frame -- in the continuation block's saved stack Nothing -> searchForListener' restOfStack (listeners : contStack)
cg5-/continue
src/Eval.hs
bsd-3-clause
9,493
0
22
1,887
2,754
1,401
1,353
127
4
module Web.TED.Types ( Cue(..) , Paragraph(..) , Transcript(..) , transcriptToText ) where import Data.Aeson import Data.Text (Text) import qualified Data.Text as T import RIO data Cue = Cue { time :: Int , text :: Text } deriving (Generic, Show) instance FromJSON Cue data Paragraph = Paragraph { cues :: [Cue] } deriving (Generic, Show) instance FromJSON Paragraph data Transcript = Transcript { paragraphs :: [Paragraph] } deriving (Generic, Show) instance FromJSON Transcript transcriptToText :: Transcript -> Text transcriptToText (Transcript ps) = T.intercalate "\n" $ map ( \(Paragraph cues) -> T.intercalate " " $ map (T.replace "\n" " " . text) cues) ps
rnons/ted2srt
backend/src/Web/TED/Types.hs
bsd-3-clause
749
0
14
189
249
141
108
26
1
{- (c) The University of Glasgow 2006 (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 -} {-# LANGUAGE CPP, TupleSections, ViewPatterns #-} module TcValidity ( Rank, UserTypeCtxt(..), checkValidType, checkValidMonoType, ContextKind(..), expectedKindInCtxt, checkValidTheta, checkValidFamPats, checkValidInstance, validDerivPred, checkInstTermination, ClsInstInfo, checkValidCoAxiom, checkValidCoAxBranch, checkValidTyFamEqn, arityErr, badATErr, checkValidTelescope, checkZonkValidTelescope, checkValidInferredKinds, allDistinctTyVars ) where #include "HsVersions.h" import Maybes -- friends: import TcUnify ( tcSubType_NC ) import TcSimplify ( simplifyAmbiguityCheck ) import TyCoRep import TcType hiding ( sizeType, sizeTypes ) import TcMType import PrelNames import Type import Coercion import Kind import CoAxiom import Class import TyCon -- others: import HsSyn -- HsType import TcRnMonad -- TcType, amongst others import TcHsSyn ( checkForRepresentationPolymorphism ) import FunDeps import FamInstEnv ( isDominatedBy, injectiveBranches, InjectivityCheckResult(..) ) import FamInst ( makeInjectivityErrors ) import Name import VarEnv import VarSet import Var ( mkTyVar ) import ErrUtils import DynFlags import Util import ListSetOps import SrcLoc import Outputable import BasicTypes import Module import Unique ( mkAlphaTyVarUnique ) import qualified GHC.LanguageExtensions as LangExt import Control.Monad import Data.List ( (\\) ) {- ************************************************************************ * * Checking for ambiguity * * ************************************************************************ Note [The ambiguity check for type signatures] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ checkAmbiguity is a check on *user-supplied type signatures*. It is *purely* there to report functions that cannot possibly be called. So for example we want to reject: f :: C a => Int The idea is there can be no legal calls to 'f' because every call will give rise to an ambiguous constraint. We could soundly omit the ambiguity check on type signatures entirely, at the expense of delaying ambiguity errors to call sites. Indeed, the flag -XAllowAmbiguousTypes switches off the ambiguity check. What about things like this: class D a b | a -> b where .. h :: D Int b => Int The Int may well fix 'b' at the call site, so that signature should not be rejected. Moreover, using *visible* fundeps is too conservative. Consider class X a b where ... class D a b | a -> b where ... instance D a b => X [a] b where... h :: X a b => a -> a Here h's type looks ambiguous in 'b', but here's a legal call: ...(h [True])... That gives rise to a (X [Bool] beta) constraint, and using the instance means we need (D Bool beta) and that fixes 'beta' via D's fundep! Behind all these special cases there is a simple guiding principle. Consider f :: <type> f = ...blah... g :: <type> g = f You would think that the definition of g would surely typecheck! After all f has exactly the same type, and g=f. But in fact f's type is instantiated and the instantiated constraints are solved against the originals, so in the case an ambiguous type it won't work. Consider our earlier example f :: C a => Int. Then in g's definition, we'll instantiate to (C alpha) and try to deduce (C alpha) from (C a), and fail. So in fact we use this as our *definition* of ambiguity. We use a very similar test for *inferred* types, to ensure that they are unambiguous. See Note [Impedence matching] in TcBinds. This test is very conveniently implemented by calling tcSubType <type> <type> This neatly takes account of the functional dependecy stuff above, and implicit parameter (see Note [Implicit parameters and ambiguity]). And this is what checkAmbiguity does. What about this, though? g :: C [a] => Int Is every call to 'g' ambiguous? After all, we might have intance C [a] where ... at the call site. So maybe that type is ok! Indeed even f's quintessentially ambiguous type might, just possibly be callable: with -XFlexibleInstances we could have instance C a where ... and now a call could be legal after all! Well, we'll reject this unless the instance is available *here*. Note [When to call checkAmbiguity] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We call checkAmbiguity (a) on user-specified type signatures (b) in checkValidType Conncerning (b), you might wonder about nested foralls. What about f :: forall b. (forall a. Eq a => b) -> b The nested forall is ambiguous. Originally we called checkAmbiguity in the forall case of check_type, but that had two bad consequences: * We got two error messages about (Eq b) in a nested forall like this: g :: forall a. Eq a => forall b. Eq b => a -> a * If we try to check for ambiguity of an nested forall like (forall a. Eq a => b), the implication constraint doesn't bind all the skolems, which results in "No skolem info" in error messages (see Trac #10432). To avoid this, we call checkAmbiguity once, at the top, in checkValidType. (I'm still a bit worried about unbound skolems when the type mentions in-scope type variables.) In fact, because of the co/contra-variance implemented in tcSubType, this *does* catch function f above. too. Concerning (a) the ambiguity check is only used for *user* types, not for types coming from inteface files. The latter can legitimately have ambiguous types. Example class S a where s :: a -> (Int,Int) instance S Char where s _ = (1,1) f:: S a => [a] -> Int -> (Int,Int) f (_::[a]) x = (a*x,b) where (a,b) = s (undefined::a) Here the worker for f gets the type fw :: forall a. S a => Int -> (# Int, Int #) Note [Implicit parameters and ambiguity] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Only a *class* predicate can give rise to ambiguity An *implicit parameter* cannot. For example: foo :: (?x :: [a]) => Int foo = length ?x is fine. The call site will supply a particular 'x' Furthermore, the type variables fixed by an implicit parameter propagate to the others. E.g. foo :: (Show a, ?x::[a]) => Int foo = show (?x++?x) The type of foo looks ambiguous. But it isn't, because at a call site we might have let ?x = 5::Int in foo and all is well. In effect, implicit parameters are, well, parameters, so we can take their type variables into account as part of the "tau-tvs" stuff. This is done in the function 'FunDeps.grow'. -} checkAmbiguity :: UserTypeCtxt -> Type -> TcM () checkAmbiguity ctxt ty | wantAmbiguityCheck ctxt = do { traceTc "Ambiguity check for" (ppr ty) -- Solve the constraints eagerly because an ambiguous type -- can cause a cascade of further errors. Since the free -- tyvars are skolemised, we can safely use tcSimplifyTop ; allow_ambiguous <- xoptM LangExt.AllowAmbiguousTypes ; (_wrap, wanted) <- addErrCtxt (mk_msg allow_ambiguous) $ captureConstraints $ tcSubType_NC ctxt ty (mkCheckExpType ty) ; simplifyAmbiguityCheck ty wanted ; traceTc "Done ambiguity check for" (ppr ty) } | otherwise = return () where mk_msg allow_ambiguous = vcat [ text "In the ambiguity check for" <+> what , ppUnless allow_ambiguous ambig_msg ] ambig_msg = text "To defer the ambiguity check to use sites, enable AllowAmbiguousTypes" what | Just n <- isSigMaybe ctxt = quotes (ppr n) | otherwise = pprUserTypeCtxt ctxt wantAmbiguityCheck :: UserTypeCtxt -> Bool wantAmbiguityCheck ctxt = case ctxt of -- See Note [When we don't check for ambiguity] GhciCtxt -> False TySynCtxt {} -> False _ -> True checkUserTypeError :: Type -> TcM () -- Check to see if the type signature mentions "TypeError blah" -- anywhere in it, and fail if so. -- -- Very unsatisfactorily (Trac #11144) we need to tidy the type -- because it may have come from an /inferred/ signature, not a -- user-supplied one. This is really only a half-baked fix; -- the other errors in checkValidType don't do tidying, and so -- may give bad error messages when given an inferred type. checkUserTypeError = check where check ty | Just msg <- userTypeError_maybe ty = fail_with msg | Just (_,ts) <- splitTyConApp_maybe ty = mapM_ check ts | Just (t1,t2) <- splitAppTy_maybe ty = check t1 >> check t2 | Just (_,t1) <- splitForAllTy_maybe ty = check t1 | otherwise = return () fail_with msg = do { env0 <- tcInitTidyEnv ; let (env1, tidy_msg) = tidyOpenType env0 msg ; failWithTcM (env1, pprUserTypeErrorTy tidy_msg) } {- Note [When we don't check for ambiguity] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In a few places we do not want to check a user-specified type for ambiguity * GhciCtxt: Allow ambiguous types in GHCi's :kind command E.g. type family T a :: * -- T :: forall k. k -> * Then :k T should work in GHCi, not complain that (T k) is ambiguous! * TySynCtxt: type T a b = C a b => blah It may be that when we /use/ T, we'll give an 'a' or 'b' that somehow cure the ambiguity. So we defer the ambiguity check to the use site. There is also an implementation reason (Trac #11608). In the RHS of a type synonym we don't (currently) instantiate 'a' and 'b' with TcTyVars before calling checkValidType, so we get asertion failures from doing an ambiguity check on a type with TyVars in it. Fixing this would not be hard, but let's wait till there's a reason. ************************************************************************ * * Checking validity of a user-defined type * * ************************************************************************ When dealing with a user-written type, we first translate it from an HsType to a Type, performing kind checking, and then check various things that should be true about it. We don't want to perform these checks at the same time as the initial translation because (a) they are unnecessary for interface-file types and (b) when checking a mutually recursive group of type and class decls, we can't "look" at the tycons/classes yet. Also, the checks are rather diverse, and used to really mess up the other code. One thing we check for is 'rank'. Rank 0: monotypes (no foralls) Rank 1: foralls at the front only, Rank 0 inside Rank 2: foralls at the front, Rank 1 on left of fn arrow, basic ::= tyvar | T basic ... basic r2 ::= forall tvs. cxt => r2a r2a ::= r1 -> r2a | basic r1 ::= forall tvs. cxt => r0 r0 ::= r0 -> r0 | basic Another thing is to check that type synonyms are saturated. This might not necessarily show up in kind checking. type A i = i data T k = MkT (k Int) f :: T A -- BAD! -} checkValidType :: UserTypeCtxt -> Type -> TcM () -- Checks that a user-written type is valid for the given context -- Assumes arguemt is fully zonked -- Not used for instance decls; checkValidInstance instead checkValidType ctxt ty = do { traceTc "checkValidType" (ppr ty <+> text "::" <+> ppr (typeKind ty)) ; rankn_flag <- xoptM LangExt.RankNTypes ; impred_flag <- xoptM LangExt.ImpredicativeTypes ; let gen_rank :: Rank -> Rank gen_rank r | rankn_flag = ArbitraryRank | otherwise = r rank1 = gen_rank r1 rank0 = gen_rank r0 r0 = rankZeroMonoType r1 = LimitedRank True r0 rank = case ctxt of DefaultDeclCtxt-> MustBeMonoType ResSigCtxt -> MustBeMonoType PatSigCtxt -> rank0 RuleSigCtxt _ -> rank1 TySynCtxt _ -> rank0 ExprSigCtxt -> rank1 TypeAppCtxt | impred_flag -> ArbitraryRank | otherwise -> tyConArgMonoType -- Normally, ImpredicativeTypes is handled in check_arg_type, -- but visible type applications don't go through there. -- So we do this check here. FunSigCtxt {} -> rank1 InfSigCtxt _ -> ArbitraryRank -- Inferred type ConArgCtxt _ -> rank1 -- We are given the type of the entire -- constructor, hence rank 1 ForSigCtxt _ -> rank1 SpecInstCtxt -> rank1 ThBrackCtxt -> rank1 GhciCtxt -> ArbitraryRank _ -> panic "checkValidType" -- Can't happen; not used for *user* sigs ; env <- tcInitOpenTidyEnv (tyCoVarsOfType ty) -- Check the internal validity of the type itself ; check_type env ctxt rank ty -- Check that the thing has kind Type, and is lifted if necessary. -- Do this *after* check_type, because we can't usefully take -- the kind of an ill-formed type such as (a~Int) ; check_kind env ctxt ty ; checkUserTypeError ty -- Check for ambiguous types. See Note [When to call checkAmbiguity] -- NB: this will happen even for monotypes, but that should be cheap; -- and there may be nested foralls for the subtype test to examine ; checkAmbiguity ctxt ty ; traceTc "checkValidType done" (ppr ty <+> text "::" <+> ppr (typeKind ty)) } checkValidMonoType :: Type -> TcM () -- Assumes arguemt is fully zonked checkValidMonoType ty = do { env <- tcInitOpenTidyEnv (tyCoVarsOfType ty) ; check_type env SigmaCtxt MustBeMonoType ty } check_kind :: TidyEnv -> UserTypeCtxt -> TcType -> TcM () -- Check that the type's kind is acceptable for the context check_kind env ctxt ty | TySynCtxt {} <- ctxt , returnsConstraintKind actual_kind = do { ck <- xoptM LangExt.ConstraintKinds ; if ck then when (isConstraintKind actual_kind) (do { dflags <- getDynFlags ; check_pred_ty env dflags ctxt ty }) else addErrTcM (constraintSynErr env actual_kind) } | otherwise = case expectedKindInCtxt ctxt of TheKind k -> checkTcM (tcEqType actual_kind k) (kindErr env actual_kind) OpenKind -> checkTcM (classifiesTypeWithValues actual_kind) (kindErr env actual_kind) AnythingKind -> return () where actual_kind = typeKind ty -- | The kind expected in a certain context. data ContextKind = TheKind Kind -- ^ a specific kind | AnythingKind -- ^ any kind will do | OpenKind -- ^ something of the form @TYPE _@ -- Depending on the context, we might accept any kind (for instance, in a TH -- splice), or only certain kinds (like in type signatures). expectedKindInCtxt :: UserTypeCtxt -> ContextKind expectedKindInCtxt (TySynCtxt _) = AnythingKind expectedKindInCtxt ThBrackCtxt = AnythingKind expectedKindInCtxt GhciCtxt = AnythingKind -- The types in a 'default' decl can have varying kinds -- See Note [Extended defaults]" in TcEnv expectedKindInCtxt DefaultDeclCtxt = AnythingKind expectedKindInCtxt TypeAppCtxt = AnythingKind expectedKindInCtxt (ForSigCtxt _) = TheKind liftedTypeKind expectedKindInCtxt InstDeclCtxt = TheKind constraintKind expectedKindInCtxt SpecInstCtxt = TheKind constraintKind expectedKindInCtxt _ = OpenKind {- Note [Higher rank types] ~~~~~~~~~~~~~~~~~~~~~~~~ Technically Int -> forall a. a->a is still a rank-1 type, but it's not Haskell 98 (Trac #5957). So the validity checker allow a forall after an arrow only if we allow it before -- that is, with Rank2Types or RankNTypes -} data Rank = ArbitraryRank -- Any rank ok | LimitedRank -- Note [Higher rank types] Bool -- Forall ok at top Rank -- Use for function arguments | MonoType SDoc -- Monotype, with a suggestion of how it could be a polytype | MustBeMonoType -- Monotype regardless of flags rankZeroMonoType, tyConArgMonoType, synArgMonoType, constraintMonoType :: Rank rankZeroMonoType = MonoType (text "Perhaps you intended to use RankNTypes or Rank2Types") tyConArgMonoType = MonoType (text "GHC doesn't yet support impredicative polymorphism") synArgMonoType = MonoType (text "Perhaps you intended to use LiberalTypeSynonyms") constraintMonoType = MonoType (text "A constraint must be a monotype") funArgResRank :: Rank -> (Rank, Rank) -- Function argument and result funArgResRank (LimitedRank _ arg_rank) = (arg_rank, LimitedRank (forAllAllowed arg_rank) arg_rank) funArgResRank other_rank = (other_rank, other_rank) forAllAllowed :: Rank -> Bool forAllAllowed ArbitraryRank = True forAllAllowed (LimitedRank forall_ok _) = forall_ok forAllAllowed _ = False -- The zonker issues errors if it zonks a representation-polymorphic binder -- But sometimes it's nice to check a little more eagerly, trying to report -- errors earlier. representationPolymorphismForbidden :: UserTypeCtxt -> Bool representationPolymorphismForbidden = go where go (ConArgCtxt _) = True -- A rep-polymorphic datacon won't be useful go (PatSynCtxt _) = True -- Similar to previous case go _ = False -- Other cases are caught by zonker ---------------------------------------- -- | Fail with error message if the type is unlifted check_lifted :: Type -> TcM () check_lifted _ = return () {- ------ Legacy comment --------- The check_unlifted function seems entirely redundant. The kind system should check for uses of unlifted types. So I've removed the check. See Trac #11120 comment:19. check_lifted ty = do { env <- tcInitOpenTidyEnv (tyCoVarsOfType ty) ; checkTcM (not (isUnliftedType ty)) (unliftedArgErr env ty) } unliftedArgErr :: TidyEnv -> Type -> (TidyEnv, SDoc) unliftedArgErr env ty = (env, sep [text "Illegal unlifted type:", ppr_tidy env ty]) ------ End of legacy comment --------- -} check_type :: TidyEnv -> UserTypeCtxt -> Rank -> Type -> TcM () -- The args say what the *type context* requires, independent -- of *flag* settings. You test the flag settings at usage sites. -- -- Rank is allowed rank for function args -- Rank 0 means no for-alls anywhere check_type env ctxt rank ty | not (null tvs && null theta) = do { traceTc "check_type" (ppr ty $$ ppr (forAllAllowed rank)) ; checkTcM (forAllAllowed rank) (forAllTyErr env rank ty) -- Reject e.g. (Maybe (?x::Int => Int)), -- with a decent error message ; check_valid_theta env' SigmaCtxt theta -- Allow type T = ?x::Int => Int -> Int -- but not type T = ?x::Int ; check_type env' ctxt rank tau -- Allow foralls to right of arrow ; checkTcM (not (any (`elemVarSet` tyCoVarsOfType phi_kind) tvs)) (forAllEscapeErr env' ty tau_kind) } where (tvs, theta, tau) = tcSplitSigmaTy ty tau_kind = typeKind tau (env', _) = tidyTyCoVarBndrs env tvs phi_kind | null theta = tau_kind | otherwise = liftedTypeKind -- If there are any constraints, the kind is *. (#11405) check_type _ _ _ (TyVarTy _) = return () check_type env ctxt rank (ForAllTy (Anon arg_ty) res_ty) = do { check_type env ctxt arg_rank arg_ty ; when (representationPolymorphismForbidden ctxt) $ checkForRepresentationPolymorphism empty arg_ty ; check_type env ctxt res_rank res_ty } where (arg_rank, res_rank) = funArgResRank rank check_type env ctxt rank (AppTy ty1 ty2) = do { check_arg_type env ctxt rank ty1 ; check_arg_type env ctxt rank ty2 } check_type env ctxt rank ty@(TyConApp tc tys) | isTypeSynonymTyCon tc || isTypeFamilyTyCon tc = check_syn_tc_app env ctxt rank ty tc tys | isUnboxedTupleTyCon tc = check_ubx_tuple env ctxt ty tys | otherwise = mapM_ (check_arg_type env ctxt rank) tys check_type _ _ _ (LitTy {}) = return () check_type env ctxt rank (CastTy ty _) = check_type env ctxt rank ty check_type _ _ _ ty = pprPanic "check_type" (ppr ty) ---------------------------------------- check_syn_tc_app :: TidyEnv -> UserTypeCtxt -> Rank -> KindOrType -> TyCon -> [KindOrType] -> TcM () -- Used for type synonyms and type synonym families, -- which must be saturated, -- but not data families, which need not be saturated check_syn_tc_app env ctxt rank ty tc tys | tc_arity <= length tys -- Saturated -- Check that the synonym has enough args -- This applies equally to open and closed synonyms -- It's OK to have an *over-applied* type synonym -- data Tree a b = ... -- type Foo a = Tree [a] -- f :: Foo a b -> ... = do { -- See Note [Liberal type synonyms] ; liberal <- xoptM LangExt.LiberalTypeSynonyms ; if not liberal || isTypeFamilyTyCon tc then -- For H98 and synonym families, do check the type args mapM_ check_arg tys else -- In the liberal case (only for closed syns), expand then check case coreView ty of Just ty' -> check_type env ctxt rank ty' Nothing -> pprPanic "check_tau_type" (ppr ty) } | GhciCtxt <- ctxt -- Accept under-saturated type synonyms in -- GHCi :kind commands; see Trac #7586 = mapM_ check_arg tys | otherwise = failWithTc (tyConArityErr tc tys) where tc_arity = tyConArity tc check_arg | isTypeFamilyTyCon tc = check_arg_type env ctxt rank | otherwise = check_type env ctxt synArgMonoType ---------------------------------------- check_ubx_tuple :: TidyEnv -> UserTypeCtxt -> KindOrType -> [KindOrType] -> TcM () check_ubx_tuple env ctxt ty tys = do { ub_tuples_allowed <- xoptM LangExt.UnboxedTuples ; checkTcM ub_tuples_allowed (ubxArgTyErr env ty) ; impred <- xoptM LangExt.ImpredicativeTypes ; let rank' = if impred then ArbitraryRank else tyConArgMonoType -- c.f. check_arg_type -- However, args are allowed to be unlifted, or -- more unboxed tuples, so can't use check_arg_ty ; mapM_ (check_type env ctxt rank') tys } ---------------------------------------- check_arg_type :: TidyEnv -> UserTypeCtxt -> Rank -> KindOrType -> TcM () -- The sort of type that can instantiate a type variable, -- or be the argument of a type constructor. -- Not an unboxed tuple, but now *can* be a forall (since impredicativity) -- Other unboxed types are very occasionally allowed as type -- arguments depending on the kind of the type constructor -- -- For example, we want to reject things like: -- -- instance Ord a => Ord (forall s. T s a) -- and -- g :: T s (forall b.b) -- -- NB: unboxed tuples can have polymorphic or unboxed args. -- This happens in the workers for functions returning -- product types with polymorphic components. -- But not in user code. -- Anyway, they are dealt with by a special case in check_tau_type check_arg_type _ _ _ (CoercionTy {}) = return () check_arg_type env ctxt rank ty = do { impred <- xoptM LangExt.ImpredicativeTypes ; let rank' = case rank of -- Predictive => must be monotype MustBeMonoType -> MustBeMonoType -- Monotype, regardless _other | impred -> ArbitraryRank | otherwise -> tyConArgMonoType -- Make sure that MustBeMonoType is propagated, -- so that we don't suggest -XImpredicativeTypes in -- (Ord (forall a.a)) => a -> a -- and so that if it Must be a monotype, we check that it is! ; check_type env ctxt rank' ty ; check_lifted ty } -- NB the isUnliftedType test also checks for -- T State# -- where there is an illegal partial application of State# (which has -- kind * -> #); see Note [The kind invariant] in TyCoRep ---------------------------------------- forAllTyErr :: TidyEnv -> Rank -> Type -> (TidyEnv, SDoc) forAllTyErr env rank ty = ( env , vcat [ hang herald 2 (ppr_tidy env ty) , suggestion ] ) where (tvs, _theta, _tau) = tcSplitSigmaTy ty herald | null tvs = text "Illegal qualified type:" | otherwise = text "Illegal polymorphic type:" suggestion = case rank of LimitedRank {} -> text "Perhaps you intended to use RankNTypes or Rank2Types" MonoType d -> d _ -> Outputable.empty -- Polytype is always illegal forAllEscapeErr :: TidyEnv -> Type -> Kind -> (TidyEnv, SDoc) forAllEscapeErr env ty tau_kind = ( env , hang (vcat [ text "Quantified type's kind mentions quantified type variable" , text "(skolem escape)" ]) 2 (vcat [ text " type:" <+> ppr_tidy env ty , text "of kind:" <+> ppr_tidy env tau_kind ]) ) ubxArgTyErr :: TidyEnv -> Type -> (TidyEnv, SDoc) ubxArgTyErr env ty = (env, sep [text "Illegal unboxed tuple type as function argument:", ppr_tidy env ty]) kindErr :: TidyEnv -> Kind -> (TidyEnv, SDoc) kindErr env kind = (env, sep [text "Expecting an ordinary type, but found a type of kind", ppr_tidy env kind]) {- Note [Liberal type synonyms] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If -XLiberalTypeSynonyms is on, expand closed type synonyms *before* doing validity checking. This allows us to instantiate a synonym defn with a for-all type, or with a partially-applied type synonym. e.g. type T a b = a type S m = m () f :: S (T Int) Here, T is partially applied, so it's illegal in H98. But if you expand S first, then T we get just f :: Int which is fine. IMPORTANT: suppose T is a type synonym. Then we must do validity checking on an appliation (T ty1 ty2) *either* before expansion (i.e. check ty1, ty2) *or* after expansion (i.e. expand T ty1 ty2, and then check) BUT NOT BOTH If we do both, we get exponential behaviour!! data TIACons1 i r c = c i ::: r c type TIACons2 t x = TIACons1 t (TIACons1 t x) type TIACons3 t x = TIACons2 t (TIACons1 t x) type TIACons4 t x = TIACons2 t (TIACons2 t x) type TIACons7 t x = TIACons4 t (TIACons3 t x) ************************************************************************ * * \subsection{Checking a theta or source type} * * ************************************************************************ Note [Implicit parameters in instance decls] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Implicit parameters _only_ allowed in type signatures; not in instance decls, superclasses etc. The reason for not allowing implicit params in instances is a bit subtle. If we allowed instance (?x::Int, Eq a) => Foo [a] where ... then when we saw (e :: (?x::Int) => t) it would be unclear how to discharge all the potential uses of the ?x in e. For example, a constraint Foo [Int] might come out of e, and applying the instance decl would show up two uses of ?x. Trac #8912. -} checkValidTheta :: UserTypeCtxt -> ThetaType -> TcM () -- Assumes arguemt is fully zonked checkValidTheta ctxt theta = do { env <- tcInitOpenTidyEnv (tyCoVarsOfTypes theta) ; addErrCtxtM (checkThetaCtxt ctxt theta) $ check_valid_theta env ctxt theta } ------------------------- check_valid_theta :: TidyEnv -> UserTypeCtxt -> [PredType] -> TcM () check_valid_theta _ _ [] = return () check_valid_theta env ctxt theta = do { dflags <- getDynFlags ; warnTcM (Reason Opt_WarnDuplicateConstraints) (wopt Opt_WarnDuplicateConstraints dflags && notNull dups) (dupPredWarn env dups) ; traceTc "check_valid_theta" (ppr theta) ; mapM_ (check_pred_ty env dflags ctxt) theta } where (_,dups) = removeDups cmpType theta ------------------------- {- Note [Validity checking for constraints] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We look through constraint synonyms so that we can see the underlying constraint(s). For example type Foo = ?x::Int instance Foo => C T We should reject the instance because it has an implicit parameter in the context. But we record, in 'under_syn', whether we have looked under a synonym to avoid requiring language extensions at the use site. Main example (Trac #9838): {-# LANGUAGE ConstraintKinds #-} module A where type EqShow a = (Eq a, Show a) module B where import A foo :: EqShow a => a -> String We don't want to require ConstraintKinds in module B. -} check_pred_ty :: TidyEnv -> DynFlags -> UserTypeCtxt -> PredType -> TcM () -- Check the validity of a predicate in a signature -- See Note [Validity checking for constraints] check_pred_ty env dflags ctxt pred = do { check_type env SigmaCtxt constraintMonoType pred ; check_pred_help False env dflags ctxt pred } check_pred_help :: Bool -- True <=> under a type synonym -> TidyEnv -> DynFlags -> UserTypeCtxt -> PredType -> TcM () check_pred_help under_syn env dflags ctxt pred | Just pred' <- coreView pred -- Switch on under_syn when going under a -- synonym (Trac #9838, yuk) = check_pred_help True env dflags ctxt pred' | otherwise = case splitTyConApp_maybe pred of Just (tc, tys) | isTupleTyCon tc -> check_tuple_pred under_syn env dflags ctxt pred tys -- NB: this equality check must come first, because (~) is a class, -- too. | tc `hasKey` heqTyConKey || tc `hasKey` eqTyConKey || tc `hasKey` eqPrimTyConKey -> check_eq_pred env dflags pred tc tys | Just cls <- tyConClass_maybe tc -> check_class_pred env dflags ctxt pred cls tys -- Includes Coercible _ -> check_irred_pred under_syn env dflags ctxt pred check_eq_pred :: TidyEnv -> DynFlags -> PredType -> TyCon -> [TcType] -> TcM () check_eq_pred env dflags pred tc tys = -- Equational constraints are valid in all contexts if type -- families are permitted do { checkTc (length tys == tyConArity tc) (tyConArityErr tc tys) ; checkTcM (xopt LangExt.TypeFamilies dflags || xopt LangExt.GADTs dflags) (eqPredTyErr env pred) } check_tuple_pred :: Bool -> TidyEnv -> DynFlags -> UserTypeCtxt -> PredType -> [PredType] -> TcM () check_tuple_pred under_syn env dflags ctxt pred ts = do { -- See Note [ConstraintKinds in predicates] checkTcM (under_syn || xopt LangExt.ConstraintKinds dflags) (predTupleErr env pred) ; mapM_ (check_pred_help under_syn env dflags ctxt) ts } -- This case will not normally be executed because without -- -XConstraintKinds tuple types are only kind-checked as * check_irred_pred :: Bool -> TidyEnv -> DynFlags -> UserTypeCtxt -> PredType -> TcM () check_irred_pred under_syn env dflags ctxt pred -- The predicate looks like (X t1 t2) or (x t1 t2) :: Constraint -- where X is a type function = do { -- If it looks like (x t1 t2), require ConstraintKinds -- see Note [ConstraintKinds in predicates] -- But (X t1 t2) is always ok because we just require ConstraintKinds -- at the definition site (Trac #9838) failIfTcM (not under_syn && not (xopt LangExt.ConstraintKinds dflags) && hasTyVarHead pred) (predIrredErr env pred) -- Make sure it is OK to have an irred pred in this context -- See Note [Irreducible predicates in superclasses] ; failIfTcM (is_superclass ctxt && not (xopt LangExt.UndecidableInstances dflags) && has_tyfun_head pred) (predSuperClassErr env pred) } where is_superclass ctxt = case ctxt of { ClassSCCtxt _ -> True; _ -> False } has_tyfun_head ty = case tcSplitTyConApp_maybe ty of Just (tc, _) -> isTypeFamilyTyCon tc Nothing -> False {- Note [ConstraintKinds in predicates] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Don't check for -XConstraintKinds under a type synonym, because that was done at the type synonym definition site; see Trac #9838 e.g. module A where type C a = (Eq a, Ix a) -- Needs -XConstraintKinds module B where import A f :: C a => a -> a -- Does *not* need -XConstraintKinds Note [Irreducible predicates in superclasses] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Allowing type-family calls in class superclasses is somewhat dangerous because we can write: type family Fooish x :: * -> Constraint type instance Fooish () = Foo class Fooish () a => Foo a where This will cause the constraint simplifier to loop because every time we canonicalise a (Foo a) class constraint we add a (Fooish () a) constraint which will be immediately solved to add+canonicalise another (Foo a) constraint. -} ------------------------- check_class_pred :: TidyEnv -> DynFlags -> UserTypeCtxt -> PredType -> Class -> [TcType] -> TcM () check_class_pred env dflags ctxt pred cls tys | isIPClass cls = do { check_arity ; checkTcM (okIPCtxt ctxt) (badIPPred env pred) } | otherwise = do { check_arity ; checkTcM arg_tys_ok (env, predTyVarErr (tidyType env pred)) } where check_arity = checkTc (classArity cls == length tys) (tyConArityErr (classTyCon cls) tys) flexible_contexts = xopt LangExt.FlexibleContexts dflags undecidable_ok = xopt LangExt.UndecidableInstances dflags arg_tys_ok = case ctxt of SpecInstCtxt -> True -- {-# SPECIALISE instance Eq (T Int) #-} is fine InstDeclCtxt -> checkValidClsArgs (flexible_contexts || undecidable_ok) cls tys -- Further checks on head and theta -- in checkInstTermination _ -> checkValidClsArgs flexible_contexts cls tys ------------------------- okIPCtxt :: UserTypeCtxt -> Bool -- See Note [Implicit parameters in instance decls] okIPCtxt (FunSigCtxt {}) = True okIPCtxt (InfSigCtxt {}) = True okIPCtxt ExprSigCtxt = True okIPCtxt TypeAppCtxt = True okIPCtxt PatSigCtxt = True okIPCtxt ResSigCtxt = True okIPCtxt GenSigCtxt = True okIPCtxt (ConArgCtxt {}) = True okIPCtxt (ForSigCtxt {}) = True -- ?? okIPCtxt ThBrackCtxt = True okIPCtxt GhciCtxt = True okIPCtxt SigmaCtxt = True okIPCtxt (DataTyCtxt {}) = True okIPCtxt (PatSynCtxt {}) = True okIPCtxt (TySynCtxt {}) = True -- e.g. type Blah = ?x::Int -- Trac #11466 okIPCtxt (ClassSCCtxt {}) = False okIPCtxt (InstDeclCtxt {}) = False okIPCtxt (SpecInstCtxt {}) = False okIPCtxt (RuleSigCtxt {}) = False okIPCtxt DefaultDeclCtxt = False badIPPred :: TidyEnv -> PredType -> (TidyEnv, SDoc) badIPPred env pred = ( env , text "Illegal implicit parameter" <+> quotes (ppr_tidy env pred) ) {- Note [Kind polymorphic type classes] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ MultiParam check: class C f where... -- C :: forall k. k -> Constraint instance C Maybe where... The dictionary gets type [C * Maybe] even if it's not a MultiParam type class. Flexibility check: class C f where... -- C :: forall k. k -> Constraint data D a = D a instance C D where The dictionary gets type [C * (D *)]. IA0_TODO it should be generalized actually. -} checkThetaCtxt :: UserTypeCtxt -> ThetaType -> TidyEnv -> TcM (TidyEnv, SDoc) checkThetaCtxt ctxt theta env = return ( env , vcat [ text "In the context:" <+> pprTheta (tidyTypes env theta) , text "While checking" <+> pprUserTypeCtxt ctxt ] ) eqPredTyErr, predTupleErr, predIrredErr, predSuperClassErr :: TidyEnv -> PredType -> (TidyEnv, SDoc) eqPredTyErr env pred = ( env , text "Illegal equational constraint" <+> ppr_tidy env pred $$ parens (text "Use GADTs or TypeFamilies to permit this") ) predTupleErr env pred = ( env , hang (text "Illegal tuple constraint:" <+> ppr_tidy env pred) 2 (parens constraintKindsMsg) ) predIrredErr env pred = ( env , hang (text "Illegal constraint:" <+> ppr_tidy env pred) 2 (parens constraintKindsMsg) ) predSuperClassErr env pred = ( env , hang (text "Illegal constraint" <+> quotes (ppr_tidy env pred) <+> text "in a superclass context") 2 (parens undecidableMsg) ) predTyVarErr :: PredType -> SDoc -- type is already tidied! predTyVarErr pred = vcat [ hang (text "Non type-variable argument") 2 (text "in the constraint:" <+> ppr pred) , parens (text "Use FlexibleContexts to permit this") ] constraintSynErr :: TidyEnv -> Type -> (TidyEnv, SDoc) constraintSynErr env kind = ( env , hang (text "Illegal constraint synonym of kind:" <+> quotes (ppr_tidy env kind)) 2 (parens constraintKindsMsg) ) dupPredWarn :: TidyEnv -> [[PredType]] -> (TidyEnv, SDoc) dupPredWarn env dups = ( env , text "Duplicate constraint" <> plural primaryDups <> text ":" <+> pprWithCommas (ppr_tidy env) primaryDups ) where primaryDups = map head dups tyConArityErr :: TyCon -> [TcType] -> SDoc -- For type-constructor arity errors, be careful to report -- the number of /visible/ arguments required and supplied, -- ignoring the /invisible/ arguments, which the user does not see. -- (e.g. Trac #10516) tyConArityErr tc tks = arityErr (tyConFlavour tc) (tyConName tc) tc_type_arity tc_type_args where vis_tks = filterOutInvisibleTypes tc tks -- tc_type_arity = number of *type* args expected -- tc_type_args = number of *type* args encountered tc_type_arity = count isVisibleBinder $ tyConBinders tc tc_type_args = length vis_tks arityErr :: Outputable a => String -> a -> Int -> Int -> SDoc arityErr what name n m = hsep [ text "The" <+> text what, quotes (ppr name), text "should have", n_arguments <> comma, text "but has been given", if m==0 then text "none" else int m] where n_arguments | n == 0 = text "no arguments" | n == 1 = text "1 argument" | True = hsep [int n, text "arguments"] {- ************************************************************************ * * \subsection{Checking for a decent instance head type} * * ************************************************************************ @checkValidInstHead@ checks the type {\em and} its syntactic constraints: it must normally look like: @instance Foo (Tycon a b c ...) ...@ The exceptions to this syntactic checking: (1)~if the @GlasgowExts@ flag is on, or (2)~the instance is imported (they must have been compiled elsewhere). In these cases, we let them go through anyway. We can also have instances for functions: @instance Foo (a -> b) ...@. -} checkValidInstHead :: UserTypeCtxt -> Class -> [Type] -> TcM () checkValidInstHead ctxt clas cls_args = do { dflags <- getDynFlags ; mod <- getModule ; checkTc (getUnique clas `notElem` abstractClassKeys || nameModule (getName clas) == mod) (instTypeErr clas cls_args abstract_class_msg) -- Check language restrictions; -- but not for SPECIALISE instance pragmas ; let ty_args = filterOutInvisibleTypes (classTyCon clas) cls_args ; unless spec_inst_prag $ do { checkTc (xopt LangExt.TypeSynonymInstances dflags || all tcInstHeadTyNotSynonym ty_args) (instTypeErr clas cls_args head_type_synonym_msg) ; checkTc (xopt LangExt.FlexibleInstances dflags || all tcInstHeadTyAppAllTyVars ty_args) (instTypeErr clas cls_args head_type_args_tyvars_msg) ; checkTc (xopt LangExt.MultiParamTypeClasses dflags || length ty_args == 1 || -- Only count type arguments (xopt LangExt.NullaryTypeClasses dflags && null ty_args)) (instTypeErr clas cls_args head_one_type_msg) } ; mapM_ checkValidTypePat ty_args } where spec_inst_prag = case ctxt of { SpecInstCtxt -> True; _ -> False } head_type_synonym_msg = parens ( text "All instance types must be of the form (T t1 ... tn)" $$ text "where T is not a synonym." $$ text "Use TypeSynonymInstances if you want to disable this.") head_type_args_tyvars_msg = parens (vcat [ text "All instance types must be of the form (T a1 ... an)", text "where a1 ... an are *distinct type variables*,", text "and each type variable appears at most once in the instance head.", text "Use FlexibleInstances if you want to disable this."]) head_one_type_msg = parens ( text "Only one type can be given in an instance head." $$ text "Use MultiParamTypeClasses if you want to allow more, or zero.") abstract_class_msg = text "Manual instances of this class are not permitted." tcInstHeadTyNotSynonym :: Type -> Bool -- Used in Haskell-98 mode, for the argument types of an instance head -- These must not be type synonyms, but everywhere else type synonyms -- are transparent, so we need a special function here tcInstHeadTyNotSynonym ty = case ty of -- Do not use splitTyConApp, -- because that expands synonyms! TyConApp tc _ -> not (isTypeSynonymTyCon tc) _ -> True tcInstHeadTyAppAllTyVars :: Type -> Bool -- Used in Haskell-98 mode, for the argument types of an instance head -- These must be a constructor applied to type variable arguments. -- But we allow kind instantiations. tcInstHeadTyAppAllTyVars ty | Just (tc, tys) <- tcSplitTyConApp_maybe (dropCasts ty) = ok (filterOutInvisibleTypes tc tys) -- avoid kinds | otherwise = False where -- Check that all the types are type variables, -- and that each is distinct ok tys = equalLength tvs tys && hasNoDups tvs where tvs = mapMaybe tcGetTyVar_maybe tys dropCasts :: Type -> Type -- See Note [Casts during validity checking] -- This function can turn a well-kinded type into an ill-kinded -- one, so I've kept it local to this module -- To consider: drop only UnivCo(HoleProv) casts dropCasts (CastTy ty _) = dropCasts ty dropCasts (AppTy t1 t2) = mkAppTy (dropCasts t1) (dropCasts t2) dropCasts (TyConApp tc tys) = mkTyConApp tc (map dropCasts tys) dropCasts (ForAllTy b ty) = ForAllTy (dropCastsB b) (dropCasts ty) dropCasts ty = ty -- LitTy, TyVarTy, CoercionTy dropCastsB :: TyBinder -> TyBinder dropCastsB (Anon ty) = Anon (dropCasts ty) dropCastsB b = b -- Don't bother in the kind of a forall abstractClassKeys :: [Unique] abstractClassKeys = [ heqTyConKey , eqTyConKey , coercibleTyConKey ] -- See Note [Equality class instances] instTypeErr :: Class -> [Type] -> SDoc -> SDoc instTypeErr cls tys msg = hang (hang (text "Illegal instance declaration for") 2 (quotes (pprClassPred cls tys))) 2 msg {- Note [Casts during validity checking] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider the (bogus) instance Eq Char# We elaborate to 'Eq (Char# |> UnivCo(hole))' where the hole is an insoluble equality constraint for * ~ #. We'll report the insoluble constraint separately, but we don't want to *also* complain that Eq is not applied to a type constructor. So we look gaily look through CastTys here. Another example: Eq (Either a). Then we actually get a cast in the middle: Eq ((Either |> g) a) Note [Valid 'deriving' predicate] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ validDerivPred checks for OK 'deriving' context. See Note [Exotic derived instance contexts] in TcDeriv. However the predicate is here because it uses sizeTypes, fvTypes. It checks for three things * No repeated variables (hasNoDups fvs) * No type constructors. This is done by comparing sizeTypes tys == length (fvTypes tys) sizeTypes counts variables and constructors; fvTypes returns variables. So if they are the same, there must be no constructors. But there might be applications thus (f (g x)). * Also check for a bizarre corner case, when the derived instance decl would look like instance C a b => D (T a) where ... Note that 'b' isn't a parameter of T. This gives rise to all sorts of problems; in particular, it's hard to compare solutions for equality when finding the fixpoint, and that means the inferContext loop does not converge. See Trac #5287. Note [Equality class instances] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We can't have users writing instances for the equality classes. But we still need to be able to write instances for them ourselves. So we allow instances only in the defining module. -} validDerivPred :: TyVarSet -> PredType -> Bool -- See Note [Valid 'deriving' predicate] validDerivPred tv_set pred = case classifyPredType pred of ClassPred cls _ -> cls `hasKey` typeableClassKey -- Typeable constraints are bigger than they appear due -- to kind polymorphism, but that's OK || check_tys EqPred {} -> False -- reject equality constraints _ -> True -- Non-class predicates are ok where check_tys = hasNoDups fvs -- use sizePred to ignore implicit args && sizePred pred == fromIntegral (length fvs) && all (`elemVarSet` tv_set) fvs fvs = fvType pred {- ************************************************************************ * * \subsection{Checking instance for termination} * * ************************************************************************ -} checkValidInstance :: UserTypeCtxt -> LHsSigType Name -> Type -> TcM ([TyVar], ThetaType, Class, [Type]) checkValidInstance ctxt hs_type ty | Just (clas,inst_tys) <- getClassPredTys_maybe tau , inst_tys `lengthIs` classArity clas = do { setSrcSpan head_loc (checkValidInstHead ctxt clas inst_tys) ; traceTc "checkValidInstance {" (ppr ty) ; checkValidTheta ctxt theta -- The Termination and Coverate Conditions -- Check that instance inference will terminate (if we care) -- For Haskell 98 this will already have been done by checkValidTheta, -- but as we may be using other extensions we need to check. -- -- Note that the Termination Condition is *more conservative* than -- the checkAmbiguity test we do on other type signatures -- e.g. Bar a => Bar Int is ambiguous, but it also fails -- the termination condition, because 'a' appears more often -- in the constraint than in the head ; undecidable_ok <- xoptM LangExt.UndecidableInstances ; if undecidable_ok then checkAmbiguity ctxt ty else checkInstTermination inst_tys theta ; traceTc "cvi 2" (ppr ty) ; case (checkInstCoverage undecidable_ok clas theta inst_tys) of IsValid -> return () -- Check succeeded NotValid msg -> addErrTc (instTypeErr clas inst_tys msg) ; traceTc "End checkValidInstance }" empty ; return (tvs, theta, clas, inst_tys) } | otherwise = failWithTc (text "Malformed instance head:" <+> ppr tau) where (tvs, theta, tau) = tcSplitSigmaTy ty -- The location of the "head" of the instance head_loc = getLoc (getLHsInstDeclHead hs_type) {- Note [Paterson conditions] ~~~~~~~~~~~~~~~~~~~~~~~~~~ Termination test: the so-called "Paterson conditions" (see Section 5 of "Understanding functional dependencies via Constraint Handling Rules, JFP Jan 2007). We check that each assertion in the context satisfies: (1) no variable has more occurrences in the assertion than in the head, and (2) the assertion has fewer constructors and variables (taken together and counting repetitions) than the head. This is only needed with -fglasgow-exts, as Haskell 98 restrictions (which have already been checked) guarantee termination. The underlying idea is that for any ground substitution, each assertion in the context has fewer type constructors than the head. -} checkInstTermination :: [TcType] -> ThetaType -> TcM () -- See Note [Paterson conditions] checkInstTermination tys theta = check_preds theta where head_fvs = fvTypes tys head_size = sizeTypes tys check_preds :: [PredType] -> TcM () check_preds preds = mapM_ check preds check :: PredType -> TcM () check pred = case classifyPredType pred of EqPred {} -> return () -- See Trac #4200. IrredPred {} -> check2 pred (sizeType pred) ClassPred cls tys | isTerminatingClass cls -> return () | isCTupleClass cls -- Look inside tuple predicates; Trac #8359 -> check_preds tys | otherwise -> check2 pred (sizeTypes $ filterOutInvisibleTypes (classTyCon cls) tys) -- Other ClassPreds check2 pred pred_size | not (null bad_tvs) = addErrTc (noMoreMsg bad_tvs what) | pred_size >= head_size = addErrTc (smallerMsg what) | otherwise = return () where what = text "constraint" <+> quotes (ppr pred) bad_tvs = fvType pred \\ head_fvs smallerMsg :: SDoc -> SDoc smallerMsg what = vcat [ hang (text "The" <+> what) 2 (text "is no smaller than the instance head") , parens undecidableMsg ] noMoreMsg :: [TcTyVar] -> SDoc -> SDoc noMoreMsg tvs what = vcat [ hang (text "Variable" <> plural tvs <+> quotes (pprWithCommas ppr tvs) <+> occurs <+> text "more often") 2 (sep [ text "in the" <+> what , text "than in the instance head" ]) , parens undecidableMsg ] where occurs = if isSingleton tvs then text "occurs" else text "occur" undecidableMsg, constraintKindsMsg :: SDoc undecidableMsg = text "Use UndecidableInstances to permit this" constraintKindsMsg = text "Use ConstraintKinds to permit this" {- Note [Associated type instances] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We allow this: class C a where type T x a instance C Int where type T (S y) Int = y type T Z Int = Char Note that a) The variable 'x' is not bound by the class decl b) 'x' is instantiated to a non-type-variable in the instance c) There are several type instance decls for T in the instance All this is fine. Of course, you can't give any *more* instances for (T ty Int) elsewhere, because it's an *associated* type. Note [Checking consistent instantiation] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ See Trac #11450 for background discussion on this check. class C a b where type T a x b With this class decl, if we have an instance decl instance C ty1 ty2 where ... then the type instance must look like type T ty1 v ty2 = ... with exactly 'ty1' for 'a', 'ty2' for 'b', and a variable for 'x'. For example: instance C [p] Int type T [p] y Int = (p,y,y) Note that * We used to allow completely different bound variables in the associated type instance; e.g. instance C [p] Int type T [q] y Int = ... But from GHC 8.2 onwards, we don't. It's much simpler this way. See Trac #11450. * When the class variable isn't used on the RHS of the type instance, it's tempting to allow wildcards, thus instance C [p] Int type T [_] y Int = (y,y) But it's awkward to do the test, and it doesn't work if the variable is repeated: instance C (p,p) Int type T (_,_) y Int = (y,y) Even though 'p' is not used on the RHS, we still need to use 'p' on the LHS to establish the repeated pattern. So to keep it simple we just require equality. * We also check that any non-class-tyvars are instantiated with distinct tyvars. That rules out instance C [p] Int where type T [p] Bool Int = p -- Note Bool type T [p] Char Int = p -- Note Char and instance C [p] Int where type T [p] p Int = p -- Note repeated 'p' on LHS It's consistent to do this because we don't allow this kind of instantiation for the class-tyvar arguments of the family. Overall, we can have exactly one type instance for each associated type. If you wantmore, use an auxiliary family. Implementation * Form the mini-envt from the class type variables a,b to the instance decl types [p],Int: [a->[p], b->Int] * Look at the tyvars a,x,b of the type family constructor T (it shares tyvars with the class C) * Apply the mini-evnt to them, and check that the result is consistent with the instance types [p] y Int We make all the instance type variables scope over the type instances, of course, which picks up non-obvious kinds. Eg class Foo (a :: k) where type F a instance Foo (b :: k -> k) where type F b = Int Here the instance is kind-indexed and really looks like type F (k->k) (b::k->k) = Int But if the 'b' didn't scope, we would make F's instance too poly-kinded. -} -- | Extra information about the parent instance declaration, needed -- when type-checking associated types. The 'Class' is the enclosing -- class, the [TyVar] are the type variable of the instance decl, -- and and the @VarEnv Type@ maps class variables to their instance -- types. type ClsInstInfo = (Class, [TyVar], VarEnv Type) type AssocInstArgShape = (Maybe Type, Type) -- AssocInstArgShape is used only for associated family instances -- (mb_exp, actual) -- mb_exp = Just ty => this arg corresponds to a class variable -- = Nothing => it doesn't correspond to a class variable -- e.g. class C b where -- type F a b c -- instance C [x] where -- type F p [x] q -- We get [AssocInstArgShape] = [ (Nothing, p) -- , (Just [x], [x]) -- , (Nothing, q)] checkConsistentFamInst :: Maybe ClsInstInfo -> TyCon -- ^ Family tycon -> [TyVar] -- ^ Type variables of the family instance -> [Type] -- ^ Type patterns from instance -> TcM () -- See Note [Checking consistent instantiation] checkConsistentFamInst Nothing _ _ _ = return () checkConsistentFamInst (Just (clas, inst_tvs, mini_env)) fam_tc _at_tvs at_tys = do { -- Check that the associated type indeed comes from this class checkTc (Just clas == tyConAssoc_maybe fam_tc) (badATErr (className clas) (tyConName fam_tc)) -- Check type args first (more comprehensible) ; checkTc (all check_arg type_shapes) pp_wrong_at_arg ; checkTc (check_poly_args type_shapes) pp_wrong_at_tyvars -- And now kind args ; checkTc (all check_arg kind_shapes) (pp_wrong_at_arg $$ ppSuggestExplicitKinds) ; checkTc (check_poly_args kind_shapes) (pp_wrong_at_tyvars $$ ppSuggestExplicitKinds) ; traceTc "cfi" (vcat [ ppr inst_tvs , ppr arg_shapes , ppr mini_env ]) } where arg_shapes :: [AssocInstArgShape] arg_shapes = [ (lookupVarEnv mini_env fam_tc_tv, at_ty) | (fam_tc_tv, at_ty) <- tyConTyVars fam_tc `zip` at_tys ] (kind_shapes, type_shapes) = partitionInvisibles fam_tc snd arg_shapes check_arg :: AssocInstArgShape -> Bool check_arg (Just exp_ty, at_ty) = exp_ty `tcEqType` at_ty check_arg (Nothing, _ ) = True -- Arg position does not correspond -- to a class variable check_poly_args :: [(Maybe Type,Type)] -> Bool check_poly_args arg_shapes = allDistinctTyVars (mkVarSet inst_tvs) [ at_ty | (Nothing, at_ty) <- arg_shapes ] pp_wrong_at_arg = vcat [ text "Type indexes must match class instance head" , pp_exp_act ] pp_wrong_at_tyvars = vcat [ text "Polymorphic type indexes of associated type" <+> quotes (ppr fam_tc) , nest 2 $ vcat [ text "(i.e. ones independent of the class type variables)" , text "must be distinct type variables" ] , pp_exp_act ] pp_exp_act = vcat [ text "Expected:" <+> ppr (mkTyConApp fam_tc expected_args) , text " Actual:" <+> ppr (mkTyConApp fam_tc at_tys) , sdocWithDynFlags $ \dflags -> ppWhen (has_poly_args dflags) $ vcat [ text "where the `<tv>' arguments are type variables," , text "distinct from each other and from the instance variables" ] ] expected_args = [ exp_ty `orElse` mk_tv at_ty | (exp_ty, at_ty) <- arg_shapes ] mk_tv at_ty = mkTyVarTy (mkTyVar tv_name (typeKind at_ty)) tv_name = mkInternalName (mkAlphaTyVarUnique 1) (mkTyVarOcc "<tv>") noSrcSpan has_poly_args dflags = any (isNothing . fst) shapes where shapes | gopt Opt_PrintExplicitKinds dflags = arg_shapes | otherwise = type_shapes badATErr :: Name -> Name -> SDoc badATErr clas op = hsep [text "Class", quotes (ppr clas), text "does not have an associated type", quotes (ppr op)] {- ************************************************************************ * * Checking type instance well-formedness and termination * * ************************************************************************ -} checkValidCoAxiom :: CoAxiom Branched -> TcM () checkValidCoAxiom ax@(CoAxiom { co_ax_tc = fam_tc, co_ax_branches = branches }) = do { mapM_ (checkValidCoAxBranch Nothing fam_tc) branch_list ; foldlM_ check_branch_compat [] branch_list } where branch_list = fromBranches branches injectivity = familyTyConInjectivityInfo fam_tc check_branch_compat :: [CoAxBranch] -- previous branches in reverse order -> CoAxBranch -- current branch -> TcM [CoAxBranch]-- current branch : previous branches -- Check for -- (a) this branch is dominated by previous ones -- (b) failure of injectivity check_branch_compat prev_branches cur_branch | cur_branch `isDominatedBy` prev_branches = do { addWarnAt NoReason (coAxBranchSpan cur_branch) $ inaccessibleCoAxBranch ax cur_branch ; return prev_branches } | otherwise = do { check_injectivity prev_branches cur_branch ; return (cur_branch : prev_branches) } -- Injectivity check: check whether a new (CoAxBranch) can extend -- already checked equations without violating injectivity -- annotation supplied by the user. -- See Note [Verifying injectivity annotation] in FamInstEnv check_injectivity prev_branches cur_branch | Injective inj <- injectivity = do { let conflicts = fst $ foldl (gather_conflicts inj prev_branches cur_branch) ([], 0) prev_branches ; mapM_ (\(err, span) -> setSrcSpan span $ addErr err) (makeInjectivityErrors ax cur_branch inj conflicts) } | otherwise = return () gather_conflicts inj prev_branches cur_branch (acc, n) branch -- n is 0-based index of branch in prev_branches = case injectiveBranches inj cur_branch branch of InjectivityUnified ax1 ax2 | ax1 `isDominatedBy` (replace_br prev_branches n ax2) -> (acc, n + 1) | otherwise -> (branch : acc, n + 1) InjectivityAccepted -> (acc, n + 1) -- Replace n-th element in the list. Assumes 0-based indexing. replace_br :: [CoAxBranch] -> Int -> CoAxBranch -> [CoAxBranch] replace_br brs n br = take n brs ++ [br] ++ drop (n+1) brs -- Check that a "type instance" is well-formed (which includes decidability -- unless -XUndecidableInstances is given). -- checkValidCoAxBranch :: Maybe ClsInstInfo -> TyCon -> CoAxBranch -> TcM () checkValidCoAxBranch mb_clsinfo fam_tc (CoAxBranch { cab_tvs = tvs, cab_cvs = cvs , cab_lhs = typats , cab_rhs = rhs, cab_loc = loc }) = checkValidTyFamEqn mb_clsinfo fam_tc tvs cvs typats rhs loc -- | Do validity checks on a type family equation, including consistency -- with any enclosing class instance head, termination, and lack of -- polytypes. checkValidTyFamEqn :: Maybe ClsInstInfo -> TyCon -- ^ of the type family -> [TyVar] -- ^ bound tyvars in the equation -> [CoVar] -- ^ bound covars in the equation -> [Type] -- ^ type patterns -> Type -- ^ rhs -> SrcSpan -> TcM () checkValidTyFamEqn mb_clsinfo fam_tc tvs cvs typats rhs loc = setSrcSpan loc $ do { checkValidFamPats mb_clsinfo fam_tc tvs cvs typats -- The argument patterns, and RHS, are all boxed tau types -- E.g Reject type family F (a :: k1) :: k2 -- type instance F (forall a. a->a) = ... -- type instance F Int# = ... -- type instance F Int = forall a. a->a -- type instance F Int = Int# -- See Trac #9357 ; checkValidMonoType rhs ; check_lifted rhs -- We have a decidable instance unless otherwise permitted ; undecidable_ok <- xoptM LangExt.UndecidableInstances ; unless undecidable_ok $ mapM_ addErrTc (checkFamInstRhs typats (tcTyFamInsts rhs)) } -- Make sure that each type family application is -- (1) strictly smaller than the lhs, -- (2) mentions no type variable more often than the lhs, and -- (3) does not contain any further type family instances. -- checkFamInstRhs :: [Type] -- lhs -> [(TyCon, [Type])] -- type family instances -> [MsgDoc] checkFamInstRhs lhsTys famInsts = mapMaybe check famInsts where size = sizeTypes lhsTys fvs = fvTypes lhsTys check (tc, tys) | not (all isTyFamFree tys) = Just (nestedMsg what) | not (null bad_tvs) = Just (noMoreMsg bad_tvs what) | size <= sizeTypes tys = Just (smallerMsg what) | otherwise = Nothing where what = text "type family application" <+> quotes (pprType (TyConApp tc tys)) bad_tvs = fvTypes tys \\ fvs checkValidFamPats :: Maybe ClsInstInfo -> TyCon -> [TyVar] -> [CoVar] -> [Type] -> TcM () -- Patterns in a 'type instance' or 'data instance' decl should -- a) contain no type family applications -- (vanilla synonyms are fine, though) -- b) properly bind all their free type variables -- e.g. we disallow (Trac #7536) -- type T a = Int -- type instance F (T a) = a -- c) Have the right number of patterns -- d) For associated types, are consistently instantiated checkValidFamPats mb_clsinfo fam_tc tvs cvs ty_pats = do { -- A family instance must have exactly the same number of type -- parameters as the family declaration. You can't write -- type family F a :: * -> * -- type instance F Int y = y -- because then the type (F Int) would be like (\y.y) checkTc (length ty_pats == fam_arity) $ wrongNumberOfParmsErr (fam_arity - count isInvisibleBinder fam_bndrs) -- report only explicit arguments ; mapM_ checkValidTypePat ty_pats ; let unbound_tcvs = filterOut (`elemVarSet` exactTyCoVarsOfTypes ty_pats) (tvs ++ cvs) ; checkTc (null unbound_tcvs) (famPatErr fam_tc unbound_tcvs ty_pats) -- Check that type patterns match the class instance head ; checkConsistentFamInst mb_clsinfo fam_tc tvs ty_pats } where fam_arity = tyConArity fam_tc fam_bndrs = tyConBinders fam_tc checkValidTypePat :: Type -> TcM () -- Used for type patterns in class instances, -- and in type/data family instances checkValidTypePat pat_ty = do { -- Check that pat_ty is a monotype checkValidMonoType pat_ty -- One could imagine generalising to allow -- instance C (forall a. a->a) -- but we don't know what all the consequences might be -- Ensure that no type family instances occur a type pattern ; checkTc (isTyFamFree pat_ty) $ tyFamInstIllegalErr pat_ty ; check_lifted pat_ty } isTyFamFree :: Type -> Bool -- ^ Check that a type does not contain any type family applications. isTyFamFree = null . tcTyFamInsts -- Error messages wrongNumberOfParmsErr :: Arity -> SDoc wrongNumberOfParmsErr exp_arity = text "Number of parameters must match family declaration; expected" <+> ppr exp_arity inaccessibleCoAxBranch :: CoAxiom br -> CoAxBranch -> SDoc inaccessibleCoAxBranch fi_ax cur_branch = text "Type family instance equation is overlapped:" $$ nest 2 (pprCoAxBranch fi_ax cur_branch) tyFamInstIllegalErr :: Type -> SDoc tyFamInstIllegalErr ty = hang (text "Illegal type synonym family application in instance" <> colon) 2 $ ppr ty nestedMsg :: SDoc -> SDoc nestedMsg what = sep [ text "Illegal nested" <+> what , parens undecidableMsg ] famPatErr :: TyCon -> [TyVar] -> [Type] -> SDoc famPatErr fam_tc tvs pats = hang (text "Family instance purports to bind type variable" <> plural tvs <+> pprQuotedList tvs) 2 (hang (text "but the real LHS (expanding synonyms) is:") 2 (pprTypeApp fam_tc (map expandTypeSynonyms pats) <+> text "= ...")) {- ************************************************************************ * * Telescope checking * * ************************************************************************ Note [Bad telescopes] ~~~~~~~~~~~~~~~~~~~~~ Now that we can mix type and kind variables, there are an awful lot of ways to shoot yourself in the foot. Here are some. data SameKind :: k -> k -> * -- just to force unification 1. data T1 a k (b :: k) (x :: SameKind a b) The problem here is that we discover that a and b should have the same kind. But this kind mentions k, which is bound *after* a. (Testcase: dependent/should_fail/BadTelescope) 2. data T2 a (c :: Proxy b) (d :: Proxy a) (x :: SameKind b d) Note that b is not bound. Yet its kind mentions a. Because we have a nice rule that all implicitly bound variables come before others, this is bogus. (We could probably figure out to put b between a and c. But I think this is doing users a disservice, in the long run.) (Testcase: dependent/should_fail/BadTelescope4) 3. t3 :: forall a. (forall k (b :: k). SameKind a b) -> () This is a straightforward skolem escape. Note that a and b need to have the same kind. (Testcase: polykinds/T11142) How do we deal with all of this? For TyCons, we have checkValidTyConTyVars. That function looks to see if any of the tyConTyVars are repeated, but it's really a telescope check. It works because all tycons are kind-generalized. If there is a bad telescope, the kind-generalization will end up generalizing over a variable bound later in the telescope. For non-tycons, we do scope checking when we bring tyvars into scope, in tcImplicitTKBndrs and tcExplicitTKBndrs. Note that we also have to sort implicit binders into a well-scoped order whenever we have implicit binders to worry about. This is done in quantifyTyVars and in tcImplicitTKBndrs. -} -- | Check a list of binders to see if they make a valid telescope. -- The key property we're checking for is scoping. For example: -- > data SameKind :: k -> k -> * -- > data X a k (b :: k) (c :: SameKind a b) -- Kind inference says that a's kind should be k. But that's impossible, -- because k isn't in scope when a is bound. This check has to come before -- general validity checking, because once we kind-generalise, this sort -- of problem is harder to spot (as we'll generalise over the unbound -- k in a's type.) See also Note [Bad telescopes]. checkValidTelescope :: SDoc -- the original user-written telescope -> [TyVar] -- explicit vars (not necessarily zonked) -> SDoc -- note to put at bottom of message -> TcM () checkValidTelescope hs_tvs orig_tvs extra = discardResult $ checkZonkValidTelescope hs_tvs orig_tvs extra -- | Like 'checkZonkValidTelescope', but returns the zonked tyvars checkZonkValidTelescope :: SDoc -> [TyVar] -> SDoc -> TcM [TyVar] checkZonkValidTelescope hs_tvs orig_tvs extra = do { orig_tvs <- mapM zonkTyCoVarKind orig_tvs ; let (_, sorted_tidied_tvs) = tidyTyCoVarBndrs emptyTidyEnv $ toposortTyVars orig_tvs ; unless (go [] emptyVarSet orig_tvs) $ addErr $ vcat [ hang (text "These kind and type variables:" <+> hs_tvs $$ text "are out of dependency order. Perhaps try this ordering:") 2 (sep (map pprTvBndr sorted_tidied_tvs)) , extra ] ; return orig_tvs } where go :: [TyVar] -- misplaced variables -> TyVarSet -> [TyVar] -> Bool go errs in_scope [] = null (filter (`elemVarSet` in_scope) errs) -- report an error only when the variable in the kind is brought -- into scope later in the telescope. Otherwise, we'll just quantify -- over it in kindGeneralize, as we should. go errs in_scope (tv:tvs) = let bad_tvs = filterOut (`elemVarSet` in_scope) $ tyCoVarsOfTypeList (tyVarKind tv) in go (bad_tvs ++ errs) (in_scope `extendVarSet` tv) tvs -- | After inferring kinds of type variables, check to make sure that the -- inferred kinds any of the type variables bound in a smaller scope. -- This is a skolem escape check. See also Note [Bad telescopes]. checkValidInferredKinds :: [TyVar] -- ^ vars to check (zonked) -> TyVarSet -- ^ vars out of scope -> SDoc -- ^ suffix to error message -> TcM () checkValidInferredKinds orig_kvs out_of_scope extra = do { let bad_pairs = [ (tv, kv) | kv <- orig_kvs , Just tv <- map (lookupVarSet out_of_scope) (tyCoVarsOfTypeList (tyVarKind kv)) ] report (tidyTyVarOcc env -> tv, tidyTyVarOcc env -> kv) = addErr $ text "The kind of variable" <+> quotes (ppr kv) <> text ", namely" <+> quotes (ppr (tyVarKind kv)) <> comma $$ text "depends on variable" <+> quotes (ppr tv) <+> text "from an inner scope" $$ text "Perhaps bind" <+> quotes (ppr kv) <+> text "sometime after binding" <+> quotes (ppr tv) $$ extra ; mapM_ report bad_pairs } where (env1, _) = tidyTyCoVarBndrs emptyTidyEnv orig_kvs (env, _) = tidyTyCoVarBndrs env1 (varSetElems out_of_scope) {- ************************************************************************ * * \subsection{Auxiliary functions} * * ************************************************************************ -} -- Free variables of a type, retaining repetitions, and expanding synonyms fvType :: Type -> [TyCoVar] fvType ty | Just exp_ty <- coreView ty = fvType exp_ty fvType (TyVarTy tv) = [tv] fvType (TyConApp _ tys) = fvTypes tys fvType (LitTy {}) = [] fvType (AppTy fun arg) = fvType fun ++ fvType arg fvType (ForAllTy bndr ty) = fvType (binderType bndr) ++ caseBinder bndr (\tv -> filter (/= tv)) (const id) (fvType ty) fvType (CastTy ty co) = fvType ty ++ fvCo co fvType (CoercionTy co) = fvCo co fvTypes :: [Type] -> [TyVar] fvTypes tys = concat (map fvType tys) fvCo :: Coercion -> [TyCoVar] fvCo (Refl _ ty) = fvType ty fvCo (TyConAppCo _ _ args) = concatMap fvCo args fvCo (AppCo co arg) = fvCo co ++ fvCo arg fvCo (ForAllCo tv h co) = filter (/= tv) (fvCo co) ++ fvCo h fvCo (CoVarCo v) = [v] fvCo (AxiomInstCo _ _ args) = concatMap fvCo args fvCo (UnivCo p _ t1 t2) = fvProv p ++ fvType t1 ++ fvType t2 fvCo (SymCo co) = fvCo co fvCo (TransCo co1 co2) = fvCo co1 ++ fvCo co2 fvCo (NthCo _ co) = fvCo co fvCo (LRCo _ co) = fvCo co fvCo (InstCo co arg) = fvCo co ++ fvCo arg fvCo (CoherenceCo co1 co2) = fvCo co1 ++ fvCo co2 fvCo (KindCo co) = fvCo co fvCo (SubCo co) = fvCo co fvCo (AxiomRuleCo _ cs) = concatMap fvCo cs fvProv :: UnivCoProvenance -> [TyCoVar] fvProv UnsafeCoerceProv = [] fvProv (PhantomProv co) = fvCo co fvProv (ProofIrrelProv co) = fvCo co fvProv (PluginProv _) = [] fvProv (HoleProv h) = pprPanic "fvProv falls into a hole" (ppr h) sizeType :: Type -> Int -- Size of a type: the number of variables and constructors sizeType ty | Just exp_ty <- coreView ty = sizeType exp_ty sizeType (TyVarTy {}) = 1 sizeType (TyConApp _ tys) = sizeTypes tys + 1 sizeType (LitTy {}) = 1 sizeType (AppTy fun arg) = sizeType fun + sizeType arg sizeType (ForAllTy (Anon arg) res) = sizeType arg + sizeType res + 1 sizeType (ForAllTy (Named {}) ty) = sizeType ty sizeType (CastTy ty _) = sizeType ty sizeType (CoercionTy _) = 1 sizeTypes :: [Type] -> Int sizeTypes = sum . map sizeType -- Size of a predicate -- -- We are considering whether class constraints terminate. -- Equality constraints and constraints for the implicit -- parameter class always termiante so it is safe to say "size 0". -- (Implicit parameter constraints always terminate because -- there are no instances for them---they are only solved by -- "local instances" in expressions). -- See Trac #4200. sizePred :: PredType -> Int sizePred ty = goClass ty where goClass p = go (classifyPredType p) go (ClassPred cls tys') | isTerminatingClass cls = 0 | otherwise = sizeTypes tys' go (EqPred {}) = 0 go (IrredPred ty) = sizeType ty -- | When this says "True", ignore this class constraint during -- a termination check isTerminatingClass :: Class -> Bool isTerminatingClass cls = isIPClass cls || cls `hasKey` typeableClassKey || cls `hasKey` coercibleTyConKey || cls `hasKey` eqTyConKey || cls `hasKey` heqTyConKey -- | Tidy before printing a type ppr_tidy :: TidyEnv -> Type -> SDoc ppr_tidy env ty = pprType (tidyType env ty) allDistinctTyVars :: TyVarSet -> [KindOrType] -> Bool -- (allDistinctTyVars tvs tys) returns True if tys are -- a) all tyvars -- b) all distinct -- c) disjoint from tvs allDistinctTyVars _ [] = True allDistinctTyVars tkvs (ty : tys) = case getTyVar_maybe ty of Nothing -> False Just tv | tv `elemVarSet` tkvs -> False | otherwise -> allDistinctTyVars (tkvs `extendVarSet` tv) tys
tjakway/ghcjvm
compiler/typecheck/TcValidity.hs
bsd-3-clause
77,534
4
24
21,622
11,818
6,079
5,739
872
15
-- | Song format parser module Vimus.Song.Format ( SongFormat(..) , parser -- * exported for testing , FormatTree(..) , format , meta , alternatives , parse ) where import Control.Applicative (Alternative(..), pure, liftA2) import Data.Default (Default(..)) import Data.Foldable (asum) import Data.List (intercalate) import Data.Maybe (fromMaybe) import Data.Map (Map) import qualified Data.Map as Map import Data.Monoid (Monoid(..)) import Text.Printf (printf) import qualified Network.MPD as MPD import qualified Vimus.Command.Parser as Parser import Vimus.Song infixr 4 :+: -- | AST for formats: data FormatTree s m a = Empty | Pure a | FormatTree s m a :+: FormatTree s m a | Meta (s -> m a) | Alt [FormatTree s m a] -- | Format AST format :: (Alternative m, Monoid a) => a -- ^ default value for failed top-level metadata query -> s -- ^ container for metadata -> FormatTree s m a -> m a format d n = top where top Empty = empty top (Pure a) = pure a top (x :+: y) = top x <#> top y top (Alt xs) = asum $ fmap nested xs top (Meta f) = f n <|> pure d -- if metadata query failed, replace failure with 'd' nested (Meta f) = f n nested (x :+: y) = nested x <#> nested y nested t = top t (<#>) = liftA2 mappend newtype SongFormat = SongFormat (MPD.Song -> String) instance Default SongFormat where def = SongFormat $ \song -> printf "%s - %s - %s - %s" (orNone $ artist song) (orNone $ album song) (orNone $ track song) (orNone $ title song <|> filename song) where orNone = fromMaybe "(none)" parser :: Map String (MPD.Song -> Maybe String) -> Parser.Parser SongFormat parser queries = Parser.Parser $ \str -> do tree <- parse queries str return (SongFormat (\song -> fromMaybe "(none)" $ format "none" song tree), "") parse :: Map String (MPD.Song -> Maybe String) -> String -> Either Parser.ParseError (FormatTree MPD.Song Maybe String) parse queries = go "" where go acc ('\\':'(':cs) = go ('(':acc) cs go acc ('\\':')':cs) = go (')':acc) cs go acc ('\\':'%':cs) = go ('%':acc) cs go acc ('(':cs) = do (xs, ys) <- alternatives cs alts <- mapM (go "") xs rest <- go "" ys return $ Pure (reverse acc) <+> Alt alts <+> rest go acc ('%':cs) = do (key, ys) <- meta cs case Map.lookup key queries of Nothing -> Left (Parser.ParseError $ "non-supported meta pattern: %" ++ key ++ "%") Just metadata -> do rest <- go "" ys return $ Pure (reverse acc) <+> Meta metadata <+> rest go acc (c:cs) = go (c:acc) cs go acc [] = Right (Pure (reverse acc)) infixr 4 <+> Pure "" <+> x = x x <+> Pure "" = x x <+> y = x :+: y data Nat = Z | S Nat -- | Parse alternatives pattern alternatives :: String -> Either Parser.ParseError ([String], String) alternatives = go Z [] "" where go n strings acc ('\\':'(':xs) = go n strings ('(':'\\':acc) xs go n strings acc ('\\':')':xs) = go n strings (')':'\\':acc) xs go n strings acc ('\\':'|':xs) = go n strings ('|':'\\':acc) xs go n strings acc ('(':xs) = go (S n) strings ('(':acc) xs go Z strings acc (')':xs) = Right (reverse (reverse acc : strings), xs) go (S n) strings acc (')':xs) = go n strings (')':acc) xs go Z strings acc ('|':xs) = go Z (reverse acc : strings) [] xs go n strings acc (x:xs) = go n strings (x:acc) xs go _ strings acc [] = Left . Parser.ParseError $ "unterminated alternatives pattern: (" ++ intercalate "|" (reverse acc : strings) -- | Parse meta pattern meta :: String -> Either Parser.ParseError (String, String) meta = go "" where go acc ('\\':'%':xs) = go ('%':acc) xs go acc ('%':xs) = Right (reverse acc, xs) go acc (x:xs) = go (x:acc) xs go acc [] = Left (Parser.ParseError $ "unterminated meta pattern: %" ++ reverse acc)
haasn/vimus
src/Vimus/Song/Format.hs
mit
3,978
0
19
1,055
1,708
885
823
100
10
{-# LANGUAGE OverloadedStrings #-} module FuncTorrent.Peer (Peer(..), PeerState(..), handShake, msgLoop ) where import Prelude hiding (lookup, concat, replicate, splitAt) import Control.Applicative (liftA3) import Control.Monad (replicateM, liftM, forever) import Data.Binary (Binary(..), decode) import Data.Binary.Get (getWord32be, getWord16be, getWord8, runGet) import Data.Binary.Put (putWord32be, putWord16be, putWord8) import Data.ByteString (ByteString, pack, unpack, concat, hGet, hPut, singleton) import Data.ByteString.Lazy (fromStrict, fromChunks) import Data.Functor ((<$>)) -- This will cause a warning in 7.10. import Network (connectTo, PortID(..)) import System.IO import qualified Data.ByteString.Char8 as BC (replicate, pack) type ID = String type IP = String type Port = Integer data PeerState = PeerState { handle :: Handle , amChoking :: Bool , amInterested :: Bool , peerChoking :: Bool , peerInterested :: Bool } data PieceState = Pending | InProgress | Have deriving (Show) -- | Peer is a PeerID, IP address, port tuple data Peer = Peer ID IP Port deriving (Show, Eq) data PeerMsg = KeepAliveMsg | ChokeMsg | UnChokeMsg | InterestedMsg | NotInterestedMsg | HaveMsg Integer | BitFieldMsg ByteString | RequestMsg Integer Integer Integer | PieceMsg Integer Integer ByteString | CancelMsg Integer Integer Integer | PortMsg Port deriving (Show) genHandShakeMsg :: ByteString -> String -> ByteString genHandShakeMsg infoHash peer_id = concat [pstrlen, pstr, reserved, infoHash, peerID] where pstrlen = singleton 19 pstr = BC.pack "BitTorrent protocol" reserved = BC.replicate 8 '\0' peerID = BC.pack peer_id handShake :: Peer -> ByteString -> String -> IO Handle handShake (Peer _ ip port) infoHash peerid = do let hs = genHandShakeMsg infoHash peerid h <- connectTo ip (PortNumber (fromIntegral port)) hSetBuffering h LineBuffering hPut h hs rlenBS <- hGet h (length (unpack hs)) putStrLn $ "got handshake from peer: " ++ show rlenBS return h instance Binary PeerMsg where put msg = case msg of KeepAliveMsg -> putWord32be 0 ChokeMsg -> do putWord32be 1 putWord8 0 UnChokeMsg -> do putWord32be 1 putWord8 1 InterestedMsg -> do putWord32be 1 putWord8 2 NotInterestedMsg -> do putWord32be 1 putWord8 3 HaveMsg i -> do putWord32be 5 putWord8 4 putWord32be (fromIntegral i) BitFieldMsg bf -> do putWord32be $ fromIntegral (1 + bfListLen) putWord8 5 mapM_ putWord8 bfList where bfList = unpack bf bfListLen = length bfList RequestMsg i o l -> do putWord32be 13 putWord8 6 putWord32be (fromIntegral i) putWord32be (fromIntegral o) putWord32be (fromIntegral l) PieceMsg i o b -> do putWord32be $ fromIntegral (9 + blocklen) putWord8 7 putWord32be (fromIntegral i) putWord32be (fromIntegral o) mapM_ putWord8 blockList where blockList = unpack b blocklen = length blockList CancelMsg i o l -> do putWord32be 13 putWord8 8 putWord32be (fromIntegral i) putWord32be (fromIntegral o) putWord32be (fromIntegral l) PortMsg p -> do putWord32be 3 putWord8 9 putWord16be (fromIntegral p) get = do l <- getWord32be msgid <- getWord8 case msgid of 0 -> return ChokeMsg 1 -> return UnChokeMsg 2 -> return InterestedMsg 3 -> return NotInterestedMsg 4 -> liftM (HaveMsg . fromIntegral) getWord32be 5 -> liftM (BitFieldMsg . pack) (replicateM (fromIntegral l - 1) getWord8) 6 -> liftA3 RequestMsg getInteger getInteger getInteger where getInteger = fromIntegral <$> getWord32be 7 -> liftA3 PieceMsg getInteger getInteger (pack <$> replicateM (fromIntegral l - 9) getWord8) where getInteger = fromIntegral <$> getWord32be 8 -> liftA3 CancelMsg getInteger getInteger getInteger where getInteger = fromIntegral <$> getWord32be 9 -> liftM (PortMsg . fromIntegral) getWord16be _ -> error ("unknown message ID: " ++ show msgid) getMsg :: Handle -> IO PeerMsg getMsg h = do lBS <- hGet h 4 let l = bsToInt lBS if l == 0 then return KeepAliveMsg else do putStrLn $ "len: " ++ show l msgID <- hGet h 1 putStrLn $ "msg Type: " ++ show msgID msg <- hGet h (l - 1) return $ decode $ fromStrict $ concat [lBS, msgID, msg] bsToInt :: ByteString -> Int bsToInt x = fromIntegral (runGet getWord32be (fromChunks (return x))) -- loop1 :: shake hands with all peers, find out the pieces they have, form PieceData. -- recvMsg :: Peer -> Handle -> Msg msgLoop :: Handle -> IO () msgLoop h = forever $ do msg <- getMsg h putStrLn $ "got a " ++ show msg
harshavardhana/functorrent
src/FuncTorrent/Peer.hs
gpl-3.0
5,794
0
17
2,140
1,538
773
765
135
2
-- Copyright 2017 Google Inc. -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. module Language.Kythe.Schema.Raw.VName ( VName(..) ) where import Data.Text (Text) -- | A VName (Vector-Name) is the primary unit of naming in the Kythe graph. -- It is a set of basis facts about a graph node that can uniquely identify it. data VName = VName { vnSignature :: !Text -- ^ Opaque signature generated by an analyser. , vnCorpus :: !Text -- ^ Loosely, a collection of related files. , vnRoot :: !Text -- ^ Corpus-specific subtree root. Can be empty. , vnPath :: !Text -- ^ Location relative to the corpus and root. , vnLanguage :: !Text -- ^ Schema-defined label for supported languages. }
robinp/haskell-indexer
kythe-schema/src/Language/Kythe/Schema/Raw/VName.hs
apache-2.0
1,244
0
9
259
92
64
28
19
0
{-# LANGUAGE NoImplicitPrelude , DataKinds , TypeOperators , TypeFamilies , ScopedTypeVariables , FlexibleContexts #-} {-# OPTIONS_GHC -Wall -fwarn-tabs #-} module Tests.Relationships (allTests) where import Prelude ((.), id, ($), asTypeOf) import Language.Hakaru.Syntax.Prelude import Language.Hakaru.Types.DataKind import Language.Hakaru.Syntax.AST (Term) import Language.Hakaru.Syntax.ABT (ABT) import Test.HUnit import Tests.TestTools import Tests.Models (normal_0_1, uniform_0_1) allTests :: Test allTests = test [ testRelationships ] testRelationships :: Test testRelationships = test [ "t1" ~: testSStriv [t1] (lam $ \_ -> lam $ \_ -> normal_0_1), "t2" ~: testSStriv [t2] (lam $ \b -> gamma b (prob_ 2)), "t3" ~: testSStriv [t3, t3'] (lam $ \a -> lam $ \x -> gamma a (prob_ 2)), "t4" ~: testSStriv [t4] (lam $ \a -> lam $ \b -> lam $ \_ -> beta a b), -- "t5" ~: testSStriv [t5, t5'] (lam $ \alpha -> gamma one (unsafeProb alpha)), --"t6" ~: testSS [t5] (lam $ \mu -> poisson mu >>= \x -> dirac (fromInt x)), "t7" ~: testSStriv [t7] (normal_0_1 >>= \x1 -> normal_0_1 >>= \x2 -> dirac (x1 * recip x2)), "t8" ~: testSStriv [t8] (lam $ \a -> lam $ \alpha -> (normal_0_1 >>= \x1 -> normal_0_1 >>= \x2 -> dirac (a + fromProb alpha * (x1 / x2)))), "t9" ~: testSStriv [t9] (lam $ \p -> bern p >>= \x -> dirac (if_ x one zero)), --Doesn't (if_ x one zero) simplify to just x?--Carl 2016Jul16 "t10" ~: testSStriv [t10] (unsafeProb <$> uniform_0_1), "t11" ~: testSStriv [t11] (lam $ \a1 -> lam $ \a2 -> gamma one (unsafeProb a1) >>= \x1 -> gamma one a2 >>= \x2 -> dirac ((fromProb x1) - (fromProb x2))), -- sum of n exponential(b) random variables is a gamma(n, b) random variable "t12" ~: testSStriv [t12] (lam $ \b -> gamma (prob_ 2) b), -- Weibull(b, 1) random variable is an exponential random variable with mean b --Above comment is wrong. Should be: --X ~ Weibull(a,1) => X ~ Exponential(1/a) --"t13" ~: testSS [t13] (lam $ \b -> exponential (recip b)), --Above line is wrong. Should be: "t13" ~: testSStriv [t13] (lam $ \a -> exponential(recip a)), --Carl 2016Jul14 -- If X is a standard normal random variable and U is a chi-squared random variable with v degrees of freedom, -- then X/sqrt(U/v) is a Student's t(v) random variable "t14" ~: testSStriv [t14] (lam $ \v -> studentT zero one v), "t15" ~: testSStriv [t15] (lam $ \k -> lam $ \t -> gamma k t), -- Linear combination property "t16" ~: testSStriv [t16] (normal zero (sqrt (prob_ 2))), "t17" ~: testSStriv [t17] (lam $ \mu -> lam $ \sigma -> normal mu (sqrt (one + sigma * sigma))), "t18" ~: testSStriv [t18] (lam $ \a1 -> lam $ \a2 -> normal zero (sqrt (a1 * a1 + a2 * a2))), -- Convolution property "t19" ~: testSStriv [t19] (lam $ \n1 -> lam $ \n2 -> lam $ \p -> binomial (n1 + n2) p), "t20" ~: testSStriv [t20] (lam $ \n -> lam $ \p -> binomial n p), "t21" ~: testSStriv [t21] (lam $ \l1 -> lam $ \l2 -> poisson (l1 + l2)), "t22" ~: testSStriv [t22] (lam $ \a1 -> lam $ \a2 -> lam $ \b -> gamma (a1 + a2) b), "t23" ~: testSStriv [t23] (lam $ \n -> lam $ \t -> gamma n t), --I can't find any evidence for the truth of relationship t24. Indeed, --it's trivial to prove false.--Carl 2016Jul16 -- -- Scaling property -- "t24" ~: testSS [t24] -- (lam $ \a -> -- lam $ \b -> -- lam $ \k -> -- weibull (a * (k ** fromProb b)) b), --The next test is wrong. The log x should be exp x (or whatever the --exponential function is in Haskell). -- Product property "t25" ~: testSStriv [t25] (lam $ \mu1 -> lam $ \mu2 -> lam $ \sigma1 -> lam $ \sigma2 -> normal (mu1 + mu2) (sigma1 + sigma2) >>= \x -> dirac (log (unsafeProb x))), -- Inverse property --I can't verify the relationship below. It's easy to prove false, except for --the case l=0, where it's true. Where did it come from? It's too complex to --have been entered by mistake.--Carl 2016Jul17 "t26" ~: testSStriv [t26] (lam $ \l -> lam $ \s -> cauchy (l / (l*l + fromProb (s*s))) (s / (unsafeProb (l*l) + s*s))), -- Multiple of a random variable "t27" ~: testSStriv [t27] (lam $ \r -> lam $ \lambda -> lam $ \a -> gamma r (a * lambda)) -- If X is a beta (a, b) random variable then (1 - X) is a beta (b, a) random variable. -- "t28" ~: testSStriv [t28] (lam $ \a -> lam $ \b -> beta b a) -- Cannot resolve type mismatch -- If X is a binomial (n, p) random variable then (n - X) is a binomial (n, 1-p) random variable. -- "t29" ~: testSStriv [t29] (lam $ \n -> lam $ \p -> binomial n (one - p)) ] t1 :: (ABT Term abt) => abt '[] ('HReal ':-> 'HProb ':-> 'HMeasure 'HReal) t1 = lam (\mu -> (lam (\sigma -> normal mu sigma >>= \x -> dirac ((x - mu) / (fromProb sigma))))) t2 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HMeasure 'HProb) t2 = lam $ \b -> chi2 ((prob_ 2) * b) -- This test (and probably many others involving gamma) is wrong, -- because the argument order to our gamma is the opposite of -- the order used by 2008amstat.pdf t3 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HProb ':-> 'HMeasure 'HProb) t3 = lam $ \alpha -> lam $ \bet -> gamma alpha bet >>= \x -> dirac ((prob_ 2) * x / bet) t3' :: (ABT Term abt) => abt '[] ('HProb ':-> 'HProb ':-> 'HMeasure 'HProb) t3' = lam $ \_ -> lam $ \bet -> chi2 ((prob_ 2) * bet) t4 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HProb ':-> 'HProb ':-> 'HMeasure 'HProb) t4 = lam $ \a -> lam $ \b -> lam $ \t -> gamma a t >>= \x1 -> gamma b t >>= \x2 -> dirac (x1 / (x1+x2)) -- t5 :: (ABT Term abt) => abt '[] ('HReal ':-> 'HMeasure 'HProb) -- t5 = -- lam $ \alpha -> -- uniform_0_1 >>= \x -> -- dirac (unsafeProb (-1 * alpha) * unsafeProb (log (unsafeProb x))) -- t5' :: (ABT Term abt) => abt '[] ('HReal ':-> 'HMeasure 'HProb) -- t5' = -- lam $ \alpha -> -- laplace alpha (unsafeProb alpha) >>= \x -> -- dirac (abs (unsafeProb x)) -- Untestable right now with mu -> infinity, maybe later? --t6 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HMeasure 'HReal) --t6 = lam (\mu -> normal infinity mu) t7 :: (ABT Term abt) => abt '[] ('HMeasure 'HReal) t7 = cauchy zero one t8 :: (ABT Term abt) => abt '[] ('HReal ':-> 'HProb ':-> 'HMeasure 'HReal) t8 = lam $ \a -> lam $ \alpha -> cauchy a alpha t9 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HMeasure 'HInt) t9 = lam $ \p -> binomial one p t10 :: (ABT Term abt) => abt '[] ('HMeasure 'HProb) t10 = beta one one t11 :: (ABT Term abt) => abt '[] ('HReal ':-> 'HProb ':-> 'HMeasure 'HReal) t11 = lam $ \a1 -> lam $ \a2 -> laplace a1 a2 t12 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HMeasure 'HProb) t12 = lam $ \b -> exponential b >>= \x1 -> exponential b >>= \x2 -> dirac (x1 + x2) t13 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HMeasure 'HProb) --t13 = lam $ \b -> weibull one b --Parameter order wrong in line above.--Carl 2016Jul14 t13 = lam $ \a -> weibull a one t14 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HMeasure 'HReal) t14 = lam $ \v -> normal_0_1 >>= \x -> chi2 v >>= \u -> dirac (x / fromProb (sqrt (u / v))) t15 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HProb ':-> 'HMeasure 'HProb) t15 = lam $ \k -> lam $ \t -> invgamma k (recip t) >>= \x -> dirac (recip x) t16 :: (ABT Term abt) => abt '[] ('HMeasure 'HReal) t16 = normal_0_1 >>= \x1 -> normal_0_1 >>= \x2 -> dirac (x1 + x2) t17 :: (ABT Term abt) => abt '[] ('HReal ':-> 'HProb ':-> 'HMeasure 'HReal) t17 = lam $ \mu -> lam $ \sigma -> normal_0_1 >>= \x1 -> normal mu sigma >>= \x2 -> dirac (x1 + x2) --I corrected the below. The relationship is about two rvs, not one. t18 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HProb ':-> 'HMeasure 'HReal) t18 = lam $ \a1 -> lam $ \a2 -> normal_0_1 >>= \x -> normal_0_1 >>= \y -> --dirac (fromProb a1 * x + fromProb a2 * x) dirac (fromProb a1 * x + fromProb a2 * y) --Actually, this relation is also true if a1 < 0 and/or a2 < 0. t19 :: (ABT Term abt) => abt '[] ('HNat ':-> 'HNat ':-> 'HProb ':-> 'HMeasure 'HInt) t19 = lam $ \n1 -> lam $ \n2 -> lam $ \p -> binomial n1 p >>= \x1 -> binomial n2 p >>= \x2 -> dirac (x1 + x2) --The next test is completely wrong. It's supposed to express something about --the sum of n iid Bernoulli rvs. That's not the same thing as n times a single --rv. Also, if_ x one zero simplifies to simply x. t20 :: (ABT Term abt) => abt '[] ('HNat ':-> 'HProb ':-> 'HMeasure 'HInt) t20 = lam $ \n -> lam $ \p -> bern p >>= \x -> dirac (nat2int (n * if_ x one zero)) t21 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HProb ':-> 'HMeasure 'HNat) t21 = lam $ \l1 -> lam $ \l2 -> poisson l1 >>= \x1 -> poisson l2 >>= \x2 -> dirac (x1 + x2) t22 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HProb ':-> 'HProb ':-> 'HMeasure 'HProb) t22 = lam $ \a1 -> lam $ \a2 -> lam $ \b -> gamma a1 b >>= \x1 -> gamma a2 b >>= \x2 -> dirac (x1 + x2) --The next test is completely wrong. It's supposed to express something about --the sum of n iid Exponential rvs. That's not the same thing as n times a single --rv. t23 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HProb ':-> 'HMeasure 'HProb) t23 = lam $ \n -> lam $ \t -> exponential t >>= \x -> dirac (n * x) --I can find no evidence for the truth of relationship t24. Indeed, it's --trivial to prove false, --t24 :: (ABT Term abt) -- => abt '[] ('HProb ':-> 'HProb ':-> 'HProb ':-> 'HMeasure 'HProb) --t24 = -- lam $ \a -> -- lam $ \b -> -- lam $ \k -> -- weibull a b >>= \x -> -- dirac (k * x) --The next test is wrong. The logs should be exps. t25 :: (ABT Term abt) => abt '[] ('HReal ':-> 'HReal ':-> 'HProb ':-> 'HProb ':-> 'HMeasure 'HReal) t25 = lam $ \mu1 -> lam $ \mu2 -> lam $ \sigma1 -> lam $ \sigma2 -> normal mu1 sigma1 >>= \x1 -> normal mu2 sigma2 >>= \x2 -> dirac (log (unsafeProb x1) * log (unsafeProb x2)) t26 :: (ABT Term abt) => abt '[] ('HReal ':-> 'HProb ':-> 'HMeasure 'HReal) t26 = lam $ \l -> lam $ \s -> cauchy l s >>= \x -> dirac (recip x) t27 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HProb ':-> 'HProb ':-> 'HMeasure 'HProb) t27 = lam $ \r -> lam $ \lambda -> lam $ \a -> gamma r lambda >>= \x -> dirac (a * x) -- t28 :: (ABT Term abt) => abt '[] ('HProb ':-> 'HProb ':-> 'HMeasure 'HProb) -- t28 = -- lam $ \a -> -- lam $ \b -> -- beta a b >>= \x -> -- dirac ((prob_ 1) - x) -- Cannot resolve type mismatch -- t29 :: (ABT Term abt) => abt '[] ('HNat ':-> 'HProb ':-> 'HMeasure 'HInt) -- t29 = -- lam $ \n -> -- lam $ \p -> -- binomial n p >>= \x -> -- dirac (n - x)
zachsully/hakaru
haskell/Tests/Relationships.hs
bsd-3-clause
11,372
0
24
3,282
3,910
2,106
1,804
226
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE OverloadedStrings #-} ----------------------------------------------------------------------------- -- -- Module : IDE.Pane.PackageFlags -- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie -- License : GNU-GPL -- -- Maintainer : <maintainer at leksah.org> -- Stability : provisional -- Portability : portable -- -- -- | Pane for saving, restoring and editing flags for specific cabal -- commands. -- --------------------------------------------------------------------------------- module IDE.Pane.PackageFlags ( readFlags , writeFlags , IDEFlags(..) , FlagsState , getFlags ) where import Graphics.UI.Gtk import qualified Text.PrettyPrint.HughesPJ as PP import Data.Typeable import System.FilePath.Posix import IDE.Core.State import Graphics.UI.Editor.Basics import Graphics.UI.Editor.MakeEditor import Graphics.UI.Editor.Simple import Graphics.UI.Editor.Parameters import Text.PrinterParser hiding (fieldParser,parameters) import Control.Event (registerEvent) import Graphics.UI.Editor.DescriptionPP (flattenFieldDescriptionPPToS, extractFieldDescription, FieldDescriptionPP(..), mkFieldPP) import Text.ParserCombinators.Parsec hiding(Parser) import IDE.Utils.GUIUtils (__) import Control.Monad (void) import Data.Text (Text) import Data.Monoid ((<>)) import qualified Data.Text as T (unwords, unpack, pack) import Control.Applicative ((<$>)) data IDEFlags = IDEFlags { flagsBox :: VBox } deriving Typeable data FlagsState = FlagsState deriving(Eq,Ord,Read,Show,Typeable) instance Pane IDEFlags IDEM where primPaneName _ = __ "Package Flags" getAddedIndex _ = 0 getTopWidget = castToWidget . flagsBox paneId b = "*Flags" instance RecoverablePane IDEFlags FlagsState IDEM where saveState p = do mbFlags :: Maybe IDEFlags <- getPane case mbFlags of Nothing -> return Nothing Just p -> return (Just FlagsState) recoverState pp st = do mbPack <- readIDE activePack case mbPack of Just pack -> do pp <- getBestPathForId "*Flags" nb <- getNotebook pp case mbPack of Nothing -> return Nothing Just pack -> buildThisPane pp nb builder Nothing -> return Nothing builder pp nb w = let flagsDesc = extractFieldDescription flagsDescription flatflagsDesc = flattenFieldDescription flagsDesc in do mbPack <- readIDE activePack case mbPack of Nothing -> return (Nothing,[]) Just p -> reifyIDE $ \ideR -> builder' p flagsDesc flatflagsDesc pp nb window ideR -- | Builds the Flags pane builder' idePackage flagsDesc flatflagsDesc pp nb window ideR = do vb <- vBoxNew False 0 let flagsPane = IDEFlags vb bb <- hButtonBoxNew boxSetSpacing bb 6 buttonBoxSetLayout bb ButtonboxSpread saveB <- buttonNewFromStock "gtk-save" widgetSetSensitive saveB False cancelB <- buttonNewFromStock "gtk-cancel" boxPackStart bb cancelB PackNatural 0 boxPackStart bb saveB PackNatural 0 (widget,injb,ext,notifier) <- buildEditor flagsDesc idePackage sw <- scrolledWindowNew Nothing Nothing scrolledWindowSetShadowType sw ShadowIn scrolledWindowAddWithViewport sw widget scrolledWindowSetPolicy sw PolicyAutomatic PolicyAutomatic on saveB buttonActivated (do mbPackWithNewFlags <- extract idePackage [ext] case mbPackWithNewFlags of Nothing -> return () Just packWithNewFlags -> do reflectIDE (do changePackage packWithNewFlags closePane flagsPane) ideR writeFields (dropExtension (ipdCabalFile packWithNewFlags) ++ leksahFlagFileExtension) packWithNewFlags flatFlagsDescription) on cancelB buttonActivated (reflectIDE (void (closePane flagsPane)) ideR) registerEvent notifier FocusIn (\e -> do reflectIDE (makeActive flagsPane) ideR return (e{gtkReturn=False})) registerEvent notifier MayHaveChanged (\e -> do mbP <- extract idePackage [ext] let hasChanged = case mbP of Nothing -> False Just p -> p /= idePackage markLabel nb (getTopWidget flagsPane) hasChanged widgetSetSensitive saveB hasChanged return (e{gtkReturn=False})) boxPackStart vb sw PackGrow 0 boxPackEnd vb bb PackNatural 6 return (Just flagsPane,[]) -- | Gets the Flags pane getFlags :: Maybe PanePath -> IDEM IDEFlags getFlags Nothing = forceGetPane (Right "*Flags") getFlags (Just pp) = forceGetPane (Left pp) -- | Quote the string if it contains spaces and escape -- any other quotes. quoteArg :: String -> String quoteArg s | ' ' `elem` s = "\"" <> escapeQuotes s <> "\"" quoteArg s = s escapeQuotes = foldr (\c s -> if c == '"' then '\\':c:s else c:s) "" -- | Parse any (escaped) character (ignoring a prefixed @\@) quotedArgCharParser :: CharParser () Char quotedArgCharParser = try (do char '\\' anyChar) <|> try ( noneOf "\"") <?> "argsParser" -- | Parse an argument that is either quoted or does not -- contain spaces argParser :: CharParser () Text argParser = try (do char '"' s <- many quotedArgCharParser char '"' return $ T.pack s) <|> try ( T.pack <$> many1 (noneOf " ")) <?> "argParser" -- | Parse many arguments, possibly seperated by spaces argsParser :: CharParser () [Text] argsParser = try ( many (do many (char ' ') argParser)) <?> "argsParser" -- | Quote all arguments and concatenate them unargs :: [Text] -> Text unargs = T.unwords . map (T.pack . quoteArg . T.unpack) -- | Parse a list of arguments from a given string args :: Text -> [Text] args s = case parse argsParser "" $ T.unpack s of Right result -> result _ -> [s] -- | The flattened description of the fields in the pane flatFlagsDescription :: [FieldDescriptionS IDEPackage] flatFlagsDescription = flattenFieldDescriptionPPToS flagsDescription -- | The description of the fields in the pane flagsDescription :: FieldDescriptionPP IDEPackage IDEM flagsDescription = VFDPP emptyParams [ mkFieldPP (paraName <<<- ParaName (__ "Config flags") $ emptyParams) (PP.text . show) readParser (unargs . ipdConfigFlags) (\ b a -> a{ipdConfigFlags = args b}) (textEditor (const True) True) (\ _ -> return ()) , mkFieldPP (paraName <<<- ParaName (__ "Build flags") $ emptyParams) (PP.text . show) readParser (unargs . ipdBuildFlags) (\ b a -> a{ipdBuildFlags = args b}) (textEditor (const True) True) (\ _ -> return ()) , mkFieldPP (paraName <<<- ParaName (__ "Test flags") $ emptyParams) (PP.text . show) readParser (unargs . ipdTestFlags) (\ b a -> a{ipdTestFlags = args b}) (textEditor (const True) True) (\ _ -> return ()) , mkFieldPP (paraName <<<- ParaName (__ "Haddock flags") $ emptyParams) (PP.text . show) readParser (unargs . ipdHaddockFlags) (\ b a -> a{ipdHaddockFlags = args b}) (textEditor (const True) True) (\ _ -> return ()) , mkFieldPP (paraName <<<- ParaName (__ "Executable flags") $ emptyParams) (PP.text . show) readParser (unargs . ipdExeFlags) (\ b a -> a{ipdExeFlags = args b}) (textEditor (const True) True) (\ _ -> return ()) , mkFieldPP (paraName <<<- ParaName (__ "Install flags") $ emptyParams) (PP.text . show) readParser (unargs . ipdInstallFlags) (\ b a -> a{ipdInstallFlags = args b}) (textEditor (const True) True) (\ _ -> return ()) , mkFieldPP (paraName <<<- ParaName (__ "Register flags") $ emptyParams) (PP.text . show) readParser (unargs . ipdRegisterFlags) (\ b a -> a{ipdRegisterFlags = args b}) (textEditor (const True) True) (\ _ -> return ()) , mkFieldPP (paraName <<<- ParaName (__ "Unregister flags") $ emptyParams) (PP.text . show) readParser (unargs . ipdUnregisterFlags) (\ b a -> a{ipdUnregisterFlags = args b}) (textEditor (const True) True) (\ _ -> return ()) , mkFieldPP (paraName <<<- ParaName (__ "Source Distribution flags") $ emptyParams) (PP.text . show) readParser (unargs . ipdSdistFlags) (\ b a -> a{ipdSdistFlags = args b}) (textEditor (const True) True) (\ _ -> return ())] -- ------------------------------------------------------------ -- * Parsing -- ------------------------------------------------------------ -- | Read all the field values from the given 'FilePath' readFlags :: FilePath -> IDEPackage -> IO IDEPackage readFlags fn = readFields fn flatFlagsDescription -- ------------------------------------------------------------ -- * Printing -- ------------------------------------------------------------ -- | Write all field values to the given 'FilePath' writeFlags :: FilePath -> IDEPackage -> IO () writeFlags fpath flags = writeFields fpath flags flatFlagsDescription
cocreature/leksah
src/IDE/Pane/PackageFlags.hs
gpl-2.0
10,233
0
21
3,101
2,586
1,339
1,247
227
3
module Base.Renderable.VBox (vBox) where import Data.Abelian import Graphics.Qt import Utils import Base.Types import Base.Renderable.Common () data VBox = VBox Int [RenderableInstance] -- | Creates a VBox, that will at least display n items. -- Displays as much as possible. vBox :: Renderable r => Int -> [r] -> VBox vBox n = VBox n . map RenderableInstance instance Renderable VBox where label = const "VBox" render ptr app config parentSize vBox@(VBox minimalItems items) = do itemRenders <- inner minimalItems (height parentSize) items return (vBoxSize (fmap fst itemRenders), renderVBox itemRenders) where vBoxSize itemSizes = case itemSizes of [] -> zero _ -> Size (maximum (fmap width itemSizes)) (boxHeight itemSizes) boxHeight itemSizes = if length itemSizes < length items then height parentSize else sum (fmap height itemSizes) inner :: Int -> Double -> [RenderableInstance] -> IO [(Size Double, IO ())] inner minimalItems h (a : r) = do t@(itemSize, action) <- render ptr app config (Size (width parentSize) h) a if (h >= height itemSize) || minimalItems > 0 then do rest <- inner (pred minimalItems) (h - height itemSize) r return (t : rest) else return [] inner _ _ [] = return [] renderVBox = fmapM_ $ \ (itemSize, action) -> do recoverMatrix ptr action translate ptr (Position 0 (height itemSize))
changlinli/nikki
src/Base/Renderable/VBox.hs
lgpl-3.0
1,576
0
17
472
524
266
258
32
1
-------------------------------------------------------------------------------- -- | Demultiplexing of frames into messages {-# LANGUAGE DeriveDataTypeable, OverloadedStrings #-} module Network.WebSockets.Hybi13.Demultiplex ( FrameType (..) , Frame (..) , DemultiplexState , emptyDemultiplexState , demultiplex ) where -------------------------------------------------------------------------------- import Blaze.ByteString.Builder (Builder) import qualified Blaze.ByteString.Builder as B import Control.Exception (Exception, throw) import Data.Binary.Get (runGet, getWord16be) import qualified Data.ByteString.Lazy as BL import Data.Monoid (mappend) import Data.Typeable (Typeable) -------------------------------------------------------------------------------- import Network.WebSockets.Types -------------------------------------------------------------------------------- -- | A low-level representation of a WebSocket packet data Frame = Frame { frameFin :: !Bool , frameRsv1 :: !Bool , frameRsv2 :: !Bool , frameRsv3 :: !Bool , frameType :: !FrameType , framePayload :: !BL.ByteString } deriving (Eq, Show) -------------------------------------------------------------------------------- -- | The type of a frame. Not all types are allowed for all protocols. data FrameType = ContinuationFrame | TextFrame | BinaryFrame | CloseFrame | PingFrame | PongFrame deriving (Eq, Show) -------------------------------------------------------------------------------- -- | Thrown if the client sends invalid multiplexed data data DemultiplexException = DemultiplexException deriving (Show, Typeable) -------------------------------------------------------------------------------- instance Exception DemultiplexException -------------------------------------------------------------------------------- -- | Internal state used by the demultiplexer data DemultiplexState = EmptyDemultiplexState | DemultiplexState !FrameType !Builder -------------------------------------------------------------------------------- emptyDemultiplexState :: DemultiplexState emptyDemultiplexState = EmptyDemultiplexState -------------------------------------------------------------------------------- demultiplex :: DemultiplexState -> Frame -> (Maybe Message, DemultiplexState) demultiplex state (Frame fin _ _ _ tp pl) = case tp of -- Return control messages immediately, they have no influence on the state CloseFrame -> (Just (ControlMessage (uncurry Close parsedClose)), state) PingFrame -> (Just (ControlMessage (Ping pl)), state) PongFrame -> (Just (ControlMessage (Pong pl)), state) -- If we're dealing with a continuation... ContinuationFrame -> case state of -- We received a continuation but we don't have any state. Let's ignore -- this fragment... EmptyDemultiplexState -> (Nothing, EmptyDemultiplexState) -- Append the payload to the state -- TODO: protect against overflows DemultiplexState amt b | not fin -> (Nothing, DemultiplexState amt b') | otherwise -> case amt of TextFrame -> (Just (DataMessage (Text m)), e) BinaryFrame -> (Just (DataMessage (Binary m)), e) _ -> throw DemultiplexException where b' = b `mappend` plb m = B.toLazyByteString b' TextFrame | fin -> (Just (DataMessage (Text pl)), e) | otherwise -> (Nothing, DemultiplexState TextFrame plb) BinaryFrame | fin -> (Just (DataMessage (Binary pl)), e) | otherwise -> (Nothing, DemultiplexState BinaryFrame plb) where e = emptyDemultiplexState plb = B.fromLazyByteString pl -- The Close frame MAY contain a body (the "Application data" portion of the -- frame) that indicates a reason for closing, such as an endpoint shutting -- down, an endpoint having received a frame too large, or an endpoint -- having received a frame that does not conform to the format expected by -- the endpoint. If there is a body, the first two bytes of the body MUST -- be a 2-byte unsigned integer (in network byte order) representing a -- status code with value /code/ defined in Section 7.4. parsedClose | BL.length pl >= 2 = (runGet getWord16be pl, BL.drop 2 pl) | otherwise = (1000, BL.empty)
zodiac/websockets
src/Network/WebSockets/Hybi13/Demultiplex.hs
bsd-3-clause
4,612
0
20
1,049
757
424
333
83
9
{-# LANGUAGE OverloadedStrings #-} module IntegrationTests where import Control.Exception import Foreign.Ptr import Foreign.Storable import Test.QuickCheck import Thrift.Protocol.Binary import Thrift.Protocol.Compact import Thrift.Protocol.JSON import Thrift.Protocol.SimpleJSON import Thrift.Transport import Interface import Util import Hs_test_Types -- | Serialize a TestStruct from C++ and deserialize in Haskell propCToHs :: Protocol p => (Ptr MemoryBuffer -> p (Ptr MemoryBuffer)) -> (Ptr MemoryBuffer -> Ptr TestStruct -> IO ()) -> TestStruct -> Property propCToHs pCons cToHS struct = ioProperty $ bracket c_newStructPtr c_freeTestStruct $ \structPtr -> bracket c_openMB tClose $ \mb -> do poke structPtr struct cToHS mb structPtr (== struct) <$> read_TestStruct (pCons mb) -- | Serialize a TestStruct in Haskell and deserialize in C++ propHsToC :: Protocol p => (Ptr MemoryBuffer -> p (Ptr MemoryBuffer)) -> (Ptr MemoryBuffer -> IO (Ptr TestStruct)) -> TestStruct -> Property propHsToC pCons hsToC struct = ioProperty $ bracket c_openMB tClose $ \mb -> do write_TestStruct (pCons mb) struct bracket (hsToC mb) c_freeTestStruct $ \structPtr -> (== struct) <$> peek structPtr main :: IO () main = aggregateResults [ quickCheckWithResult args (propCToHs BinaryProtocol c_serializeBinary) , quickCheckWithResult args (propHsToC BinaryProtocol c_deserializeBinary) , quickCheckWithResult args (propCToHs JSONProtocol c_serializeJSON) , quickCheckWithResult args (propHsToC JSONProtocol c_deserializeJSON) , quickCheckWithResult args $ propCToHs SimpleJSONProtocol c_serializeSimpleJSON , quickCheckWithResult args $ propHsToC SimpleJSONProtocol c_deserializeSimpleJSON , quickCheckWithResult args (propCToHs CompactProtocol c_serializeCompact) , quickCheckWithResult args (propHsToC CompactProtocol c_deserializeCompact) ] where args = Args Nothing 100 10 100 True
Orvid/fbthrift
thrift/lib/hs/tests/IntegrationTests.hs
apache-2.0
2,041
0
14
400
512
260
252
48
1
{-# OPTIONS_JHC -fno-prelude -fffi #-} ----------------------------------------------------------------------------- -- | -- Module : Foreign.C.Error -- Copyright : (c) The FFI task force 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : [email protected] -- Stability : provisional -- Portability : portable -- -- C-specific Marshalling support: Handling of C \"errno\" error codes. -- ----------------------------------------------------------------------------- module Foreign.C.Error ( -- * Haskell representations of @errno@ values Errno(..), -- instance: Eq -- ** Common @errno@ symbols -- | Different operating systems and\/or C libraries often support -- different values of @errno@. This module defines the common values, -- but due to the open definition of 'Errno' users may add definitions -- which are not predefined. eOK,{- e2BIG, eACCES, eADDRINUSE, eADDRNOTAVAIL, eADV, eAFNOSUPPORT, eAGAIN, eALREADY, eBADF, eBADMSG, eBADRPC, eBUSY, eCHILD, eCOMM, eCONNABORTED, eCONNREFUSED, eCONNRESET, eDEADLK, eDESTADDRREQ, eDIRTY, eDOM, eDQUOT, eEXIST, eFAULT, eFBIG, eFTYPE, eHOSTDOWN, eHOSTUNREACH, eIDRM, eILSEQ, eINPROGRESS, eINTR, eINVAL, eIO, eISCONN, eISDIR, eLOOP, eMFILE, eMLINK, eMSGSIZE, eMULTIHOP, eNAMETOOLONG, eNETDOWN, eNETRESET, eNETUNREACH, eNFILE, eNOBUFS, eNODATA, eNODEV, eNOENT, eNOEXEC, eNOLCK, eNOLINK, eNOMEM, eNOMSG, eNONET, eNOPROTOOPT, eNOSPC, eNOSR, eNOSTR, eNOSYS, eNOTBLK, eNOTCONN, eNOTDIR, eNOTEMPTY, eNOTSOCK, eNOTTY, eNXIO, eOPNOTSUPP, ePERM, ePFNOSUPPORT, ePIPE, ePROCLIM, ePROCUNAVAIL, ePROGMISMATCH, ePROGUNAVAIL, ePROTO, ePROTONOSUPPORT, ePROTOTYPE, eRANGE, eREMCHG, eREMOTE, eROFS, eRPCMISMATCH, eRREMOTE, eSHUTDOWN, eSOCKTNOSUPPORT, eSPIPE, eSRCH, eSRMNT, eSTALE, eTIME, eTIMEDOUT, eTOOMANYREFS, eTXTBSY, eUSERS, eWOULDBLOCK, eXDEV, -} -- ** 'Errno' functions -- :: Errno isValidErrno, -- :: Errno -> Bool -- access to the current thread's "errno" value -- getErrno, -- :: IO Errno resetErrno, -- :: IO () -- conversion of an "errno" value into IO error -- errnoToIOError, -- :: String -- location -- -> Errno -- errno -- -> Maybe Handle -- handle -- -> Maybe String -- filename -- -> IOError -- throw current "errno" value -- throwErrno, -- :: String -> IO a -- ** Guards for IO operations that may fail throwErrnoIf, -- :: (a -> Bool) -> String -> IO a -> IO a throwErrnoIf_, -- :: (a -> Bool) -> String -> IO a -> IO () throwErrnoIfRetry, -- :: (a -> Bool) -> String -> IO a -> IO a throwErrnoIfRetry_, -- :: (a -> Bool) -> String -> IO a -> IO () throwErrnoIfMinus1, -- :: Num a -- => String -> IO a -> IO a throwErrnoIfMinus1_, -- :: Num a -- => String -> IO a -> IO () throwErrnoIfMinus1Retry, -- :: Num a -- => String -> IO a -> IO a throwErrnoIfMinus1Retry_, -- :: Num a -- => String -> IO a -> IO () throwErrnoIfNull, -- :: String -> IO (Ptr a) -> IO (Ptr a) throwErrnoIfNullRetry,-- :: String -> IO (Ptr a) -> IO (Ptr a) throwErrnoIfRetryMayBlock, throwErrnoIfRetryMayBlock_, throwErrnoIfMinus1RetryMayBlock, throwErrnoIfMinus1RetryMayBlock_, throwErrnoIfNullRetryMayBlock ) where import Foreign.C.String import Foreign.Ptr import Foreign.Storable import Jhc.Basics import Jhc.Basics import Jhc.IO import Jhc.Maybe import Jhc.Monad import Jhc.Num import Jhc.Order import Jhc.Type.C instance Eq Errno where errno1@(Errno no1) == errno2@(Errno no2) | isValidErrno errno1 && isValidErrno errno2 = no1 == no2 | otherwise = False -- common "errno" symbols -- {- eOK, e2BIG, eACCES, eADDRINUSE, eADDRNOTAVAIL, eADV, eAFNOSUPPORT, eAGAIN, eALREADY, eBADF, eBADMSG, eBADRPC, eBUSY, eCHILD, eCOMM, eCONNABORTED, eCONNREFUSED, eCONNRESET, eDEADLK, eDESTADDRREQ, eDIRTY, eDOM, eDQUOT, eEXIST, eFAULT, eFBIG, eFTYPE, eHOSTDOWN, eHOSTUNREACH, eIDRM, eILSEQ, eINPROGRESS, eINTR, eINVAL, eIO, eISCONN, eISDIR, eLOOP, eMFILE, eMLINK, eMSGSIZE, eMULTIHOP, eNAMETOOLONG, eNETDOWN, eNETRESET, eNETUNREACH, eNFILE, eNOBUFS, eNODATA, eNODEV, eNOENT, eNOEXEC, eNOLCK, eNOLINK, eNOMEM, eNOMSG, eNONET, eNOPROTOOPT, eNOSPC, eNOSR, eNOSTR, eNOSYS, eNOTBLK, eNOTCONN, eNOTDIR, eNOTEMPTY, eNOTSOCK, eNOTTY, eNXIO, eOPNOTSUPP, ePERM, ePFNOSUPPORT, ePIPE, ePROCLIM, ePROCUNAVAIL, ePROGMISMATCH, ePROGUNAVAIL, ePROTO, ePROTONOSUPPORT, ePROTOTYPE, eRANGE, eREMCHG, eREMOTE, eROFS, eRPCMISMATCH, eRREMOTE, eSHUTDOWN, eSOCKTNOSUPPORT, eSPIPE, eSRCH, eSRMNT, eSTALE, eTIME, eTIMEDOUT, eTOOMANYREFS, eTXTBSY, eUSERS, eWOULDBLOCK, eXDEV :: Errno -- -} -- the cCONST_XXX identifiers are cpp symbols whose value is computed by -- configure -- eOK = Errno 0 {- #ifdef __NHC__ #include "Errno.hs" #else e2BIG = Errno (CONST_E2BIG) eACCES = Errno (CONST_EACCES) eADDRINUSE = Errno (CONST_EADDRINUSE) eADDRNOTAVAIL = Errno (CONST_EADDRNOTAVAIL) eADV = Errno (CONST_EADV) eAFNOSUPPORT = Errno (CONST_EAFNOSUPPORT) eAGAIN = Errno (CONST_EAGAIN) eALREADY = Errno (CONST_EALREADY) eBADF = Errno (CONST_EBADF) eBADMSG = Errno (CONST_EBADMSG) eBADRPC = Errno (CONST_EBADRPC) eBUSY = Errno (CONST_EBUSY) eCHILD = Errno (CONST_ECHILD) eCOMM = Errno (CONST_ECOMM) eCONNABORTED = Errno (CONST_ECONNABORTED) eCONNREFUSED = Errno (CONST_ECONNREFUSED) eCONNRESET = Errno (CONST_ECONNRESET) eDEADLK = Errno (CONST_EDEADLK) eDESTADDRREQ = Errno (CONST_EDESTADDRREQ) eDIRTY = Errno (CONST_EDIRTY) eDOM = Errno (CONST_EDOM) eDQUOT = Errno (CONST_EDQUOT) eEXIST = Errno (CONST_EEXIST) eFAULT = Errno (CONST_EFAULT) eFBIG = Errno (CONST_EFBIG) eFTYPE = Errno (CONST_EFTYPE) eHOSTDOWN = Errno (CONST_EHOSTDOWN) eHOSTUNREACH = Errno (CONST_EHOSTUNREACH) eIDRM = Errno (CONST_EIDRM) eILSEQ = Errno (CONST_EILSEQ) eINPROGRESS = Errno (CONST_EINPROGRESS) eINTR = Errno (CONST_EINTR) eINVAL = Errno (CONST_EINVAL) eIO = Errno (CONST_EIO) eISCONN = Errno (CONST_EISCONN) eISDIR = Errno (CONST_EISDIR) eLOOP = Errno (CONST_ELOOP) eMFILE = Errno (CONST_EMFILE) eMLINK = Errno (CONST_EMLINK) eMSGSIZE = Errno (CONST_EMSGSIZE) eMULTIHOP = Errno (CONST_EMULTIHOP) eNAMETOOLONG = Errno (CONST_ENAMETOOLONG) eNETDOWN = Errno (CONST_ENETDOWN) eNETRESET = Errno (CONST_ENETRESET) eNETUNREACH = Errno (CONST_ENETUNREACH) eNFILE = Errno (CONST_ENFILE) eNOBUFS = Errno (CONST_ENOBUFS) eNODATA = Errno (CONST_ENODATA) eNODEV = Errno (CONST_ENODEV) eNOENT = Errno (CONST_ENOENT) eNOEXEC = Errno (CONST_ENOEXEC) eNOLCK = Errno (CONST_ENOLCK) eNOLINK = Errno (CONST_ENOLINK) eNOMEM = Errno (CONST_ENOMEM) eNOMSG = Errno (CONST_ENOMSG) eNONET = Errno (CONST_ENONET) eNOPROTOOPT = Errno (CONST_ENOPROTOOPT) eNOSPC = Errno (CONST_ENOSPC) eNOSR = Errno (CONST_ENOSR) eNOSTR = Errno (CONST_ENOSTR) eNOSYS = Errno (CONST_ENOSYS) eNOTBLK = Errno (CONST_ENOTBLK) eNOTCONN = Errno (CONST_ENOTCONN) eNOTDIR = Errno (CONST_ENOTDIR) eNOTEMPTY = Errno (CONST_ENOTEMPTY) eNOTSOCK = Errno (CONST_ENOTSOCK) eNOTTY = Errno (CONST_ENOTTY) eNXIO = Errno (CONST_ENXIO) eOPNOTSUPP = Errno (CONST_EOPNOTSUPP) ePERM = Errno (CONST_EPERM) ePFNOSUPPORT = Errno (CONST_EPFNOSUPPORT) ePIPE = Errno (CONST_EPIPE) ePROCLIM = Errno (CONST_EPROCLIM) ePROCUNAVAIL = Errno (CONST_EPROCUNAVAIL) ePROGMISMATCH = Errno (CONST_EPROGMISMATCH) ePROGUNAVAIL = Errno (CONST_EPROGUNAVAIL) ePROTO = Errno (CONST_EPROTO) ePROTONOSUPPORT = Errno (CONST_EPROTONOSUPPORT) ePROTOTYPE = Errno (CONST_EPROTOTYPE) eRANGE = Errno (CONST_ERANGE) eREMCHG = Errno (CONST_EREMCHG) eREMOTE = Errno (CONST_EREMOTE) eROFS = Errno (CONST_EROFS) eRPCMISMATCH = Errno (CONST_ERPCMISMATCH) eRREMOTE = Errno (CONST_ERREMOTE) eSHUTDOWN = Errno (CONST_ESHUTDOWN) eSOCKTNOSUPPORT = Errno (CONST_ESOCKTNOSUPPORT) eSPIPE = Errno (CONST_ESPIPE) eSRCH = Errno (CONST_ESRCH) eSRMNT = Errno (CONST_ESRMNT) eSTALE = Errno (CONST_ESTALE) eTIME = Errno (CONST_ETIME) eTIMEDOUT = Errno (CONST_ETIMEDOUT) eTOOMANYREFS = Errno (CONST_ETOOMANYREFS) eTXTBSY = Errno (CONST_ETXTBSY) eUSERS = Errno (CONST_EUSERS) eWOULDBLOCK = Errno (CONST_EWOULDBLOCK) eXDEV = Errno (CONST_EXDEV) #endif -} -- | Yield 'True' if the given 'Errno' value is valid on the system. -- This implies that the 'Eq' instance of 'Errno' is also system dependent -- as it is only defined for valid values of 'Errno'. -- isValidErrno :: Errno -> Bool -- -- the configure script sets all invalid "errno"s to -1 -- isValidErrno (Errno errno) = errno /= -1 -- access to the current thread's "errno" value -- -------------------------------------------- -- | Get the current value of @errno@ in the current thread. -- getErrno :: IO Errno -- We must call a C function to get the value of errno in general. On -- threaded systems, errno is hidden behind a C macro so that each OS -- thread gets its own copy. getErrno = do e <- peek _errno; return (Errno e) foreign import ccall "errno.h &errno" _errno :: Ptr CInt -- | Reset the current thread\'s @errno@ value to 'eOK'. -- resetErrno :: IO () -- Again, setting errno has to be done via a C function. resetErrno = poke _errno 0 -- throw current "errno" value -- --------------------------- -- | Throw an 'IOError' corresponding to the current value of 'getErrno'. -- throwErrno :: String -- ^ textual description of the error location -> IO a throwErrno loc = do errno <- getErrno ioError (errnoToIOError loc errno Nothing Nothing) -- guards for IO operations that may fail -- -------------------------------------- -- | Throw an 'IOError' corresponding to the current value of 'getErrno' -- if the result value of the 'IO' action meets the given predicate. -- throwErrnoIf :: (a -> Bool) -- ^ predicate to apply to the result value -- of the 'IO' operation -> String -- ^ textual description of the location -> IO a -- ^ the 'IO' operation to be executed -> IO a throwErrnoIf pred loc f = do res <- f if pred res then throwErrno loc else return res -- | as 'throwErrnoIf', but discards the result of the 'IO' action after -- error handling. -- throwErrnoIf_ :: (a -> Bool) -> String -> IO a -> IO () throwErrnoIf_ pred loc f = throwErrnoIf pred loc f >> return () -- | as 'throwErrnoIf', but retry the 'IO' action when it yields the -- error code 'eINTR' - this amounts to the standard retry loop for -- interrupted POSIX system calls. -- throwErrnoIfRetry :: (a -> Bool) -> String -> IO a -> IO a throwErrnoIfRetry pred loc f = do res <- f if pred res then do err <- getErrno if err == eINTR then throwErrnoIfRetry pred loc f else throwErrno loc else return res -- | as 'throwErrnoIfRetry', but checks for operations that would block and -- executes an alternative action before retrying in that case. -- throwErrnoIfRetryMayBlock :: (a -> Bool) -- ^ predicate to apply to the result value -- of the 'IO' operation -> String -- ^ textual description of the location -> IO a -- ^ the 'IO' operation to be executed -> IO b -- ^ action to execute before retrying if -- an immediate retry would block -> IO a throwErrnoIfRetryMayBlock pred loc f on_block = do res <- f if pred res then do err <- getErrno if err == eINTR then throwErrnoIfRetryMayBlock pred loc f on_block else if err == eWOULDBLOCK || err == eAGAIN then do on_block; throwErrnoIfRetryMayBlock pred loc f on_block else throwErrno loc else return res -- | as 'throwErrnoIfRetry', but discards the result. -- throwErrnoIfRetry_ :: (a -> Bool) -> String -> IO a -> IO () throwErrnoIfRetry_ pred loc f = throwErrnoIfRetry pred loc f >> return () -- | as 'throwErrnoIfRetryMayBlock', but discards the result. -- throwErrnoIfRetryMayBlock_ :: (a -> Bool) -> String -> IO a -> IO b -> IO () throwErrnoIfRetryMayBlock_ pred loc f on_block = throwErrnoIfRetryMayBlock pred loc f on_block >> return () -- | Throw an 'IOError' corresponding to the current value of 'getErrno' -- if the 'IO' action returns a result of @-1@. -- throwErrnoIfMinus1 :: Num a => String -> IO a -> IO a throwErrnoIfMinus1 = throwErrnoIf (== -1) -- | as 'throwErrnoIfMinus1', but discards the result. -- throwErrnoIfMinus1_ :: Num a => String -> IO a -> IO () throwErrnoIfMinus1_ = throwErrnoIf_ (== -1) -- | Throw an 'IOError' corresponding to the current value of 'getErrno' -- if the 'IO' action returns a result of @-1@, but retries in case of -- an interrupted operation. -- throwErrnoIfMinus1Retry :: Num a => String -> IO a -> IO a throwErrnoIfMinus1Retry = throwErrnoIfRetry (== -1) -- | as 'throwErrnoIfMinus1', but discards the result. -- throwErrnoIfMinus1Retry_ :: Num a => String -> IO a -> IO () throwErrnoIfMinus1Retry_ = throwErrnoIfRetry_ (== -1) -- | as 'throwErrnoIfMinus1Retry', but checks for operations that would block. -- throwErrnoIfMinus1RetryMayBlock :: Num a => String -> IO a -> IO b -> IO a throwErrnoIfMinus1RetryMayBlock = throwErrnoIfRetryMayBlock (== -1) -- | as 'throwErrnoIfMinus1RetryMayBlock', but discards the result. -- throwErrnoIfMinus1RetryMayBlock_ :: Num a => String -> IO a -> IO b -> IO () throwErrnoIfMinus1RetryMayBlock_ = throwErrnoIfRetryMayBlock_ (== -1) -- | Throw an 'IOError' corresponding to the current value of 'getErrno' -- if the 'IO' action returns 'nullPtr'. -- throwErrnoIfNull :: String -> IO (Ptr a) -> IO (Ptr a) throwErrnoIfNull = throwErrnoIf (== nullPtr) -- | Throw an 'IOError' corresponding to the current value of 'getErrno' -- if the 'IO' action returns 'nullPtr', -- but retry in case of an interrupted operation. -- throwErrnoIfNullRetry :: String -> IO (Ptr a) -> IO (Ptr a) throwErrnoIfNullRetry = throwErrnoIfRetry (== nullPtr) -- | as 'throwErrnoIfNullRetry', but checks for operations that would block. -- throwErrnoIfNullRetryMayBlock :: String -> IO (Ptr a) -> IO b -> IO (Ptr a) throwErrnoIfNullRetryMayBlock = throwErrnoIfRetryMayBlock (== nullPtr) -- conversion of an "errno" value into IO error -- -------------------------------------------- -- | Construct a Haskell 98 I\/O error based on the given 'Errno' value. -- The optional information can be used to improve the accuracy of -- error messages. -- errnoToIOError :: String -- ^ the location where the error occurred -> Errno -- ^ the error number -> Maybe a -- ^ optional handle associated with the error -> Maybe String -- ^ optional filename associated with the error -> IOError errnoToIOError loc errno maybeHdl maybeName = unsafePerformIO $ do str <- strerror errno >>= peekCString {- #if __GLASGOW_HASKELL__ return (IOError maybeHdl errType loc str maybeName) where errType | errno == eOK = OtherError | errno == e2BIG = ResourceExhausted | errno == eACCES = PermissionDenied | errno == eADDRINUSE = ResourceBusy | errno == eADDRNOTAVAIL = UnsupportedOperation | errno == eADV = OtherError | errno == eAFNOSUPPORT = UnsupportedOperation | errno == eAGAIN = ResourceExhausted | errno == eALREADY = AlreadyExists | errno == eBADF = OtherError | errno == eBADMSG = InappropriateType | errno == eBADRPC = OtherError | errno == eBUSY = ResourceBusy | errno == eCHILD = NoSuchThing | errno == eCOMM = ResourceVanished | errno == eCONNABORTED = OtherError | errno == eCONNREFUSED = NoSuchThing | errno == eCONNRESET = ResourceVanished | errno == eDEADLK = ResourceBusy | errno == eDESTADDRREQ = InvalidArgument | errno == eDIRTY = UnsatisfiedConstraints | errno == eDOM = InvalidArgument | errno == eDQUOT = PermissionDenied | errno == eEXIST = AlreadyExists | errno == eFAULT = OtherError | errno == eFBIG = PermissionDenied | errno == eFTYPE = InappropriateType | errno == eHOSTDOWN = NoSuchThing | errno == eHOSTUNREACH = NoSuchThing | errno == eIDRM = ResourceVanished | errno == eILSEQ = InvalidArgument | errno == eINPROGRESS = AlreadyExists | errno == eINTR = Interrupted | errno == eINVAL = InvalidArgument | errno == eIO = HardwareFault | errno == eISCONN = AlreadyExists | errno == eISDIR = InappropriateType | errno == eLOOP = InvalidArgument | errno == eMFILE = ResourceExhausted | errno == eMLINK = ResourceExhausted | errno == eMSGSIZE = ResourceExhausted | errno == eMULTIHOP = UnsupportedOperation | errno == eNAMETOOLONG = InvalidArgument | errno == eNETDOWN = ResourceVanished | errno == eNETRESET = ResourceVanished | errno == eNETUNREACH = NoSuchThing | errno == eNFILE = ResourceExhausted | errno == eNOBUFS = ResourceExhausted | errno == eNODATA = NoSuchThing | errno == eNODEV = UnsupportedOperation | errno == eNOENT = NoSuchThing | errno == eNOEXEC = InvalidArgument | errno == eNOLCK = ResourceExhausted | errno == eNOLINK = ResourceVanished | errno == eNOMEM = ResourceExhausted | errno == eNOMSG = NoSuchThing | errno == eNONET = NoSuchThing | errno == eNOPROTOOPT = UnsupportedOperation | errno == eNOSPC = ResourceExhausted | errno == eNOSR = ResourceExhausted | errno == eNOSTR = InvalidArgument | errno == eNOSYS = UnsupportedOperation | errno == eNOTBLK = InvalidArgument | errno == eNOTCONN = InvalidArgument | errno == eNOTDIR = InappropriateType | errno == eNOTEMPTY = UnsatisfiedConstraints | errno == eNOTSOCK = InvalidArgument | errno == eNOTTY = IllegalOperation | errno == eNXIO = NoSuchThing | errno == eOPNOTSUPP = UnsupportedOperation | errno == ePERM = PermissionDenied | errno == ePFNOSUPPORT = UnsupportedOperation | errno == ePIPE = ResourceVanished | errno == ePROCLIM = PermissionDenied | errno == ePROCUNAVAIL = UnsupportedOperation | errno == ePROGMISMATCH = ProtocolError | errno == ePROGUNAVAIL = UnsupportedOperation | errno == ePROTO = ProtocolError | errno == ePROTONOSUPPORT = ProtocolError | errno == ePROTOTYPE = ProtocolError | errno == eRANGE = UnsupportedOperation | errno == eREMCHG = ResourceVanished | errno == eREMOTE = IllegalOperation | errno == eROFS = PermissionDenied | errno == eRPCMISMATCH = ProtocolError | errno == eRREMOTE = IllegalOperation | errno == eSHUTDOWN = IllegalOperation | errno == eSOCKTNOSUPPORT = UnsupportedOperation | errno == eSPIPE = UnsupportedOperation | errno == eSRCH = NoSuchThing | errno == eSRMNT = UnsatisfiedConstraints | errno == eSTALE = ResourceVanished | errno == eTIME = TimeExpired | errno == eTIMEDOUT = TimeExpired | errno == eTOOMANYREFS = ResourceExhausted | errno == eTXTBSY = ResourceBusy | errno == eUSERS = ResourceExhausted | errno == eWOULDBLOCK = OtherError | errno == eXDEV = UnsupportedOperation | otherwise = OtherError #else #endif -} return (userError (loc ++ ": " ++ str ++ maybe "" (": "++) maybeName)) -- #endif foreign import ccall "string.h strerror" strerror :: Errno -> IO (Ptr CChar) foreign import primitive "const.EINTR" eINTR :: Errno foreign import primitive "const.EWOULDBLOCK" eWOULDBLOCK :: Errno foreign import primitive "const.EAGAIN" eAGAIN :: Errno
m-alvarez/jhc
lib/jhc/Foreign/C/Error.hs
mit
20,685
85
11
5,218
1,557
888
669
-1
-1
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE RecursiveDo #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE KindSignatures #-} module Graphics.Oedel.Terminal.Widget ( Widget ) where import Control.Reactive import qualified Control.Reactive.IO as IO import qualified Graphics.Oedel.Layout as Layout import Graphics.Oedel.Style hiding (key) import Graphics.Oedel.Terminal.Base import Graphics.Oedel.Terminal.Input import Graphics.Oedel.Terminal.Flow (Flow, TextStyle) import Graphics.Oedel.Terminal.Block (Block) import Graphics.Oedel.Terminal.Paint (runPaint) import qualified Graphics.Oedel.Terminal.Block as Block import Data.Set (Set) import qualified Data.Set as Set import Data.Monoid import Control.Concurrent import Control.Monad.State import Control.Applicative -- | Identifies a key that can be assigned to an action. type Key = Char -- | Gives a unique name to a persistent thing (or group of things) in a -- widget. type Name = [Int] -- | Gives a unique name to a group of things in a widget. type Group = Name -- | A context in which names can be created. type NameGen = State (Int, Name) -- | Generates a new name within the context of a 'NameGen'. newName :: NameGen Name newName = do (head, tail) <- get put (head + 1, tail) return $ reverse (head : tail) -- | Encapsulates all requests from a widget to its environment. data Request e = Request { -- | The initial key requests for the widget. Each distinct name -- represents a feature to which a key can be assigned. Each feature is -- paired with the keys it may potentially be assigned to, in order -- of preference. initialKeyRequest :: [(Name, [Key])], -- | Dynamically requests keys, with similar behavior to -- 'initialKeyRequest'. keyRequest :: e (Name, [Key]) } instance Event e => Monoid (Request e) where mempty = Request { initialKeyRequest = [], keyRequest = never } mappend x y = Request { initialKeyRequest = initialKeyRequest x <> initialKeyRequest y, keyRequest = union (keyRequest x) (keyRequest y) } -- | Encapsulates all responses from an environment to a widget. data Response e = Response { -- | The initial assignments for requested keys. If a feature is assigned -- a key, the key will be returned along with an event that occurs when -- the key is pressed. initialKeyAssign :: Name -> Maybe (Key, e ()), -- | Responds to dynamic key assignments made by 'keyRequest' (and -- some that haven't, so make sure to check the name). keyAssign :: e (Name, Maybe (Key, e ())) } -- | Augments a figure of type @q f@ with the ability to interact with the -- user and access an environment of type @a@. Widgets live in a reactive -- system with event type @e@ and behavior type @f@. data Widget e (f :: * -> *) q a = Widget (NameGen (a -> (a, Request e, Response e -> q f))) -- | Converts a static figure into a widget. augment :: (Event e) => q f -> Widget e f q a augment source = Widget $ return $ \env -> (env, mempty, const source) -- | Applies a function to the underlying figure for a widget. decorate :: (Event e) => (q f -> r f) -> Widget e f q a -> Widget e f r a decorate f (Widget x) = Widget $ (\inner env -> let (nEnv, req, fig) = inner env in (nEnv, req, f . fig)) <$> x -- | Composes widgets by composing their underlying figures naturally. compose :: (Event e) => (q f -> p f -> r f) -> Widget e f q a -> Widget e f p a -> Widget e f r a compose f (Widget x) (Widget y) = Widget $ (\xInner yInner env -> let (xEnv, xReq, xFig) = xInner env (yEnv, yReq, yFig) = yInner xEnv in (yEnv, xReq <> yReq, \res -> f (xFig res) (yFig res))) <$> x <*> y instance Reactive e f => Monoid (Widget e f Flow a) where mempty = augment mempty mappend = compose mappend instance Reactive e f => Layout.Flow (Widget e f Flow a) where tight = decorate Layout.tight
dzamkov/Oedel
src/Graphics/Oedel/Terminal/Widget.hs
mit
4,186
0
16
974
1,009
575
434
68
1
-- | -- Module: Math.NumberTheory.Primes.Factorisation.Montgomery -- Copyright: (c) 2011 Daniel Fischer -- Licence: MIT -- Maintainer: Daniel Fischer <[email protected]> -- -- Factorisation of 'Integer's by the elliptic curve algorithm after Montgomery. -- The algorithm is explained at -- <http://programmingpraxis.com/2010/04/23/modern-elliptic-curve-factorization-part-1/> -- and -- <http://programmingpraxis.com/2010/04/27/modern-elliptic-curve-factorization-part-2/> -- {-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE MagicHash #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE UnboxedTuples #-} {-# OPTIONS_GHC -fno-warn-type-defaults #-} module Math.NumberTheory.Primes.Factorisation.Montgomery ( -- * Complete factorisation functions -- ** Functions with input checking factorise -- -- * Partial factorisation , smallFactors -- -- ** Single curve worker , montgomeryFactorisation , findParms ) where import Control.Arrow import Control.Monad.Trans.State.Lazy import Data.Array.Base (bounds, unsafeAt) import Data.Bits import Data.IntMap (IntMap) import qualified Data.IntMap as IM import Data.List (foldl') import Data.Maybe import Data.Mod import Data.Proxy #if __GLASGOW_HASKELL__ < 803 import Data.Semigroup #endif import Data.Traversable import GHC.Exts import GHC.Integer.GMP.Internals hiding (integerToInt, wordToInteger) import GHC.Natural import GHC.TypeNats (KnownNat, SomeNat(..), natVal, someNatVal) import System.Random import Math.NumberTheory.Curves.Montgomery import Math.NumberTheory.Euclidean.Coprimes (splitIntoCoprimes, unCoprimes) import Math.NumberTheory.Logarithms (integerLogBase') import Math.NumberTheory.Roots import Math.NumberTheory.Primes.Sieve.Eratosthenes (PrimeSieve(..), psieveFrom) import Math.NumberTheory.Primes.Sieve.Indexing (toPrim) import Math.NumberTheory.Primes.Small import Math.NumberTheory.Primes.Testing.Probabilistic import Math.NumberTheory.Utils hiding (splitOff) import Math.NumberTheory.Utils.FromIntegral -- | @'factorise' n@ produces the prime factorisation of @n@. @'factorise' 0@ is -- an error and the factorisation of @1@ is empty. Uses a 'StdGen' produced in -- an arbitrary manner from the bit-pattern of @n@. -- -- __Warning:__ there are no guarantees of any particular -- order of prime factors, do not expect them to be ascending. factorise :: Integral a => a -> [(a, Word)] factorise 0 = error "0 has no prime factorisation" factorise n' = map (first fromIntegral) sfs <> map (first fromInteger) rest where n = abs n' (sfs, mb) = smallFactors (fromIntegral n) sg = mkStdGen (fromIntegral n `xor` 0xdeadbeef) rest = case mb of Nothing -> [] Just m -> stdGenFactorisation (Just $ 65536 * 65536) sg Nothing (toInteger m) ---------------------------------------------------------------------------------------------------- -- Factorisation wrappers -- ---------------------------------------------------------------------------------------------------- -- | A wrapper around 'curveFactorisation' providing a few default arguments. -- The primality test is 'bailliePSW', the @prng@ function - naturally - -- 'randomR'. This function also requires small prime factors to have been -- stripped before. stdGenFactorisation :: Maybe Integer -- ^ Lower bound for composite divisors -> StdGen -- ^ Standard PRNG -> Maybe Int -- ^ Estimated number of digits of smallest prime factor -> Integer -- ^ The number to factorise -> [(Integer, Word)] -- ^ List of prime factors and exponents stdGenFactorisation primeBound = curveFactorisation primeBound bailliePSW (\m -> randomR (6, m - 2)) -- | 'curveFactorisation' is the driver for the factorisation. Its performance (and success) -- can be influenced by passing appropriate arguments. If you know that @n@ has no prime divisors -- below @b@, any divisor found less than @b*b@ must be prime, thus giving @Just (b*b)@ as the -- first argument allows skipping the comparatively expensive primality test for those. -- If @n@ is such that all prime divisors must have a specific easy to test for structure, a -- custom primality test can improve the performance (normally, it will make very little -- difference, since @n@ has not many divisors, and many curves have to be tried to find one). -- More influence has the pseudo random generator (a function @prng@ with @6 <= fst (prng k s) <= k-2@ -- and an initial state for the PRNG) used to generate the curves to try. A lucky choice here can -- make a huge difference. So, if the default takes too long, try another one; or you can improve your -- chances for a quick result by running several instances in parallel. -- -- 'curveFactorisation' @n@ requires that small (< 65536) prime factors of @n@ -- have been stripped before. Otherwise it is likely to cycle forever. -- -- 'curveFactorisation' is unlikely to succeed if @n@ has more than one (really) large prime factor. -- curveFactorisation :: forall g. Maybe Integer -- ^ Lower bound for composite divisors -> (Integer -> Bool) -- ^ A primality test -> (Integer -> g -> (Integer, g)) -- ^ A PRNG -> g -- ^ Initial PRNG state -> Maybe Int -- ^ Estimated number of digits of the smallest prime factor -> Integer -- ^ The number to factorise -> [(Integer, Word)] -- ^ List of prime factors and exponents curveFactorisation primeBound primeTest prng seed mbdigs n | n == 1 = [] | ptest n = [(n, 1)] | otherwise = evalState (fact n digits) seed where digits :: Int digits = fromMaybe 8 mbdigs ptest :: Integer -> Bool ptest = maybe primeTest (\bd k -> k <= bd || primeTest k) primeBound rndR :: Integer -> State g Integer rndR k = state (prng k) perfPw :: Integer -> (Integer, Word) perfPw = maybe highestPower (largePFPower . integerSquareRoot) primeBound fact :: Integer -> Int -> State g [(Integer, Word)] fact 1 _ = return mempty fact m digs = do let (b1, b2, ct) = findParms digs -- All factors (both @pfs@ and @cfs@), are pairwise coprime. This is -- because 'repFact' returns either a single factor, or output of 'workFact'. -- In its turn, 'workFact' returns either a single factor, -- or concats 'repFact's over coprime integers. Induction completes the proof. Factors pfs cfs <- repFact m b1 b2 ct case cfs of [] -> return pfs _ -> do nfs <- forM cfs $ \(k, j) -> map (second (* j)) <$> fact k (if null pfs then digs + 5 else digs) return $ mconcat (pfs : nfs) repFact :: Integer -> Word -> Word -> Word -> State g Factors repFact 1 _ _ _ = return mempty repFact m b1 b2 count = case perfPw m of (_, 1) -> workFact m b1 b2 count (b, e) | ptest b -> return $ singlePrimeFactor b e | otherwise -> modifyPowers (* e) <$> workFact b b1 b2 count workFact :: Integer -> Word -> Word -> Word -> State g Factors workFact 1 _ _ _ = return mempty workFact m _ _ 0 = return $ singleCompositeFactor m 1 workFact m b1 b2 count = do s <- rndR m case someNatVal (fromInteger m) of SomeNat (_ :: Proxy t) -> case montgomeryFactorisation b1 b2 (fromInteger s :: Mod t) of Nothing -> workFact m b1 b2 (count - 1) Just d -> do let cs = unCoprimes $ splitIntoCoprimes [(d, 1), (m `quot` d, 1)] -- Since all @cs@ are coprime, we can factor each of -- them and just concat results, without summing up -- powers of the same primes in different elements. fmap mconcat $ forM cs $ \(x, xm) -> if ptest x then pure $ singlePrimeFactor x xm else repFact x b1 b2 (count - 1) data Factors = Factors { _primeFactors :: [(Integer, Word)] , _compositeFactors :: [(Integer, Word)] } singlePrimeFactor :: Integer -> Word -> Factors singlePrimeFactor a b = Factors [(a, b)] [] singleCompositeFactor :: Integer -> Word -> Factors singleCompositeFactor a b = Factors [] [(a, b)] instance Semigroup Factors where Factors pfs1 cfs1 <> Factors pfs2 cfs2 = Factors (pfs1 <> pfs2) (cfs1 <> cfs2) instance Monoid Factors where mempty = Factors [] [] mappend = (<>) modifyPowers :: (Word -> Word) -> Factors -> Factors modifyPowers f (Factors pfs cfs) = Factors (map (second f) pfs) (map (second f) cfs) ------------------------------------------------------------------------------- -- largePFPower -- | @'largePFPower' bd n@ produces the pair @(b,k)@ with the largest -- exponent @k@ such that @n == b^k@, where @bd > 1@ (it is expected -- that @bd@ is much larger, at least @1000@ or so), @n > bd^2@ and @n@ -- has no prime factors @p <= bd@, skipping the trial division phase -- of @'highestPower'@ when that is a priori known to be superfluous. -- It is only present to avoid duplication of work in factorisation -- and primality testing, it is not expected to be generally useful. -- The assumptions are not checked, if they are not satisfied, wrong -- results and wasted work may be the consequence. largePFPower :: Integer -> Integer -> (Integer, Word) largePFPower bd n = rawPower ln n where ln = intToWord (integerLogBase' (bd+1) n) rawPower :: Word -> Integer -> (Integer, Word) rawPower mx n = case exactRoot 4 n of Just r -> case rawPower (mx `quot` 4) r of (m,e) -> (m, 4*e) Nothing -> case exactSquareRoot n of Just r -> case rawOddPower (mx `quot` 2) r of (m,e) -> (m, 2*e) Nothing -> rawOddPower mx n rawOddPower :: Word -> Integer -> (Integer, Word) rawOddPower mx n | mx < 3 = (n,1) rawOddPower mx n = case exactCubeRoot n of Just r -> case rawOddPower (mx `quot` 3) r of (m,e) -> (m, 3*e) Nothing -> badPower mx n badPower :: Word -> Integer -> (Integer, Word) badPower mx n | mx < 5 = (n,1) | otherwise = go 1 mx n (takeWhile (<= mx) $ scanl (+) 5 $ cycle [2,4]) where go !e b m (k:ks) | b < k = (m,e) | otherwise = case exactRoot k m of Just r -> go (e*k) (b `quot` k) r (k:ks) Nothing -> go e b m ks go e _ m [] = (m,e) ---------------------------------------------------------------------------------------------------- -- The workhorse -- ---------------------------------------------------------------------------------------------------- -- | @'montgomeryFactorisation' n b1 b2 s@ tries to find a factor of @n@ using the -- curve and point determined by the seed @s@ (@6 <= s < n-1@), multiplying the -- point by the least common multiple of all numbers @<= b1@ and all primes -- between @b1@ and @b2@. The idea is that there's a good chance that the order -- of the point in the curve over one prime factor divides the multiplier, but the -- order over another factor doesn't, if @b1@ and @b2@ are appropriately chosen. -- If they are too small, none of the orders will probably divide the multiplier, -- if they are too large, all probably will, so they should be chosen to fit -- the expected size of the smallest factor. -- -- It is assumed that @n@ has no small prime factors. -- -- The result is maybe a nontrivial divisor of @n@. montgomeryFactorisation :: KnownNat n => Word -> Word -> Mod n -> Maybe Integer montgomeryFactorisation b1 b2 s = case newPoint (toInteger (unMod s)) n of Nothing -> Nothing Just (SomePoint p0) -> do -- Small step: for each prime p <= b1 -- multiply point 'p0' by the highest power p^k <= b1. let q = foldl (flip multiply) p0 smallPowers z = pointZ q case gcd n z of -- If small step did not succeed, perform a big step. 1 -> case gcd n (bigStep q b1 b2) of 1 -> Nothing g -> Just g g -> Just g where n = toInteger (natVal s) smallPowers = map findPower $ takeWhile (<= b1) (2 : 3 : 5 : list primeStore) findPower p = go p where go acc | acc <= b1 `quot` p = go (acc * p) | otherwise = acc -- | The implementation follows the algorithm at p. 6-7 -- of <http://www.hyperelliptic.org/tanja/SHARCS/talks06/Gaj.pdf Implementing the Elliptic Curve Method of Factoring in Reconfigurable Hardware> -- by K. Gaj, S. Kwon et al. bigStep :: (KnownNat a24, KnownNat n) => Point a24 n -> Word -> Word -> Integer bigStep q b1 b2 = rs where n = pointN q b0 = b1 - b1 `rem` wheel qks = zip [0..] $ map (`multiply` q) wheelCoprimes qs = enumAndMultiplyFromThenTo q b0 (b0 + wheel) b2 rs = foldl' (\ts (_cHi, p) -> foldl' (\us (_cLo, pq) -> us * (pointZ p * pointX pq - pointX p * pointZ pq) `rem` n ) ts qks) 1 qs wheel :: Word wheel = 210 wheelCoprimes :: [Word] wheelCoprimes = [ k | k <- [1 .. wheel `div` 2], k `gcd` wheel == 1 ] -- | Same as map (id *** flip multiply p) [from, thn .. to], -- but calculated in more efficient way. enumAndMultiplyFromThenTo :: (KnownNat a24, KnownNat n) => Point a24 n -> Word -> Word -> Word -> [(Word, Point a24 n)] enumAndMultiplyFromThenTo p from thn to = zip [from, thn .. to] progression where step = thn - from pFrom = multiply from p pThen = multiply thn p pStep = multiply step p progression = pFrom : pThen : zipWith (`add` pStep) progression (tail progression) -- primes, compactly stored as a bit sieve primeStore :: [PrimeSieve] primeStore = psieveFrom 7 -- generate list of primes from arrays list :: [PrimeSieve] -> [Word] list sieves = concat [[off + toPrim i | i <- [0 .. li], unsafeAt bs i] | PS vO bs <- sieves, let { (_,li) = bounds bs; off = fromInteger vO; }] -- | @'smallFactors' n@ finds all prime divisors of @n > 1@ up to 2^16 by trial division and returns the -- list of these together with their multiplicities, and a possible remaining factor which may be composite. smallFactors :: Natural -> ([(Natural, Word)], Maybe Natural) smallFactors = \case NatS# 0## -> error "0 has no prime factorisation" NatS# n# -> case shiftToOddCount# n# of (# 0##, m# #) -> goWord m# 1 (# k#, m# #) -> (2, W# k#) <: goWord m# 1 NatJ# n -> case shiftToOddCountBigNat n of (0, m) -> goBigNat m 1 (k, m) -> (2, k) <: goBigNat m 1 where x <: ~(l,b) = (x:l,b) !(Ptr smallPrimesAddr#) = smallPrimesPtr goBigNat :: BigNat -> Int -> ([(Natural, Word)], Maybe Natural) goBigNat !m i@(I# i#) | isTrue# (sizeofBigNat# m ==# 1#) = goWord (bigNatToWord m) i | i >= smallPrimesLength = ([], Just (NatJ# m)) | otherwise = let p# = indexWord16OffAddr# smallPrimesAddr# i# in case m `quotRemBigNatWord` p# of (# mp, 0## #) -> let (# k, r #) = splitOff 1 mp in (NatS# p#, k) <: goBigNat r (i + 1) where splitOff !k x = case x `quotRemBigNatWord` p# of (# xp, 0## #) -> splitOff (k + 1) xp _ -> (# k, x #) _ -> goBigNat m (i + 1) goWord :: Word# -> Int -> ([(Natural, Word)], Maybe Natural) goWord 1## !_ = ([], Nothing) goWord m# !i | i >= smallPrimesLength = if isTrue# (m# `leWord#` 4294967295##) -- 65536 * 65536 - 1 then ([(NatS# m#, 1)], Nothing) else ([], Just (NatS# m#)) goWord m# i@(I# i#) = let p# = indexWord16OffAddr# smallPrimesAddr# i# in if isTrue# (m# `ltWord#` (p# `timesWord#` p#)) then ([(NatS# m#, 1)], Nothing) else case m# `quotRemWord#` p# of (# mp#, 0## #) -> let !(# k#, r# #) = splitOff 1## mp# in (NatS# p#, W# k#) <: goWord r# (i + 1) where splitOff k# x# = case x# `quotRemWord#` p# of (# xp#, 0## #) -> splitOff (k# `plusWord#` 1##) xp# _ -> (# k#, x# #) _ -> goWord m# (i + 1) -- | For a given estimated decimal length of the smallest prime factor -- ("tier") return parameters B1, B2 and the number of curves to try -- before next "tier". -- Roughly based on http://www.mersennewiki.org/index.php/Elliptic_Curve_Method#Choosing_the_best_parameters_for_ECM testParms :: IntMap (Word, Word, Word) testParms = IM.fromList [ (12, ( 400, 40000, 10)) , (15, ( 2000, 200000, 25)) , (20, ( 11000, 1100000, 90)) , (25, ( 50000, 5000000, 300)) , (30, ( 250000, 25000000, 700)) , (35, ( 1000000, 100000000, 1800)) , (40, ( 3000000, 300000000, 5100)) , (45, ( 11000000, 1100000000, 10600)) , (50, ( 43000000, 4300000000, 19300)) , (55, ( 110000000, 11000000000, 49000)) , (60, ( 260000000, 26000000000, 124000)) , (65, ( 850000000, 85000000000, 210000)) , (70, (2900000000, 290000000000, 340000)) ] findParms :: Int -> (Word, Word, Word) findParms digs = maybe (wheel, 1000, 7) snd (IM.lookupLT digs testParms)
cartazio/arithmoi
Math/NumberTheory/Primes/Factorisation/Montgomery.hs
mit
17,967
0
24
5,010
4,360
2,390
1,970
268
13
module Main where allEven :: [Integer] -> [Integer] allEven [] = [] allEven (h:t) = if even h then h:allEven(t) else allEven(t) main = print(allEven [1,2,3,4,5,6,7])
skywind3000/language
haskell/all_even.hs
mit
193
0
8
53
108
61
47
5
2
{-| Description : Nix-relevant interfaces to NaCl signatures. -} module System.Nix.Signature ( Signature , NarSignature(..) ) where import System.Nix.Internal.Signature
shlevy/hnix-store
hnix-store-core/src/System/Nix/Signature.hs
mit
176
0
5
26
27
19
8
4
0
{-# LANGUAGE OverloadedStrings #-} module BackendSpec.GamingSpec.HelperSpec (main, spec) where import Test.Hspec import Test.QuickCheck import Game.Helper import Data.Maybe import Debug.Trace import Linear.V2 firstLocation = (Location (V2 1.0 1.0) (V2 1.0 0.0)) main :: IO() main = hspec spec spec :: Spec spec = describe "HelpereSpec" $ do it "add" $ do add firstLocation (Location (V2 2.0 2.0) (V2 0.0 1.0) ) `shouldBe` (Location (V2 3.0 3.0) (V2 0.0 1.0)) it "add nothin" $ do add firstLocation (Location (V2 0.0 0.0) (V2 0.0 1.0) ) `shouldBe` (Location (V2 1.0 1.0) (V2 0.0 1.0)) it "moveLocation" $ do moveLocation firstLocation (V2 1.0 0.0) `shouldBe` (Location (V2 2.0 1.0) (V2 1.0 0.0)) it "changeOri" $ do changeOri firstLocation (V2 0.0 1.0) `shouldBe` (Location (V2 1.0 1.0) (V2 0.0 1.0)) it "normalize" $ do normalize (V2 0.0 3.0) `shouldBe` (V2 0 1) it "normalize zero" $ do normalize (V2 0.0 0.0) `shouldBe` (V2 0 0) it "normalize nega" $ do normalize (V2 (negate 9.0) 0.0) `shouldBe` (V2 (negate 1) 0) it "divide" $ do divide 2 (V2 0.0 3.0) `shouldBe` (V2 0 1.5) it "divide zero" $ do divide 0 (V2 5.0 5.0) `shouldBe` (V2 0 0) it "divide zero 2" $ do divide 3 (V2 0.0 0.0) `shouldBe` (V2 0 0) it "divide nega" $ do divide 3 (V2 (negate 9.0) 0.0) `shouldBe` (V2 (negate 3) 0)
bruno-cadorette/TheLambdaReactor
test/BackendSpec/GamingSpec/HelperSpec.hs
mit
1,493
0
16
426
664
328
336
35
1
{-# LANGUAGE PackageImports #-} import "frelm-packages" Application (develMain) import Prelude (IO) main :: IO () main = develMain
rgrempel/frelm.org
app/devel.hs
mit
132
0
6
19
34
20
14
5
1
{-# LANGUAGE TypeApplications #-} {-# LANGUAGE GADTs #-} module Vyom.Term.UnitSym where import Data.Kind (Type) import Vyom import Util (safeRead) class UnitSym r where unit :: r h () instance UnitSym Run where unit = rop0 () instance UnitSym Pretty where unit = sop0 () instance UnitSym Expr where unit = eop0 "()" () deserialise :: UnitSym r => ExtensibleDeserialiser r deserialise _ _ (Node "()" [Leaf s]) _ | Just "()" <- safeRead s = return $ Dyn (typeRep @()) unit | otherwise = Left $ "Bad unit literal " ++ s deserialise _ _ (Node "()" es) _ = Left $ "Invalid number of arguments, expected 1, found " ++ show (length es) deserialise old self e env = old self e env
ajnsit/vyom
src/Vyom/Term/UnitSym.hs
mit
694
0
11
145
262
131
131
20
1
import Data.Time import Data.Time.Calendar (Day) import Data.Time.Clock (diffUTCTime) import Plow.Extras.Crontab import Plow.Extras.Time import Test.QuickCheck.Property (ioProperty) import Test.QuickCheck.Instances () import Test.Tasty (TestTree, defaultMain, testGroup) import Test.Tasty.HUnit import Test.Tasty.QuickCheck (testProperty) import Text.ParserCombinators.ReadP import System.Process (readProcess) main :: IO () main = defaultMain tests tests :: TestTree tests = testGroup "tests" [timeTests, cronParseTests, cronShouldSendTests] timeTests = testGroup "plow-extras-time" [ testProperty "intToUTCTime . utcTimeToInt ~= id" $ \t -> round (diffUTCTime (intToUTCTime $ utcTimeToInt t) t) == (0 :: Int) , testProperty "utcTimeToInt . intToUTCTime = id" $ \t -> (utcTimeToInt . intToUTCTime) t == t , testProperty "Time accuracy of getCurrentEpochTime" $ ioProperty $ do tstr <- init <$> readProcess "date" ["+%s"] [] t <- getCurrentEpochTime pure $ abs (t - read tstr) <= 1 ] cronParseTests = testGroup "Crontab Parser" [ testCase "all asteriks 1" $ testParser cron "* * * * *" @?= [(True,"")] , testCase "all ranges 1 - true" $ testParser cron "0-59 0-23 1-31 1-12 1-7" @?= [(True,"")] , testCase "all ranges 1 - false" $ testParser cron "1-59 1-23 2-31 2-12 2-7" @?= [(False,"")] , testCase "asteriks, ranges, and ints 1 - true" $ testParser cron "* 0-23 * 1 7" @?= [(True,"")] , testCase "all lists 1 - true" $ testParser cron "0,1,2,7 0,4,5,6 1,4 1,3 0,1,2,3,4,5,6,7" @?= [(True,"")] , testCase "all lists 1 - false" $ testParser cron "1,2,7 0,4,5,6 1,4 1,3 1,2" @?= [(False,"")] , testCase "asteriks, ranges, ints, lists 1 - true" $ testParser cron "* 0-6 1,4 1 7,1" @?= [(True,"")] , testCase "asteriks, ranges, ints, lists 1 - false" $ testParser cron "0 1,4 1-4 * 1,2" @?= [(False,"")] , testCase "all lists 2 - true" $ testParser cronTwo "0,1,2,5 0,4,5,6 1,4,13 1,3 1,2,3" @?= [(True,"")] , testCase "all lists 2 - false" $ testParser cronTwo "0,1,2 0,4,5,6 4,13 1,3 0,2,3" @?= [(False,"")] , testCase "asteriks, ranges, ints, lists 2 - true" $ testParser cronTwo "* 0-6 1,4,13 3 1,2,3" @?= [(True,"")] , testCase "asteriks, ranges, ints, lists 2 - false" $ testParser cronTwo "5 * 1-4 1,3 1,2" @?= [(False,"")] , testCase "asteriks, ranges, and ints 1 - false" $ testParser cron "5-10 * * 4 2" @?= [(False,"")] , testCase "imposible range 1" $ testParser cron "5-3 * * 4 2" @?= [(False,"")] , testCase "all asteriks 2" $ testParser cronTwo "* * * * *" @?= [(True,"")] , testCase "all ranges 2 - true" $ testParser cronTwo "0-10 2-9 13-30 1-12 2-7" @?= [(True,"")] , testCase "all ranges 2 - false" $ testParser cronTwo "1-10 1-23 2-31 5-12 1-7" @?= [(False,"")] , testCase "asteriks, ranges, and ints 2 - true" $ testParser cronTwo "* 0-23 13 3 *" @?= [(True,"")] , testCase "asteriks, ranges, and ints 2 - false" $ testParser cronTwo "5-10 * * 4 2" @?= [(False,"")] , testCase "imposible range 2" $ testParser cronTwo "5-10 * * 4-1 2" @?= [(False,"")] ] where testParser a = readP_to_S $ compareToParsedCron a cron = CronTab 0 0 1 January Sunday cronTwo = CronTab 5 4 13 March Wednesday cronShouldSendTests = testGroup "Crontab shouldSend" [ testCase "all asteriks 1" $ testShouldSend time "* * * * *" @?= [(True,"")] , testCase "all ranges 1 - true" $ testShouldSend time "0-59 0-23 1-31 1-12 1-7" @?= [(True,"")] , testCase "all ranges 1 - false" $ testShouldSend time "1-59 1-23 2-31 2-12 2-7" @?= [(False,"")] , testCase "asteriks, ranges, and ints 1 - true" $ testShouldSend time "* 0-23 * 1 4" @?= [(True,"")] , testCase "asteriks, ranges, and ints 1 - false" $ testShouldSend time "5-10 * * 4 2" @?= [(False,"")] , testCase "imposible range 1" $ testShouldSend time "5-3 * * 4 2" @?= [(False,"")] , testCase "all asteriks 2" $ testShouldSend timeTwo "* * * * *" @?= [(True,"")] , testCase "all ranges 2 - true" $ testShouldSend timeTwo "0-10 2-9 13-30 1-12 1-7" @?= [(True,"")] , testCase "all ranges 2 - false" $ testShouldSend timeTwo "1-10 1-23 2-31 5-12 2-7" @?= [(False,"")] , testCase "asteriks, ranges, and ints 2 - true" $ testShouldSend timeTwo "* 0-23 16 10 *" @?= [(True,"")] , testCase "asteriks, ranges, and ints 2 - false" $ testShouldSend timeTwo "5-10 * * 4 2" @?= [(False,"")] , testCase "imposible range 2" $ testShouldSend timeTwo "5-10 * * 4-1 2" @?= [(False,"")] ] where shouldSend currentTime = compareToParsedCron $ utcToCronTab currentTime testShouldSend currentTime = readP_to_S $ shouldSend currentTime time = UTCTime (ModifiedJulianDay 57023) 0 --January 1, 2015, 12:00 AM timeTwo = UTCTime (ModifiedJulianDay 50372) 21600 --October 16, 1996, 6:00 AM
plow-technologies/plow-extras
plow-extras-time/test/Main.hs
mit
5,114
0
15
1,258
1,334
715
619
98
1
module Redmine (initRedmine) where import Control.Applicative import Control.Concurrent (withMVar, MVar, forkIO, threadDelay) import Control.Exception (handle, IOException) import Data.Aeson (eitherDecode, (.:), Object, encode) import Data.Aeson.Types (FromJSON, Parser) import qualified Data.ByteString.Char8 as BSC import qualified Data.ByteString.Lazy as LBS import Data.Foldable (for_) import Data.Monoid import Data.Text (Text) import qualified Data.Text as T import Data.Text.Encoding (encodeUtf8) import Data.Traversable (traverse) import Network.HTTP.Client (applyBasicAuth) import Network.SimpleIRC import System.Directory (renameFile) import System.IO (hPutStrLn, stderr) import Config (Config(channel, redmine), Redmine(..)) import Utils data Issue = Issue { issueID :: Int , issueAssignee :: Text , issueSubject :: Text } project :: Redmine -> Redmine project rm = rm { rmURL = rmURL rm ++ "/projects/" ++ rmProject rm ++ "/issues.json?" } option :: String -> Redmine -> Redmine option opt rm = rm { rmURL = rmURL rm ++ opt ++ "&" } closed :: Redmine -> Redmine closed = option "status_id=4" lastUpdatedFirst :: Redmine -> Redmine lastUpdatedFirst = option "sort=closed_on:desc" closedAfter :: String -> Redmine -> Redmine closedAfter date = option ("closed_on=%3E%3D" ++ date) . lastUpdatedFirst latest :: Redmine -> Redmine latest = option "limit=1" . lastUpdatedFirst getRedmine :: Redmine -> IO LBS.ByteString getRedmine rm = myGetURL (rmURL rm) where myGetURL = case (,) <$> rmUser rm <*> rmPassword rm of Just (user, pass) -> getURLTransformRQ (applyBasicAuth (BSC.pack user) (BSC.pack pass)) Nothing -> getURL getRedmineJSON :: FromJSON a => Redmine -> (a -> Parser b) -> IO (Either String b) getRedmineJSON = getJSONWith . getRedmine getLatestClosed :: Redmine -> IO (Either String String) getLatestClosed rm = getRedmineJSON (latest $ closed $ project rm) parseDate parseDate :: FromJSON a => Object -> Parser a parseDate o = do is <- o .: "issues" i <- case is of [] -> fail "No closed issue found." i:_ -> pure i i .: "closed_on" getClosedByAfter :: Redmine -> String -> IO (Either String ([Issue], String)) getClosedByAfter rm date = getRedmineJSON (closedAfter date $ closed $ project rm) $ \o -> (,) <$> parseIssue o <*> parseDate o where parseIssue o = do assignees <- o .: "issues" >>= traverse (\i -> optional $ do issue <- Issue <$> i .: "id" <*> assignee i <*> i .: "subject" closedOn <- i .: "closed_on" return (issue, closedOn)) return [ issue | Just (issue, date') <- assignees -- the API can only filter >= date, so we receive one update again , date' /= date ] assignee o = o .: "assigned_to" >>= (.: "name") initRedmine :: Config -> MIrc -> MVar String -> IO () initRedmine cfg serv karmaVar = for_ (redmine cfg) $ \rm -> forkIO $ do eitherDate <- getLatestClosed rm case eitherDate of Left err -> hPutStrLn stderr $ "[redmineLoop] getLatestClosed: " ++ err Right date -> redmineLoop rm date where redmineLoop rm date = do threadDelay $ 10^6 * 60 * rmInterval rm eitherNew <- getClosedByAfter rm date newDate <- case eitherNew of Left err -> do hPutStrLn stderr $ "[redmineLoop] getClosedByAfter: " ++ err return date Right (closedTasks, newDate) -> withMVar karmaVar $ \karmaFile -> do giveKarma rm closedTasks karmaFile return newDate redmineLoop rm newDate giveKarma :: Redmine -> [Issue] -> String -> IO () giveKarma rm closedTasks karmaFile = handle (\e -> hPutStrLn stderr $ "[redmineLoop] " ++ show (e :: IOException)) (do updateKarmaFile karmaFile $ \obj -> let inc o i = snd $ increment (sanitize (issueAssignee i)) o in foldl inc obj closedTasks for_ closedTasks $ \i -> sendMsg serv (BSC.pack $ channel cfg) $ encodeUtf8 $ issueAssignee i <> " completed ticket \"" <> issueSubject i <> "\" <" <> T.pack (rmURL rm) <> "/issues/" <> T.pack (show (issueID i)) <> ">" ) updateKarmaFile :: String -> (Object -> Object) -> IO () updateKarmaFile file f = do json <- LBS.readFile file case eitherDecode json of Left err -> hPutStrLn stderr $ "[redmineLoop] eitherDecode: " ++ err Right obj -> do let newFile = file ++ ".new" LBS.writeFile newFile (encode $ f obj) renameFile newFile file
k00mi/bckspc-bot
src/Redmine.hs
mit
5,238
0
25
1,764
1,547
789
758
114
3
{-# OPTIONS_GHC -Wall #-} {-# LANGUAGE RecordWildCards #-} import qualified Control.Foldl as L import Data.Functor.Identity import Data.Maybe (mapMaybe) import Data.Sampling.Types import Numeric.MCMC.Metropolis (chain, chain') import System.Random.MWC import Test.Hspec withinPercent :: Double -> Double -> Double -> Bool withinPercent b n a | b == 0 = a == 0 | otherwise = d / b < n / 100 where d = abs (a - b) mean :: [Double] -> Double mean = L.fold L.mean variance :: [Double] -> Double variance xs = L.fold alg xs where alg = (/) <$> L.premap csq L.sum <*> L.genericLength - 1 csq = (** 2.0) . subtract m m = mean xs stdDev :: [Double] -> Double stdDev = sqrt . variance stdErr :: [Double] -> Double stdErr xs = stdDev xs / sqrt n where n = fromIntegral (length xs) thin :: Int -> [a] -> [a] thin n xs = case xs of (h:t) -> h : thin n (drop (pred n) t) _ -> mempty data Params = Params { pepochs :: Int , pradial :: Double , porigin :: Identity Double , ptunable :: Maybe (Identity Double -> Double) , pltarget :: Identity Double -> Double , pthin :: Int } testParams :: Params testParams = Params { pepochs = 1000000 , pradial = 0.2 , porigin = Identity 1.0 , ptunable = Just (\(Identity x) -> x ** 3.0) , pltarget = \(Identity x) -> if x > 0 then negate x else negate 1 / 0 , pthin = 1000 } vanillaTrace :: IO [Double] vanillaTrace = do let Params {..} = testParams boxed <- withSystemRandom . asGenIO $ chain pepochs pradial porigin pltarget let positions = fmap (runIdentity . chainPosition) boxed pure (thin pthin positions) tunedTrace :: IO [Double] tunedTrace = do let Params {..} = testParams boxed <- withSystemRandom . asGenIO $ chain' pepochs pradial porigin pltarget ptunable let positions = mapMaybe chainTunables boxed pure (thin pthin positions) testWithinPercent :: SpecWith () testWithinPercent = describe "withinPercent" $ it "works as expected" $ do 106 `shouldNotSatisfy` withinPercent 100 5 105 `shouldNotSatisfy` withinPercent 100 5 104 `shouldSatisfy` withinPercent 100 5 96 `shouldSatisfy` withinPercent 100 5 95 `shouldNotSatisfy` withinPercent 100 5 94 `shouldNotSatisfy` withinPercent 100 5 testMean :: SpecWith () testMean = describe "mean" $ it "works as expected" $ do mean [1, 2, 3] `shouldSatisfy` withinPercent 2 1e-3 mean [1..100] `shouldSatisfy` withinPercent 50.5 1e-3 mean [1..1000000] `shouldSatisfy` withinPercent 500000.5 1e-3 testVariance :: SpecWith () testVariance = describe "variance" $ it "works as expected" $ do variance [0, 1] `shouldSatisfy` withinPercent 0.5 1e-3 variance [1, 1, 1] `shouldSatisfy` withinPercent 0 1e-3 variance [1..100] `shouldSatisfy` withinPercent 841.66666666 1e-3 testStdErr :: SpecWith () testStdErr = describe "stdErr" $ it "works as expected" $ do stdErr [1..100] `shouldSatisfy` withinPercent 2.901149 1e-3 stdErr [1..1000] `shouldSatisfy` withinPercent 9.133273 1e-3 testHelperFunctions :: SpecWith () testHelperFunctions = describe "helper functions" $ do testWithinPercent testMean testVariance testStdErr testSamples :: [Double] -> SpecWith () testSamples xs = describe "sampled trace over exp(1)" $ do let meanStdErr = stdErr xs varStdErr = stdErr (fmap (\x -> pred x ** 2.0) xs) context "within three standard errors" $ do it "has the expected mean" $ do mean xs `shouldSatisfy` (< 1 + 3 * meanStdErr) mean xs `shouldSatisfy` (> 1 - 3 * meanStdErr) it "has the expected variance" $ do variance xs `shouldSatisfy` (< 1 + 3 * varStdErr) variance xs `shouldSatisfy` (> 1 - 3 * varStdErr) testTunables :: [Double] -> SpecWith () testTunables ts = describe "sampled tunables over exp(1)" $ do let meanStdErr = stdErr ts context "within three standard errors" $ it "has the expected third moment" $ do mean ts `shouldSatisfy` (< 6 + 3 * meanStdErr) mean ts `shouldSatisfy` (> 6 - 3 * meanStdErr) main :: IO () main = do xs <- vanillaTrace ts <- tunedTrace hspec $ do testHelperFunctions testSamples xs testTunables ts
jtobin/mighty-metropolis
test/Spec.hs
mit
4,204
0
17
948
1,518
776
742
117
2
{-# LANGUAGE NoMonomorphismRestriction, DeriveFunctor #-} module ProjectEuler.Problem149 ( problem ) where import Control.Monad import Control.Monad.ST import Data.Int import Petbox import qualified Data.Vector.Unboxed as VU import qualified Data.Vector.Unboxed.Mutable as VUM import ProjectEuler.Types problem :: Problem problem = pureProblem 149 Solved result {- Idea: I don't think this is a particularly large array to build, but it might improve performance if we just compute the value on the fly knowing the coordinate. Then rest of it is just generic algorithm for figuring out max adjacent sum (with negative elements taking into account) might be helpful: https://en.wikipedia.org/wiki/Lagged_Fibonacci_generator perhaps, given that it is designed for generating random numbers, we aren't supposed to find any patterns that can help speeding up the process. but I'll need to take a closer look. Assuming that we can learn nothing of use from the way that these numbers are generated, we can still use Kadane's algorithm as describe from: https://en.wikipedia.org/wiki/Maximum_subarray_problem Afterthoughts: (TODO) Note that it is not necessary to have the full matrix available before running the algorithm: for every coordinate (r,c), we send the value in question to: - an array indexed by r - another array indexed by c - another array indexed by r+c - another array indexed by r-c then after we have gone through all the numbers, we can simply scan these 4 arrays to get the max out of them. -} result :: Int32 result = findMatrixMax 2000 cells type Coord = (Int,Int) {- Two vectors of the same length for running Kadane's algorithm. - First vector holds current best value. - Second vector holds current sum. -} data Kadane v = Kadane v v deriving Functor {- Kadane's algorithm to compute sum of adjacent numbers. Note that if the array (or the sequence of things) are all negative, the algorithm will return 0, which is fine for our case, because there are definitely positive numbers in our array (for example, s_100 = 86613) -} kadaneAux :: (Num a, Ord a) => (a, a) -> a -> (a, a) kadaneAux (bestSum, prevSum) curVal = (bestSum', curSum) where curSum = max 0 (prevSum + curVal) bestSum' = max bestSum curSum findMatrixMax :: Int -> [] (Coord, Int32) -> Int32 findMatrixMax l cs = runST $ do let initKadane sz = Kadane <$> VUM.new sz <*> VUM.new sz updateKadane (Kadane vecBest vecCur) i val = do vBest <- VUM.read vecBest i vCur <- VUM.read vecCur i let (vBest', vCur') = kadaneAux (vBest, vCur) val VUM.write vecBest i vBest' VUM.write vecCur i vCur' rowsKn <- initKadane l colsKn <- initKadane l diags0Kn <- initKadane (l+l-1) diags1Kn <- initKadane (l+l-1) let base = l-1 -- this base allows diags1Kn's index to start from 0. forM_ cs $ \((r,c), val) -> do updateKadane rowsKn r val updateKadane colsKn c val updateKadane diags0Kn (r+c) val updateKadane diags1Kn (r-c+base) val pure () let getMax (Kadane mvec _) = do vec <- VU.unsafeFreeze mvec pure (maximum (VU.toList vec)) rowsMax <- getMax rowsKn colsMax <- getMax colsKn diags0Max <- getMax diags0Kn diags1Max <- getMax diags1Kn pure $ maximum [rowsMax, colsMax, diags0Max, diags1Max] cells :: [] (Coord, Int32) cells = zip [ (r,c) | r <- [0..1999], c <- [0..1999] ] vals where _ : vals = VU.toList numTable numTable :: VU.Vector Int32 numTable = runST $ do vec <- VUM.unsafeNew sz -- for 1 <= k <= 55 forM_ [1..55] $ \k -> do let -- being careful here not to overflow. v0 = 100003 - 200003 * k v1 = modMul (k*k) (modMul k 300007) val = modPlus v0 v1 - 500000 VUM.write vec k (fromIntegral val) forM_ [56..l*l] $ \k -> do kM24 <- VUM.read vec (k-24) kM55 <- VUM.read vec (k-55) let v0 :: Int v0 = modPlus (modPlus (fInt kM24) (fInt kM55)) 1000000 val = v0 - 500000 VUM.write vec k (fromIntegral val) VU.unsafeFreeze vec where -- `rem` and `mod` produces the same result when m is non-negative, -- but rem is slightly more efficient to use. modPlus a b = (a + b) `rem` m modMul a b = (a * b) `rem` m m = 1000000 l = 2000 sz = 1 + l * l
Javran/Project-Euler
src/ProjectEuler/Problem149.hs
mit
4,357
0
19
1,035
1,076
549
527
73
1
{-# LANGUAGE ConstraintKinds, DataKinds, FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, OverlappingInstances, TypeFamilies, UndecidableInstances #-} {- | Module : Control.Monad.Levels.ConstraintPassing Description : Whether a transformer allows a constraint to pass through Copyright : (c) Ivan Lazar Miljenovic License : MIT Maintainer : [email protected] This module is defined separately as it's the only one that uses the @OverlappingInstances@ extension. -} module Control.Monad.Levels.ConstraintPassing where import Control.Monad.Levels.Definitions -- ----------------------------------------------------------------------------- -- | Indicates whether a specified constraint is allowed to pass -- through a particular level. -- -- (It may not be recognisable in Haddock documentation, but the @b@ -- parameter is of kind @'Bool'@ using the @DataKinds@ extension). -- -- By default, for all monad levels this is set to the value of -- 'DefaultAllowConstraints' for all constraints, with the exception -- of 'IsBaseMonad' for which it is set to 'True'. -- -- Instances of this class can - and should when appropriate - be -- overlapped\/overriden. class (ValidConstraint c, MonadLevel m) => ConstraintPassThrough c m (b :: Bool) instance (ValidConstraint c, MonadLevel m, DefaultAllowConstraints m ~ b) => ConstraintPassThrough c m b instance (MonadLevel m) => ConstraintPassThrough IsBaseMonad m True
ivan-m/monad-levels
Control/Monad/Levels/ConstraintPassing.hs
mit
1,515
0
8
271
119
71
48
-1
-1
{-# LANGUAGE CPP #-} module GHCJS.DOM.WorkerLocation ( #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) module GHCJS.DOM.JSFFI.Generated.WorkerLocation #else #endif ) where #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) import GHCJS.DOM.JSFFI.Generated.WorkerLocation #else #endif
plow-technologies/ghcjs-dom
src/GHCJS/DOM/WorkerLocation.hs
mit
355
0
5
33
33
26
7
4
0
-- The solution of exercise 1.1 -- Give the result printed by the interpreter in response to each express- -- ion. Assume that the sequence is to be evaluated in the order in which -- it is presented. a = 3 b = a + 1 result = do { print 10; print (5 + 3 + 4); print (9 - 1); print (6 / 2); print ((2 * 4) + (4 - 6)); print (a + b + a * b); print (a == b); print (if b > a && b < a * b then b else a); print (if a == 4 then 6 else if b == 4 then 6 + 7 + a else 25); print (if b > a then b + 2 else a + 2); print (let temp = a + 1 in if a > b then temp * a else if a < b then temp * b else temp * (-1)); }
perryleo/sicp
ch1/sicpc1e01.hs
mit
1,065
0
15
625
315
168
147
26
7
{- ============================================================================ | Copyright 2011 Matthew D. Steele <[email protected]> | | | | This file is part of Fallback. | | | | Fallback is free software: you can redistribute it and/or modify it under | | the terms of the GNU General Public License as published by the Free | | Software Foundation, either version 3 of the License, or (at your option) | | any later version. | | | | Fallback is distributed in the hope that it will be useful, but WITHOUT | | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for | | more details. | | | | You should have received a copy of the GNU General Public License along | | with Fallback. If not, see <http://www.gnu.org/licenses/>. | ============================================================================ -} module Fallback.View.Editor (EditorState(..), tickEditorState, EditorAction(..), newEditorView) where import Control.Applicative ((<$), (<$>)) import Control.Arrow ((&&&)) import Control.Monad (forM_, unless, when) import Data.Array (Array, bounds, range) import Data.List (find) import Fallback.Constants (cameraWidth, cameraHeight, sidebarWidth, tileHeight, tileWidth) import Fallback.Data.Clock (Clock, clockInc, clockMod) import Fallback.Data.Color (Tint(Tint), blackColor, whiteColor) import Fallback.Data.Point import Fallback.Draw import Fallback.Event import Fallback.State.Resources (FontTag(..), Resources, rsrcFont, rsrcTerrainOverlaySprite, rsrcTerrainSprite) import Fallback.State.Terrain import Fallback.State.Tileset import Fallback.Utility (ceilDiv, maybeM) import Fallback.View.Base import Fallback.View.Camera (paintTerrainFullyExplored) import Fallback.View.Hover import Fallback.View.Sidebar (newMinimapView) import Fallback.View.Widget (makeLabel, newScrollZone) ------------------------------------------------------------------------------- data EditorAction = ScrollMap IPoint | JumpMapTo Position | ScrollPalette Int | PickTile TerrainTile | AutoPaintAt Position | PaintAt Position | FloodFill Position | ChangeMarks Position | ChangeRects Position | DoLoad | DoSave | DoUndo | DoRedo data EditorState = EditorState { esBrush :: TerrainTile, esCameraTopleft :: IPoint, esClock :: Clock, esFilename :: String, esMinimap :: Minimap, esNullTile :: TerrainTile, esPaletteTop :: Int, esRedoStack :: [TerrainMap], esTerrain :: TerrainMap, esTileArray :: Array Int TerrainTile, esTileset :: Tileset, esUndoStack :: [TerrainMap], esUnsaved :: Bool } esCameraRect :: EditorState -> IRect esCameraRect es = let Point x y = esCameraTopleft es in Rect x y cameraWidth cameraHeight tickEditorState :: EditorState -> EditorState tickEditorState es = es { esClock = clockInc (esClock es) } ------------------------------------------------------------------------------- newEditorView :: (MonadDraw m) => Resources -> m (View EditorState EditorAction) newEditorView resources = do ref <- newHoverRef Nothing let mapRect _ (w, h) = Rect sidebarWidth 0 (w - sidebarWidth) h let sidebarRect _ (_, h) = Rect 0 0 sidebarWidth h hoverJunction ref <$> compoundViewM [ (subView mapRect <$> newEditorMapView resources (hoverSink ref)), (subView sidebarRect <$> newEditorSidebarView resources ref)] ------------------------------------------------------------------------------- newEditorMapView :: (MonadDraw m) => Resources -> HoverSink (Maybe Position) -> m (View EditorState EditorAction) newEditorMapView resources sink = do dragRef <- newDrawRef False let paint es = do let topleft = esCameraTopleft es paintTerrainFullyExplored resources topleft (esTerrain es) (esClock es) let font = rsrcFont resources FontGeorgia10 forM_ (tmapAllMarks $ esTerrain es) $ \(str, pos) -> do let (w, h) = textRenderSize font str let Point x y = positionCenter pos `pSub` topleft tintRect (Tint 0 0 0 192) (Rect (x - half w - 2) (y - half h - 2) (w + 4) (h + 4)) drawText font whiteColor (LocCenter $ Point x y) str forM_ (tmapAllRects $ esTerrain es) $ \(str, prect) -> do let rect = prectRect prect `rectMinus` topleft tintRect (Tint 255 0 255 64) rect drawRect (Tint 255 0 255 192) rect drawText font whiteColor (LocTopleft $ rectTopleft rect) str handler state EvTick = do mbMousePt <- getRelativeMousePos maybeM mbMousePt $ \pt -> do rect <- canvasRect writeHoverSink sink $ positionAt state rect pt mouseHeld <- getMouseButtonState unless mouseHeld $ writeDrawRef dragRef False maybe Ignore (Action . ScrollMap . (`pMul` 8) . dirDelta) <$> getArrowKeysDirection handler state (EvMouseMotion pt _) = do rect <- canvasRect writeHoverSink sink $ positionAt state rect pt drag <- readDrawRef dragRef if drag then paintAt state rect pt else return Ignore handler state (EvMouseDown pt) = do rect <- canvasRect let posAction act = return $ maybe Ignore (Action . act) $ positionAt state rect pt chMarks <- getKeyState KeyM if chMarks then posAction ChangeMarks else do chRects <- getKeyState KeyR if chRects then posAction ChangeRects else do eyedrop <- getKeyState KeyE if eyedrop then posAction (PickTile . tmapGet (esTerrain state)) else do fill <- getKeyState KeyF if fill then posAction FloodFill else do writeDrawRef dragRef True >> paintAt state rect pt handler _ (EvMouseUp _) = Ignore <$ writeDrawRef dragRef False handler _ (EvKeyDown KeyO [KeyModCmd] _) = return (Action DoLoad) handler _ (EvKeyDown KeyS [KeyModCmd] _) = return (Action DoSave) handler _ (EvKeyDown KeyZ [KeyModCmd] _) = return (Action DoUndo) handler _ (EvKeyDown KeyZ [KeyModCmd, KeyModShift] _) = return (Action DoRedo) handler _ _ = return Ignore paintAt state rect pt = do auto <- getKeyState KeyA let action = if auto then AutoPaintAt else PaintAt return $ maybe Ignore (Action . action) $ positionAt state rect pt positionAt state rect pt = if not (rectContains rect pt) then Nothing else Just $ pointPosition $ pt `pSub` rectTopleft rect `pAdd` esCameraTopleft state return $ View paint handler ------------------------------------------------------------------------------- newEditorSidebarView :: (MonadDraw m) => Resources -> HoverRef (Maybe Position) -> m (View EditorState EditorAction) newEditorSidebarView resources ref = do bgSprite <- loadSprite "gui/sidebar-background.png" let font = rsrcFont resources FontGeorgia10 hoverView (hoverSink ref) Nothing <$> compoundViewM [ (return $ inertView $ const $ canvasRect >>= blitStretch bgSprite), -- Minimap: (subView_ (Rect 2 2 (sidebarWidth - 10) 92) . viewMap (esCameraRect &&& esMinimap) JumpMapTo <$> newMinimapView), -- Palette and scroll bar: (subView_ (Rect 12 114 88 340) <$> newEditorPaletteView resources), (subView_ (Rect 12 114 101 340) . vmap (\es -> (0, (snd $ bounds $ esTileArray es) `ceilDiv` 3, 9, esPaletteTop es)) . fmap ScrollPalette <$> newScrollZone), -- Unsaved data indicator: (let paint bool = when bool $ tintCanvas (Tint 255 0 0 255) in return $ subView_ (Rect 100 466 10 10) $ vmap esUnsaved $ inertView paint), -- Position label: (viewMapM (const (readHoverRef ref)) (return . Action) <$> (newMaybeView (fmap show) $ makeLabel font blackColor $ \(_, h) -> LocBottomleft $ Point 2 (h - 2)))] newEditorPaletteView :: (MonadDraw m) => Resources -> m (View EditorState EditorAction) newEditorPaletteView resources = do let paint state = do let paintTile (rect, tile) = do case ttAppearance tile of Still row col -> do blitStretch (rsrcTerrainSprite resources (row, col)) rect Anim row c0 slowdown overlay -> do let col = c0 + clockMod 4 slowdown (esClock state) blitStretch (rsrcTerrainSprite resources (row, col)) rect case overlay of NoOverlay -> return () Overlay r c -> do blitStretch (rsrcTerrainOverlaySprite resources r c) rect when (ttId tile == ttId (esBrush state)) $ do drawRect (Tint 255 0 255 255) rect canvasSize >>= (mapM_ paintTile . rectsAndTiles state) handler state (EvMouseDown pt) = do rect <- canvasRect let hit rAndT = rectContains (fst rAndT) (pt `pSub` rectTopleft rect) return $ maybe Ignore (Action . PickTile . snd) $ find hit $ rectsAndTiles state $ rectSize rect handler _ _ = return Ignore rectsAndTiles state (width, height) = let gap = 2 numCols = (width + gap) `div` (tileWidth + gap) numRows = (height + gap) `div` (tileHeight + gap) (lo, hi) = bounds (esTileArray state) start = min hi (esPaletteTop state * numCols + lo + 1) rAndT index = let (row, col) = (index - start) `divMod` numCols in (Rect (col * (tileWidth + gap)) (row * (tileHeight + gap)) tileWidth tileHeight, esTileArray state ! index) in map rAndT $ range (start, min hi (start + numCols * numRows - 1)) return $ View paint handler -------------------------------------------------------------------------------
mdsteele/fallback
src/Fallback/View/Editor.hs
gpl-3.0
10,528
0
29
3,094
2,893
1,481
1,412
-1
-1
module FAtypes ( BDFA(..) , BNFA(..) , TNFA(..) , ETNFA(..) , Auto , emptyTNFA , trinfo , info , sons ) where import Set import FiniteMap import Options import TA import Ids -- provides instance Show Id -- import Stuff -- provides instances Show Set, Show FiniteMap -- bottom up deterministic data BDFA a = BDFA TCons -- what algebra we're in (Set a) -- all states (Set a) -- accepting states (FiniteMap (STerm a) a) -- transition table deriving (Eq, Show) -- bottom up nondeterministic data BNFA a = BNFA TCons -- what algebra we're in (Set a) -- all states (Set a) -- accepting states (FiniteMap (STerm a) (Set a)) -- transition table deriving (Eq, Show) -- top down non deterministic data TNFA a = TNFA TCons -- algebra (Set a) -- all states (Set a) -- start states (FiniteMap a (Set (STerm a))) -- production rules deriving (Eq, Show) emptyTNFA = TNFA emptySet emptySet emptySet emptyFM -- this is what we normally use type Auto = TNFA Int -- top down non deterministic with epsilon moves data ETNFA a = ETNFA TCons -- algebra (Set a) -- all states (Set a) -- start states (FiniteMap a (Set (STerm a))) -- production rules (FiniteMap a (Set a)) -- epsilon moves deriving (Eq, Show) --------------------------------------------------------------- sons :: TNFA Int -> TCon -> Int -> [[Int]] sons (TNFA cons all starts moves) con p = let ts = lookupWithDefaultFM moves (error "BackS.sons.ts") p lrs = [ stargs t | t <- setToList ts , stcon t == con ] in lrs --------------------------------------------------------------- info msg (TNFA cons all starts moves) = let sc = " cons: " ++ show cons sa = " |all|: " ++ show (cardinality all) sm = " |moves|: " ++ show (sizeFM moves) in ("\n" ++ msg ++ sc ++ sa ++ sm) trinfo opts msg aut = troff opts $ info msg aut
jwaldmann/rx
src/FAtypes.hs
gpl-3.0
1,858
52
12
415
637
348
289
56
1
module QHaskell.Environment.Map (Env,pattern Emp,pattern Ext,len,get) where import QHaskell.MyPrelude import qualified QHaskell.Nat.ADT as NA type Env a b = [(a , b)] pattern Emp = [] pattern Ext x xs = x : xs len :: Env a b -> NA.Nat len Emp = NA.Zro len (Ext _ xs) = NA.Suc (len xs) len _ = impossible get :: (Monad m , Eq a , Show a) => a -> Env a b -> m b get x xs = maybe (fail ("Scope Error: cannot find '"++ show x ++ "'")) return (lookup x xs)
shayan-najd/QHaskell
QHaskell/Environment/Map.hs
gpl-3.0
480
0
11
122
234
126
108
-1
-1
module ConfigSpec where import Control.Monad (unless) import qualified Data.ByteString.Char8 as Bytes import Data.Map import qualified Data.YAML as Yaml import Hadolint.Config import Hadolint.Rule as Rule import Test.HUnit import Test.Hspec tests :: SpecWith () tests = describe "Config" $ do it "Parses config with only error severities" $ let configFile = [ "override:", " error:", " - DL3000", " - SC1010" ] override = Just (OverrideConfig (Just ["DL3000", "SC1010"]) Nothing Nothing Nothing) expected = ConfigFile override Nothing Nothing Nothing Nothing Nothing in assertConfig expected (Bytes.unlines configFile) it "Parses config with only warning severities" $ let configFile = [ "override:", " warning:", " - DL3000", " - SC1010" ] override = Just (OverrideConfig Nothing (Just ["DL3000", "SC1010"]) Nothing Nothing) expected = ConfigFile override Nothing Nothing Nothing Nothing Nothing in assertConfig expected (Bytes.unlines configFile) it "Parses config with only info severities" $ let configFile = [ "override:", " info:", " - DL3000", " - SC1010" ] override = Just (OverrideConfig Nothing Nothing (Just ["DL3000", "SC1010"]) Nothing) expected = ConfigFile override Nothing Nothing Nothing Nothing Nothing in assertConfig expected (Bytes.unlines configFile) it "Parses config with only style severities" $ let configFile = [ "override:", " style:", " - DL3000", " - SC1010" ] override = Just (OverrideConfig Nothing Nothing Nothing (Just ["DL3000", "SC1010"])) expected = ConfigFile override Nothing Nothing Nothing Nothing Nothing in assertConfig expected (Bytes.unlines configFile) it "Parses config with only ignores" $ let configFile = [ "ignored:", "- DL3000", "- SC1010" ] expected = ConfigFile Nothing (Just ["DL3000", "SC1010"]) Nothing Nothing Nothing Nothing in assertConfig expected (Bytes.unlines configFile) it "Parses config with only trustedRegistries" $ let configFile = [ "trustedRegistries:", "- hub.docker.com", "- my.shady.xyz" ] expected = ConfigFile Nothing Nothing (Just ["hub.docker.com", "my.shady.xyz"]) Nothing Nothing Nothing in assertConfig expected (Bytes.unlines configFile) it "Parses config with only label-schema" $ let configFile = [ "label-schema:", " author: text", " url: url" ] expected = ConfigFile Nothing Nothing Nothing (Just (fromList [("author", Rule.RawText), ("url", Rule.Url)])) Nothing Nothing in assertConfig expected (Bytes.unlines configFile) it "Parses config with only label-schema" $ let configFile = [ "strict-labels: true" ] expected = ConfigFile Nothing Nothing Nothing Nothing (Just True) Nothing in assertConfig expected (Bytes.unlines configFile) it "Parses config with failure-threshold" $ let configFile = [ "failure-threshold: error" ] expected = ConfigFile Nothing Nothing Nothing Nothing Nothing (Just Rule.DLErrorC) in assertConfig expected (Bytes.unlines configFile) it "Parses full file" $ let configFile = [ "override:", " info:", " - DL3002", " style:", " - DL3004", " warning:", " - DL3003", " error:", " - DL3001", "", "trustedRegistries:", "- hub.docker.com", "", "ignored:", "- DL3000", "", "strict-labels: false", "label-schema:", " author: text", " url: url", "", "failure-threshold: style" ] override = Just (OverrideConfig (Just ["DL3001"]) (Just ["DL3003"]) (Just ["DL3002"]) (Just ["DL3004"])) labelschema = Just (fromList [("author", Rule.RawText), ("url", Rule.Url)]) expected = ConfigFile override (Just ["DL3000"]) (Just ["hub.docker.com"]) labelschema (Just False) (Just Rule.DLStyleC) in assertConfig expected (Bytes.unlines configFile) assertConfig :: HasCallStack => ConfigFile -> Bytes.ByteString -> Assertion assertConfig config s = case Yaml.decode1Strict s of Left (_, err) -> assertFailure err Right result -> checkResult result where checkResult result = unless (result == config) $ assertFailure ("Config \n\n" ++ show config ++ "\n\n is not \n\n" ++ show result)
lukasmartinelli/hadolint
test/ConfigSpec.hs
gpl-3.0
5,047
0
20
1,696
1,188
621
567
-1
-1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.DeploymentManager.Types -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- module Network.Google.DeploymentManager.Types ( -- * Service Configuration deploymentManagerService -- * OAuth Scopes , cloudPlatformReadOnlyScope , cloudPlatformScope , ndevCloudmanScope , ndevCloudmanReadOnlyScope -- * OperationWarningsItemDataItem , OperationWarningsItemDataItem , operationWarningsItemDataItem , owidiValue , owidiKey -- * ConfigFile , ConfigFile , configFile , cfContent -- * OperationWarningsItemCode , OperationWarningsItemCode (..) -- * AuditConfig , AuditConfig , auditConfig , acService , acAuditLogConfigs -- * DeploymentsUpdateCreatePolicy , DeploymentsUpdateCreatePolicy (..) -- * Expr , Expr , expr , eLocation , eExpression , eTitle , eDescription -- * OperationsListResponse , OperationsListResponse , operationsListResponse , olrNextPageToken , olrOperations -- * ResourceUpdateWarningsItemDataItem , ResourceUpdateWarningsItemDataItem , resourceUpdateWarningsItemDataItem , ruwidiValue , ruwidiKey -- * ResourceUpdateWarningsItemCode , ResourceUpdateWarningsItemCode (..) -- * DeploymentsDeleteDeletePolicy , DeploymentsDeleteDeletePolicy (..) -- * TypesListResponse , TypesListResponse , typesListResponse , tlrNextPageToken , tlrTypes -- * DeploymentsUpdateDeletePolicy , DeploymentsUpdateDeletePolicy (..) -- * DeploymentsPatchDeletePolicy , DeploymentsPatchDeletePolicy (..) -- * Operation , Operation , operation , oTargetId , oStatus , oOperationGroupId , oInsertTime , oProgress , oStartTime , oKind , oError , oHTTPErrorMessage , oZone , oWarnings , oHTTPErrorStatusCode , oUser , oSelfLink , oName , oStatusMessage , oCreationTimestamp , oEndTime , oId , oOperationType , oRegion , oDescription , oTargetLink , oClientOperationId -- * TestPermissionsResponse , TestPermissionsResponse , testPermissionsResponse , tprPermissions -- * DeploymentsPatchCreatePolicy , DeploymentsPatchCreatePolicy (..) -- * ResourcesListResponse , ResourcesListResponse , resourcesListResponse , rlrNextPageToken , rlrResources -- * DeploymentUpdate , DeploymentUpdate , deploymentUpdate , duManifest , duLabels , duDescription -- * ResourceUpdate , ResourceUpdate , resourceUpdate , ruState , ruError , ruAccessControl , ruWarnings , ruIntent , ruManifest , ruFinalProperties , ruProperties -- * DeploymentLabelEntry , DeploymentLabelEntry , deploymentLabelEntry , dleValue , dleKey -- * OperationStatus , OperationStatus (..) -- * ResourceUpdateState , ResourceUpdateState (..) -- * ResourceUpdateIntent , ResourceUpdateIntent (..) -- * TestPermissionsRequest , TestPermissionsRequest , testPermissionsRequest , tPermissions -- * Manifest , Manifest , manifest , mInsertTime , mLayout , mConfig , mExpandedConfig , mManifestSizeBytes , mImports , mSelfLink , mName , mId , mManifestSizeLimitBytes -- * ResourceUpdateWarningsItem , ResourceUpdateWarningsItem , resourceUpdateWarningsItem , ruwiData , ruwiCode , ruwiMessage -- * DeploymentsCancelPreviewRequest , DeploymentsCancelPreviewRequest , deploymentsCancelPreviewRequest , dcprFingerprint -- * AuditLogConfigLogType , AuditLogConfigLogType (..) -- * Resource , Resource , resource , rInsertTime , rAccessControl , rURL , rWarnings , rUpdateTime , rName , rManifest , rFinalProperties , rId , rType , rUpdate , rProperties -- * Xgafv , Xgafv (..) -- * DeploymentUpdateLabelEntry , DeploymentUpdateLabelEntry , deploymentUpdateLabelEntry , duleValue , duleKey -- * ResourceUpdateErrorErrorsItem , ResourceUpdateErrorErrorsItem , resourceUpdateErrorErrorsItem , rueeiLocation , rueeiCode , rueeiMessage -- * ManifestsListResponse , ManifestsListResponse , manifestsListResponse , mlrNextPageToken , mlrManifests -- * OperationError , OperationError , operationError , oeErrors -- * GlobalSetPolicyRequest , GlobalSetPolicyRequest , globalSetPolicyRequest , gsprEtag , gsprBindings , gsprPolicy -- * Policy , Policy , policy , pAuditConfigs , pEtag , pVersion , pBindings -- * Type , Type , type' , tInsertTime , tOperation , tSelfLink , tName , tId -- * ImportFile , ImportFile , importFile , ifContent , ifName -- * OperationErrorErrorsItem , OperationErrorErrorsItem , operationErrorErrorsItem , oeeiLocation , oeeiCode , oeeiMessage -- * DeploymentsStopRequest , DeploymentsStopRequest , deploymentsStopRequest , dsrFingerprint -- * ResourceWarningsItemDataItem , ResourceWarningsItemDataItem , resourceWarningsItemDataItem , rwidiValue , rwidiKey -- * AuditLogConfig , AuditLogConfig , auditLogConfig , alcLogType , alcExemptedMembers -- * ResourceUpdateError , ResourceUpdateError , resourceUpdateError , rueErrors -- * ResourceWarningsItemCode , ResourceWarningsItemCode (..) -- * DeploymentsListResponse , DeploymentsListResponse , deploymentsListResponse , dlrNextPageToken , dlrDeployments -- * ResourceWarningsItem , ResourceWarningsItem , resourceWarningsItem , rwiData , rwiCode , rwiMessage -- * ResourceAccessControl , ResourceAccessControl , resourceAccessControl , racGcpIAMPolicy -- * TargetConfiguration , TargetConfiguration , targetConfiguration , tcConfig , tcImports -- * OperationWarningsItem , OperationWarningsItem , operationWarningsItem , owiData , owiCode , owiMessage -- * Binding , Binding , binding , bMembers , bRole , bCondition -- * Deployment , Deployment , deployment , dInsertTime , dOperation , dFingerprint , dUpdateTime , dSelfLink , dName , dManifest , dId , dLabels , dDescription , dUpdate , dTarget -- * DeploymentsInsertCreatePolicy , DeploymentsInsertCreatePolicy (..) ) where import Network.Google.DeploymentManager.Types.Product import Network.Google.DeploymentManager.Types.Sum import Network.Google.Prelude -- | Default request referring to version 'v2' of the Cloud Deployment Manager V2 API. This contains the host and root path used as a starting point for constructing service requests. deploymentManagerService :: ServiceConfig deploymentManagerService = defaultService (ServiceId "deploymentmanager:v2") "deploymentmanager.googleapis.com" -- | View your data across Google Cloud Platform services cloudPlatformReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/cloud-platform.read-only"] cloudPlatformReadOnlyScope = Proxy -- | See, edit, configure, and delete your Google Cloud Platform data cloudPlatformScope :: Proxy '["https://www.googleapis.com/auth/cloud-platform"] cloudPlatformScope = Proxy -- | View and manage your Google Cloud Platform management resources and -- deployment status information ndevCloudmanScope :: Proxy '["https://www.googleapis.com/auth/ndev.cloudman"] ndevCloudmanScope = Proxy -- | View your Google Cloud Platform management resources and deployment -- status information ndevCloudmanReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/ndev.cloudman.readonly"] ndevCloudmanReadOnlyScope = Proxy
brendanhay/gogol
gogol-deploymentmanager/gen/Network/Google/DeploymentManager/Types.hs
mpl-2.0
8,422
0
7
2,099
958
659
299
257
1
{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} -- Module : Gen.Syntax -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : provisional -- Portability : non-portable (GHC extensions) module Gen.Syntax where import Control.Lens hiding (iso, mapping, op, pre, strict) import Data.Either import Data.Foldable (foldl', foldr') import qualified Data.HashMap.Strict as Map import Data.List (delete, nub) import Data.Maybe import Data.Text (Text) import qualified Data.Text as Text import Data.Text.Manipulate import Gen.Text import Gen.Types import Language.Haskell.Exts.Build (app, appFun, infixApp, lamE, listE, name, noBinds, paren, patBind, pvar, sfun, strE, sym, var) import Language.Haskell.Exts.Syntax hiding (Alt, Int, Lit) serviceSig :: Name () -> Decl () serviceSig n = TypeSig () [n] (TyCon () "ServiceConfig") serviceDecl :: Service a -> Name () -> Decl () serviceDecl s n = sfun n [] (UnGuardedRhs () rhs) noBinds where rhs = appFun (var "defaultService") [ app (var "ServiceId") (str (s ^. dId)) , str . stripSuffix "/" $ stripPrefix "https://" (s ^. dRootUrl) ] scopeSig :: Name () -> Text -> Decl () scopeSig n v = TypeSig () [n] $ TyApp () (TyCon () "Proxy") $ TyPromoted () $ PromotedList () True [ TyPromoted () $ PromotedString () (Text.unpack v) (Text.unpack v) ] scopeDecl :: Name () -> Decl () scopeDecl n = sfun n [] (UnGuardedRhs () (var "Proxy")) noBinds apiAlias :: Name () -> [Name ()] -> Decl () apiAlias n ls = TypeDecl () (DHead () n) alias where alias = case map (TyCon () . UnQual ()) ls of [] -> unit_tycon () x:xs -> foldl' (\l r -> TyInfix () l ((UnpromotedName () . UnQual ()) (sym ":<|>")) r) x xs verbAlias :: HasDescription a s => a -> Name () -> Method Solved -> Decl () verbAlias d n m = TypeDecl () (DHead () n) $ servantOr meta (catMaybes [down, up]) where meta = path $ metadataAlias m down = path <$> downloadAlias m up = uploadAlias root m path = flip servantSub root root = subPaths (d ^. dServicePath) ++ requestPath m (_mPath m) metadataAlias :: Method Solved -> Type () metadataAlias m = servantSub (jsonVerb m) (params ++ media) where params = requestQuery m (requestQueryParams m) media = case _mRequest m of Nothing -> [] Just b -> [ TyApp () (TyApp () (TyCon () "ReqBody") jsonMedia) (tycon (ref b)) ] downloadAlias :: Method Solved -> Maybe (Type ()) downloadAlias m | _mSupportsMediaDownload m = Just download | otherwise = Nothing where download = servantSub (downloadVerb m) (params ++ [downloadParam]) params = requestQuery m . Map.delete "alt" $ requestQueryParams m uploadAlias :: [Type ()] -> Method Solved -> Maybe (Type ()) uploadAlias sub m | _mSupportsMediaUpload m = Just upload | otherwise = Nothing where upload = servantSub (jsonVerb m) (path ++ params ++ media) params = requestQuery m (requestQueryParams m) path = case _mMediaUpload m of Nothing -> sub Just u -> requestPath m (_muSimplePath u) media = case _mRequest m of Just b -> [ multipartParam , TyApp () (TyApp () (TyCon () "MultipartRelated") jsonMedia) (tycon (ref b)) ] Nothing -> [ mediaParam , TyCon () "AltMedia" ] requestPath :: Method Solved -> Text -> [Type ()] requestPath m = map go . extractPath where go (Left t) = sing t go (Right (l, c)) = case Map.lookup l (_mParameters m) of Nothing -> error $ "Unable to find path parameter " ++ show l Just x -> case c of Nothing | x ^. iRepeated -> TyApp () (TyApp () (TyCon () "Captures") (sing (local l))) (terminalType (_type (_pParam x))) Nothing -> TyApp () (TyApp () (TyCon () "Capture") (sing (local l))) (terminalType (_type (_pParam x))) Just y -> TyApp () (TyApp () (TyApp () (TyCon () "CaptureMode") (sing (local l))) (sing y)) (terminalType (_type (_pParam x))) requestQuery :: Method Solved -> Map Local (Param Solved) -> [Type ()] requestQuery m xs = mapMaybe go $ orderParams fst (Map.toList xs) (_mParameterOrder m) where go (k, x) = case _pLocation x of Query | x ^. iRepeated -> Just $ TyApp () (TyApp () (TyCon () "QueryParams") n) t Query -> Just $ TyApp () (TyApp () (TyCon () "QueryParam") n) t Path -> Nothing where t = terminalType (_type (_pParam x)) n = sing (local k) requestQueryParams :: Method a -> Map Local (Param a) requestQueryParams = Map.filter ((/= Path) . _pLocation) . _mParameters servantOr, servantSub :: Type () -> [Type ()] -> Type () servantOr = foldl' (\l r -> TyInfix () l ((UnpromotedName () . UnQual ()) (sym ":<|>")) r) servantSub = foldr' (\l r -> TyInfix () l ((UnpromotedName () . UnQual ()) (sym ":>")) r) jsonVerb :: Method a -> Type () jsonVerb m = TyApp () (TyApp () (httpMethod m) jsonMedia) $ maybe (TyCon () "()") (tycon . ref) (_mResponse m) downloadVerb :: Method a -> Type () downloadVerb m = TyApp () (TyApp () (httpMethod m) streamMedia) (TyCon () "Stream") httpMethod :: Method a -> Type () httpMethod = TyCon () . unqual . Text.unpack . Text.toTitle . _mHttpMethod subPaths :: Text -> [Type ()] subPaths = map sing . filter (not . Text.null) . Text.split (== '/') jsonMedia, streamMedia :: Type () jsonMedia = tylist ["JSON"] streamMedia = tylist ["OctetStream"] downloadParam :: Type () downloadParam = TyApp () (TyApp () (TyCon () "QueryParam") (sing "alt")) (TyCon () "AltMedia") mediaParam :: Type () mediaParam = TyApp () (TyApp () (TyCon () "QueryParam") (sing "uploadType")) (TyCon () "AltMedia") multipartParam :: Type () multipartParam = TyApp () (TyApp () (TyCon () "QueryParam") (sing "uploadType")) (TyCon () "Multipart") pattern' :: Integer -> Method a -> Pat () pattern' n m = case (n, down, up) of (1, True, True) -> infixOr go (infixOr wild wild) (2, True, True) -> infixOr wild (infixOr go wild) (_, True, True) -> infixOr wild (infixOr wild go) (1, True, _) -> infixOr go wild (_, True, _) -> infixOr wild go (1, _, True) -> infixOr go wild (_, _, True) -> infixOr wild go (_, _, _) -> go where down = _mSupportsMediaDownload m up = _mSupportsMediaUpload m go = pvar "go" wild = PWildCard () infixOr l = PInfixApp () l (UnQual () (sym ":<|>")) metadataPat, downloadPat, uploadPat :: Method a -> Pat () metadataPat = pattern' 1 downloadPat = pattern' 2 uploadPat = pattern' 3 downloadDecl :: Global -> Prefix -> Name () -> Name () -> [Local] -> Method Solved -> Decl () downloadDecl n pre api url fs m = googleRequestDecl (TyParen () ty) [rs, ss] [alt] pre api url m pat prec where ty = TyApp () (TyCon () "MediaDownload") (tycon n) rs = InsType () (TyApp () (TyCon () "Rs") ty) (TyCon () "Stream") ss = InsType () (TyApp () (TyCon () "Scopes") ty) $ TyApp () (TyCon () "Scopes") (tycon n) alt = app (var "Just") (var "AltMedia") pat = downloadPat m prec = PApp () (UnQual () "MediaDownload") [ PRec () (UnQual () (dname' n)) [PFieldWildcard () | not (null fs)] ] uploadDecl :: Global -> Prefix -> Name () -> Name () -> [Local] -> Method Solved -> Decl () uploadDecl n pre api url fs m = googleRequestDecl (TyParen () ty) [rs, ss] extras pre api url m pat prec where ty = TyApp () (TyCon () "MediaUpload") (tycon n) rs = InsType () (TyApp () (TyCon () "Rs") ty) $ maybe (unit_tycon ()) (tycon . ref) (_mResponse m) ss = InsType () (TyApp () (TyCon () "Scopes") ty) $ TyApp () (TyCon () "Scopes") (tycon n) extras = maybeToList alt ++ [upl] ++ payload ++ [var media] where upl = app (var "Just") $ if isJust (_mRequest m) then var "Multipart" else var "AltMedia" alt = app (var "Just") . var . name . Text.unpack . alternate <$> (Map.lookup "alt" (_mParameters m) >>= view iDefault) payload | isJust (_mRequest m) = [var (fname pre "payload")] | otherwise = [] pat = uploadPat m prec = PApp () (UnQual () "MediaUpload") [ PRec () (UnQual () (dname' n)) [PFieldWildcard () | not (null fs)] , PVar () media ] media = name "body" requestDecl :: Global -> Prefix -> Name () -> Name () -> [Local] -> Method Solved -> Decl () requestDecl n pre api url fs m = googleRequestDecl (tycon n) [rs, ss] extras pre api url m pat prec where rs :: InstDecl () rs = InsType () (TyApp () (TyCon () "Rs") (tycon n)) $ maybe (unit_tycon ()) (tycon . ref) (_mResponse m) ss :: InstDecl () ss = InsType () (TyApp () (TyCon () "Scopes") (tycon n)) $ TyPromoted () $ PromotedList () True $ map (\m' -> TyPromoted () (PromotedString () (Text.unpack m') "Scopes")) (_mScopes m) extras = catMaybes [alt, payload] where alt = app (var "Just") . var . name . Text.unpack . alternate <$> (Map.lookup "alt" (_mParameters m) >>= view iDefault) payload | isJust (_mRequest m) = Just $ var (fname pre "payload") | otherwise = Nothing pat = metadataPat m prec = PRec () (UnQual () (dname' n)) [PFieldWildcard () | not (null fs)] googleRequestDecl :: Type () -> [InstDecl ()] -> [Exp ()] -> Prefix -> Name () -> Name () -> Method Solved -> Pat () -> Pat () -> Decl () googleRequestDecl n assoc extras pre api url m pat prec = InstDecl () Nothing (instrule "GoogleRequest" n) (Just (assoc ++ [request])) where request = InsDecl () (FunBind () [match]) match = Match () (name "requestClient") [prec] rhs (Just decls) decls = BDecls () [ patBind pat $ appFun (var "buildClient") [ ExpTypeSig () (var "Proxy") $ TyApp () (TyCon () "Proxy") (TyCon () (UnQual () api)) , var "mempty" ] ] rhs = UnGuardedRhs () . appFun (var "go") $ map go fs ++ extras ++ [var url] where go l = case Map.lookup l ps of Just p | _pLocation p == Query , defaulted p , p ^. iRepeated -> v l Just p | _pLocation p == Query , not (required p) , p ^. iRepeated -> infixApp (v l) "^." (var "_Default") Just p | _pLocation p == Query , not (p ^. iRepeated) , parameter p || defaulted p -> app (var "Just") (v l) _ -> v l ps = _mParameters m v = var . fname pre fs = delete "alt" . orderParams id (Map.keys (_mParameters m)) . nub $ map fst (rights (extractPath (_mPath m))) ++ _mParameterOrder m jsonDecls :: Global -> Prefix -> Map Local Solved -> [Decl ()] jsonDecls g p (Map.toList -> rs) = [from', to'] where from' = InstDecl () Nothing (instrule "FromJSON" (tycon g)) (Just [ funD "parseJSON" $ app (app (var "withObject") (dstr g)) $ lamE [pvar "o"] $ ctorE g (map decode rs) ]) decode (l, s) | _additional s = app (var "parseJSONObject") (var "o") | Just x <- def s = defJS l x | required s = reqJS l | monoid s = defJS l (var "mempty") | otherwise = optJS l to' = case rs of [(k, v)] | _additional v -> InstDecl () Nothing (instrule "ToJSON" (tycon g)) (Just [ funD "toJSON" $ infixApp (var "toJSON") "." (var (fname p k)) ]) _ -> InstDecl () Nothing (instrule "ToJSON" (tycon g)) (Just [ wildcardD "toJSON" g omit emptyObj (map encode rs) ]) omit = app (var "object") . app (var "catMaybes") . listE emptyObj = var "emptyObject" encode (l, s) | TMaybe {} <- _type s = infixApp (paren (app n o)) "<$>" a | otherwise = app (var "Just") (infixApp n ".=" a) where n = fstr l a = var (fname p l) o = var ".=" wildcardD :: String -> Global -> ([Exp ()] -> Exp ()) -> Exp () -> [Exp ()] -> InstDecl () wildcardD f n enc x = \case [] -> constD f x xs -> InsDecl () (FunBind () [match prec xs]) where match p es = Match () (name f) [p] (UnGuardedRhs () (enc es)) noBinds prec = PRec () (UnQual () (dname' n)) [PFieldWildcard ()] defJS :: Local -> Exp () -> Exp () defJS n = infixApp (infixApp (var "o") ".:?" (fstr n)) ".!=" reqJS :: Local -> Exp () reqJS = infixApp (var "o") ".:" . fstr optJS :: Local -> Exp () optJS = infixApp (var "o") ".:?" . fstr funD :: String -> Exp () -> InstDecl () funD f = InsDecl () . patBind (pvar (name f)) constD :: String -> Exp () -> InstDecl () constD f x = InsDecl () $ sfun (name f) [] (UnGuardedRhs () (app (var "const") x)) noBinds ctorE :: Global -> [Exp ()] -> Exp () ctorE n = seqE (var (dname' n)) . map paren seqE :: Exp () -> [Exp ()] -> Exp () seqE l [] = app (var "pure") l seqE l (r:rs) = infixApp l "<$>" (infixE r "<*>" rs) objDecl :: Global -> Prefix -> [Derive] -> Map Local Solved -> Decl () objDecl n p ds rs = DataDecl () arity Nothing (DHead () (dname n)) [conDecl (dname' n) p rs] [der ds] where arity | Map.size rs == 1 = NewType () | otherwise = DataType () der = Deriving () Nothing . map (unqualrule . drop 1 . show) -- decl = -- DataDecl -- () -- (DataType ()) -- Nothing -- (DHead () (Ident () "AboutGet")) -- [QualConDecl () Nothing Nothing (ConDecl () (Ident () "AboutGet'") [])] -- [ Deriving -- () -- Nothing -- [ IRule () Nothing Nothing (IHCon () (UnQual () (Ident () "Eq"))) -- , IRule () Nothing Nothing (IHCon () (UnQual () (Ident () "Show"))) -- , IRule () Nothing Nothing (IHCon () (UnQual () (Ident () "Data"))) -- , IRule () Nothing Nothing (IHCon () (UnQual () (Ident () "Typeable"))) -- , IRule () Nothing Nothing (IHCon () (UnQual () (Ident () "Generic"))) -- ] -- ] conDecl :: Name () -> Prefix -> Map Local Solved -> QualConDecl () conDecl n p rs = QualConDecl () Nothing Nothing body where body = case Map.toList rs of [] -> ConDecl () n [] [x] -> RecDecl () n [field internalType x] xs -> RecDecl () n (map (field (strict . internalType)) xs) field f (l, v) = FieldDecl () [fname p l] (f (_type v)) ctorSig :: Global -> Map Local Solved -> Decl () ctorSig n rs = TypeSig () [cname n] ts where ts = foldr' (TyFun ()) (TyCon () (UnQual () (dname n))) ps ps = parameters (Map.elems rs) ctorDecl :: Global -> Prefix -> Map Local Solved -> Decl () ctorDecl n p rs = sfun c ps (UnGuardedRhs () rhs) noBinds where c = cname n d = dname' n rhs | Map.null rs = var d | otherwise = RecConstr () (UnQual () d) $ map (uncurry (fieldUpdate p)) (Map.toList rs) ps = map (pname p) . Map.keys $ Map.filter parameter rs fieldUpdate :: Prefix -> Local -> Solved -> FieldUpdate () fieldUpdate p l s = FieldUpdate () (UnQual () (fname p l)) rhs where rhs | Just x <- def s, s ^. iRepeated = listE [x] | Just x <- def s = x | Just x <- iso (_type s) = infixApp x "#" v | parameter s = v | otherwise = var (name "Nothing") v = var (pname p l) lensSig :: Global -> Prefix -> Local -> Solved -> Decl () lensSig n p l s = TypeSig () [lname p l] $ TyApp () (TyApp () (TyCon () "Lens'") (tycon n)) (externalType (_type s)) lensDecl :: Prefix -> Local -> Solved -> Decl () lensDecl p l s = sfun (lname p l) [] (UnGuardedRhs () rhs) noBinds where f = fname p l t = _type s rhs = mapping t $ app (app (var "lens") (var f)) (paren (lamE [pvar "s", pvar "a"] (RecUpdate () (var "s") [FieldUpdate () (UnQual () f) (var "a")]))) parameters :: [Solved] -> [Type ()] parameters = map (externalType . _type) . filter parameter def :: Solved -> Maybe (Exp ()) def s | Just x <- s ^. iDefault = Just (go x (_prefix s) (_schema s)) | otherwise = Nothing where go x p = \case SEnm {} -> var (bname p x) SLit _ Bool -> lit (upperHead x) SLit _ Text -> str x SLit {} -> lit x e -> error $ "Unsupported default value: " ++ show e lit = var . name . Text.unpack terminalType :: TType -> Type () terminalType = internalType . go where go (TMaybe x) = go x go (TList x) = go x go x = x externalType :: TType -> Type () externalType = \case TType r -> tycon r TMaybe t@TList {} -> externalType t TMaybe t -> TyApp () (TyCon () "Maybe") (externalType t) TList t -> TyList () (externalType t) TLit l -> externalLit l TMap k v -> TyApp () (TyApp () (TyCon () "HashMap") (externalType k)) (externalType (require v)) internalType :: TType -> Type () internalType = \case TType r -> tycon r TMaybe t -> TyApp () (TyCon () "Maybe") (internalType t) TList t -> TyList () (internalType t) TLit l -> internalLit l TMap k v -> TyApp () (TyApp () (TyCon () "HashMap") (internalType k)) (internalType (require v)) externalLit :: Lit -> Type () externalLit = \case Text -> TyCon () "Text" Bool -> TyCon () "Bool" Time -> TyCon () "TimeOfDay" Date -> TyCon () "Day" DateTime -> TyCon () "UTCTime" Nat -> TyCon () "Natural" Float -> TyCon () "Double" Double -> TyCon () "Double" Byte -> TyCon () "ByteString" UInt32 -> TyCon () "Word32" UInt64 -> TyCon () "Word64" Int32 -> TyCon () "Int32" Int64 -> TyCon () "Int64" Alt t -> TyCon () (unqual (Text.unpack t)) RqBody -> TyCon () "RequestBody" RsBody -> TyCon () "Stream" JSONValue -> TyCon () "JSONValue" GFieldMask -> TyCon () "GFieldMask" GDuration -> TyCon () "Scientific" internalLit :: Lit -> Type () internalLit = \case Text -> TyCon () "Text" Bool -> TyCon () "Bool" Time -> TyCon () "Time'" Date -> TyCon () "Date'" DateTime -> TyCon () "DateTime'" Nat -> TyApp () (TyCon () "Textual") (TyCon () "Nat") Float -> TyApp () (TyCon () "Textual") (TyCon () "Double") Double -> TyApp () (TyCon () "Textual") (TyCon () "Double") Byte -> TyCon () "Bytes" UInt32 -> TyApp () (TyCon () "Textual") (TyCon () "Word32") UInt64 -> TyApp () (TyCon () "Textual") (TyCon () "Word64") Int32 -> TyApp () (TyCon () "Textual") (TyCon () "Int32") Int64 -> TyApp () (TyCon () "Textual") (TyCon () "Int64") Alt t -> TyCon () (unqual (Text.unpack t)) RqBody -> TyCon () "RequestBody" RsBody -> TyCon () "Stream" JSONValue -> TyCon () "JSONValue" GFieldMask -> TyCon () "GFieldMask" GDuration -> TyCon () "GDuration" mapping :: TType -> Exp () -> Exp () mapping t e = infixE e "." (go t) where go = \case TMaybe x@TList {} -> var "_Default" : go x TMaybe x -> nest (go x) x -> maybeToList (iso x) nest [] = [] nest (x:xs) = [app (var "mapping") (infixE x "." xs)] iso :: TType -> Maybe (Exp ()) iso = \case TList {} -> Just (var "_Coerce") TMap {} -> Just (var "_Coerce") TLit Nat -> Just (var "_Coerce") TLit Time -> Just (var "_Time") TLit Date -> Just (var "_Date") TLit DateTime -> Just (var "_DateTime") TLit GDuration -> Just (var "_GDuration") TLit Float -> Just (var "_Coerce") TLit Double -> Just (var "_Coerce") TLit Byte -> Just (var "_Bytes") TLit UInt32 -> Just (var "_Coerce") TLit UInt64 -> Just (var "_Coerce") TLit Int32 -> Just (var "_Coerce") TLit Int64 -> Just (var "_Coerce") _ -> Nothing require :: TType -> TType require (TMaybe t) = t require t = t strict :: Type () -> Type () strict = TyBang () (BangedTy ()) (NoUnpackPragma ()) . \case t@TyApp{} -> TyParen () t t -> t sing :: Text -> Type () sing = TyCon () . unqual . Text.unpack . flip mappend "\"" . mappend "\"" tylist :: [Text] -> Type () tylist xs = TyCon () . UnQual () . name . Text.unpack $ "'[" <> Text.intercalate ", " xs <> "]" tycon :: Global -> Type () tycon = TyCon () . UnQual () . dname unqual :: String -> QName () unqual = UnQual () . name unqualrule :: String -> InstRule () unqualrule = IRule () Nothing Nothing . IHCon () . unqual instrule :: String -> Type () -> InstRule () instrule n t = IRule () Nothing Nothing (IHApp () (IHCon () (unqual n)) t) infixE :: Exp () -> QOp () -> [Exp ()] -> Exp () infixE l _ [] = l infixE l o (r:rs) = infixE (infixApp l o r) o rs str :: Text -> Exp () str = strE . Text.unpack
brendanhay/gogol
gen/src/Gen/Syntax.hs
mpl-2.0
22,588
0
21
7,573
9,681
4,756
4,925
495
19
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Language.Types -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- module Network.Google.Language.Types ( -- * Service Configuration languageService -- * OAuth Scopes , cloudLanguageScope , cloudPlatformScope -- * AnalyzeSyntaxRequest , AnalyzeSyntaxRequest , analyzeSyntaxRequest , asrEncodingType , asrDocument -- * DependencyEdge , DependencyEdge , dependencyEdge , deHeadTokenIndex , deLabel -- * ClassifyTextRequest , ClassifyTextRequest , classifyTextRequest , ctrDocument -- * Status , Status , status , sDetails , sCode , sMessage -- * PartOfSpeechProper , PartOfSpeechProper (..) -- * PartOfSpeechTag , PartOfSpeechTag (..) -- * Sentiment , Sentiment , sentiment , sScore , sMagnitude -- * DocumentType , DocumentType (..) -- * AnalyzeSyntaxRequestEncodingType , AnalyzeSyntaxRequestEncodingType (..) -- * AnalyzeEntitySentimentRequestEncodingType , AnalyzeEntitySentimentRequestEncodingType (..) -- * DependencyEdgeLabel , DependencyEdgeLabel (..) -- * PartOfSpeechVoice , PartOfSpeechVoice (..) -- * PartOfSpeechForm , PartOfSpeechForm (..) -- * PartOfSpeechPerson , PartOfSpeechPerson (..) -- * Token , Token , token , tDependencyEdge , tText , tLemma , tPartOfSpeech -- * EntityType , EntityType (..) -- * StatusDetailsItem , StatusDetailsItem , statusDetailsItem , sdiAddtional -- * ClassificationCategory , ClassificationCategory , classificationCategory , ccConfidence , ccName -- * AnnotateTextRequest , AnnotateTextRequest , annotateTextRequest , atrEncodingType , atrFeatures , atrDocument -- * EntityMention , EntityMention , entityMention , emSentiment , emText , emType -- * TextSpan , TextSpan , textSpan , tsBeginOffSet , tsContent -- * AnalyzeEntitySentimentRequest , AnalyzeEntitySentimentRequest , analyzeEntitySentimentRequest , aesrEncodingType , aesrDocument -- * AnnotateTextResponse , AnnotateTextResponse , annotateTextResponse , atrEntities , atrTokens , atrDocumentSentiment , atrCategories , atrSentences , atrLanguage -- * PartOfSpeechTense , PartOfSpeechTense (..) -- * Features , Features , features , fExtractSyntax , fExtractDocumentSentiment , fClassifyText , fExtractEntitySentiment , fExtractEntities -- * Document , Document , document , dContent , dLanguage , dGcsContentURI , dType -- * PartOfSpeechMood , PartOfSpeechMood (..) -- * PartOfSpeechCase , PartOfSpeechCase (..) -- * AnalyzeSentimentRequest , AnalyzeSentimentRequest , analyzeSentimentRequest , aEncodingType , aDocument -- * Xgafv , Xgafv (..) -- * AnalyzeEntitiesResponse , AnalyzeEntitiesResponse , analyzeEntitiesResponse , aerEntities , aerLanguage -- * AnnotateTextRequestEncodingType , AnnotateTextRequestEncodingType (..) -- * PartOfSpeechNumber , PartOfSpeechNumber (..) -- * AnalyzeSentimentResponse , AnalyzeSentimentResponse , analyzeSentimentResponse , asrDocumentSentiment , asrSentences , asrLanguage -- * AnalyzeEntitiesRequest , AnalyzeEntitiesRequest , analyzeEntitiesRequest , aerEncodingType , aerDocument -- * AnalyzeEntitiesRequestEncodingType , AnalyzeEntitiesRequestEncodingType (..) -- * Entity , Entity , entity , eSentiment , eName , eSalience , eMetadata , eType , eMentions -- * AnalyzeEntitySentimentResponse , AnalyzeEntitySentimentResponse , analyzeEntitySentimentResponse , aesrEntities , aesrLanguage -- * AnalyzeSyntaxResponse , AnalyzeSyntaxResponse , analyzeSyntaxResponse , aTokens , aSentences , aLanguage -- * EntityMetadata , EntityMetadata , entityMetadata , emAddtional -- * PartOfSpeechAspect , PartOfSpeechAspect (..) -- * ClassifyTextResponse , ClassifyTextResponse , classifyTextResponse , ctrCategories -- * PartOfSpeech , PartOfSpeech , partOfSpeech , posProper , posTag , posPerson , posAspect , posCase , posGender , posReciprocity , posNumber , posVoice , posForm , posTense , posMood -- * PartOfSpeechReciprocity , PartOfSpeechReciprocity (..) -- * PartOfSpeechGender , PartOfSpeechGender (..) -- * AnalyzeSentimentRequestEncodingType , AnalyzeSentimentRequestEncodingType (..) -- * EntityMentionType , EntityMentionType (..) -- * Sentence , Sentence , sentence , sSentiment , sText ) where import Network.Google.Language.Types.Product import Network.Google.Language.Types.Sum import Network.Google.Prelude -- | Default request referring to version 'v1' of the Cloud Natural Language API. This contains the host and root path used as a starting point for constructing service requests. languageService :: ServiceConfig languageService = defaultService (ServiceId "language:v1") "language.googleapis.com" -- | Apply machine learning models to reveal the structure and meaning of -- text cloudLanguageScope :: Proxy '["https://www.googleapis.com/auth/cloud-language"] cloudLanguageScope = Proxy -- | See, edit, configure, and delete your Google Cloud Platform data cloudPlatformScope :: Proxy '["https://www.googleapis.com/auth/cloud-platform"] cloudPlatformScope = Proxy
brendanhay/gogol
gogol-language/gen/Network/Google/Language/Types.hs
mpl-2.0
6,164
0
7
1,559
710
498
212
174
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.DoubleClickBids.Types.Sum -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- module Network.Google.DoubleClickBids.Types.Sum where import Network.Google.Prelude hiding (Bytes) -- | Format of the generated report. data QueryMetadataFormat = CSV -- ^ @CSV@ | ExcelCSV -- ^ @EXCEL_CSV@ | Xlsx -- ^ @XLSX@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable QueryMetadataFormat instance FromHttpApiData QueryMetadataFormat where parseQueryParam = \case "CSV" -> Right CSV "EXCEL_CSV" -> Right ExcelCSV "XLSX" -> Right Xlsx x -> Left ("Unable to parse QueryMetadataFormat from: " <> x) instance ToHttpApiData QueryMetadataFormat where toQueryParam = \case CSV -> "CSV" ExcelCSV -> "EXCEL_CSV" Xlsx -> "XLSX" instance FromJSON QueryMetadataFormat where parseJSON = parseJSONText "QueryMetadataFormat" instance ToJSON QueryMetadataFormat where toJSON = toJSONText -- | Dimension the filter is applied to. data PathQueryOptionsFilterFilter = FilterUnknown -- ^ @FILTER_UNKNOWN@ | FilterDate -- ^ @FILTER_DATE@ | FilterDayOfWeek -- ^ @FILTER_DAY_OF_WEEK@ | FilterWeek -- ^ @FILTER_WEEK@ | FilterMonth -- ^ @FILTER_MONTH@ | FilterYear -- ^ @FILTER_YEAR@ | FilterTimeOfDay -- ^ @FILTER_TIME_OF_DAY@ | FilterConversionDelay -- ^ @FILTER_CONVERSION_DELAY@ | FilterCreativeId -- ^ @FILTER_CREATIVE_ID@ | FilterCreativeSize -- ^ @FILTER_CREATIVE_SIZE@ | FilterCreativeType -- ^ @FILTER_CREATIVE_TYPE@ | FilterExchangeId -- ^ @FILTER_EXCHANGE_ID@ | FilterAdPosition -- ^ @FILTER_AD_POSITION@ | FilterPublicInventory -- ^ @FILTER_PUBLIC_INVENTORY@ | FilterInventorySource -- ^ @FILTER_INVENTORY_SOURCE@ | FilterCity -- ^ @FILTER_CITY@ | FilterRegion -- ^ @FILTER_REGION@ | FilterDma -- ^ @FILTER_DMA@ | FilterCountry -- ^ @FILTER_COUNTRY@ | FilterSiteId -- ^ @FILTER_SITE_ID@ | FilterChannelId -- ^ @FILTER_CHANNEL_ID@ | FilterPartner -- ^ @FILTER_PARTNER@ | FilterAdvertiser -- ^ @FILTER_ADVERTISER@ | FilterInsertionOrder -- ^ @FILTER_INSERTION_ORDER@ | FilterLineItem -- ^ @FILTER_LINE_ITEM@ | FilterPartnerCurrency -- ^ @FILTER_PARTNER_CURRENCY@ | FilterAdvertiserCurrency -- ^ @FILTER_ADVERTISER_CURRENCY@ | FilterAdvertiserTimezone -- ^ @FILTER_ADVERTISER_TIMEZONE@ | FilterLineItemType -- ^ @FILTER_LINE_ITEM_TYPE@ | FilterUserList -- ^ @FILTER_USER_LIST@ | FilterUserListFirstParty -- ^ @FILTER_USER_LIST_FIRST_PARTY@ | FilterUserListThirdParty -- ^ @FILTER_USER_LIST_THIRD_PARTY@ | FilterTargetedUserList -- ^ @FILTER_TARGETED_USER_LIST@ | FilterDataProvider -- ^ @FILTER_DATA_PROVIDER@ | FilterOrderId -- ^ @FILTER_ORDER_ID@ | FilterVideoPlayerSize -- ^ @FILTER_VIDEO_PLAYER_SIZE@ | FilterVideoDurationSeconds -- ^ @FILTER_VIDEO_DURATION_SECONDS@ | FilterKeyword -- ^ @FILTER_KEYWORD@ | FilterPageCategory -- ^ @FILTER_PAGE_CATEGORY@ | FilterCampaignDailyFrequency -- ^ @FILTER_CAMPAIGN_DAILY_FREQUENCY@ | FilterLineItemDailyFrequency -- ^ @FILTER_LINE_ITEM_DAILY_FREQUENCY@ | FilterLineItemLifetimeFrequency -- ^ @FILTER_LINE_ITEM_LIFETIME_FREQUENCY@ | FilterOS -- ^ @FILTER_OS@ | FilterBrowser -- ^ @FILTER_BROWSER@ | FilterCarrier -- ^ @FILTER_CARRIER@ | FilterSiteLanguage -- ^ @FILTER_SITE_LANGUAGE@ | FilterInventoryFormat -- ^ @FILTER_INVENTORY_FORMAT@ | FilterZipCode -- ^ @FILTER_ZIP_CODE@ | FilterVideoRatingTier -- ^ @FILTER_VIDEO_RATING_TIER@ | FilterVideoFormatSupport -- ^ @FILTER_VIDEO_FORMAT_SUPPORT@ | FilterVideoSkippableSupport -- ^ @FILTER_VIDEO_SKIPPABLE_SUPPORT@ | FilterVideoCreativeDuration -- ^ @FILTER_VIDEO_CREATIVE_DURATION@ | FilterPageLayout -- ^ @FILTER_PAGE_LAYOUT@ | FilterVideoAdPositionInStream -- ^ @FILTER_VIDEO_AD_POSITION_IN_STREAM@ | FilterAge -- ^ @FILTER_AGE@ | FilterGender -- ^ @FILTER_GENDER@ | FilterQuarter -- ^ @FILTER_QUARTER@ | FilterTrueviewConversionType -- ^ @FILTER_TRUEVIEW_CONVERSION_TYPE@ | FilterMobileGeo -- ^ @FILTER_MOBILE_GEO@ | FilterMraidSupport -- ^ @FILTER_MRAID_SUPPORT@ | FilterActiveViewExpectedViewability -- ^ @FILTER_ACTIVE_VIEW_EXPECTED_VIEWABILITY@ | FilterVideoCreativeDurationSkippable -- ^ @FILTER_VIDEO_CREATIVE_DURATION_SKIPPABLE@ | FilterNielsenCountryCode -- ^ @FILTER_NIELSEN_COUNTRY_CODE@ | FilterNielsenDeviceId -- ^ @FILTER_NIELSEN_DEVICE_ID@ | FilterNielsenGender -- ^ @FILTER_NIELSEN_GENDER@ | FilterNielsenAge -- ^ @FILTER_NIELSEN_AGE@ | FilterInventorySourceType -- ^ @FILTER_INVENTORY_SOURCE_TYPE@ | FilterCreativeWidth -- ^ @FILTER_CREATIVE_WIDTH@ | FilterCreativeHeight -- ^ @FILTER_CREATIVE_HEIGHT@ | FilterDfpOrderId -- ^ @FILTER_DFP_ORDER_ID@ | FilterTrueviewAge -- ^ @FILTER_TRUEVIEW_AGE@ | FilterTrueviewGender -- ^ @FILTER_TRUEVIEW_GENDER@ | FilterTrueviewParentalStatus -- ^ @FILTER_TRUEVIEW_PARENTAL_STATUS@ | FilterTrueviewRemarketingList -- ^ @FILTER_TRUEVIEW_REMARKETING_LIST@ | FilterTrueviewInterest -- ^ @FILTER_TRUEVIEW_INTEREST@ | FilterTrueviewAdGroupId -- ^ @FILTER_TRUEVIEW_AD_GROUP_ID@ | FilterTrueviewAdGroupAdId -- ^ @FILTER_TRUEVIEW_AD_GROUP_AD_ID@ | FilterTrueviewIarLanguage -- ^ @FILTER_TRUEVIEW_IAR_LANGUAGE@ | FilterTrueviewIarGender -- ^ @FILTER_TRUEVIEW_IAR_GENDER@ | FilterTrueviewIarAge -- ^ @FILTER_TRUEVIEW_IAR_AGE@ | FilterTrueviewIarCategory -- ^ @FILTER_TRUEVIEW_IAR_CATEGORY@ | FilterTrueviewIarCountry -- ^ @FILTER_TRUEVIEW_IAR_COUNTRY@ | FilterTrueviewIarCity -- ^ @FILTER_TRUEVIEW_IAR_CITY@ | FilterTrueviewIarRegion -- ^ @FILTER_TRUEVIEW_IAR_REGION@ | FilterTrueviewIarZipcode -- ^ @FILTER_TRUEVIEW_IAR_ZIPCODE@ | FilterTrueviewIarRemarketingList -- ^ @FILTER_TRUEVIEW_IAR_REMARKETING_LIST@ | FilterTrueviewIarInterest -- ^ @FILTER_TRUEVIEW_IAR_INTEREST@ | FilterTrueviewIarParentalStatus -- ^ @FILTER_TRUEVIEW_IAR_PARENTAL_STATUS@ | FilterTrueviewIarTimeOfDay -- ^ @FILTER_TRUEVIEW_IAR_TIME_OF_DAY@ | FilterTrueviewCustomAffinity -- ^ @FILTER_TRUEVIEW_CUSTOM_AFFINITY@ | FilterTrueviewCategory -- ^ @FILTER_TRUEVIEW_CATEGORY@ | FilterTrueviewKeyword -- ^ @FILTER_TRUEVIEW_KEYWORD@ | FilterTrueviewPlacement -- ^ @FILTER_TRUEVIEW_PLACEMENT@ | FilterTrueviewURL -- ^ @FILTER_TRUEVIEW_URL@ | FilterTrueviewCountry -- ^ @FILTER_TRUEVIEW_COUNTRY@ | FilterTrueviewRegion -- ^ @FILTER_TRUEVIEW_REGION@ | FilterTrueviewCity -- ^ @FILTER_TRUEVIEW_CITY@ | FilterTrueviewDma -- ^ @FILTER_TRUEVIEW_DMA@ | FilterTrueviewZipcode -- ^ @FILTER_TRUEVIEW_ZIPCODE@ | FilterNotSupported -- ^ @FILTER_NOT_SUPPORTED@ | FilterMediaPlan -- ^ @FILTER_MEDIA_PLAN@ | FilterTrueviewIarYouTubeChannel -- ^ @FILTER_TRUEVIEW_IAR_YOUTUBE_CHANNEL@ | FilterTrueviewIarYouTubeVideo -- ^ @FILTER_TRUEVIEW_IAR_YOUTUBE_VIDEO@ | FilterSkippableSupport -- ^ @FILTER_SKIPPABLE_SUPPORT@ | FilterCompanionCreativeId -- ^ @FILTER_COMPANION_CREATIVE_ID@ | FilterBudgetSegmentDescription -- ^ @FILTER_BUDGET_SEGMENT_DESCRIPTION@ | FilterFloodlightActivityId -- ^ @FILTER_FLOODLIGHT_ACTIVITY_ID@ | FilterDeviceModel -- ^ @FILTER_DEVICE_MODEL@ | FilterDeviceMake -- ^ @FILTER_DEVICE_MAKE@ | FilterDeviceType -- ^ @FILTER_DEVICE_TYPE@ | FilterCreativeAttribute -- ^ @FILTER_CREATIVE_ATTRIBUTE@ | FilterInventoryCommitmentType -- ^ @FILTER_INVENTORY_COMMITMENT_TYPE@ | FilterInventoryRateType -- ^ @FILTER_INVENTORY_RATE_TYPE@ | FilterInventoryDeliveryMethod -- ^ @FILTER_INVENTORY_DELIVERY_METHOD@ | FilterInventorySourceExternalId -- ^ @FILTER_INVENTORY_SOURCE_EXTERNAL_ID@ | FilterAuthorizedSellerState -- ^ @FILTER_AUTHORIZED_SELLER_STATE@ | FilterVideoDurationSecondsRange -- ^ @FILTER_VIDEO_DURATION_SECONDS_RANGE@ | FilterPartnerName -- ^ @FILTER_PARTNER_NAME@ | FilterPartnerStatus -- ^ @FILTER_PARTNER_STATUS@ | FilterAdvertiserName -- ^ @FILTER_ADVERTISER_NAME@ | FilterAdvertiserIntegrationCode -- ^ @FILTER_ADVERTISER_INTEGRATION_CODE@ | FilterAdvertiserIntegrationStatus -- ^ @FILTER_ADVERTISER_INTEGRATION_STATUS@ | FilterCarrierName -- ^ @FILTER_CARRIER_NAME@ | FilterChannelName -- ^ @FILTER_CHANNEL_NAME@ | FilterCityName -- ^ @FILTER_CITY_NAME@ | FilterCompanionCreativeName -- ^ @FILTER_COMPANION_CREATIVE_NAME@ | FilterUserListFirstPartyName -- ^ @FILTER_USER_LIST_FIRST_PARTY_NAME@ | FilterUserListThirdPartyName -- ^ @FILTER_USER_LIST_THIRD_PARTY_NAME@ | FilterNielsenReStatementDate -- ^ @FILTER_NIELSEN_RESTATEMENT_DATE@ | FilterNielsenDateRange -- ^ @FILTER_NIELSEN_DATE_RANGE@ | FilterInsertionOrderName -- ^ @FILTER_INSERTION_ORDER_NAME@ | FilterRegionName -- ^ @FILTER_REGION_NAME@ | FilterDmaName -- ^ @FILTER_DMA_NAME@ | FilterTrueviewIarRegionName -- ^ @FILTER_TRUEVIEW_IAR_REGION_NAME@ | FilterTrueviewDmaName -- ^ @FILTER_TRUEVIEW_DMA_NAME@ | FilterTrueviewRegionName -- ^ @FILTER_TRUEVIEW_REGION_NAME@ | FilterActiveViewCustomMetricId -- ^ @FILTER_ACTIVE_VIEW_CUSTOM_METRIC_ID@ | FilterActiveViewCustomMetricName -- ^ @FILTER_ACTIVE_VIEW_CUSTOM_METRIC_NAME@ | FilterAdType -- ^ @FILTER_AD_TYPE@ | FilterAlgorithm -- ^ @FILTER_ALGORITHM@ | FilterAlgorithmId -- ^ @FILTER_ALGORITHM_ID@ | FilterAmpPageRequest -- ^ @FILTER_AMP_PAGE_REQUEST@ | FilterAnonymousInventoryModeling -- ^ @FILTER_ANONYMOUS_INVENTORY_MODELING@ | FilterAppURL -- ^ @FILTER_APP_URL@ | FilterAppURLExcluded -- ^ @FILTER_APP_URL_EXCLUDED@ | FilterAttributedUserList -- ^ @FILTER_ATTRIBUTED_USERLIST@ | FilterAttributedUserListCost -- ^ @FILTER_ATTRIBUTED_USERLIST_COST@ | FilterAttributedUserListType -- ^ @FILTER_ATTRIBUTED_USERLIST_TYPE@ | FilterAttributionModel -- ^ @FILTER_ATTRIBUTION_MODEL@ | FilterAudienceList -- ^ @FILTER_AUDIENCE_LIST@ | FilterAudienceListCost -- ^ @FILTER_AUDIENCE_LIST_COST@ | FilterAudienceListType -- ^ @FILTER_AUDIENCE_LIST_TYPE@ | FilterAudienceName -- ^ @FILTER_AUDIENCE_NAME@ | FilterAudienceType -- ^ @FILTER_AUDIENCE_TYPE@ | FilterBillableOutcome -- ^ @FILTER_BILLABLE_OUTCOME@ | FilterBrandLiftType -- ^ @FILTER_BRAND_LIFT_TYPE@ | FilterChannelType -- ^ @FILTER_CHANNEL_TYPE@ | FilterCmPlacementId -- ^ @FILTER_CM_PLACEMENT_ID@ | FilterConversionSource -- ^ @FILTER_CONVERSION_SOURCE@ | FilterConversionSourceId -- ^ @FILTER_CONVERSION_SOURCE_ID@ | FilterCountryId -- ^ @FILTER_COUNTRY_ID@ | FilterCreative -- ^ @FILTER_CREATIVE@ | FilterCreativeAsset -- ^ @FILTER_CREATIVE_ASSET@ | FilterCreativeIntegrationCode -- ^ @FILTER_CREATIVE_INTEGRATION_CODE@ | FilterCreativeRenderedInAmp -- ^ @FILTER_CREATIVE_RENDERED_IN_AMP@ | FilterCreativeSource -- ^ @FILTER_CREATIVE_SOURCE@ | FilterCreativeStatus -- ^ @FILTER_CREATIVE_STATUS@ | FilterDataProviderName -- ^ @FILTER_DATA_PROVIDER_NAME@ | FilterDetailedDemographics -- ^ @FILTER_DETAILED_DEMOGRAPHICS@ | FilterDetailedDemographicsId -- ^ @FILTER_DETAILED_DEMOGRAPHICS_ID@ | FilterDevice -- ^ @FILTER_DEVICE@ | FilterGamInsertionOrder -- ^ @FILTER_GAM_INSERTION_ORDER@ | FilterGamLineItem -- ^ @FILTER_GAM_LINE_ITEM@ | FilterGamLineItemId -- ^ @FILTER_GAM_LINE_ITEM_ID@ | FilterDigitalContentLabel -- ^ @FILTER_DIGITAL_CONTENT_LABEL@ | FilterDomain -- ^ @FILTER_DOMAIN@ | FilterEligibleCookiesOnFirstPartyAudienceList -- ^ @FILTER_ELIGIBLE_COOKIES_ON_FIRST_PARTY_AUDIENCE_LIST@ | FilterEligibleCookiesOnThirdPartyAudienceListAndInterest -- ^ @FILTER_ELIGIBLE_COOKIES_ON_THIRD_PARTY_AUDIENCE_LIST_AND_INTEREST@ | FilterExchange -- ^ @FILTER_EXCHANGE@ | FilterExchangeCode -- ^ @FILTER_EXCHANGE_CODE@ | FilterExtension -- ^ @FILTER_EXTENSION@ | FilterExtensionStatus -- ^ @FILTER_EXTENSION_STATUS@ | FilterExtensionType -- ^ @FILTER_EXTENSION_TYPE@ | FilterFirstPartyAudienceListCost -- ^ @FILTER_FIRST_PARTY_AUDIENCE_LIST_COST@ | FilterFirstPartyAudienceListType -- ^ @FILTER_FIRST_PARTY_AUDIENCE_LIST_TYPE@ | FilterFloodlightActivity -- ^ @FILTER_FLOODLIGHT_ACTIVITY@ | FilterFormat -- ^ @FILTER_FORMAT@ | FilterGmailAge -- ^ @FILTER_GMAIL_AGE@ | FilterGmailCity -- ^ @FILTER_GMAIL_CITY@ | FilterGmailCountry -- ^ @FILTER_GMAIL_COUNTRY@ | FilterGmailCountryName -- ^ @FILTER_GMAIL_COUNTRY_NAME@ | FilterGmailDeviceType -- ^ @FILTER_GMAIL_DEVICE_TYPE@ | FilterGmailDeviceTypeName -- ^ @FILTER_GMAIL_DEVICE_TYPE_NAME@ | FilterGmailGender -- ^ @FILTER_GMAIL_GENDER@ | FilterGmailRegion -- ^ @FILTER_GMAIL_REGION@ | FilterGmailRemarketingList -- ^ @FILTER_GMAIL_REMARKETING_LIST@ | FilterHouseholdIncome -- ^ @FILTER_HOUSEHOLD_INCOME@ | FilterImpressionCountingMethod -- ^ @FILTER_IMPRESSION_COUNTING_METHOD@ | FilterYouTubeProgrammaticGuaranteedInsertionOrder -- ^ @FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_INSERTION_ORDER@ | FilterInsertionOrderIntegrationCode -- ^ @FILTER_INSERTION_ORDER_INTEGRATION_CODE@ | FilterInsertionOrderStatus -- ^ @FILTER_INSERTION_ORDER_STATUS@ | FilterInterest -- ^ @FILTER_INTEREST@ | FilterInventorySourceGroup -- ^ @FILTER_INVENTORY_SOURCE_GROUP@ | FilterInventorySourceGroupId -- ^ @FILTER_INVENTORY_SOURCE_GROUP_ID@ | FilterInventorySourceId -- ^ @FILTER_INVENTORY_SOURCE_ID@ | FilterInventorySourceName -- ^ @FILTER_INVENTORY_SOURCE_NAME@ | FilterLifeEvent -- ^ @FILTER_LIFE_EVENT@ | FilterLifeEvents -- ^ @FILTER_LIFE_EVENTS@ | FilterLineItemIntegrationCode -- ^ @FILTER_LINE_ITEM_INTEGRATION_CODE@ | FilterLineItemName -- ^ @FILTER_LINE_ITEM_NAME@ | FilterLineItemStatus -- ^ @FILTER_LINE_ITEM_STATUS@ | FilterMatchRatio -- ^ @FILTER_MATCH_RATIO@ | FilterMeasurementSource -- ^ @FILTER_MEASUREMENT_SOURCE@ | FilterMediaPlanName -- ^ @FILTER_MEDIA_PLAN_NAME@ | FilterParentalStatus -- ^ @FILTER_PARENTAL_STATUS@ | FilterPlacementAllYouTubeChannels -- ^ @FILTER_PLACEMENT_ALL_YOUTUBE_CHANNELS@ | FilterPlatform -- ^ @FILTER_PLATFORM@ | FilterPlaybackMethod -- ^ @FILTER_PLAYBACK_METHOD@ | FilterPositionInContent -- ^ @FILTER_POSITION_IN_CONTENT@ | FilterPublisherProperty -- ^ @FILTER_PUBLISHER_PROPERTY@ | FilterPublisherPropertyId -- ^ @FILTER_PUBLISHER_PROPERTY_ID@ | FilterPublisherPropertySection -- ^ @FILTER_PUBLISHER_PROPERTY_SECTION@ | FilterPublisherPropertySectionId -- ^ @FILTER_PUBLISHER_PROPERTY_SECTION_ID@ | FilterRefundReason -- ^ @FILTER_REFUND_REASON@ | FilterRemarketingList -- ^ @FILTER_REMARKETING_LIST@ | FilterRewarded -- ^ @FILTER_REWARDED@ | FilterSensitiveCategory -- ^ @FILTER_SENSITIVE_CATEGORY@ | FilterServedPixelDensity -- ^ @FILTER_SERVED_PIXEL_DENSITY@ | FilterTargetedDataProviders -- ^ @FILTER_TARGETED_DATA_PROVIDERS@ | FilterThirdPartyAudienceListCost -- ^ @FILTER_THIRD_PARTY_AUDIENCE_LIST_COST@ | FilterThirdPartyAudienceListType -- ^ @FILTER_THIRD_PARTY_AUDIENCE_LIST_TYPE@ | FilterTrueviewAd -- ^ @FILTER_TRUEVIEW_AD@ | FilterTrueviewAdGroup -- ^ @FILTER_TRUEVIEW_AD_GROUP@ | FilterTrueviewDetailedDemographics -- ^ @FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS@ | FilterTrueviewDetailedDemographicsId -- ^ @FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS_ID@ | FilterTrueviewHouseholdIncome -- ^ @FILTER_TRUEVIEW_HOUSEHOLD_INCOME@ | FilterTrueviewIarCountryName -- ^ @FILTER_TRUEVIEW_IAR_COUNTRY_NAME@ | FilterTrueviewRemarketingListName -- ^ @FILTER_TRUEVIEW_REMARKETING_LIST_NAME@ | FilterVariantId -- ^ @FILTER_VARIANT_ID@ | FilterVariantName -- ^ @FILTER_VARIANT_NAME@ | FilterVariantVersion -- ^ @FILTER_VARIANT_VERSION@ | FilterVerificationVideoPlayerSize -- ^ @FILTER_VERIFICATION_VIDEO_PLAYER_SIZE@ | FilterVerificationVideoPosition -- ^ @FILTER_VERIFICATION_VIDEO_POSITION@ | FilterVideoCompanionCreativeSize -- ^ @FILTER_VIDEO_COMPANION_CREATIVE_SIZE@ | FilterVideoContinuousPlay -- ^ @FILTER_VIDEO_CONTINUOUS_PLAY@ | FilterVideoDuration -- ^ @FILTER_VIDEO_DURATION@ | FilterYouTubeAdaptedAudienceList -- ^ @FILTER_YOUTUBE_ADAPTED_AUDIENCE_LIST@ | FilterYouTubeAdVideo -- ^ @FILTER_YOUTUBE_AD_VIDEO@ | FilterYouTubeAdVideoId -- ^ @FILTER_YOUTUBE_AD_VIDEO_ID@ | FilterYouTubeChannel -- ^ @FILTER_YOUTUBE_CHANNEL@ | FilterYouTubeProgrammaticGuaranteedAdvertiser -- ^ @FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_ADVERTISER@ | FilterYouTubeProgrammaticGuaranteedPartner -- ^ @FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_PARTNER@ | FilterYouTubeVideo -- ^ @FILTER_YOUTUBE_VIDEO@ | FilterZipPostalCode -- ^ @FILTER_ZIP_POSTAL_CODE@ | FilterPlacementNameAllYouTubeChannels -- ^ @FILTER_PLACEMENT_NAME_ALL_YOUTUBE_CHANNELS@ | FilterTrueviewPlacementId -- ^ @FILTER_TRUEVIEW_PLACEMENT_ID@ | FilterPathPatternId -- ^ @FILTER_PATH_PATTERN_ID@ | FilterPathEventIndex -- ^ @FILTER_PATH_EVENT_INDEX@ | FilterEventType -- ^ @FILTER_EVENT_TYPE@ | FilterChannelGrouping -- ^ @FILTER_CHANNEL_GROUPING@ | FilterOmSdkAvailable -- ^ @FILTER_OM_SDK_AVAILABLE@ | FilterDataSource -- ^ @FILTER_DATA_SOURCE@ | FilterCM360PlacementId -- ^ @FILTER_CM360_PLACEMENT_ID@ | FilterTrueviewClickTypeName -- ^ @FILTER_TRUEVIEW_CLICK_TYPE_NAME@ | FilterTrueviewAdTypeName -- ^ @FILTER_TRUEVIEW_AD_TYPE_NAME@ | FilterVideoContentDuration -- ^ @FILTER_VIDEO_CONTENT_DURATION@ | FilterMatchedGenreTarget -- ^ @FILTER_MATCHED_GENRE_TARGET@ | FilterVideoContentLiveStream -- ^ @FILTER_VIDEO_CONTENT_LIVE_STREAM@ | FilterBudgetSegmentType -- ^ @FILTER_BUDGET_SEGMENT_TYPE@ | FilterBudgetSegmentBudget -- ^ @FILTER_BUDGET_SEGMENT_BUDGET@ | FilterBudgetSegmentStartDate -- ^ @FILTER_BUDGET_SEGMENT_START_DATE@ | FilterBudgetSegmentEndDate -- ^ @FILTER_BUDGET_SEGMENT_END_DATE@ | FilterBudgetSegmentPacingPercentage -- ^ @FILTER_BUDGET_SEGMENT_PACING_PERCENTAGE@ | FilterLineItemBudget -- ^ @FILTER_LINE_ITEM_BUDGET@ | FilterLineItemStartDate -- ^ @FILTER_LINE_ITEM_START_DATE@ | FilterLineItemEndDate -- ^ @FILTER_LINE_ITEM_END_DATE@ | FilterInsertionOrderGoalType -- ^ @FILTER_INSERTION_ORDER_GOAL_TYPE@ | FilterLineItemPacingPercentage -- ^ @FILTER_LINE_ITEM_PACING_PERCENTAGE@ | FilterInsertionOrderGoalValue -- ^ @FILTER_INSERTION_ORDER_GOAL_VALUE@ | FilterOmidCapable -- ^ @FILTER_OMID_CAPABLE@ | FilterVendorMeasurementMode -- ^ @FILTER_VENDOR_MEASUREMENT_MODE@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable PathQueryOptionsFilterFilter instance FromHttpApiData PathQueryOptionsFilterFilter where parseQueryParam = \case "FILTER_UNKNOWN" -> Right FilterUnknown "FILTER_DATE" -> Right FilterDate "FILTER_DAY_OF_WEEK" -> Right FilterDayOfWeek "FILTER_WEEK" -> Right FilterWeek "FILTER_MONTH" -> Right FilterMonth "FILTER_YEAR" -> Right FilterYear "FILTER_TIME_OF_DAY" -> Right FilterTimeOfDay "FILTER_CONVERSION_DELAY" -> Right FilterConversionDelay "FILTER_CREATIVE_ID" -> Right FilterCreativeId "FILTER_CREATIVE_SIZE" -> Right FilterCreativeSize "FILTER_CREATIVE_TYPE" -> Right FilterCreativeType "FILTER_EXCHANGE_ID" -> Right FilterExchangeId "FILTER_AD_POSITION" -> Right FilterAdPosition "FILTER_PUBLIC_INVENTORY" -> Right FilterPublicInventory "FILTER_INVENTORY_SOURCE" -> Right FilterInventorySource "FILTER_CITY" -> Right FilterCity "FILTER_REGION" -> Right FilterRegion "FILTER_DMA" -> Right FilterDma "FILTER_COUNTRY" -> Right FilterCountry "FILTER_SITE_ID" -> Right FilterSiteId "FILTER_CHANNEL_ID" -> Right FilterChannelId "FILTER_PARTNER" -> Right FilterPartner "FILTER_ADVERTISER" -> Right FilterAdvertiser "FILTER_INSERTION_ORDER" -> Right FilterInsertionOrder "FILTER_LINE_ITEM" -> Right FilterLineItem "FILTER_PARTNER_CURRENCY" -> Right FilterPartnerCurrency "FILTER_ADVERTISER_CURRENCY" -> Right FilterAdvertiserCurrency "FILTER_ADVERTISER_TIMEZONE" -> Right FilterAdvertiserTimezone "FILTER_LINE_ITEM_TYPE" -> Right FilterLineItemType "FILTER_USER_LIST" -> Right FilterUserList "FILTER_USER_LIST_FIRST_PARTY" -> Right FilterUserListFirstParty "FILTER_USER_LIST_THIRD_PARTY" -> Right FilterUserListThirdParty "FILTER_TARGETED_USER_LIST" -> Right FilterTargetedUserList "FILTER_DATA_PROVIDER" -> Right FilterDataProvider "FILTER_ORDER_ID" -> Right FilterOrderId "FILTER_VIDEO_PLAYER_SIZE" -> Right FilterVideoPlayerSize "FILTER_VIDEO_DURATION_SECONDS" -> Right FilterVideoDurationSeconds "FILTER_KEYWORD" -> Right FilterKeyword "FILTER_PAGE_CATEGORY" -> Right FilterPageCategory "FILTER_CAMPAIGN_DAILY_FREQUENCY" -> Right FilterCampaignDailyFrequency "FILTER_LINE_ITEM_DAILY_FREQUENCY" -> Right FilterLineItemDailyFrequency "FILTER_LINE_ITEM_LIFETIME_FREQUENCY" -> Right FilterLineItemLifetimeFrequency "FILTER_OS" -> Right FilterOS "FILTER_BROWSER" -> Right FilterBrowser "FILTER_CARRIER" -> Right FilterCarrier "FILTER_SITE_LANGUAGE" -> Right FilterSiteLanguage "FILTER_INVENTORY_FORMAT" -> Right FilterInventoryFormat "FILTER_ZIP_CODE" -> Right FilterZipCode "FILTER_VIDEO_RATING_TIER" -> Right FilterVideoRatingTier "FILTER_VIDEO_FORMAT_SUPPORT" -> Right FilterVideoFormatSupport "FILTER_VIDEO_SKIPPABLE_SUPPORT" -> Right FilterVideoSkippableSupport "FILTER_VIDEO_CREATIVE_DURATION" -> Right FilterVideoCreativeDuration "FILTER_PAGE_LAYOUT" -> Right FilterPageLayout "FILTER_VIDEO_AD_POSITION_IN_STREAM" -> Right FilterVideoAdPositionInStream "FILTER_AGE" -> Right FilterAge "FILTER_GENDER" -> Right FilterGender "FILTER_QUARTER" -> Right FilterQuarter "FILTER_TRUEVIEW_CONVERSION_TYPE" -> Right FilterTrueviewConversionType "FILTER_MOBILE_GEO" -> Right FilterMobileGeo "FILTER_MRAID_SUPPORT" -> Right FilterMraidSupport "FILTER_ACTIVE_VIEW_EXPECTED_VIEWABILITY" -> Right FilterActiveViewExpectedViewability "FILTER_VIDEO_CREATIVE_DURATION_SKIPPABLE" -> Right FilterVideoCreativeDurationSkippable "FILTER_NIELSEN_COUNTRY_CODE" -> Right FilterNielsenCountryCode "FILTER_NIELSEN_DEVICE_ID" -> Right FilterNielsenDeviceId "FILTER_NIELSEN_GENDER" -> Right FilterNielsenGender "FILTER_NIELSEN_AGE" -> Right FilterNielsenAge "FILTER_INVENTORY_SOURCE_TYPE" -> Right FilterInventorySourceType "FILTER_CREATIVE_WIDTH" -> Right FilterCreativeWidth "FILTER_CREATIVE_HEIGHT" -> Right FilterCreativeHeight "FILTER_DFP_ORDER_ID" -> Right FilterDfpOrderId "FILTER_TRUEVIEW_AGE" -> Right FilterTrueviewAge "FILTER_TRUEVIEW_GENDER" -> Right FilterTrueviewGender "FILTER_TRUEVIEW_PARENTAL_STATUS" -> Right FilterTrueviewParentalStatus "FILTER_TRUEVIEW_REMARKETING_LIST" -> Right FilterTrueviewRemarketingList "FILTER_TRUEVIEW_INTEREST" -> Right FilterTrueviewInterest "FILTER_TRUEVIEW_AD_GROUP_ID" -> Right FilterTrueviewAdGroupId "FILTER_TRUEVIEW_AD_GROUP_AD_ID" -> Right FilterTrueviewAdGroupAdId "FILTER_TRUEVIEW_IAR_LANGUAGE" -> Right FilterTrueviewIarLanguage "FILTER_TRUEVIEW_IAR_GENDER" -> Right FilterTrueviewIarGender "FILTER_TRUEVIEW_IAR_AGE" -> Right FilterTrueviewIarAge "FILTER_TRUEVIEW_IAR_CATEGORY" -> Right FilterTrueviewIarCategory "FILTER_TRUEVIEW_IAR_COUNTRY" -> Right FilterTrueviewIarCountry "FILTER_TRUEVIEW_IAR_CITY" -> Right FilterTrueviewIarCity "FILTER_TRUEVIEW_IAR_REGION" -> Right FilterTrueviewIarRegion "FILTER_TRUEVIEW_IAR_ZIPCODE" -> Right FilterTrueviewIarZipcode "FILTER_TRUEVIEW_IAR_REMARKETING_LIST" -> Right FilterTrueviewIarRemarketingList "FILTER_TRUEVIEW_IAR_INTEREST" -> Right FilterTrueviewIarInterest "FILTER_TRUEVIEW_IAR_PARENTAL_STATUS" -> Right FilterTrueviewIarParentalStatus "FILTER_TRUEVIEW_IAR_TIME_OF_DAY" -> Right FilterTrueviewIarTimeOfDay "FILTER_TRUEVIEW_CUSTOM_AFFINITY" -> Right FilterTrueviewCustomAffinity "FILTER_TRUEVIEW_CATEGORY" -> Right FilterTrueviewCategory "FILTER_TRUEVIEW_KEYWORD" -> Right FilterTrueviewKeyword "FILTER_TRUEVIEW_PLACEMENT" -> Right FilterTrueviewPlacement "FILTER_TRUEVIEW_URL" -> Right FilterTrueviewURL "FILTER_TRUEVIEW_COUNTRY" -> Right FilterTrueviewCountry "FILTER_TRUEVIEW_REGION" -> Right FilterTrueviewRegion "FILTER_TRUEVIEW_CITY" -> Right FilterTrueviewCity "FILTER_TRUEVIEW_DMA" -> Right FilterTrueviewDma "FILTER_TRUEVIEW_ZIPCODE" -> Right FilterTrueviewZipcode "FILTER_NOT_SUPPORTED" -> Right FilterNotSupported "FILTER_MEDIA_PLAN" -> Right FilterMediaPlan "FILTER_TRUEVIEW_IAR_YOUTUBE_CHANNEL" -> Right FilterTrueviewIarYouTubeChannel "FILTER_TRUEVIEW_IAR_YOUTUBE_VIDEO" -> Right FilterTrueviewIarYouTubeVideo "FILTER_SKIPPABLE_SUPPORT" -> Right FilterSkippableSupport "FILTER_COMPANION_CREATIVE_ID" -> Right FilterCompanionCreativeId "FILTER_BUDGET_SEGMENT_DESCRIPTION" -> Right FilterBudgetSegmentDescription "FILTER_FLOODLIGHT_ACTIVITY_ID" -> Right FilterFloodlightActivityId "FILTER_DEVICE_MODEL" -> Right FilterDeviceModel "FILTER_DEVICE_MAKE" -> Right FilterDeviceMake "FILTER_DEVICE_TYPE" -> Right FilterDeviceType "FILTER_CREATIVE_ATTRIBUTE" -> Right FilterCreativeAttribute "FILTER_INVENTORY_COMMITMENT_TYPE" -> Right FilterInventoryCommitmentType "FILTER_INVENTORY_RATE_TYPE" -> Right FilterInventoryRateType "FILTER_INVENTORY_DELIVERY_METHOD" -> Right FilterInventoryDeliveryMethod "FILTER_INVENTORY_SOURCE_EXTERNAL_ID" -> Right FilterInventorySourceExternalId "FILTER_AUTHORIZED_SELLER_STATE" -> Right FilterAuthorizedSellerState "FILTER_VIDEO_DURATION_SECONDS_RANGE" -> Right FilterVideoDurationSecondsRange "FILTER_PARTNER_NAME" -> Right FilterPartnerName "FILTER_PARTNER_STATUS" -> Right FilterPartnerStatus "FILTER_ADVERTISER_NAME" -> Right FilterAdvertiserName "FILTER_ADVERTISER_INTEGRATION_CODE" -> Right FilterAdvertiserIntegrationCode "FILTER_ADVERTISER_INTEGRATION_STATUS" -> Right FilterAdvertiserIntegrationStatus "FILTER_CARRIER_NAME" -> Right FilterCarrierName "FILTER_CHANNEL_NAME" -> Right FilterChannelName "FILTER_CITY_NAME" -> Right FilterCityName "FILTER_COMPANION_CREATIVE_NAME" -> Right FilterCompanionCreativeName "FILTER_USER_LIST_FIRST_PARTY_NAME" -> Right FilterUserListFirstPartyName "FILTER_USER_LIST_THIRD_PARTY_NAME" -> Right FilterUserListThirdPartyName "FILTER_NIELSEN_RESTATEMENT_DATE" -> Right FilterNielsenReStatementDate "FILTER_NIELSEN_DATE_RANGE" -> Right FilterNielsenDateRange "FILTER_INSERTION_ORDER_NAME" -> Right FilterInsertionOrderName "FILTER_REGION_NAME" -> Right FilterRegionName "FILTER_DMA_NAME" -> Right FilterDmaName "FILTER_TRUEVIEW_IAR_REGION_NAME" -> Right FilterTrueviewIarRegionName "FILTER_TRUEVIEW_DMA_NAME" -> Right FilterTrueviewDmaName "FILTER_TRUEVIEW_REGION_NAME" -> Right FilterTrueviewRegionName "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_ID" -> Right FilterActiveViewCustomMetricId "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_NAME" -> Right FilterActiveViewCustomMetricName "FILTER_AD_TYPE" -> Right FilterAdType "FILTER_ALGORITHM" -> Right FilterAlgorithm "FILTER_ALGORITHM_ID" -> Right FilterAlgorithmId "FILTER_AMP_PAGE_REQUEST" -> Right FilterAmpPageRequest "FILTER_ANONYMOUS_INVENTORY_MODELING" -> Right FilterAnonymousInventoryModeling "FILTER_APP_URL" -> Right FilterAppURL "FILTER_APP_URL_EXCLUDED" -> Right FilterAppURLExcluded "FILTER_ATTRIBUTED_USERLIST" -> Right FilterAttributedUserList "FILTER_ATTRIBUTED_USERLIST_COST" -> Right FilterAttributedUserListCost "FILTER_ATTRIBUTED_USERLIST_TYPE" -> Right FilterAttributedUserListType "FILTER_ATTRIBUTION_MODEL" -> Right FilterAttributionModel "FILTER_AUDIENCE_LIST" -> Right FilterAudienceList "FILTER_AUDIENCE_LIST_COST" -> Right FilterAudienceListCost "FILTER_AUDIENCE_LIST_TYPE" -> Right FilterAudienceListType "FILTER_AUDIENCE_NAME" -> Right FilterAudienceName "FILTER_AUDIENCE_TYPE" -> Right FilterAudienceType "FILTER_BILLABLE_OUTCOME" -> Right FilterBillableOutcome "FILTER_BRAND_LIFT_TYPE" -> Right FilterBrandLiftType "FILTER_CHANNEL_TYPE" -> Right FilterChannelType "FILTER_CM_PLACEMENT_ID" -> Right FilterCmPlacementId "FILTER_CONVERSION_SOURCE" -> Right FilterConversionSource "FILTER_CONVERSION_SOURCE_ID" -> Right FilterConversionSourceId "FILTER_COUNTRY_ID" -> Right FilterCountryId "FILTER_CREATIVE" -> Right FilterCreative "FILTER_CREATIVE_ASSET" -> Right FilterCreativeAsset "FILTER_CREATIVE_INTEGRATION_CODE" -> Right FilterCreativeIntegrationCode "FILTER_CREATIVE_RENDERED_IN_AMP" -> Right FilterCreativeRenderedInAmp "FILTER_CREATIVE_SOURCE" -> Right FilterCreativeSource "FILTER_CREATIVE_STATUS" -> Right FilterCreativeStatus "FILTER_DATA_PROVIDER_NAME" -> Right FilterDataProviderName "FILTER_DETAILED_DEMOGRAPHICS" -> Right FilterDetailedDemographics "FILTER_DETAILED_DEMOGRAPHICS_ID" -> Right FilterDetailedDemographicsId "FILTER_DEVICE" -> Right FilterDevice "FILTER_GAM_INSERTION_ORDER" -> Right FilterGamInsertionOrder "FILTER_GAM_LINE_ITEM" -> Right FilterGamLineItem "FILTER_GAM_LINE_ITEM_ID" -> Right FilterGamLineItemId "FILTER_DIGITAL_CONTENT_LABEL" -> Right FilterDigitalContentLabel "FILTER_DOMAIN" -> Right FilterDomain "FILTER_ELIGIBLE_COOKIES_ON_FIRST_PARTY_AUDIENCE_LIST" -> Right FilterEligibleCookiesOnFirstPartyAudienceList "FILTER_ELIGIBLE_COOKIES_ON_THIRD_PARTY_AUDIENCE_LIST_AND_INTEREST" -> Right FilterEligibleCookiesOnThirdPartyAudienceListAndInterest "FILTER_EXCHANGE" -> Right FilterExchange "FILTER_EXCHANGE_CODE" -> Right FilterExchangeCode "FILTER_EXTENSION" -> Right FilterExtension "FILTER_EXTENSION_STATUS" -> Right FilterExtensionStatus "FILTER_EXTENSION_TYPE" -> Right FilterExtensionType "FILTER_FIRST_PARTY_AUDIENCE_LIST_COST" -> Right FilterFirstPartyAudienceListCost "FILTER_FIRST_PARTY_AUDIENCE_LIST_TYPE" -> Right FilterFirstPartyAudienceListType "FILTER_FLOODLIGHT_ACTIVITY" -> Right FilterFloodlightActivity "FILTER_FORMAT" -> Right FilterFormat "FILTER_GMAIL_AGE" -> Right FilterGmailAge "FILTER_GMAIL_CITY" -> Right FilterGmailCity "FILTER_GMAIL_COUNTRY" -> Right FilterGmailCountry "FILTER_GMAIL_COUNTRY_NAME" -> Right FilterGmailCountryName "FILTER_GMAIL_DEVICE_TYPE" -> Right FilterGmailDeviceType "FILTER_GMAIL_DEVICE_TYPE_NAME" -> Right FilterGmailDeviceTypeName "FILTER_GMAIL_GENDER" -> Right FilterGmailGender "FILTER_GMAIL_REGION" -> Right FilterGmailRegion "FILTER_GMAIL_REMARKETING_LIST" -> Right FilterGmailRemarketingList "FILTER_HOUSEHOLD_INCOME" -> Right FilterHouseholdIncome "FILTER_IMPRESSION_COUNTING_METHOD" -> Right FilterImpressionCountingMethod "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_INSERTION_ORDER" -> Right FilterYouTubeProgrammaticGuaranteedInsertionOrder "FILTER_INSERTION_ORDER_INTEGRATION_CODE" -> Right FilterInsertionOrderIntegrationCode "FILTER_INSERTION_ORDER_STATUS" -> Right FilterInsertionOrderStatus "FILTER_INTEREST" -> Right FilterInterest "FILTER_INVENTORY_SOURCE_GROUP" -> Right FilterInventorySourceGroup "FILTER_INVENTORY_SOURCE_GROUP_ID" -> Right FilterInventorySourceGroupId "FILTER_INVENTORY_SOURCE_ID" -> Right FilterInventorySourceId "FILTER_INVENTORY_SOURCE_NAME" -> Right FilterInventorySourceName "FILTER_LIFE_EVENT" -> Right FilterLifeEvent "FILTER_LIFE_EVENTS" -> Right FilterLifeEvents "FILTER_LINE_ITEM_INTEGRATION_CODE" -> Right FilterLineItemIntegrationCode "FILTER_LINE_ITEM_NAME" -> Right FilterLineItemName "FILTER_LINE_ITEM_STATUS" -> Right FilterLineItemStatus "FILTER_MATCH_RATIO" -> Right FilterMatchRatio "FILTER_MEASUREMENT_SOURCE" -> Right FilterMeasurementSource "FILTER_MEDIA_PLAN_NAME" -> Right FilterMediaPlanName "FILTER_PARENTAL_STATUS" -> Right FilterParentalStatus "FILTER_PLACEMENT_ALL_YOUTUBE_CHANNELS" -> Right FilterPlacementAllYouTubeChannels "FILTER_PLATFORM" -> Right FilterPlatform "FILTER_PLAYBACK_METHOD" -> Right FilterPlaybackMethod "FILTER_POSITION_IN_CONTENT" -> Right FilterPositionInContent "FILTER_PUBLISHER_PROPERTY" -> Right FilterPublisherProperty "FILTER_PUBLISHER_PROPERTY_ID" -> Right FilterPublisherPropertyId "FILTER_PUBLISHER_PROPERTY_SECTION" -> Right FilterPublisherPropertySection "FILTER_PUBLISHER_PROPERTY_SECTION_ID" -> Right FilterPublisherPropertySectionId "FILTER_REFUND_REASON" -> Right FilterRefundReason "FILTER_REMARKETING_LIST" -> Right FilterRemarketingList "FILTER_REWARDED" -> Right FilterRewarded "FILTER_SENSITIVE_CATEGORY" -> Right FilterSensitiveCategory "FILTER_SERVED_PIXEL_DENSITY" -> Right FilterServedPixelDensity "FILTER_TARGETED_DATA_PROVIDERS" -> Right FilterTargetedDataProviders "FILTER_THIRD_PARTY_AUDIENCE_LIST_COST" -> Right FilterThirdPartyAudienceListCost "FILTER_THIRD_PARTY_AUDIENCE_LIST_TYPE" -> Right FilterThirdPartyAudienceListType "FILTER_TRUEVIEW_AD" -> Right FilterTrueviewAd "FILTER_TRUEVIEW_AD_GROUP" -> Right FilterTrueviewAdGroup "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS" -> Right FilterTrueviewDetailedDemographics "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS_ID" -> Right FilterTrueviewDetailedDemographicsId "FILTER_TRUEVIEW_HOUSEHOLD_INCOME" -> Right FilterTrueviewHouseholdIncome "FILTER_TRUEVIEW_IAR_COUNTRY_NAME" -> Right FilterTrueviewIarCountryName "FILTER_TRUEVIEW_REMARKETING_LIST_NAME" -> Right FilterTrueviewRemarketingListName "FILTER_VARIANT_ID" -> Right FilterVariantId "FILTER_VARIANT_NAME" -> Right FilterVariantName "FILTER_VARIANT_VERSION" -> Right FilterVariantVersion "FILTER_VERIFICATION_VIDEO_PLAYER_SIZE" -> Right FilterVerificationVideoPlayerSize "FILTER_VERIFICATION_VIDEO_POSITION" -> Right FilterVerificationVideoPosition "FILTER_VIDEO_COMPANION_CREATIVE_SIZE" -> Right FilterVideoCompanionCreativeSize "FILTER_VIDEO_CONTINUOUS_PLAY" -> Right FilterVideoContinuousPlay "FILTER_VIDEO_DURATION" -> Right FilterVideoDuration "FILTER_YOUTUBE_ADAPTED_AUDIENCE_LIST" -> Right FilterYouTubeAdaptedAudienceList "FILTER_YOUTUBE_AD_VIDEO" -> Right FilterYouTubeAdVideo "FILTER_YOUTUBE_AD_VIDEO_ID" -> Right FilterYouTubeAdVideoId "FILTER_YOUTUBE_CHANNEL" -> Right FilterYouTubeChannel "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_ADVERTISER" -> Right FilterYouTubeProgrammaticGuaranteedAdvertiser "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_PARTNER" -> Right FilterYouTubeProgrammaticGuaranteedPartner "FILTER_YOUTUBE_VIDEO" -> Right FilterYouTubeVideo "FILTER_ZIP_POSTAL_CODE" -> Right FilterZipPostalCode "FILTER_PLACEMENT_NAME_ALL_YOUTUBE_CHANNELS" -> Right FilterPlacementNameAllYouTubeChannels "FILTER_TRUEVIEW_PLACEMENT_ID" -> Right FilterTrueviewPlacementId "FILTER_PATH_PATTERN_ID" -> Right FilterPathPatternId "FILTER_PATH_EVENT_INDEX" -> Right FilterPathEventIndex "FILTER_EVENT_TYPE" -> Right FilterEventType "FILTER_CHANNEL_GROUPING" -> Right FilterChannelGrouping "FILTER_OM_SDK_AVAILABLE" -> Right FilterOmSdkAvailable "FILTER_DATA_SOURCE" -> Right FilterDataSource "FILTER_CM360_PLACEMENT_ID" -> Right FilterCM360PlacementId "FILTER_TRUEVIEW_CLICK_TYPE_NAME" -> Right FilterTrueviewClickTypeName "FILTER_TRUEVIEW_AD_TYPE_NAME" -> Right FilterTrueviewAdTypeName "FILTER_VIDEO_CONTENT_DURATION" -> Right FilterVideoContentDuration "FILTER_MATCHED_GENRE_TARGET" -> Right FilterMatchedGenreTarget "FILTER_VIDEO_CONTENT_LIVE_STREAM" -> Right FilterVideoContentLiveStream "FILTER_BUDGET_SEGMENT_TYPE" -> Right FilterBudgetSegmentType "FILTER_BUDGET_SEGMENT_BUDGET" -> Right FilterBudgetSegmentBudget "FILTER_BUDGET_SEGMENT_START_DATE" -> Right FilterBudgetSegmentStartDate "FILTER_BUDGET_SEGMENT_END_DATE" -> Right FilterBudgetSegmentEndDate "FILTER_BUDGET_SEGMENT_PACING_PERCENTAGE" -> Right FilterBudgetSegmentPacingPercentage "FILTER_LINE_ITEM_BUDGET" -> Right FilterLineItemBudget "FILTER_LINE_ITEM_START_DATE" -> Right FilterLineItemStartDate "FILTER_LINE_ITEM_END_DATE" -> Right FilterLineItemEndDate "FILTER_INSERTION_ORDER_GOAL_TYPE" -> Right FilterInsertionOrderGoalType "FILTER_LINE_ITEM_PACING_PERCENTAGE" -> Right FilterLineItemPacingPercentage "FILTER_INSERTION_ORDER_GOAL_VALUE" -> Right FilterInsertionOrderGoalValue "FILTER_OMID_CAPABLE" -> Right FilterOmidCapable "FILTER_VENDOR_MEASUREMENT_MODE" -> Right FilterVendorMeasurementMode x -> Left ("Unable to parse PathQueryOptionsFilterFilter from: " <> x) instance ToHttpApiData PathQueryOptionsFilterFilter where toQueryParam = \case FilterUnknown -> "FILTER_UNKNOWN" FilterDate -> "FILTER_DATE" FilterDayOfWeek -> "FILTER_DAY_OF_WEEK" FilterWeek -> "FILTER_WEEK" FilterMonth -> "FILTER_MONTH" FilterYear -> "FILTER_YEAR" FilterTimeOfDay -> "FILTER_TIME_OF_DAY" FilterConversionDelay -> "FILTER_CONVERSION_DELAY" FilterCreativeId -> "FILTER_CREATIVE_ID" FilterCreativeSize -> "FILTER_CREATIVE_SIZE" FilterCreativeType -> "FILTER_CREATIVE_TYPE" FilterExchangeId -> "FILTER_EXCHANGE_ID" FilterAdPosition -> "FILTER_AD_POSITION" FilterPublicInventory -> "FILTER_PUBLIC_INVENTORY" FilterInventorySource -> "FILTER_INVENTORY_SOURCE" FilterCity -> "FILTER_CITY" FilterRegion -> "FILTER_REGION" FilterDma -> "FILTER_DMA" FilterCountry -> "FILTER_COUNTRY" FilterSiteId -> "FILTER_SITE_ID" FilterChannelId -> "FILTER_CHANNEL_ID" FilterPartner -> "FILTER_PARTNER" FilterAdvertiser -> "FILTER_ADVERTISER" FilterInsertionOrder -> "FILTER_INSERTION_ORDER" FilterLineItem -> "FILTER_LINE_ITEM" FilterPartnerCurrency -> "FILTER_PARTNER_CURRENCY" FilterAdvertiserCurrency -> "FILTER_ADVERTISER_CURRENCY" FilterAdvertiserTimezone -> "FILTER_ADVERTISER_TIMEZONE" FilterLineItemType -> "FILTER_LINE_ITEM_TYPE" FilterUserList -> "FILTER_USER_LIST" FilterUserListFirstParty -> "FILTER_USER_LIST_FIRST_PARTY" FilterUserListThirdParty -> "FILTER_USER_LIST_THIRD_PARTY" FilterTargetedUserList -> "FILTER_TARGETED_USER_LIST" FilterDataProvider -> "FILTER_DATA_PROVIDER" FilterOrderId -> "FILTER_ORDER_ID" FilterVideoPlayerSize -> "FILTER_VIDEO_PLAYER_SIZE" FilterVideoDurationSeconds -> "FILTER_VIDEO_DURATION_SECONDS" FilterKeyword -> "FILTER_KEYWORD" FilterPageCategory -> "FILTER_PAGE_CATEGORY" FilterCampaignDailyFrequency -> "FILTER_CAMPAIGN_DAILY_FREQUENCY" FilterLineItemDailyFrequency -> "FILTER_LINE_ITEM_DAILY_FREQUENCY" FilterLineItemLifetimeFrequency -> "FILTER_LINE_ITEM_LIFETIME_FREQUENCY" FilterOS -> "FILTER_OS" FilterBrowser -> "FILTER_BROWSER" FilterCarrier -> "FILTER_CARRIER" FilterSiteLanguage -> "FILTER_SITE_LANGUAGE" FilterInventoryFormat -> "FILTER_INVENTORY_FORMAT" FilterZipCode -> "FILTER_ZIP_CODE" FilterVideoRatingTier -> "FILTER_VIDEO_RATING_TIER" FilterVideoFormatSupport -> "FILTER_VIDEO_FORMAT_SUPPORT" FilterVideoSkippableSupport -> "FILTER_VIDEO_SKIPPABLE_SUPPORT" FilterVideoCreativeDuration -> "FILTER_VIDEO_CREATIVE_DURATION" FilterPageLayout -> "FILTER_PAGE_LAYOUT" FilterVideoAdPositionInStream -> "FILTER_VIDEO_AD_POSITION_IN_STREAM" FilterAge -> "FILTER_AGE" FilterGender -> "FILTER_GENDER" FilterQuarter -> "FILTER_QUARTER" FilterTrueviewConversionType -> "FILTER_TRUEVIEW_CONVERSION_TYPE" FilterMobileGeo -> "FILTER_MOBILE_GEO" FilterMraidSupport -> "FILTER_MRAID_SUPPORT" FilterActiveViewExpectedViewability -> "FILTER_ACTIVE_VIEW_EXPECTED_VIEWABILITY" FilterVideoCreativeDurationSkippable -> "FILTER_VIDEO_CREATIVE_DURATION_SKIPPABLE" FilterNielsenCountryCode -> "FILTER_NIELSEN_COUNTRY_CODE" FilterNielsenDeviceId -> "FILTER_NIELSEN_DEVICE_ID" FilterNielsenGender -> "FILTER_NIELSEN_GENDER" FilterNielsenAge -> "FILTER_NIELSEN_AGE" FilterInventorySourceType -> "FILTER_INVENTORY_SOURCE_TYPE" FilterCreativeWidth -> "FILTER_CREATIVE_WIDTH" FilterCreativeHeight -> "FILTER_CREATIVE_HEIGHT" FilterDfpOrderId -> "FILTER_DFP_ORDER_ID" FilterTrueviewAge -> "FILTER_TRUEVIEW_AGE" FilterTrueviewGender -> "FILTER_TRUEVIEW_GENDER" FilterTrueviewParentalStatus -> "FILTER_TRUEVIEW_PARENTAL_STATUS" FilterTrueviewRemarketingList -> "FILTER_TRUEVIEW_REMARKETING_LIST" FilterTrueviewInterest -> "FILTER_TRUEVIEW_INTEREST" FilterTrueviewAdGroupId -> "FILTER_TRUEVIEW_AD_GROUP_ID" FilterTrueviewAdGroupAdId -> "FILTER_TRUEVIEW_AD_GROUP_AD_ID" FilterTrueviewIarLanguage -> "FILTER_TRUEVIEW_IAR_LANGUAGE" FilterTrueviewIarGender -> "FILTER_TRUEVIEW_IAR_GENDER" FilterTrueviewIarAge -> "FILTER_TRUEVIEW_IAR_AGE" FilterTrueviewIarCategory -> "FILTER_TRUEVIEW_IAR_CATEGORY" FilterTrueviewIarCountry -> "FILTER_TRUEVIEW_IAR_COUNTRY" FilterTrueviewIarCity -> "FILTER_TRUEVIEW_IAR_CITY" FilterTrueviewIarRegion -> "FILTER_TRUEVIEW_IAR_REGION" FilterTrueviewIarZipcode -> "FILTER_TRUEVIEW_IAR_ZIPCODE" FilterTrueviewIarRemarketingList -> "FILTER_TRUEVIEW_IAR_REMARKETING_LIST" FilterTrueviewIarInterest -> "FILTER_TRUEVIEW_IAR_INTEREST" FilterTrueviewIarParentalStatus -> "FILTER_TRUEVIEW_IAR_PARENTAL_STATUS" FilterTrueviewIarTimeOfDay -> "FILTER_TRUEVIEW_IAR_TIME_OF_DAY" FilterTrueviewCustomAffinity -> "FILTER_TRUEVIEW_CUSTOM_AFFINITY" FilterTrueviewCategory -> "FILTER_TRUEVIEW_CATEGORY" FilterTrueviewKeyword -> "FILTER_TRUEVIEW_KEYWORD" FilterTrueviewPlacement -> "FILTER_TRUEVIEW_PLACEMENT" FilterTrueviewURL -> "FILTER_TRUEVIEW_URL" FilterTrueviewCountry -> "FILTER_TRUEVIEW_COUNTRY" FilterTrueviewRegion -> "FILTER_TRUEVIEW_REGION" FilterTrueviewCity -> "FILTER_TRUEVIEW_CITY" FilterTrueviewDma -> "FILTER_TRUEVIEW_DMA" FilterTrueviewZipcode -> "FILTER_TRUEVIEW_ZIPCODE" FilterNotSupported -> "FILTER_NOT_SUPPORTED" FilterMediaPlan -> "FILTER_MEDIA_PLAN" FilterTrueviewIarYouTubeChannel -> "FILTER_TRUEVIEW_IAR_YOUTUBE_CHANNEL" FilterTrueviewIarYouTubeVideo -> "FILTER_TRUEVIEW_IAR_YOUTUBE_VIDEO" FilterSkippableSupport -> "FILTER_SKIPPABLE_SUPPORT" FilterCompanionCreativeId -> "FILTER_COMPANION_CREATIVE_ID" FilterBudgetSegmentDescription -> "FILTER_BUDGET_SEGMENT_DESCRIPTION" FilterFloodlightActivityId -> "FILTER_FLOODLIGHT_ACTIVITY_ID" FilterDeviceModel -> "FILTER_DEVICE_MODEL" FilterDeviceMake -> "FILTER_DEVICE_MAKE" FilterDeviceType -> "FILTER_DEVICE_TYPE" FilterCreativeAttribute -> "FILTER_CREATIVE_ATTRIBUTE" FilterInventoryCommitmentType -> "FILTER_INVENTORY_COMMITMENT_TYPE" FilterInventoryRateType -> "FILTER_INVENTORY_RATE_TYPE" FilterInventoryDeliveryMethod -> "FILTER_INVENTORY_DELIVERY_METHOD" FilterInventorySourceExternalId -> "FILTER_INVENTORY_SOURCE_EXTERNAL_ID" FilterAuthorizedSellerState -> "FILTER_AUTHORIZED_SELLER_STATE" FilterVideoDurationSecondsRange -> "FILTER_VIDEO_DURATION_SECONDS_RANGE" FilterPartnerName -> "FILTER_PARTNER_NAME" FilterPartnerStatus -> "FILTER_PARTNER_STATUS" FilterAdvertiserName -> "FILTER_ADVERTISER_NAME" FilterAdvertiserIntegrationCode -> "FILTER_ADVERTISER_INTEGRATION_CODE" FilterAdvertiserIntegrationStatus -> "FILTER_ADVERTISER_INTEGRATION_STATUS" FilterCarrierName -> "FILTER_CARRIER_NAME" FilterChannelName -> "FILTER_CHANNEL_NAME" FilterCityName -> "FILTER_CITY_NAME" FilterCompanionCreativeName -> "FILTER_COMPANION_CREATIVE_NAME" FilterUserListFirstPartyName -> "FILTER_USER_LIST_FIRST_PARTY_NAME" FilterUserListThirdPartyName -> "FILTER_USER_LIST_THIRD_PARTY_NAME" FilterNielsenReStatementDate -> "FILTER_NIELSEN_RESTATEMENT_DATE" FilterNielsenDateRange -> "FILTER_NIELSEN_DATE_RANGE" FilterInsertionOrderName -> "FILTER_INSERTION_ORDER_NAME" FilterRegionName -> "FILTER_REGION_NAME" FilterDmaName -> "FILTER_DMA_NAME" FilterTrueviewIarRegionName -> "FILTER_TRUEVIEW_IAR_REGION_NAME" FilterTrueviewDmaName -> "FILTER_TRUEVIEW_DMA_NAME" FilterTrueviewRegionName -> "FILTER_TRUEVIEW_REGION_NAME" FilterActiveViewCustomMetricId -> "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_ID" FilterActiveViewCustomMetricName -> "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_NAME" FilterAdType -> "FILTER_AD_TYPE" FilterAlgorithm -> "FILTER_ALGORITHM" FilterAlgorithmId -> "FILTER_ALGORITHM_ID" FilterAmpPageRequest -> "FILTER_AMP_PAGE_REQUEST" FilterAnonymousInventoryModeling -> "FILTER_ANONYMOUS_INVENTORY_MODELING" FilterAppURL -> "FILTER_APP_URL" FilterAppURLExcluded -> "FILTER_APP_URL_EXCLUDED" FilterAttributedUserList -> "FILTER_ATTRIBUTED_USERLIST" FilterAttributedUserListCost -> "FILTER_ATTRIBUTED_USERLIST_COST" FilterAttributedUserListType -> "FILTER_ATTRIBUTED_USERLIST_TYPE" FilterAttributionModel -> "FILTER_ATTRIBUTION_MODEL" FilterAudienceList -> "FILTER_AUDIENCE_LIST" FilterAudienceListCost -> "FILTER_AUDIENCE_LIST_COST" FilterAudienceListType -> "FILTER_AUDIENCE_LIST_TYPE" FilterAudienceName -> "FILTER_AUDIENCE_NAME" FilterAudienceType -> "FILTER_AUDIENCE_TYPE" FilterBillableOutcome -> "FILTER_BILLABLE_OUTCOME" FilterBrandLiftType -> "FILTER_BRAND_LIFT_TYPE" FilterChannelType -> "FILTER_CHANNEL_TYPE" FilterCmPlacementId -> "FILTER_CM_PLACEMENT_ID" FilterConversionSource -> "FILTER_CONVERSION_SOURCE" FilterConversionSourceId -> "FILTER_CONVERSION_SOURCE_ID" FilterCountryId -> "FILTER_COUNTRY_ID" FilterCreative -> "FILTER_CREATIVE" FilterCreativeAsset -> "FILTER_CREATIVE_ASSET" FilterCreativeIntegrationCode -> "FILTER_CREATIVE_INTEGRATION_CODE" FilterCreativeRenderedInAmp -> "FILTER_CREATIVE_RENDERED_IN_AMP" FilterCreativeSource -> "FILTER_CREATIVE_SOURCE" FilterCreativeStatus -> "FILTER_CREATIVE_STATUS" FilterDataProviderName -> "FILTER_DATA_PROVIDER_NAME" FilterDetailedDemographics -> "FILTER_DETAILED_DEMOGRAPHICS" FilterDetailedDemographicsId -> "FILTER_DETAILED_DEMOGRAPHICS_ID" FilterDevice -> "FILTER_DEVICE" FilterGamInsertionOrder -> "FILTER_GAM_INSERTION_ORDER" FilterGamLineItem -> "FILTER_GAM_LINE_ITEM" FilterGamLineItemId -> "FILTER_GAM_LINE_ITEM_ID" FilterDigitalContentLabel -> "FILTER_DIGITAL_CONTENT_LABEL" FilterDomain -> "FILTER_DOMAIN" FilterEligibleCookiesOnFirstPartyAudienceList -> "FILTER_ELIGIBLE_COOKIES_ON_FIRST_PARTY_AUDIENCE_LIST" FilterEligibleCookiesOnThirdPartyAudienceListAndInterest -> "FILTER_ELIGIBLE_COOKIES_ON_THIRD_PARTY_AUDIENCE_LIST_AND_INTEREST" FilterExchange -> "FILTER_EXCHANGE" FilterExchangeCode -> "FILTER_EXCHANGE_CODE" FilterExtension -> "FILTER_EXTENSION" FilterExtensionStatus -> "FILTER_EXTENSION_STATUS" FilterExtensionType -> "FILTER_EXTENSION_TYPE" FilterFirstPartyAudienceListCost -> "FILTER_FIRST_PARTY_AUDIENCE_LIST_COST" FilterFirstPartyAudienceListType -> "FILTER_FIRST_PARTY_AUDIENCE_LIST_TYPE" FilterFloodlightActivity -> "FILTER_FLOODLIGHT_ACTIVITY" FilterFormat -> "FILTER_FORMAT" FilterGmailAge -> "FILTER_GMAIL_AGE" FilterGmailCity -> "FILTER_GMAIL_CITY" FilterGmailCountry -> "FILTER_GMAIL_COUNTRY" FilterGmailCountryName -> "FILTER_GMAIL_COUNTRY_NAME" FilterGmailDeviceType -> "FILTER_GMAIL_DEVICE_TYPE" FilterGmailDeviceTypeName -> "FILTER_GMAIL_DEVICE_TYPE_NAME" FilterGmailGender -> "FILTER_GMAIL_GENDER" FilterGmailRegion -> "FILTER_GMAIL_REGION" FilterGmailRemarketingList -> "FILTER_GMAIL_REMARKETING_LIST" FilterHouseholdIncome -> "FILTER_HOUSEHOLD_INCOME" FilterImpressionCountingMethod -> "FILTER_IMPRESSION_COUNTING_METHOD" FilterYouTubeProgrammaticGuaranteedInsertionOrder -> "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_INSERTION_ORDER" FilterInsertionOrderIntegrationCode -> "FILTER_INSERTION_ORDER_INTEGRATION_CODE" FilterInsertionOrderStatus -> "FILTER_INSERTION_ORDER_STATUS" FilterInterest -> "FILTER_INTEREST" FilterInventorySourceGroup -> "FILTER_INVENTORY_SOURCE_GROUP" FilterInventorySourceGroupId -> "FILTER_INVENTORY_SOURCE_GROUP_ID" FilterInventorySourceId -> "FILTER_INVENTORY_SOURCE_ID" FilterInventorySourceName -> "FILTER_INVENTORY_SOURCE_NAME" FilterLifeEvent -> "FILTER_LIFE_EVENT" FilterLifeEvents -> "FILTER_LIFE_EVENTS" FilterLineItemIntegrationCode -> "FILTER_LINE_ITEM_INTEGRATION_CODE" FilterLineItemName -> "FILTER_LINE_ITEM_NAME" FilterLineItemStatus -> "FILTER_LINE_ITEM_STATUS" FilterMatchRatio -> "FILTER_MATCH_RATIO" FilterMeasurementSource -> "FILTER_MEASUREMENT_SOURCE" FilterMediaPlanName -> "FILTER_MEDIA_PLAN_NAME" FilterParentalStatus -> "FILTER_PARENTAL_STATUS" FilterPlacementAllYouTubeChannels -> "FILTER_PLACEMENT_ALL_YOUTUBE_CHANNELS" FilterPlatform -> "FILTER_PLATFORM" FilterPlaybackMethod -> "FILTER_PLAYBACK_METHOD" FilterPositionInContent -> "FILTER_POSITION_IN_CONTENT" FilterPublisherProperty -> "FILTER_PUBLISHER_PROPERTY" FilterPublisherPropertyId -> "FILTER_PUBLISHER_PROPERTY_ID" FilterPublisherPropertySection -> "FILTER_PUBLISHER_PROPERTY_SECTION" FilterPublisherPropertySectionId -> "FILTER_PUBLISHER_PROPERTY_SECTION_ID" FilterRefundReason -> "FILTER_REFUND_REASON" FilterRemarketingList -> "FILTER_REMARKETING_LIST" FilterRewarded -> "FILTER_REWARDED" FilterSensitiveCategory -> "FILTER_SENSITIVE_CATEGORY" FilterServedPixelDensity -> "FILTER_SERVED_PIXEL_DENSITY" FilterTargetedDataProviders -> "FILTER_TARGETED_DATA_PROVIDERS" FilterThirdPartyAudienceListCost -> "FILTER_THIRD_PARTY_AUDIENCE_LIST_COST" FilterThirdPartyAudienceListType -> "FILTER_THIRD_PARTY_AUDIENCE_LIST_TYPE" FilterTrueviewAd -> "FILTER_TRUEVIEW_AD" FilterTrueviewAdGroup -> "FILTER_TRUEVIEW_AD_GROUP" FilterTrueviewDetailedDemographics -> "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS" FilterTrueviewDetailedDemographicsId -> "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS_ID" FilterTrueviewHouseholdIncome -> "FILTER_TRUEVIEW_HOUSEHOLD_INCOME" FilterTrueviewIarCountryName -> "FILTER_TRUEVIEW_IAR_COUNTRY_NAME" FilterTrueviewRemarketingListName -> "FILTER_TRUEVIEW_REMARKETING_LIST_NAME" FilterVariantId -> "FILTER_VARIANT_ID" FilterVariantName -> "FILTER_VARIANT_NAME" FilterVariantVersion -> "FILTER_VARIANT_VERSION" FilterVerificationVideoPlayerSize -> "FILTER_VERIFICATION_VIDEO_PLAYER_SIZE" FilterVerificationVideoPosition -> "FILTER_VERIFICATION_VIDEO_POSITION" FilterVideoCompanionCreativeSize -> "FILTER_VIDEO_COMPANION_CREATIVE_SIZE" FilterVideoContinuousPlay -> "FILTER_VIDEO_CONTINUOUS_PLAY" FilterVideoDuration -> "FILTER_VIDEO_DURATION" FilterYouTubeAdaptedAudienceList -> "FILTER_YOUTUBE_ADAPTED_AUDIENCE_LIST" FilterYouTubeAdVideo -> "FILTER_YOUTUBE_AD_VIDEO" FilterYouTubeAdVideoId -> "FILTER_YOUTUBE_AD_VIDEO_ID" FilterYouTubeChannel -> "FILTER_YOUTUBE_CHANNEL" FilterYouTubeProgrammaticGuaranteedAdvertiser -> "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_ADVERTISER" FilterYouTubeProgrammaticGuaranteedPartner -> "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_PARTNER" FilterYouTubeVideo -> "FILTER_YOUTUBE_VIDEO" FilterZipPostalCode -> "FILTER_ZIP_POSTAL_CODE" FilterPlacementNameAllYouTubeChannels -> "FILTER_PLACEMENT_NAME_ALL_YOUTUBE_CHANNELS" FilterTrueviewPlacementId -> "FILTER_TRUEVIEW_PLACEMENT_ID" FilterPathPatternId -> "FILTER_PATH_PATTERN_ID" FilterPathEventIndex -> "FILTER_PATH_EVENT_INDEX" FilterEventType -> "FILTER_EVENT_TYPE" FilterChannelGrouping -> "FILTER_CHANNEL_GROUPING" FilterOmSdkAvailable -> "FILTER_OM_SDK_AVAILABLE" FilterDataSource -> "FILTER_DATA_SOURCE" FilterCM360PlacementId -> "FILTER_CM360_PLACEMENT_ID" FilterTrueviewClickTypeName -> "FILTER_TRUEVIEW_CLICK_TYPE_NAME" FilterTrueviewAdTypeName -> "FILTER_TRUEVIEW_AD_TYPE_NAME" FilterVideoContentDuration -> "FILTER_VIDEO_CONTENT_DURATION" FilterMatchedGenreTarget -> "FILTER_MATCHED_GENRE_TARGET" FilterVideoContentLiveStream -> "FILTER_VIDEO_CONTENT_LIVE_STREAM" FilterBudgetSegmentType -> "FILTER_BUDGET_SEGMENT_TYPE" FilterBudgetSegmentBudget -> "FILTER_BUDGET_SEGMENT_BUDGET" FilterBudgetSegmentStartDate -> "FILTER_BUDGET_SEGMENT_START_DATE" FilterBudgetSegmentEndDate -> "FILTER_BUDGET_SEGMENT_END_DATE" FilterBudgetSegmentPacingPercentage -> "FILTER_BUDGET_SEGMENT_PACING_PERCENTAGE" FilterLineItemBudget -> "FILTER_LINE_ITEM_BUDGET" FilterLineItemStartDate -> "FILTER_LINE_ITEM_START_DATE" FilterLineItemEndDate -> "FILTER_LINE_ITEM_END_DATE" FilterInsertionOrderGoalType -> "FILTER_INSERTION_ORDER_GOAL_TYPE" FilterLineItemPacingPercentage -> "FILTER_LINE_ITEM_PACING_PERCENTAGE" FilterInsertionOrderGoalValue -> "FILTER_INSERTION_ORDER_GOAL_VALUE" FilterOmidCapable -> "FILTER_OMID_CAPABLE" FilterVendorMeasurementMode -> "FILTER_VENDOR_MEASUREMENT_MODE" instance FromJSON PathQueryOptionsFilterFilter where parseJSON = parseJSONText "PathQueryOptionsFilterFilter" instance ToJSON PathQueryOptionsFilterFilter where toJSON = toJSONText -- | Report type. data ParametersType = TypeGeneral -- ^ @TYPE_GENERAL@ | TypeAudiencePerformance -- ^ @TYPE_AUDIENCE_PERFORMANCE@ | TypeInventoryAvailability -- ^ @TYPE_INVENTORY_AVAILABILITY@ | TypeKeyword -- ^ @TYPE_KEYWORD@ | TypePixelLoad -- ^ @TYPE_PIXEL_LOAD@ | TypeAudienceComPosition -- ^ @TYPE_AUDIENCE_COMPOSITION@ | TypeCrossPartner -- ^ @TYPE_CROSS_PARTNER@ | TypePageCategory -- ^ @TYPE_PAGE_CATEGORY@ | TypeThirdPartyDataProvider -- ^ @TYPE_THIRD_PARTY_DATA_PROVIDER@ | TypeCrossPartnerThirdPartyDataProvider -- ^ @TYPE_CROSS_PARTNER_THIRD_PARTY_DATA_PROVIDER@ | TypeClientSafe -- ^ @TYPE_CLIENT_SAFE@ | TypeOrderId -- ^ @TYPE_ORDER_ID@ | TypeFee -- ^ @TYPE_FEE@ | TypeCrossFee -- ^ @TYPE_CROSS_FEE@ | TypeActiveGrp -- ^ @TYPE_ACTIVE_GRP@ | TypeYouTubeVertical -- ^ @TYPE_YOUTUBE_VERTICAL@ | TypeComscoreVce -- ^ @TYPE_COMSCORE_VCE@ | TypeTrueview -- ^ @TYPE_TRUEVIEW@ | TypeNielsenAudienceProFile -- ^ @TYPE_NIELSEN_AUDIENCE_PROFILE@ | TypeNielsenDailyReachBuild -- ^ @TYPE_NIELSEN_DAILY_REACH_BUILD@ | TypeNielsenSite -- ^ @TYPE_NIELSEN_SITE@ | TypeReachAndFrequency -- ^ @TYPE_REACH_AND_FREQUENCY@ | TypeEstimatedConversion -- ^ @TYPE_ESTIMATED_CONVERSION@ | TypeVerification -- ^ @TYPE_VERIFICATION@ | TypeTrueviewIar -- ^ @TYPE_TRUEVIEW_IAR@ | TypeNielsenOnlineGlobalMarket -- ^ @TYPE_NIELSEN_ONLINE_GLOBAL_MARKET@ | TypePetraNielsenAudienceProFile -- ^ @TYPE_PETRA_NIELSEN_AUDIENCE_PROFILE@ | TypePetraNielsenDailyReachBuild -- ^ @TYPE_PETRA_NIELSEN_DAILY_REACH_BUILD@ | TypePetraNielsenOnlineGlobalMarket -- ^ @TYPE_PETRA_NIELSEN_ONLINE_GLOBAL_MARKET@ | TypeNotSupported -- ^ @TYPE_NOT_SUPPORTED@ | TypeReachAudience -- ^ @TYPE_REACH_AUDIENCE@ | TypeLinearTvSearchLift -- ^ @TYPE_LINEAR_TV_SEARCH_LIFT@ | TypePath -- ^ @TYPE_PATH@ | TypePathAttribution -- ^ @TYPE_PATH_ATTRIBUTION@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable ParametersType instance FromHttpApiData ParametersType where parseQueryParam = \case "TYPE_GENERAL" -> Right TypeGeneral "TYPE_AUDIENCE_PERFORMANCE" -> Right TypeAudiencePerformance "TYPE_INVENTORY_AVAILABILITY" -> Right TypeInventoryAvailability "TYPE_KEYWORD" -> Right TypeKeyword "TYPE_PIXEL_LOAD" -> Right TypePixelLoad "TYPE_AUDIENCE_COMPOSITION" -> Right TypeAudienceComPosition "TYPE_CROSS_PARTNER" -> Right TypeCrossPartner "TYPE_PAGE_CATEGORY" -> Right TypePageCategory "TYPE_THIRD_PARTY_DATA_PROVIDER" -> Right TypeThirdPartyDataProvider "TYPE_CROSS_PARTNER_THIRD_PARTY_DATA_PROVIDER" -> Right TypeCrossPartnerThirdPartyDataProvider "TYPE_CLIENT_SAFE" -> Right TypeClientSafe "TYPE_ORDER_ID" -> Right TypeOrderId "TYPE_FEE" -> Right TypeFee "TYPE_CROSS_FEE" -> Right TypeCrossFee "TYPE_ACTIVE_GRP" -> Right TypeActiveGrp "TYPE_YOUTUBE_VERTICAL" -> Right TypeYouTubeVertical "TYPE_COMSCORE_VCE" -> Right TypeComscoreVce "TYPE_TRUEVIEW" -> Right TypeTrueview "TYPE_NIELSEN_AUDIENCE_PROFILE" -> Right TypeNielsenAudienceProFile "TYPE_NIELSEN_DAILY_REACH_BUILD" -> Right TypeNielsenDailyReachBuild "TYPE_NIELSEN_SITE" -> Right TypeNielsenSite "TYPE_REACH_AND_FREQUENCY" -> Right TypeReachAndFrequency "TYPE_ESTIMATED_CONVERSION" -> Right TypeEstimatedConversion "TYPE_VERIFICATION" -> Right TypeVerification "TYPE_TRUEVIEW_IAR" -> Right TypeTrueviewIar "TYPE_NIELSEN_ONLINE_GLOBAL_MARKET" -> Right TypeNielsenOnlineGlobalMarket "TYPE_PETRA_NIELSEN_AUDIENCE_PROFILE" -> Right TypePetraNielsenAudienceProFile "TYPE_PETRA_NIELSEN_DAILY_REACH_BUILD" -> Right TypePetraNielsenDailyReachBuild "TYPE_PETRA_NIELSEN_ONLINE_GLOBAL_MARKET" -> Right TypePetraNielsenOnlineGlobalMarket "TYPE_NOT_SUPPORTED" -> Right TypeNotSupported "TYPE_REACH_AUDIENCE" -> Right TypeReachAudience "TYPE_LINEAR_TV_SEARCH_LIFT" -> Right TypeLinearTvSearchLift "TYPE_PATH" -> Right TypePath "TYPE_PATH_ATTRIBUTION" -> Right TypePathAttribution x -> Left ("Unable to parse ParametersType from: " <> x) instance ToHttpApiData ParametersType where toQueryParam = \case TypeGeneral -> "TYPE_GENERAL" TypeAudiencePerformance -> "TYPE_AUDIENCE_PERFORMANCE" TypeInventoryAvailability -> "TYPE_INVENTORY_AVAILABILITY" TypeKeyword -> "TYPE_KEYWORD" TypePixelLoad -> "TYPE_PIXEL_LOAD" TypeAudienceComPosition -> "TYPE_AUDIENCE_COMPOSITION" TypeCrossPartner -> "TYPE_CROSS_PARTNER" TypePageCategory -> "TYPE_PAGE_CATEGORY" TypeThirdPartyDataProvider -> "TYPE_THIRD_PARTY_DATA_PROVIDER" TypeCrossPartnerThirdPartyDataProvider -> "TYPE_CROSS_PARTNER_THIRD_PARTY_DATA_PROVIDER" TypeClientSafe -> "TYPE_CLIENT_SAFE" TypeOrderId -> "TYPE_ORDER_ID" TypeFee -> "TYPE_FEE" TypeCrossFee -> "TYPE_CROSS_FEE" TypeActiveGrp -> "TYPE_ACTIVE_GRP" TypeYouTubeVertical -> "TYPE_YOUTUBE_VERTICAL" TypeComscoreVce -> "TYPE_COMSCORE_VCE" TypeTrueview -> "TYPE_TRUEVIEW" TypeNielsenAudienceProFile -> "TYPE_NIELSEN_AUDIENCE_PROFILE" TypeNielsenDailyReachBuild -> "TYPE_NIELSEN_DAILY_REACH_BUILD" TypeNielsenSite -> "TYPE_NIELSEN_SITE" TypeReachAndFrequency -> "TYPE_REACH_AND_FREQUENCY" TypeEstimatedConversion -> "TYPE_ESTIMATED_CONVERSION" TypeVerification -> "TYPE_VERIFICATION" TypeTrueviewIar -> "TYPE_TRUEVIEW_IAR" TypeNielsenOnlineGlobalMarket -> "TYPE_NIELSEN_ONLINE_GLOBAL_MARKET" TypePetraNielsenAudienceProFile -> "TYPE_PETRA_NIELSEN_AUDIENCE_PROFILE" TypePetraNielsenDailyReachBuild -> "TYPE_PETRA_NIELSEN_DAILY_REACH_BUILD" TypePetraNielsenOnlineGlobalMarket -> "TYPE_PETRA_NIELSEN_ONLINE_GLOBAL_MARKET" TypeNotSupported -> "TYPE_NOT_SUPPORTED" TypeReachAudience -> "TYPE_REACH_AUDIENCE" TypeLinearTvSearchLift -> "TYPE_LINEAR_TV_SEARCH_LIFT" TypePath -> "TYPE_PATH" TypePathAttribution -> "TYPE_PATH_ATTRIBUTION" instance FromJSON ParametersType where parseJSON = parseJSONText "ParametersType" instance ToJSON ParametersType where toJSON = toJSONText -- | Range of report data. data QueryMetadataDataRange = QMDRCustomDates -- ^ @CUSTOM_DATES@ | QMDRCurrentDay -- ^ @CURRENT_DAY@ | QMDRPreviousDay -- ^ @PREVIOUS_DAY@ | QMDRWeekToDate -- ^ @WEEK_TO_DATE@ | QMDRMonthToDate -- ^ @MONTH_TO_DATE@ | QMDRQuarterToDate -- ^ @QUARTER_TO_DATE@ | QMDRYearToDate -- ^ @YEAR_TO_DATE@ | QMDRPreviousWeek -- ^ @PREVIOUS_WEEK@ | QMDRPreviousHalfMonth -- ^ @PREVIOUS_HALF_MONTH@ | QMDRPreviousMonth -- ^ @PREVIOUS_MONTH@ | QMDRPreviousQuarter -- ^ @PREVIOUS_QUARTER@ | QMDRPreviousYear -- ^ @PREVIOUS_YEAR@ | QMDRLast7Days -- ^ @LAST_7_DAYS@ | QMDRLast30Days -- ^ @LAST_30_DAYS@ | QMDRLast90Days -- ^ @LAST_90_DAYS@ | QMDRLast365Days -- ^ @LAST_365_DAYS@ | QMDRAllTime -- ^ @ALL_TIME@ | QMDRLast14Days -- ^ @LAST_14_DAYS@ | QMDRTypeNotSupported -- ^ @TYPE_NOT_SUPPORTED@ | QMDRLast60Days -- ^ @LAST_60_DAYS@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable QueryMetadataDataRange instance FromHttpApiData QueryMetadataDataRange where parseQueryParam = \case "CUSTOM_DATES" -> Right QMDRCustomDates "CURRENT_DAY" -> Right QMDRCurrentDay "PREVIOUS_DAY" -> Right QMDRPreviousDay "WEEK_TO_DATE" -> Right QMDRWeekToDate "MONTH_TO_DATE" -> Right QMDRMonthToDate "QUARTER_TO_DATE" -> Right QMDRQuarterToDate "YEAR_TO_DATE" -> Right QMDRYearToDate "PREVIOUS_WEEK" -> Right QMDRPreviousWeek "PREVIOUS_HALF_MONTH" -> Right QMDRPreviousHalfMonth "PREVIOUS_MONTH" -> Right QMDRPreviousMonth "PREVIOUS_QUARTER" -> Right QMDRPreviousQuarter "PREVIOUS_YEAR" -> Right QMDRPreviousYear "LAST_7_DAYS" -> Right QMDRLast7Days "LAST_30_DAYS" -> Right QMDRLast30Days "LAST_90_DAYS" -> Right QMDRLast90Days "LAST_365_DAYS" -> Right QMDRLast365Days "ALL_TIME" -> Right QMDRAllTime "LAST_14_DAYS" -> Right QMDRLast14Days "TYPE_NOT_SUPPORTED" -> Right QMDRTypeNotSupported "LAST_60_DAYS" -> Right QMDRLast60Days x -> Left ("Unable to parse QueryMetadataDataRange from: " <> x) instance ToHttpApiData QueryMetadataDataRange where toQueryParam = \case QMDRCustomDates -> "CUSTOM_DATES" QMDRCurrentDay -> "CURRENT_DAY" QMDRPreviousDay -> "PREVIOUS_DAY" QMDRWeekToDate -> "WEEK_TO_DATE" QMDRMonthToDate -> "MONTH_TO_DATE" QMDRQuarterToDate -> "QUARTER_TO_DATE" QMDRYearToDate -> "YEAR_TO_DATE" QMDRPreviousWeek -> "PREVIOUS_WEEK" QMDRPreviousHalfMonth -> "PREVIOUS_HALF_MONTH" QMDRPreviousMonth -> "PREVIOUS_MONTH" QMDRPreviousQuarter -> "PREVIOUS_QUARTER" QMDRPreviousYear -> "PREVIOUS_YEAR" QMDRLast7Days -> "LAST_7_DAYS" QMDRLast30Days -> "LAST_30_DAYS" QMDRLast90Days -> "LAST_90_DAYS" QMDRLast365Days -> "LAST_365_DAYS" QMDRAllTime -> "ALL_TIME" QMDRLast14Days -> "LAST_14_DAYS" QMDRTypeNotSupported -> "TYPE_NOT_SUPPORTED" QMDRLast60Days -> "LAST_60_DAYS" instance FromJSON QueryMetadataDataRange where parseJSON = parseJSONText "QueryMetadataDataRange" instance ToJSON QueryMetadataDataRange where toJSON = toJSONText data ParametersMetricsItem = MetricUnknown -- ^ @METRIC_UNKNOWN@ | MetricImpressions -- ^ @METRIC_IMPRESSIONS@ | MetricClicks -- ^ @METRIC_CLICKS@ | MetricLastImpressions -- ^ @METRIC_LAST_IMPRESSIONS@ | MetricLastClicks -- ^ @METRIC_LAST_CLICKS@ | MetricTotalConversions -- ^ @METRIC_TOTAL_CONVERSIONS@ | MetricMediaCostAdvertiser -- ^ @METRIC_MEDIA_COST_ADVERTISER@ | MetricMediaCostUsd -- ^ @METRIC_MEDIA_COST_USD@ | MetricMediaCostPartner -- ^ @METRIC_MEDIA_COST_PARTNER@ | MetricDataCostAdvertiser -- ^ @METRIC_DATA_COST_ADVERTISER@ | MetricDataCostUsd -- ^ @METRIC_DATA_COST_USD@ | MetricDataCostPartner -- ^ @METRIC_DATA_COST_PARTNER@ | MetricCpmFEE1Advertiser -- ^ @METRIC_CPM_FEE1_ADVERTISER@ | MetricCpmFEE1Usd -- ^ @METRIC_CPM_FEE1_USD@ | MetricCpmFEE1Partner -- ^ @METRIC_CPM_FEE1_PARTNER@ | MetricCpmFEE2Advertiser -- ^ @METRIC_CPM_FEE2_ADVERTISER@ | MetricCpmFEE2Usd -- ^ @METRIC_CPM_FEE2_USD@ | MetricCpmFEE2Partner -- ^ @METRIC_CPM_FEE2_PARTNER@ | MetricMediaFEE1Advertiser -- ^ @METRIC_MEDIA_FEE1_ADVERTISER@ | MetricMediaFEE1Usd -- ^ @METRIC_MEDIA_FEE1_USD@ | MetricMediaFEE1Partner -- ^ @METRIC_MEDIA_FEE1_PARTNER@ | MetricMediaFEE2Advertiser -- ^ @METRIC_MEDIA_FEE2_ADVERTISER@ | MetricMediaFEE2Usd -- ^ @METRIC_MEDIA_FEE2_USD@ | MetricMediaFEE2Partner -- ^ @METRIC_MEDIA_FEE2_PARTNER@ | MetricRevenueAdvertiser -- ^ @METRIC_REVENUE_ADVERTISER@ | MetricRevenueUsd -- ^ @METRIC_REVENUE_USD@ | MetricRevenuePartner -- ^ @METRIC_REVENUE_PARTNER@ | MetricProfitAdvertiser -- ^ @METRIC_PROFIT_ADVERTISER@ | MetricProfitUsd -- ^ @METRIC_PROFIT_USD@ | MetricProfitPartner -- ^ @METRIC_PROFIT_PARTNER@ | MetricProfitMargin -- ^ @METRIC_PROFIT_MARGIN@ | MetricTotalMediaCostUsd -- ^ @METRIC_TOTAL_MEDIA_COST_USD@ | MetricTotalMediaCostPartner -- ^ @METRIC_TOTAL_MEDIA_COST_PARTNER@ | MetricTotalMediaCostAdvertiser -- ^ @METRIC_TOTAL_MEDIA_COST_ADVERTISER@ | MetricBillableCostUsd -- ^ @METRIC_BILLABLE_COST_USD@ | MetricBillableCostPartner -- ^ @METRIC_BILLABLE_COST_PARTNER@ | MetricBillableCostAdvertiser -- ^ @METRIC_BILLABLE_COST_ADVERTISER@ | MetricPlatformFeeUsd -- ^ @METRIC_PLATFORM_FEE_USD@ | MetricPlatformFeePartner -- ^ @METRIC_PLATFORM_FEE_PARTNER@ | MetricPlatformFeeAdvertiser -- ^ @METRIC_PLATFORM_FEE_ADVERTISER@ | MetricVideoCompletionRate -- ^ @METRIC_VIDEO_COMPLETION_RATE@ | MetricProfitEcpmAdvertiser -- ^ @METRIC_PROFIT_ECPM_ADVERTISER@ | MetricProfitEcpmUsd -- ^ @METRIC_PROFIT_ECPM_USD@ | MetricProfitEcpmPartner -- ^ @METRIC_PROFIT_ECPM_PARTNER@ | MetricRevenueEcpmAdvertiser -- ^ @METRIC_REVENUE_ECPM_ADVERTISER@ | MetricRevenueEcpmUsd -- ^ @METRIC_REVENUE_ECPM_USD@ | MetricRevenueEcpmPartner -- ^ @METRIC_REVENUE_ECPM_PARTNER@ | MetricRevenueEcpcAdvertiser -- ^ @METRIC_REVENUE_ECPC_ADVERTISER@ | MetricRevenueEcpcUsd -- ^ @METRIC_REVENUE_ECPC_USD@ | MetricRevenueEcpcPartner -- ^ @METRIC_REVENUE_ECPC_PARTNER@ | MetricRevenueEcpaAdvertiser -- ^ @METRIC_REVENUE_ECPA_ADVERTISER@ | MetricRevenueEcpaUsd -- ^ @METRIC_REVENUE_ECPA_USD@ | MetricRevenueEcpaPartner -- ^ @METRIC_REVENUE_ECPA_PARTNER@ | MetricRevenueEcpapvAdvertiser -- ^ @METRIC_REVENUE_ECPAPV_ADVERTISER@ | MetricRevenueEcpapvUsd -- ^ @METRIC_REVENUE_ECPAPV_USD@ | MetricRevenueEcpapvPartner -- ^ @METRIC_REVENUE_ECPAPV_PARTNER@ | MetricRevenueEcpapcAdvertiser -- ^ @METRIC_REVENUE_ECPAPC_ADVERTISER@ | MetricRevenueEcpapcUsd -- ^ @METRIC_REVENUE_ECPAPC_USD@ | MetricRevenueEcpapcPartner -- ^ @METRIC_REVENUE_ECPAPC_PARTNER@ | MetricMediaCostEcpmAdvertiser -- ^ @METRIC_MEDIA_COST_ECPM_ADVERTISER@ | MetricMediaCostEcpmUsd -- ^ @METRIC_MEDIA_COST_ECPM_USD@ | MetricMediaCostEcpmPartner -- ^ @METRIC_MEDIA_COST_ECPM_PARTNER@ | MetricMediaCostEcpcAdvertiser -- ^ @METRIC_MEDIA_COST_ECPC_ADVERTISER@ | MetricMediaCostEcpcUsd -- ^ @METRIC_MEDIA_COST_ECPC_USD@ | MetricMediaCostEcpcPartner -- ^ @METRIC_MEDIA_COST_ECPC_PARTNER@ | MetricMediaCostEcpaAdvertiser -- ^ @METRIC_MEDIA_COST_ECPA_ADVERTISER@ | MetricMediaCostEcpaUsd -- ^ @METRIC_MEDIA_COST_ECPA_USD@ | MetricMediaCostEcpaPartner -- ^ @METRIC_MEDIA_COST_ECPA_PARTNER@ | MetricMediaCostEcpapvAdvertiser -- ^ @METRIC_MEDIA_COST_ECPAPV_ADVERTISER@ | MetricMediaCostEcpapvUsd -- ^ @METRIC_MEDIA_COST_ECPAPV_USD@ | MetricMediaCostEcpapvPartner -- ^ @METRIC_MEDIA_COST_ECPAPV_PARTNER@ | MetricMediaCostEcpapcAdvertiser -- ^ @METRIC_MEDIA_COST_ECPAPC_ADVERTISER@ | MetricMediaCostEcpapcUsd -- ^ @METRIC_MEDIA_COST_ECPAPC_USD@ | MetricMediaCostEcpapcPartner -- ^ @METRIC_MEDIA_COST_ECPAPC_PARTNER@ | MetricTotalMediaCostEcpmAdvertiser -- ^ @METRIC_TOTAL_MEDIA_COST_ECPM_ADVERTISER@ | MetricTotalMediaCostEcpmUsd -- ^ @METRIC_TOTAL_MEDIA_COST_ECPM_USD@ | MetricTotalMediaCostEcpmPartner -- ^ @METRIC_TOTAL_MEDIA_COST_ECPM_PARTNER@ | MetricTotalMediaCostEcpcAdvertiser -- ^ @METRIC_TOTAL_MEDIA_COST_ECPC_ADVERTISER@ | MetricTotalMediaCostEcpcUsd -- ^ @METRIC_TOTAL_MEDIA_COST_ECPC_USD@ | MetricTotalMediaCostEcpcPartner -- ^ @METRIC_TOTAL_MEDIA_COST_ECPC_PARTNER@ | MetricTotalMediaCostEcpaAdvertiser -- ^ @METRIC_TOTAL_MEDIA_COST_ECPA_ADVERTISER@ | MetricTotalMediaCostEcpaUsd -- ^ @METRIC_TOTAL_MEDIA_COST_ECPA_USD@ | MetricTotalMediaCostEcpaPartner -- ^ @METRIC_TOTAL_MEDIA_COST_ECPA_PARTNER@ | MetricTotalMediaCostEcpapvAdvertiser -- ^ @METRIC_TOTAL_MEDIA_COST_ECPAPV_ADVERTISER@ | MetricTotalMediaCostEcpapvUsd -- ^ @METRIC_TOTAL_MEDIA_COST_ECPAPV_USD@ | MetricTotalMediaCostEcpapvPartner -- ^ @METRIC_TOTAL_MEDIA_COST_ECPAPV_PARTNER@ | MetricTotalMediaCostEcpapcAdvertiser -- ^ @METRIC_TOTAL_MEDIA_COST_ECPAPC_ADVERTISER@ | MetricTotalMediaCostEcpapcUsd -- ^ @METRIC_TOTAL_MEDIA_COST_ECPAPC_USD@ | MetricTotalMediaCostEcpapcPartner -- ^ @METRIC_TOTAL_MEDIA_COST_ECPAPC_PARTNER@ | MetricRichMediaVideoPlays -- ^ @METRIC_RICH_MEDIA_VIDEO_PLAYS@ | MetricRichMediaVideoCompletions -- ^ @METRIC_RICH_MEDIA_VIDEO_COMPLETIONS@ | MetricRichMediaVideoPauses -- ^ @METRIC_RICH_MEDIA_VIDEO_PAUSES@ | MetricRichMediaVideoMutes -- ^ @METRIC_RICH_MEDIA_VIDEO_MUTES@ | MetricRichMediaVideoMidpoints -- ^ @METRIC_RICH_MEDIA_VIDEO_MIDPOINTS@ | MetricRichMediaVideoFullScreens -- ^ @METRIC_RICH_MEDIA_VIDEO_FULL_SCREENS@ | MetricRichMediaVideoFirstQuartileCompletes -- ^ @METRIC_RICH_MEDIA_VIDEO_FIRST_QUARTILE_COMPLETES@ | MetricRichMediaVideoThirdQuartileCompletes -- ^ @METRIC_RICH_MEDIA_VIDEO_THIRD_QUARTILE_COMPLETES@ | MetricClickToPostClickConversionRate -- ^ @METRIC_CLICK_TO_POST_CLICK_CONVERSION_RATE@ | MetricImpressionsToConversionRate -- ^ @METRIC_IMPRESSIONS_TO_CONVERSION_RATE@ | MetricConversionsPerMille -- ^ @METRIC_CONVERSIONS_PER_MILLE@ | MetricCtr -- ^ @METRIC_CTR@ | MetricBidRequests -- ^ @METRIC_BID_REQUESTS@ | MetricUniqueVisitorsCookies -- ^ @METRIC_UNIQUE_VISITORS_COOKIES@ | MetricRevenueEcpcvAdvertiser -- ^ @METRIC_REVENUE_ECPCV_ADVERTISER@ | MetricRevenueEcpcvUsd -- ^ @METRIC_REVENUE_ECPCV_USD@ | MetricRevenueEcpcvPartner -- ^ @METRIC_REVENUE_ECPCV_PARTNER@ | MetricMediaCostEcpcvAdvertiser -- ^ @METRIC_MEDIA_COST_ECPCV_ADVERTISER@ | MetricMediaCostEcpcvUsd -- ^ @METRIC_MEDIA_COST_ECPCV_USD@ | MetricMediaCostEcpcvPartner -- ^ @METRIC_MEDIA_COST_ECPCV_PARTNER@ | MetricTotalMediaCostEcpcvAdvertiser -- ^ @METRIC_TOTAL_MEDIA_COST_ECPCV_ADVERTISER@ | MetricTotalMediaCostEcpcvUsd -- ^ @METRIC_TOTAL_MEDIA_COST_ECPCV_USD@ | MetricTotalMediaCostEcpcvPartner -- ^ @METRIC_TOTAL_MEDIA_COST_ECPCV_PARTNER@ | MetricRichMediaVideoSkips -- ^ @METRIC_RICH_MEDIA_VIDEO_SKIPS@ | MetricFEE2Advertiser -- ^ @METRIC_FEE2_ADVERTISER@ | MetricFEE2Usd -- ^ @METRIC_FEE2_USD@ | MetricFEE2Partner -- ^ @METRIC_FEE2_PARTNER@ | MetricFEE3Advertiser -- ^ @METRIC_FEE3_ADVERTISER@ | MetricFEE3Usd -- ^ @METRIC_FEE3_USD@ | MetricFEE3Partner -- ^ @METRIC_FEE3_PARTNER@ | MetricFEE4Advertiser -- ^ @METRIC_FEE4_ADVERTISER@ | MetricFEE4Usd -- ^ @METRIC_FEE4_USD@ | MetricFEE4Partner -- ^ @METRIC_FEE4_PARTNER@ | MetricFEE5Advertiser -- ^ @METRIC_FEE5_ADVERTISER@ | MetricFEE5Usd -- ^ @METRIC_FEE5_USD@ | MetricFEE5Partner -- ^ @METRIC_FEE5_PARTNER@ | MetricFEE6Advertiser -- ^ @METRIC_FEE6_ADVERTISER@ | MetricFEE6Usd -- ^ @METRIC_FEE6_USD@ | MetricFEE6Partner -- ^ @METRIC_FEE6_PARTNER@ | MetricFEE7Advertiser -- ^ @METRIC_FEE7_ADVERTISER@ | MetricFEE7Usd -- ^ @METRIC_FEE7_USD@ | MetricFEE7Partner -- ^ @METRIC_FEE7_PARTNER@ | MetricFEE8Advertiser -- ^ @METRIC_FEE8_ADVERTISER@ | MetricFEE8Usd -- ^ @METRIC_FEE8_USD@ | MetricFEE8Partner -- ^ @METRIC_FEE8_PARTNER@ | MetricFEE9Advertiser -- ^ @METRIC_FEE9_ADVERTISER@ | MetricFEE9Usd -- ^ @METRIC_FEE9_USD@ | MetricFEE9Partner -- ^ @METRIC_FEE9_PARTNER@ | MetricFEE10Advertiser -- ^ @METRIC_FEE10_ADVERTISER@ | MetricFEE10Usd -- ^ @METRIC_FEE10_USD@ | MetricFEE10Partner -- ^ @METRIC_FEE10_PARTNER@ | MetricFEE11Advertiser -- ^ @METRIC_FEE11_ADVERTISER@ | MetricFEE11Usd -- ^ @METRIC_FEE11_USD@ | MetricFEE11Partner -- ^ @METRIC_FEE11_PARTNER@ | MetricFEE12Advertiser -- ^ @METRIC_FEE12_ADVERTISER@ | MetricFEE12Usd -- ^ @METRIC_FEE12_USD@ | MetricFEE12Partner -- ^ @METRIC_FEE12_PARTNER@ | MetricFEE13Advertiser -- ^ @METRIC_FEE13_ADVERTISER@ | MetricFEE13Usd -- ^ @METRIC_FEE13_USD@ | MetricFEE13Partner -- ^ @METRIC_FEE13_PARTNER@ | MetricFEE14Advertiser -- ^ @METRIC_FEE14_ADVERTISER@ | MetricFEE14Usd -- ^ @METRIC_FEE14_USD@ | MetricFEE14Partner -- ^ @METRIC_FEE14_PARTNER@ | MetricFEE15Advertiser -- ^ @METRIC_FEE15_ADVERTISER@ | MetricFEE15Usd -- ^ @METRIC_FEE15_USD@ | MetricFEE15Partner -- ^ @METRIC_FEE15_PARTNER@ | MetricCpmFEE3Advertiser -- ^ @METRIC_CPM_FEE3_ADVERTISER@ | MetricCpmFEE3Usd -- ^ @METRIC_CPM_FEE3_USD@ | MetricCpmFEE3Partner -- ^ @METRIC_CPM_FEE3_PARTNER@ | MetricCpmFEE4Advertiser -- ^ @METRIC_CPM_FEE4_ADVERTISER@ | MetricCpmFEE4Usd -- ^ @METRIC_CPM_FEE4_USD@ | MetricCpmFEE4Partner -- ^ @METRIC_CPM_FEE4_PARTNER@ | MetricCpmFEE5Advertiser -- ^ @METRIC_CPM_FEE5_ADVERTISER@ | MetricCpmFEE5Usd -- ^ @METRIC_CPM_FEE5_USD@ | MetricCpmFEE5Partner -- ^ @METRIC_CPM_FEE5_PARTNER@ | MetricMediaFEE3Advertiser -- ^ @METRIC_MEDIA_FEE3_ADVERTISER@ | MetricMediaFEE3Usd -- ^ @METRIC_MEDIA_FEE3_USD@ | MetricMediaFEE3Partner -- ^ @METRIC_MEDIA_FEE3_PARTNER@ | MetricMediaFEE4Advertiser -- ^ @METRIC_MEDIA_FEE4_ADVERTISER@ | MetricMediaFEE4Usd -- ^ @METRIC_MEDIA_FEE4_USD@ | MetricMediaFEE4Partner -- ^ @METRIC_MEDIA_FEE4_PARTNER@ | MetricMediaFEE5Advertiser -- ^ @METRIC_MEDIA_FEE5_ADVERTISER@ | MetricMediaFEE5Usd -- ^ @METRIC_MEDIA_FEE5_USD@ | MetricMediaFEE5Partner -- ^ @METRIC_MEDIA_FEE5_PARTNER@ | MetricVideoCompanionImpressions -- ^ @METRIC_VIDEO_COMPANION_IMPRESSIONS@ | MetricVideoCompanionClicks -- ^ @METRIC_VIDEO_COMPANION_CLICKS@ | MetricFEE16Advertiser -- ^ @METRIC_FEE16_ADVERTISER@ | MetricFEE16Usd -- ^ @METRIC_FEE16_USD@ | MetricFEE16Partner -- ^ @METRIC_FEE16_PARTNER@ | MetricFEE17Advertiser -- ^ @METRIC_FEE17_ADVERTISER@ | MetricFEE17Usd -- ^ @METRIC_FEE17_USD@ | MetricFEE17Partner -- ^ @METRIC_FEE17_PARTNER@ | MetricFEE18Advertiser -- ^ @METRIC_FEE18_ADVERTISER@ | MetricFEE18Usd -- ^ @METRIC_FEE18_USD@ | MetricFEE18Partner -- ^ @METRIC_FEE18_PARTNER@ | MetricTrueviewViews -- ^ @METRIC_TRUEVIEW_VIEWS@ | MetricTrueviewUniqueViewers -- ^ @METRIC_TRUEVIEW_UNIQUE_VIEWERS@ | MetricTrueviewEarnedViews -- ^ @METRIC_TRUEVIEW_EARNED_VIEWS@ | MetricTrueviewEarnedSubscribers -- ^ @METRIC_TRUEVIEW_EARNED_SUBSCRIBERS@ | MetricTrueviewEarnedPlayListAdditions -- ^ @METRIC_TRUEVIEW_EARNED_PLAYLIST_ADDITIONS@ | MetricTrueviewEarnedLikes -- ^ @METRIC_TRUEVIEW_EARNED_LIKES@ | MetricTrueviewEarnedShares -- ^ @METRIC_TRUEVIEW_EARNED_SHARES@ | MetricTrueviewImpressionShare -- ^ @METRIC_TRUEVIEW_IMPRESSION_SHARE@ | MetricTrueviewLostIsBudget -- ^ @METRIC_TRUEVIEW_LOST_IS_BUDGET@ | MetricTrueviewLostIsRank -- ^ @METRIC_TRUEVIEW_LOST_IS_RANK@ | MetricTrueviewViewThroughConversion -- ^ @METRIC_TRUEVIEW_VIEW_THROUGH_CONVERSION@ | MetricTrueviewConversionManyPerView -- ^ @METRIC_TRUEVIEW_CONVERSION_MANY_PER_VIEW@ | MetricTrueviewViewRate -- ^ @METRIC_TRUEVIEW_VIEW_RATE@ | MetricTrueviewConversionRateOnePerView -- ^ @METRIC_TRUEVIEW_CONVERSION_RATE_ONE_PER_VIEW@ | MetricTrueviewCpvAdvertiser -- ^ @METRIC_TRUEVIEW_CPV_ADVERTISER@ | MetricTrueviewCpvUsd -- ^ @METRIC_TRUEVIEW_CPV_USD@ | MetricTrueviewCpvPartner -- ^ @METRIC_TRUEVIEW_CPV_PARTNER@ | MetricFEE19Advertiser -- ^ @METRIC_FEE19_ADVERTISER@ | MetricFEE19Usd -- ^ @METRIC_FEE19_USD@ | MetricFEE19Partner -- ^ @METRIC_FEE19_PARTNER@ | MetricTeaTrueviewImpressions -- ^ @METRIC_TEA_TRUEVIEW_IMPRESSIONS@ | MetricTeaTrueviewUniqueCookies -- ^ @METRIC_TEA_TRUEVIEW_UNIQUE_COOKIES@ | MetricFEE20Advertiser -- ^ @METRIC_FEE20_ADVERTISER@ | MetricFEE20Usd -- ^ @METRIC_FEE20_USD@ | MetricFEE20Partner -- ^ @METRIC_FEE20_PARTNER@ | MetricFEE21Advertiser -- ^ @METRIC_FEE21_ADVERTISER@ | MetricFEE21Usd -- ^ @METRIC_FEE21_USD@ | MetricFEE21Partner -- ^ @METRIC_FEE21_PARTNER@ | MetricFEE22Advertiser -- ^ @METRIC_FEE22_ADVERTISER@ | MetricFEE22Usd -- ^ @METRIC_FEE22_USD@ | MetricFEE22Partner -- ^ @METRIC_FEE22_PARTNER@ | MetricTrueviewTotalConversionValuesAdvertiser -- ^ @METRIC_TRUEVIEW_TOTAL_CONVERSION_VALUES_ADVERTISER@ | MetricTrueviewTotalConversionValuesUsd -- ^ @METRIC_TRUEVIEW_TOTAL_CONVERSION_VALUES_USD@ | MetricTrueviewTotalConversionValuesPartner -- ^ @METRIC_TRUEVIEW_TOTAL_CONVERSION_VALUES_PARTNER@ | MetricTrueviewConversionCostManyPerViewAdvertiser -- ^ @METRIC_TRUEVIEW_CONVERSION_COST_MANY_PER_VIEW_ADVERTISER@ | MetricTrueviewConversionCostManyPerViewUsd -- ^ @METRIC_TRUEVIEW_CONVERSION_COST_MANY_PER_VIEW_USD@ | MetricTrueviewConversionCostManyPerViewPartner -- ^ @METRIC_TRUEVIEW_CONVERSION_COST_MANY_PER_VIEW_PARTNER@ | MetricProfitViewableEcpmAdvertiser -- ^ @METRIC_PROFIT_VIEWABLE_ECPM_ADVERTISER@ | MetricProfitViewableEcpmUsd -- ^ @METRIC_PROFIT_VIEWABLE_ECPM_USD@ | MetricProfitViewableEcpmPartner -- ^ @METRIC_PROFIT_VIEWABLE_ECPM_PARTNER@ | MetricRevenueViewableEcpmAdvertiser -- ^ @METRIC_REVENUE_VIEWABLE_ECPM_ADVERTISER@ | MetricRevenueViewableEcpmUsd -- ^ @METRIC_REVENUE_VIEWABLE_ECPM_USD@ | MetricRevenueViewableEcpmPartner -- ^ @METRIC_REVENUE_VIEWABLE_ECPM_PARTNER@ | MetricMediaCostViewableEcpmAdvertiser -- ^ @METRIC_MEDIA_COST_VIEWABLE_ECPM_ADVERTISER@ | MetricMediaCostViewableEcpmUsd -- ^ @METRIC_MEDIA_COST_VIEWABLE_ECPM_USD@ | MetricMediaCostViewableEcpmPartner -- ^ @METRIC_MEDIA_COST_VIEWABLE_ECPM_PARTNER@ | MetricTotalMediaCostViewableEcpmAdvertiser -- ^ @METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_ADVERTISER@ | MetricTotalMediaCostViewableEcpmUsd -- ^ @METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_USD@ | MetricTotalMediaCostViewableEcpmPartner -- ^ @METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_PARTNER@ | MetricTrueviewEngagements -- ^ @METRIC_TRUEVIEW_ENGAGEMENTS@ | MetricTrueviewEngagementRate -- ^ @METRIC_TRUEVIEW_ENGAGEMENT_RATE@ | MetricTrueviewAverageCpeAdvertiser -- ^ @METRIC_TRUEVIEW_AVERAGE_CPE_ADVERTISER@ | MetricTrueviewAverageCpeUsd -- ^ @METRIC_TRUEVIEW_AVERAGE_CPE_USD@ | MetricTrueviewAverageCpePartner -- ^ @METRIC_TRUEVIEW_AVERAGE_CPE_PARTNER@ | MetricActiveViewViewableImpressions -- ^ @METRIC_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS@ | MetricActiveViewEligibleImpressions -- ^ @METRIC_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS@ | MetricActiveViewMeasurableImpressions -- ^ @METRIC_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS@ | MetricActiveViewPctMeasurableImpressions -- ^ @METRIC_ACTIVE_VIEW_PCT_MEASURABLE_IMPRESSIONS@ | MetricActiveViewPctViewableImpressions -- ^ @METRIC_ACTIVE_VIEW_PCT_VIEWABLE_IMPRESSIONS@ | MetricActiveViewAverageViewableTime -- ^ @METRIC_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME@ | MetricActiveViewUnmeasurableImpressions -- ^ @METRIC_ACTIVE_VIEW_UNMEASURABLE_IMPRESSIONS@ | MetricActiveViewUnviewableImpressions -- ^ @METRIC_ACTIVE_VIEW_UNVIEWABLE_IMPRESSIONS@ | MetricActiveViewDistributionUnmeasurable -- ^ @METRIC_ACTIVE_VIEW_DISTRIBUTION_UNMEASURABLE@ | MetricActiveViewDistributionUnviewable -- ^ @METRIC_ACTIVE_VIEW_DISTRIBUTION_UNVIEWABLE@ | MetricActiveViewDistributionViewable -- ^ @METRIC_ACTIVE_VIEW_DISTRIBUTION_VIEWABLE@ | MetricActiveViewPercentViewableForTimeThreshold -- ^ @METRIC_ACTIVE_VIEW_PERCENT_VIEWABLE_FOR_TIME_THRESHOLD@ | MetricActiveViewViewableForTimeThreshold -- ^ @METRIC_ACTIVE_VIEW_VIEWABLE_FOR_TIME_THRESHOLD@ | MetricActiveViewPercentVisibleAtStart -- ^ @METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_AT_START@ | MetricActiveViewPercentVisibleFirstQuar -- ^ @METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_FIRST_QUAR@ | MetricActiveViewPercentVisibleSecondQuar -- ^ @METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_SECOND_QUAR@ | MetricActiveViewPercentVisibleThirdQuar -- ^ @METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_THIRD_QUAR@ | MetricActiveViewPercentVisibleOnComplete -- ^ @METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_ON_COMPLETE@ | MetricActiveViewPercentAudibleVisibleAtStart -- ^ @METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_AT_START@ | MetricActiveViewPercentAudibleVisibleFirstQuar -- ^ @METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_FIRST_QUAR@ | MetricActiveViewPercentAudibleVisibleSecondQuar -- ^ @METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_SECOND_QUAR@ | MetricActiveViewPercentAudibleVisibleThirdQuar -- ^ @METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_THIRD_QUAR@ | MetricActiveViewPercentAudibleVisibleOnComplete -- ^ @METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_ON_COMPLETE@ | MetricActiveViewAudibleVisibleOnCompleteImpressions -- ^ @METRIC_ACTIVE_VIEW_AUDIBLE_VISIBLE_ON_COMPLETE_IMPRESSIONS@ | MetricViewableBidRequests -- ^ @METRIC_VIEWABLE_BID_REQUESTS@ | MetricCookieReachImpressionReach -- ^ @METRIC_COOKIE_REACH_IMPRESSION_REACH@ | MetricCookieReachAverageImpressionFrequency -- ^ @METRIC_COOKIE_REACH_AVERAGE_IMPRESSION_FREQUENCY@ | MetricDBmEngagementRate -- ^ @METRIC_DBM_ENGAGEMENT_RATE@ | MetricRichMediaScrolls -- ^ @METRIC_RICH_MEDIA_SCROLLS@ | MetricCmPostViewRevenue -- ^ @METRIC_CM_POST_VIEW_REVENUE@ | MetricCmPostClickRevenue -- ^ @METRIC_CM_POST_CLICK_REVENUE@ | MetricFloodlightImpressions -- ^ @METRIC_FLOODLIGHT_IMPRESSIONS@ | MetricBillableImpressions -- ^ @METRIC_BILLABLE_IMPRESSIONS@ | MetricNielsenAverageFrequency -- ^ @METRIC_NIELSEN_AVERAGE_FREQUENCY@ | MetricNielsenImpressions -- ^ @METRIC_NIELSEN_IMPRESSIONS@ | MetricNielsenUniqueAudience -- ^ @METRIC_NIELSEN_UNIQUE_AUDIENCE@ | MetricNielsenGrp -- ^ @METRIC_NIELSEN_GRP@ | MetricNielsenImpressionIndex -- ^ @METRIC_NIELSEN_IMPRESSION_INDEX@ | MetricNielsenImpressionsShare -- ^ @METRIC_NIELSEN_IMPRESSIONS_SHARE@ | MetricNielsenPopulation -- ^ @METRIC_NIELSEN_POPULATION@ | MetricNielsenPopulationReach -- ^ @METRIC_NIELSEN_POPULATION_REACH@ | MetricNielsenPopulationShare -- ^ @METRIC_NIELSEN_POPULATION_SHARE@ | MetricNielsenReachIndex -- ^ @METRIC_NIELSEN_REACH_INDEX@ | MetricNielsenReachShare -- ^ @METRIC_NIELSEN_REACH_SHARE@ | MetricActiveViewAudibleFullyOnScreenHalfOfDurationImpressions -- ^ @METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_IMPRESSIONS@ | MetricActiveViewAudibleFullyOnScreenHalfOfDurationMeasurableImpressions -- ^ @METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_MEASURABLE_IMPRESSIONS@ | MetricActiveViewAudibleFullyOnScreenHalfOfDurationRate -- ^ @METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_RATE@ | MetricActiveViewAudibleFullyOnScreenHalfOfDurationTrueviewImpressions -- ^ @METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_IMPRESSIONS@ | MetricActiveViewAudibleFullyOnScreenHalfOfDurationTrueviewMeasurableImpressions -- ^ @METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_MEASURABLE_IMPRESSIONS@ | MetricActiveViewAudibleFullyOnScreenHalfOfDurationTrueviewRate -- ^ @METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_RATE@ | MetricActiveViewCustomMetricMeasurableImpressions -- ^ @METRIC_ACTIVE_VIEW_CUSTOM_METRIC_MEASURABLE_IMPRESSIONS@ | MetricActiveViewCustomMetricViewableImpressions -- ^ @METRIC_ACTIVE_VIEW_CUSTOM_METRIC_VIEWABLE_IMPRESSIONS@ | MetricActiveViewCustomMetricViewableRate -- ^ @METRIC_ACTIVE_VIEW_CUSTOM_METRIC_VIEWABLE_RATE@ | MetricActiveViewPercentAudibleImpressions -- ^ @METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_IMPRESSIONS@ | MetricActiveViewPercentFullyOnScreen2Sec -- ^ @METRIC_ACTIVE_VIEW_PERCENT_FULLY_ON_SCREEN_2_SEC@ | MetricActiveViewPercentFullScreen -- ^ @METRIC_ACTIVE_VIEW_PERCENT_FULL_SCREEN@ | MetricActiveViewPercentInBackgRound -- ^ @METRIC_ACTIVE_VIEW_PERCENT_IN_BACKGROUND@ | MetricActiveViewPercentOfAdPlayed -- ^ @METRIC_ACTIVE_VIEW_PERCENT_OF_AD_PLAYED@ | MetricActiveViewPercentOfCompletedImpressionsAudibleAndVisible -- ^ @METRIC_ACTIVE_VIEW_PERCENT_OF_COMPLETED_IMPRESSIONS_AUDIBLE_AND_VISIBLE@ | MetricActiveViewPercentOfCompletedImpressionsVisible -- ^ @METRIC_ACTIVE_VIEW_PERCENT_OF_COMPLETED_IMPRESSIONS_VISIBLE@ | MetricActiveViewPercentOfFirstQuartileImpressionsAudibleAndVisible -- ^ @METRIC_ACTIVE_VIEW_PERCENT_OF_FIRST_QUARTILE_IMPRESSIONS_AUDIBLE_AND_VISIBLE@ | MetricActiveViewPercentOfFirstQuartileImpressionsVisible -- ^ @METRIC_ACTIVE_VIEW_PERCENT_OF_FIRST_QUARTILE_IMPRESSIONS_VISIBLE@ | MetricActiveViewPercentOfMidpointImpressionsAudibleAndVisible -- ^ @METRIC_ACTIVE_VIEW_PERCENT_OF_MIDPOINT_IMPRESSIONS_AUDIBLE_AND_VISIBLE@ | MetricActiveViewPercentOfMidpointImpressionsVisible -- ^ @METRIC_ACTIVE_VIEW_PERCENT_OF_MIDPOINT_IMPRESSIONS_VISIBLE@ | MetricActiveViewPercentOfThirdQuartileImpressionsAudibleAndVisible -- ^ @METRIC_ACTIVE_VIEW_PERCENT_OF_THIRD_QUARTILE_IMPRESSIONS_AUDIBLE_AND_VISIBLE@ | MetricActiveViewPercentOfThirdQuartileImpressionsVisible -- ^ @METRIC_ACTIVE_VIEW_PERCENT_OF_THIRD_QUARTILE_IMPRESSIONS_VISIBLE@ | MetricActiveViewPercentPlayTimeAudible -- ^ @METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_AUDIBLE@ | MetricActiveViewPercentPlayTimeAudibleAndVisible -- ^ @METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_AUDIBLE_AND_VISIBLE@ | MetricActiveViewPercentPlayTimeVisible -- ^ @METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_VISIBLE@ | MetricAdaptedAudienceFrequency -- ^ @METRIC_ADAPTED_AUDIENCE_FREQUENCY@ | MetricAdlingoFeeAdvertiserCurrency -- ^ @METRIC_ADLINGO_FEE_ADVERTISER_CURRENCY@ | MetricAudioClientCostEcpclAdvertiserCurrency -- ^ @METRIC_AUDIO_CLIENT_COST_ECPCL_ADVERTISER_CURRENCY@ | MetricAudioMediaCostEcpclAdvertiserCurrency -- ^ @METRIC_AUDIO_MEDIA_COST_ECPCL_ADVERTISER_CURRENCY@ | MetricAudioMutesAudio -- ^ @METRIC_AUDIO_MUTES_AUDIO@ | MetricAudioRevenueEcpclAdvertiserCurrency -- ^ @METRIC_AUDIO_REVENUE_ECPCL_ADVERTISER_CURRENCY@ | MetricAudioUnmutesAudio -- ^ @METRIC_AUDIO_UNMUTES_AUDIO@ | MetricAudioUnmutesVideo -- ^ @METRIC_AUDIO_UNMUTES_VIDEO@ | MetricAverageDisplayTime -- ^ @METRIC_AVERAGE_DISPLAY_TIME@ | MetricAverageImpressionFrequencyPerUser -- ^ @METRIC_AVERAGE_IMPRESSION_FREQUENCY_PER_USER@ | MetricAverageInteractionTime -- ^ @METRIC_AVERAGE_INTERACTION_TIME@ | MetricAverageWatchTimePerImpression -- ^ @METRIC_AVERAGE_WATCH_TIME_PER_IMPRESSION@ | MetricBeginToRenderEligibleImpressions -- ^ @METRIC_BEGIN_TO_RENDER_ELIGIBLE_IMPRESSIONS@ | MetricBeginToRenderImpressions -- ^ @METRIC_BEGIN_TO_RENDER_IMPRESSIONS@ | MetricBenchmarkFrequency -- ^ @METRIC_BENCHMARK_FREQUENCY@ | MetricBrandLiftAbsoluteBrandLift -- ^ @METRIC_BRAND_LIFT_ABSOLUTE_BRAND_LIFT@ | MetricBrandLiftAllSurveyResponses -- ^ @METRIC_BRAND_LIFT_ALL_SURVEY_RESPONSES@ | MetricBrandLiftBaselinePositiveResponseRate -- ^ @METRIC_BRAND_LIFT_BASELINE_POSITIVE_RESPONSE_RATE@ | MetricBrandLiftBaselineSurveyResponses -- ^ @METRIC_BRAND_LIFT_BASELINE_SURVEY_RESPONSES@ | MetricBrandLiftCostPerLiftedUser -- ^ @METRIC_BRAND_LIFT_COST_PER_LIFTED_USER@ | MetricBrandLiftExposedSurveyResponses -- ^ @METRIC_BRAND_LIFT_EXPOSED_SURVEY_RESPONSES@ | MetricBrandLiftHeadroomBrandLift -- ^ @METRIC_BRAND_LIFT_HEADROOM_BRAND_LIFT@ | MetricBrandLiftRelativeBrandLift -- ^ @METRIC_BRAND_LIFT_RELATIVE_BRAND_LIFT@ | MetricBrandLiftUsers -- ^ @METRIC_BRAND_LIFT_USERS@ | MetricCardClicks -- ^ @METRIC_CARD_CLICKS@ | MetricClientCostAdvertiserCurrency -- ^ @METRIC_CLIENT_COST_ADVERTISER_CURRENCY@ | MetricClientCostEcpaAdvertiserCurrency -- ^ @METRIC_CLIENT_COST_ECPA_ADVERTISER_CURRENCY@ | MetricClientCostEcpaPcAdvertiserCurrency -- ^ @METRIC_CLIENT_COST_ECPA_PC_ADVERTISER_CURRENCY@ | MetricClientCostEcpaPvAdvertiserCurrency -- ^ @METRIC_CLIENT_COST_ECPA_PV_ADVERTISER_CURRENCY@ | MetricClientCostEcpcAdvertiserCurrency -- ^ @METRIC_CLIENT_COST_ECPC_ADVERTISER_CURRENCY@ | MetricClientCostEcpmAdvertiserCurrency -- ^ @METRIC_CLIENT_COST_ECPM_ADVERTISER_CURRENCY@ | MetricClientCostViewableEcpmAdvertiserCurrency -- ^ @METRIC_CLIENT_COST_VIEWABLE_ECPM_ADVERTISER_CURRENCY@ | MetricCmPostClickRevenueCrossEnvironment -- ^ @METRIC_CM_POST_CLICK_REVENUE_CROSS_ENVIRONMENT@ | MetricCmPostViewRevenueCrossEnvironment -- ^ @METRIC_CM_POST_VIEW_REVENUE_CROSS_ENVIRONMENT@ | MetricCompanionClicksAudio -- ^ @METRIC_COMPANION_CLICKS_AUDIO@ | MetricCompanionImpressionsAudio -- ^ @METRIC_COMPANION_IMPRESSIONS_AUDIO@ | MetricCompleteListensAudio -- ^ @METRIC_COMPLETE_LISTENS_AUDIO@ | MetricCompletionRateAudio -- ^ @METRIC_COMPLETION_RATE_AUDIO@ | MetricCounters -- ^ @METRIC_COUNTERS@ | MetricCustomFee1AdvertiserCurrency -- ^ @METRIC_CUSTOM_FEE_1_ADVERTISER_CURRENCY@ | MetricCustomFee2AdvertiserCurrency -- ^ @METRIC_CUSTOM_FEE_2_ADVERTISER_CURRENCY@ | MetricCustomFee3AdvertiserCurrency -- ^ @METRIC_CUSTOM_FEE_3_ADVERTISER_CURRENCY@ | MetricCustomFee4AdvertiserCurrency -- ^ @METRIC_CUSTOM_FEE_4_ADVERTISER_CURRENCY@ | MetricCustomFee5AdvertiserCurrency -- ^ @METRIC_CUSTOM_FEE_5_ADVERTISER_CURRENCY@ | MetricCustomValuePer1000Impressions -- ^ @METRIC_CUSTOM_VALUE_PER_1000_IMPRESSIONS@ | MetricEngagements -- ^ @METRIC_ENGAGEMENTS@ | MetricEstimatedCpmForImpressionsWithCustomValueAdvertiserCurrency -- ^ @METRIC_ESTIMATED_CPM_FOR_IMPRESSIONS_WITH_CUSTOM_VALUE_ADVERTISER_CURRENCY@ | MetricEstimatedTotalCostForImpressionsWithCustomValueAdvertiserCurrency -- ^ @METRIC_ESTIMATED_TOTAL_COST_FOR_IMPRESSIONS_WITH_CUSTOM_VALUE_ADVERTISER_CURRENCY@ | MetricExits -- ^ @METRIC_EXITS@ | MetricExpansions -- ^ @METRIC_EXPANSIONS@ | MetricFirstQuartileAudio -- ^ @METRIC_FIRST_QUARTILE_AUDIO@ | MetricGeneralInvalidTrafficGivtImpressions -- ^ @METRIC_GENERAL_INVALID_TRAFFIC_GIVT_IMPRESSIONS@ | MetricGeneralInvalidTrafficGivtTrackedAds -- ^ @METRIC_GENERAL_INVALID_TRAFFIC_GIVT_TRACKED_ADS@ | MetricGivtActiveViewEligibleImpressions -- ^ @METRIC_GIVT_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS@ | MetricGivtActiveViewMeasurableImpressions -- ^ @METRIC_GIVT_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS@ | MetricGivtActiveViewViewableImpressions -- ^ @METRIC_GIVT_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS@ | MetricGivtBeginToRenderImpressions -- ^ @METRIC_GIVT_BEGIN_TO_RENDER_IMPRESSIONS@ | MetricGivtClicks -- ^ @METRIC_GIVT_CLICKS@ | MetricGmailConversions -- ^ @METRIC_GMAIL_CONVERSIONS@ | MetricGmailPostClickConversions -- ^ @METRIC_GMAIL_POST_CLICK_CONVERSIONS@ | MetricGmailPostViewConversions -- ^ @METRIC_GMAIL_POST_VIEW_CONVERSIONS@ | MetricGmailPotentialViews -- ^ @METRIC_GMAIL_POTENTIAL_VIEWS@ | MetricImpressionsWithCustomValue -- ^ @METRIC_IMPRESSIONS_WITH_CUSTOM_VALUE@ | MetricImpressionsWithPositiveCustomValue -- ^ @METRIC_IMPRESSIONS_WITH_POSITIVE_CUSTOM_VALUE@ | MetricImpressionCustomValueCost -- ^ @METRIC_IMPRESSION_CUSTOM_VALUE_COST@ | MetricInteractiveImpressions -- ^ @METRIC_INTERACTIVE_IMPRESSIONS@ | MetricInvalidActiveViewEligibleImpressions -- ^ @METRIC_INVALID_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS@ | MetricInvalidActiveViewMeasurableImpressions -- ^ @METRIC_INVALID_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS@ | MetricInvalidActiveViewViewableImpressions -- ^ @METRIC_INVALID_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS@ | MetricInvalidBeginToRenderImpressions -- ^ @METRIC_INVALID_BEGIN_TO_RENDER_IMPRESSIONS@ | MetricInvalidClicks -- ^ @METRIC_INVALID_CLICKS@ | MetricInvalidImpressions -- ^ @METRIC_INVALID_IMPRESSIONS@ | MetricInvalidTrackedAds -- ^ @METRIC_INVALID_TRACKED_ADS@ | MetricMediaCostAdvertiserCurrencyPerStoreVisitAdxOnly -- ^ @METRIC_MEDIA_COST_ADVERTISER_CURRENCY_PER_STORE_VISIT_ADX_ONLY@ | MetricMidpointAudio -- ^ @METRIC_MIDPOINT_AUDIO@ | MetricOriginalAudienceFrequency -- ^ @METRIC_ORIGINAL_AUDIENCE_FREQUENCY@ | MetricPausesAudio -- ^ @METRIC_PAUSES_AUDIO@ | MetricPercentImpressionsWithPositiveCustomValue -- ^ @METRIC_PERCENT_IMPRESSIONS_WITH_POSITIVE_CUSTOM_VALUE@ | MetricPlatformFeeRate -- ^ @METRIC_PLATFORM_FEE_RATE@ | MetricPostClickConversionsCrossEnvironment -- ^ @METRIC_POST_CLICK_CONVERSIONS_CROSS_ENVIRONMENT@ | MetricPostViewConversionsCrossEnvironment -- ^ @METRIC_POST_VIEW_CONVERSIONS_CROSS_ENVIRONMENT@ | MetricPotentialImpressions -- ^ @METRIC_POTENTIAL_IMPRESSIONS@ | MetricPotentialViews -- ^ @METRIC_POTENTIAL_VIEWS@ | MetricPremiumFeeAdvertiserCurrency -- ^ @METRIC_PREMIUM_FEE_ADVERTISER_CURRENCY@ | MetricProgrammaticGuaranteedImpressionsPassedDueToFrequency -- ^ @METRIC_PROGRAMMATIC_GUARANTEED_IMPRESSIONS_PASSED_DUE_TO_FREQUENCY@ | MetricProgrammaticGuaranteedSavingsReInvestedDueToFrequencyAdvertiserCurrency -- ^ @METRIC_PROGRAMMATIC_GUARANTEED_SAVINGS_RE_INVESTED_DUE_TO_FREQUENCY_ADVERTISER_CURRENCY@ | MetricRefundBillableCostAdvertiserCurrency -- ^ @METRIC_REFUND_BILLABLE_COST_ADVERTISER_CURRENCY@ | MetricRefundMediaCostAdvertiserCurrency -- ^ @METRIC_REFUND_MEDIA_COST_ADVERTISER_CURRENCY@ | MetricRefundPlatformFeeAdvertiserCurrency -- ^ @METRIC_REFUND_PLATFORM_FEE_ADVERTISER_CURRENCY@ | MetricRevenueAdvertiserCurrencyPerStoreVisitAdxOnly -- ^ @METRIC_REVENUE_ADVERTISER_CURRENCY_PER_STORE_VISIT_ADX_ONLY@ | MetricRichMediaEngagements -- ^ @METRIC_RICH_MEDIA_ENGAGEMENTS@ | MetricStartsAudio -- ^ @METRIC_STARTS_AUDIO@ | MetricStopsAudio -- ^ @METRIC_STOPS_AUDIO@ | MetricStoreVisitsAdxOnly -- ^ @METRIC_STORE_VISITS_ADX_ONLY@ | MetricStoreVisitConversions -- ^ @METRIC_STORE_VISIT_CONVERSIONS@ | MetricThirdQuartileAudio -- ^ @METRIC_THIRD_QUARTILE_AUDIO@ | MetricTimers -- ^ @METRIC_TIMERS@ | MetricTotalAudioMediaCostEcpclAdvertiserCurrency -- ^ @METRIC_TOTAL_AUDIO_MEDIA_COST_ECPCL_ADVERTISER_CURRENCY@ | MetricTotalConversionsCrossEnvironment -- ^ @METRIC_TOTAL_CONVERSIONS_CROSS_ENVIRONMENT@ | MetricTotalDisplayTime -- ^ @METRIC_TOTAL_DISPLAY_TIME@ | MetricTotalImpressionCustomValue -- ^ @METRIC_TOTAL_IMPRESSION_CUSTOM_VALUE@ | MetricTotalInteractionTime -- ^ @METRIC_TOTAL_INTERACTION_TIME@ | MetricTotalMediaCostAdvertiserCurrencyPerStoreVisitAdxOnly -- ^ @METRIC_TOTAL_MEDIA_COST_ADVERTISER_CURRENCY_PER_STORE_VISIT_ADX_ONLY@ | MetricTotalUsers -- ^ @METRIC_TOTAL_USERS@ | MetricTrackedAds -- ^ @METRIC_TRACKED_ADS@ | MetricTrueviewGeneralInvalidTrafficGivtViews -- ^ @METRIC_TRUEVIEW_GENERAL_INVALID_TRAFFIC_GIVT_VIEWS@ | MetricTrueviewInvalidViews -- ^ @METRIC_TRUEVIEW_INVALID_VIEWS@ | MetricUniqueCookiesWithImpressions -- ^ @METRIC_UNIQUE_COOKIES_WITH_IMPRESSIONS@ | MetricUniqueReachAverageImpressionFrequency -- ^ @METRIC_UNIQUE_REACH_AVERAGE_IMPRESSION_FREQUENCY@ | MetricUniqueReachClickReach -- ^ @METRIC_UNIQUE_REACH_CLICK_REACH@ | MetricUniqueReachImpressionReach -- ^ @METRIC_UNIQUE_REACH_IMPRESSION_REACH@ | MetricUniqueReachTotalReach -- ^ @METRIC_UNIQUE_REACH_TOTAL_REACH@ | MetricVerifiableImpressions -- ^ @METRIC_VERIFIABLE_IMPRESSIONS@ | MetricVideoClientCostEcpcvAdvertiserCurrency -- ^ @METRIC_VIDEO_CLIENT_COST_ECPCV_ADVERTISER_CURRENCY@ | MetricWatchTime -- ^ @METRIC_WATCH_TIME@ | MetricLastTouchTotalConversions -- ^ @METRIC_LAST_TOUCH_TOTAL_CONVERSIONS@ | MetricLastTouchClickThroughConversions -- ^ @METRIC_LAST_TOUCH_CLICK_THROUGH_CONVERSIONS@ | MetricLastTouchViewThroughConversions -- ^ @METRIC_LAST_TOUCH_VIEW_THROUGH_CONVERSIONS@ | MetricTotalPaths -- ^ @METRIC_TOTAL_PATHS@ | MetricTotalExposures -- ^ @METRIC_TOTAL_EXPOSURES@ | MetricPathConversionRate -- ^ @METRIC_PATH_CONVERSION_RATE@ | MetricConvertingPaths -- ^ @METRIC_CONVERTING_PATHS@ | MetricActivityRevenue -- ^ @METRIC_ACTIVITY_REVENUE@ | MetricPercentInvalidImpressionsPreBid -- ^ @METRIC_PERCENT_INVALID_IMPRESSIONS_PREBID@ | MetricGrpCorrectedImpressions -- ^ @METRIC_GRP_CORRECTED_IMPRESSIONS@ | MetricDemoCorrectedClicks -- ^ @METRIC_DEMO_CORRECTED_CLICKS@ | MetricVirtualPeopleImpressionReachByDemo -- ^ @METRIC_VIRTUAL_PEOPLE_IMPRESSION_REACH_BY_DEMO@ | MetricVirtualPeopleClickReachByDemo -- ^ @METRIC_VIRTUAL_PEOPLE_CLICK_REACH_BY_DEMO@ | MetricVirtualPeopleAverageImpressionFrequencyByDemo -- ^ @METRIC_VIRTUAL_PEOPLE_AVERAGE_IMPRESSION_FREQUENCY_BY_DEMO@ | MetricDemoComPositionImpression -- ^ @METRIC_DEMO_COMPOSITION_IMPRESSION@ | MetricVirtualPeopleImpressionReachSharePercent -- ^ @METRIC_VIRTUAL_PEOPLE_IMPRESSION_REACH_SHARE_PERCENT@ | MetricDemoPopulation -- ^ @METRIC_DEMO_POPULATION@ | MetricVirtualPeopleImpressionReachPercent -- ^ @METRIC_VIRTUAL_PEOPLE_IMPRESSION_REACH_PERCENT@ | MetricTargetRatingPoints -- ^ @METRIC_TARGET_RATING_POINTS@ | MetricProvisionalImpressions -- ^ @METRIC_PROVISIONAL_IMPRESSIONS@ | MetricVendorBlockedAds -- ^ @METRIC_VENDOR_BLOCKED_ADS@ | MetricGrpCorrectedViewableImpressions -- ^ @METRIC_GRP_CORRECTED_VIEWABLE_IMPRESSIONS@ | MetricGrpCorrectedViewableImpressionsSharePercent -- ^ @METRIC_GRP_CORRECTED_VIEWABLE_IMPRESSIONS_SHARE_PERCENT@ | MetricViewableGrossRatingPoints -- ^ @METRIC_VIEWABLE_GROSS_RATING_POINTS@ | MetricVirtualPeopleAverageViewableImpressionFrequencyByDemo -- ^ @METRIC_VIRTUAL_PEOPLE_AVERAGE_VIEWABLE_IMPRESSION_FREQUENCY_BY_DEMO@ | MetricVirtualPeopleViewableImpressionReachByDemo -- ^ @METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_BY_DEMO@ | MetricVirtualPeopleViewableImpressionReachPercent -- ^ @METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_PERCENT@ | MetricVirtualPeopleViewableImpressionReachSharePercent -- ^ @METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_SHARE_PERCENT@ | MetricEngagementRate -- ^ @METRIC_ENGAGEMENT_RATE@ | MetricCM360PostViewRevenue -- ^ @METRIC_CM360_POST_VIEW_REVENUE@ | MetricCM360PostClickRevenue -- ^ @METRIC_CM360_POST_CLICK_REVENUE@ | MetricCM360PostClickRevenueCrossEnvironment -- ^ @METRIC_CM360_POST_CLICK_REVENUE_CROSS_ENVIRONMENT@ | MetricCM360PostViewRevenueCrossEnvironment -- ^ @METRIC_CM360_POST_VIEW_REVENUE_CROSS_ENVIRONMENT@ | MetricPercentageFromCurrentIoGoal -- ^ @METRIC_PERCENTAGE_FROM_CURRENT_IO_GOAL@ | MetricDuplicateFloodlightImpressions -- ^ @METRIC_DUPLICATE_FLOODLIGHT_IMPRESSIONS@ | MetricCookieConsentedFloodlightImpressions -- ^ @METRIC_COOKIE_CONSENTED_FLOODLIGHT_IMPRESSIONS@ | MetricCookieUnconsentedFloodlightImpressions -- ^ @METRIC_COOKIE_UNCONSENTED_FLOODLIGHT_IMPRESSIONS@ | MetricTrackingUnconsentedClicks -- ^ @METRIC_TRACKING_UNCONSENTED_CLICKS@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable ParametersMetricsItem instance FromHttpApiData ParametersMetricsItem where parseQueryParam = \case "METRIC_UNKNOWN" -> Right MetricUnknown "METRIC_IMPRESSIONS" -> Right MetricImpressions "METRIC_CLICKS" -> Right MetricClicks "METRIC_LAST_IMPRESSIONS" -> Right MetricLastImpressions "METRIC_LAST_CLICKS" -> Right MetricLastClicks "METRIC_TOTAL_CONVERSIONS" -> Right MetricTotalConversions "METRIC_MEDIA_COST_ADVERTISER" -> Right MetricMediaCostAdvertiser "METRIC_MEDIA_COST_USD" -> Right MetricMediaCostUsd "METRIC_MEDIA_COST_PARTNER" -> Right MetricMediaCostPartner "METRIC_DATA_COST_ADVERTISER" -> Right MetricDataCostAdvertiser "METRIC_DATA_COST_USD" -> Right MetricDataCostUsd "METRIC_DATA_COST_PARTNER" -> Right MetricDataCostPartner "METRIC_CPM_FEE1_ADVERTISER" -> Right MetricCpmFEE1Advertiser "METRIC_CPM_FEE1_USD" -> Right MetricCpmFEE1Usd "METRIC_CPM_FEE1_PARTNER" -> Right MetricCpmFEE1Partner "METRIC_CPM_FEE2_ADVERTISER" -> Right MetricCpmFEE2Advertiser "METRIC_CPM_FEE2_USD" -> Right MetricCpmFEE2Usd "METRIC_CPM_FEE2_PARTNER" -> Right MetricCpmFEE2Partner "METRIC_MEDIA_FEE1_ADVERTISER" -> Right MetricMediaFEE1Advertiser "METRIC_MEDIA_FEE1_USD" -> Right MetricMediaFEE1Usd "METRIC_MEDIA_FEE1_PARTNER" -> Right MetricMediaFEE1Partner "METRIC_MEDIA_FEE2_ADVERTISER" -> Right MetricMediaFEE2Advertiser "METRIC_MEDIA_FEE2_USD" -> Right MetricMediaFEE2Usd "METRIC_MEDIA_FEE2_PARTNER" -> Right MetricMediaFEE2Partner "METRIC_REVENUE_ADVERTISER" -> Right MetricRevenueAdvertiser "METRIC_REVENUE_USD" -> Right MetricRevenueUsd "METRIC_REVENUE_PARTNER" -> Right MetricRevenuePartner "METRIC_PROFIT_ADVERTISER" -> Right MetricProfitAdvertiser "METRIC_PROFIT_USD" -> Right MetricProfitUsd "METRIC_PROFIT_PARTNER" -> Right MetricProfitPartner "METRIC_PROFIT_MARGIN" -> Right MetricProfitMargin "METRIC_TOTAL_MEDIA_COST_USD" -> Right MetricTotalMediaCostUsd "METRIC_TOTAL_MEDIA_COST_PARTNER" -> Right MetricTotalMediaCostPartner "METRIC_TOTAL_MEDIA_COST_ADVERTISER" -> Right MetricTotalMediaCostAdvertiser "METRIC_BILLABLE_COST_USD" -> Right MetricBillableCostUsd "METRIC_BILLABLE_COST_PARTNER" -> Right MetricBillableCostPartner "METRIC_BILLABLE_COST_ADVERTISER" -> Right MetricBillableCostAdvertiser "METRIC_PLATFORM_FEE_USD" -> Right MetricPlatformFeeUsd "METRIC_PLATFORM_FEE_PARTNER" -> Right MetricPlatformFeePartner "METRIC_PLATFORM_FEE_ADVERTISER" -> Right MetricPlatformFeeAdvertiser "METRIC_VIDEO_COMPLETION_RATE" -> Right MetricVideoCompletionRate "METRIC_PROFIT_ECPM_ADVERTISER" -> Right MetricProfitEcpmAdvertiser "METRIC_PROFIT_ECPM_USD" -> Right MetricProfitEcpmUsd "METRIC_PROFIT_ECPM_PARTNER" -> Right MetricProfitEcpmPartner "METRIC_REVENUE_ECPM_ADVERTISER" -> Right MetricRevenueEcpmAdvertiser "METRIC_REVENUE_ECPM_USD" -> Right MetricRevenueEcpmUsd "METRIC_REVENUE_ECPM_PARTNER" -> Right MetricRevenueEcpmPartner "METRIC_REVENUE_ECPC_ADVERTISER" -> Right MetricRevenueEcpcAdvertiser "METRIC_REVENUE_ECPC_USD" -> Right MetricRevenueEcpcUsd "METRIC_REVENUE_ECPC_PARTNER" -> Right MetricRevenueEcpcPartner "METRIC_REVENUE_ECPA_ADVERTISER" -> Right MetricRevenueEcpaAdvertiser "METRIC_REVENUE_ECPA_USD" -> Right MetricRevenueEcpaUsd "METRIC_REVENUE_ECPA_PARTNER" -> Right MetricRevenueEcpaPartner "METRIC_REVENUE_ECPAPV_ADVERTISER" -> Right MetricRevenueEcpapvAdvertiser "METRIC_REVENUE_ECPAPV_USD" -> Right MetricRevenueEcpapvUsd "METRIC_REVENUE_ECPAPV_PARTNER" -> Right MetricRevenueEcpapvPartner "METRIC_REVENUE_ECPAPC_ADVERTISER" -> Right MetricRevenueEcpapcAdvertiser "METRIC_REVENUE_ECPAPC_USD" -> Right MetricRevenueEcpapcUsd "METRIC_REVENUE_ECPAPC_PARTNER" -> Right MetricRevenueEcpapcPartner "METRIC_MEDIA_COST_ECPM_ADVERTISER" -> Right MetricMediaCostEcpmAdvertiser "METRIC_MEDIA_COST_ECPM_USD" -> Right MetricMediaCostEcpmUsd "METRIC_MEDIA_COST_ECPM_PARTNER" -> Right MetricMediaCostEcpmPartner "METRIC_MEDIA_COST_ECPC_ADVERTISER" -> Right MetricMediaCostEcpcAdvertiser "METRIC_MEDIA_COST_ECPC_USD" -> Right MetricMediaCostEcpcUsd "METRIC_MEDIA_COST_ECPC_PARTNER" -> Right MetricMediaCostEcpcPartner "METRIC_MEDIA_COST_ECPA_ADVERTISER" -> Right MetricMediaCostEcpaAdvertiser "METRIC_MEDIA_COST_ECPA_USD" -> Right MetricMediaCostEcpaUsd "METRIC_MEDIA_COST_ECPA_PARTNER" -> Right MetricMediaCostEcpaPartner "METRIC_MEDIA_COST_ECPAPV_ADVERTISER" -> Right MetricMediaCostEcpapvAdvertiser "METRIC_MEDIA_COST_ECPAPV_USD" -> Right MetricMediaCostEcpapvUsd "METRIC_MEDIA_COST_ECPAPV_PARTNER" -> Right MetricMediaCostEcpapvPartner "METRIC_MEDIA_COST_ECPAPC_ADVERTISER" -> Right MetricMediaCostEcpapcAdvertiser "METRIC_MEDIA_COST_ECPAPC_USD" -> Right MetricMediaCostEcpapcUsd "METRIC_MEDIA_COST_ECPAPC_PARTNER" -> Right MetricMediaCostEcpapcPartner "METRIC_TOTAL_MEDIA_COST_ECPM_ADVERTISER" -> Right MetricTotalMediaCostEcpmAdvertiser "METRIC_TOTAL_MEDIA_COST_ECPM_USD" -> Right MetricTotalMediaCostEcpmUsd "METRIC_TOTAL_MEDIA_COST_ECPM_PARTNER" -> Right MetricTotalMediaCostEcpmPartner "METRIC_TOTAL_MEDIA_COST_ECPC_ADVERTISER" -> Right MetricTotalMediaCostEcpcAdvertiser "METRIC_TOTAL_MEDIA_COST_ECPC_USD" -> Right MetricTotalMediaCostEcpcUsd "METRIC_TOTAL_MEDIA_COST_ECPC_PARTNER" -> Right MetricTotalMediaCostEcpcPartner "METRIC_TOTAL_MEDIA_COST_ECPA_ADVERTISER" -> Right MetricTotalMediaCostEcpaAdvertiser "METRIC_TOTAL_MEDIA_COST_ECPA_USD" -> Right MetricTotalMediaCostEcpaUsd "METRIC_TOTAL_MEDIA_COST_ECPA_PARTNER" -> Right MetricTotalMediaCostEcpaPartner "METRIC_TOTAL_MEDIA_COST_ECPAPV_ADVERTISER" -> Right MetricTotalMediaCostEcpapvAdvertiser "METRIC_TOTAL_MEDIA_COST_ECPAPV_USD" -> Right MetricTotalMediaCostEcpapvUsd "METRIC_TOTAL_MEDIA_COST_ECPAPV_PARTNER" -> Right MetricTotalMediaCostEcpapvPartner "METRIC_TOTAL_MEDIA_COST_ECPAPC_ADVERTISER" -> Right MetricTotalMediaCostEcpapcAdvertiser "METRIC_TOTAL_MEDIA_COST_ECPAPC_USD" -> Right MetricTotalMediaCostEcpapcUsd "METRIC_TOTAL_MEDIA_COST_ECPAPC_PARTNER" -> Right MetricTotalMediaCostEcpapcPartner "METRIC_RICH_MEDIA_VIDEO_PLAYS" -> Right MetricRichMediaVideoPlays "METRIC_RICH_MEDIA_VIDEO_COMPLETIONS" -> Right MetricRichMediaVideoCompletions "METRIC_RICH_MEDIA_VIDEO_PAUSES" -> Right MetricRichMediaVideoPauses "METRIC_RICH_MEDIA_VIDEO_MUTES" -> Right MetricRichMediaVideoMutes "METRIC_RICH_MEDIA_VIDEO_MIDPOINTS" -> Right MetricRichMediaVideoMidpoints "METRIC_RICH_MEDIA_VIDEO_FULL_SCREENS" -> Right MetricRichMediaVideoFullScreens "METRIC_RICH_MEDIA_VIDEO_FIRST_QUARTILE_COMPLETES" -> Right MetricRichMediaVideoFirstQuartileCompletes "METRIC_RICH_MEDIA_VIDEO_THIRD_QUARTILE_COMPLETES" -> Right MetricRichMediaVideoThirdQuartileCompletes "METRIC_CLICK_TO_POST_CLICK_CONVERSION_RATE" -> Right MetricClickToPostClickConversionRate "METRIC_IMPRESSIONS_TO_CONVERSION_RATE" -> Right MetricImpressionsToConversionRate "METRIC_CONVERSIONS_PER_MILLE" -> Right MetricConversionsPerMille "METRIC_CTR" -> Right MetricCtr "METRIC_BID_REQUESTS" -> Right MetricBidRequests "METRIC_UNIQUE_VISITORS_COOKIES" -> Right MetricUniqueVisitorsCookies "METRIC_REVENUE_ECPCV_ADVERTISER" -> Right MetricRevenueEcpcvAdvertiser "METRIC_REVENUE_ECPCV_USD" -> Right MetricRevenueEcpcvUsd "METRIC_REVENUE_ECPCV_PARTNER" -> Right MetricRevenueEcpcvPartner "METRIC_MEDIA_COST_ECPCV_ADVERTISER" -> Right MetricMediaCostEcpcvAdvertiser "METRIC_MEDIA_COST_ECPCV_USD" -> Right MetricMediaCostEcpcvUsd "METRIC_MEDIA_COST_ECPCV_PARTNER" -> Right MetricMediaCostEcpcvPartner "METRIC_TOTAL_MEDIA_COST_ECPCV_ADVERTISER" -> Right MetricTotalMediaCostEcpcvAdvertiser "METRIC_TOTAL_MEDIA_COST_ECPCV_USD" -> Right MetricTotalMediaCostEcpcvUsd "METRIC_TOTAL_MEDIA_COST_ECPCV_PARTNER" -> Right MetricTotalMediaCostEcpcvPartner "METRIC_RICH_MEDIA_VIDEO_SKIPS" -> Right MetricRichMediaVideoSkips "METRIC_FEE2_ADVERTISER" -> Right MetricFEE2Advertiser "METRIC_FEE2_USD" -> Right MetricFEE2Usd "METRIC_FEE2_PARTNER" -> Right MetricFEE2Partner "METRIC_FEE3_ADVERTISER" -> Right MetricFEE3Advertiser "METRIC_FEE3_USD" -> Right MetricFEE3Usd "METRIC_FEE3_PARTNER" -> Right MetricFEE3Partner "METRIC_FEE4_ADVERTISER" -> Right MetricFEE4Advertiser "METRIC_FEE4_USD" -> Right MetricFEE4Usd "METRIC_FEE4_PARTNER" -> Right MetricFEE4Partner "METRIC_FEE5_ADVERTISER" -> Right MetricFEE5Advertiser "METRIC_FEE5_USD" -> Right MetricFEE5Usd "METRIC_FEE5_PARTNER" -> Right MetricFEE5Partner "METRIC_FEE6_ADVERTISER" -> Right MetricFEE6Advertiser "METRIC_FEE6_USD" -> Right MetricFEE6Usd "METRIC_FEE6_PARTNER" -> Right MetricFEE6Partner "METRIC_FEE7_ADVERTISER" -> Right MetricFEE7Advertiser "METRIC_FEE7_USD" -> Right MetricFEE7Usd "METRIC_FEE7_PARTNER" -> Right MetricFEE7Partner "METRIC_FEE8_ADVERTISER" -> Right MetricFEE8Advertiser "METRIC_FEE8_USD" -> Right MetricFEE8Usd "METRIC_FEE8_PARTNER" -> Right MetricFEE8Partner "METRIC_FEE9_ADVERTISER" -> Right MetricFEE9Advertiser "METRIC_FEE9_USD" -> Right MetricFEE9Usd "METRIC_FEE9_PARTNER" -> Right MetricFEE9Partner "METRIC_FEE10_ADVERTISER" -> Right MetricFEE10Advertiser "METRIC_FEE10_USD" -> Right MetricFEE10Usd "METRIC_FEE10_PARTNER" -> Right MetricFEE10Partner "METRIC_FEE11_ADVERTISER" -> Right MetricFEE11Advertiser "METRIC_FEE11_USD" -> Right MetricFEE11Usd "METRIC_FEE11_PARTNER" -> Right MetricFEE11Partner "METRIC_FEE12_ADVERTISER" -> Right MetricFEE12Advertiser "METRIC_FEE12_USD" -> Right MetricFEE12Usd "METRIC_FEE12_PARTNER" -> Right MetricFEE12Partner "METRIC_FEE13_ADVERTISER" -> Right MetricFEE13Advertiser "METRIC_FEE13_USD" -> Right MetricFEE13Usd "METRIC_FEE13_PARTNER" -> Right MetricFEE13Partner "METRIC_FEE14_ADVERTISER" -> Right MetricFEE14Advertiser "METRIC_FEE14_USD" -> Right MetricFEE14Usd "METRIC_FEE14_PARTNER" -> Right MetricFEE14Partner "METRIC_FEE15_ADVERTISER" -> Right MetricFEE15Advertiser "METRIC_FEE15_USD" -> Right MetricFEE15Usd "METRIC_FEE15_PARTNER" -> Right MetricFEE15Partner "METRIC_CPM_FEE3_ADVERTISER" -> Right MetricCpmFEE3Advertiser "METRIC_CPM_FEE3_USD" -> Right MetricCpmFEE3Usd "METRIC_CPM_FEE3_PARTNER" -> Right MetricCpmFEE3Partner "METRIC_CPM_FEE4_ADVERTISER" -> Right MetricCpmFEE4Advertiser "METRIC_CPM_FEE4_USD" -> Right MetricCpmFEE4Usd "METRIC_CPM_FEE4_PARTNER" -> Right MetricCpmFEE4Partner "METRIC_CPM_FEE5_ADVERTISER" -> Right MetricCpmFEE5Advertiser "METRIC_CPM_FEE5_USD" -> Right MetricCpmFEE5Usd "METRIC_CPM_FEE5_PARTNER" -> Right MetricCpmFEE5Partner "METRIC_MEDIA_FEE3_ADVERTISER" -> Right MetricMediaFEE3Advertiser "METRIC_MEDIA_FEE3_USD" -> Right MetricMediaFEE3Usd "METRIC_MEDIA_FEE3_PARTNER" -> Right MetricMediaFEE3Partner "METRIC_MEDIA_FEE4_ADVERTISER" -> Right MetricMediaFEE4Advertiser "METRIC_MEDIA_FEE4_USD" -> Right MetricMediaFEE4Usd "METRIC_MEDIA_FEE4_PARTNER" -> Right MetricMediaFEE4Partner "METRIC_MEDIA_FEE5_ADVERTISER" -> Right MetricMediaFEE5Advertiser "METRIC_MEDIA_FEE5_USD" -> Right MetricMediaFEE5Usd "METRIC_MEDIA_FEE5_PARTNER" -> Right MetricMediaFEE5Partner "METRIC_VIDEO_COMPANION_IMPRESSIONS" -> Right MetricVideoCompanionImpressions "METRIC_VIDEO_COMPANION_CLICKS" -> Right MetricVideoCompanionClicks "METRIC_FEE16_ADVERTISER" -> Right MetricFEE16Advertiser "METRIC_FEE16_USD" -> Right MetricFEE16Usd "METRIC_FEE16_PARTNER" -> Right MetricFEE16Partner "METRIC_FEE17_ADVERTISER" -> Right MetricFEE17Advertiser "METRIC_FEE17_USD" -> Right MetricFEE17Usd "METRIC_FEE17_PARTNER" -> Right MetricFEE17Partner "METRIC_FEE18_ADVERTISER" -> Right MetricFEE18Advertiser "METRIC_FEE18_USD" -> Right MetricFEE18Usd "METRIC_FEE18_PARTNER" -> Right MetricFEE18Partner "METRIC_TRUEVIEW_VIEWS" -> Right MetricTrueviewViews "METRIC_TRUEVIEW_UNIQUE_VIEWERS" -> Right MetricTrueviewUniqueViewers "METRIC_TRUEVIEW_EARNED_VIEWS" -> Right MetricTrueviewEarnedViews "METRIC_TRUEVIEW_EARNED_SUBSCRIBERS" -> Right MetricTrueviewEarnedSubscribers "METRIC_TRUEVIEW_EARNED_PLAYLIST_ADDITIONS" -> Right MetricTrueviewEarnedPlayListAdditions "METRIC_TRUEVIEW_EARNED_LIKES" -> Right MetricTrueviewEarnedLikes "METRIC_TRUEVIEW_EARNED_SHARES" -> Right MetricTrueviewEarnedShares "METRIC_TRUEVIEW_IMPRESSION_SHARE" -> Right MetricTrueviewImpressionShare "METRIC_TRUEVIEW_LOST_IS_BUDGET" -> Right MetricTrueviewLostIsBudget "METRIC_TRUEVIEW_LOST_IS_RANK" -> Right MetricTrueviewLostIsRank "METRIC_TRUEVIEW_VIEW_THROUGH_CONVERSION" -> Right MetricTrueviewViewThroughConversion "METRIC_TRUEVIEW_CONVERSION_MANY_PER_VIEW" -> Right MetricTrueviewConversionManyPerView "METRIC_TRUEVIEW_VIEW_RATE" -> Right MetricTrueviewViewRate "METRIC_TRUEVIEW_CONVERSION_RATE_ONE_PER_VIEW" -> Right MetricTrueviewConversionRateOnePerView "METRIC_TRUEVIEW_CPV_ADVERTISER" -> Right MetricTrueviewCpvAdvertiser "METRIC_TRUEVIEW_CPV_USD" -> Right MetricTrueviewCpvUsd "METRIC_TRUEVIEW_CPV_PARTNER" -> Right MetricTrueviewCpvPartner "METRIC_FEE19_ADVERTISER" -> Right MetricFEE19Advertiser "METRIC_FEE19_USD" -> Right MetricFEE19Usd "METRIC_FEE19_PARTNER" -> Right MetricFEE19Partner "METRIC_TEA_TRUEVIEW_IMPRESSIONS" -> Right MetricTeaTrueviewImpressions "METRIC_TEA_TRUEVIEW_UNIQUE_COOKIES" -> Right MetricTeaTrueviewUniqueCookies "METRIC_FEE20_ADVERTISER" -> Right MetricFEE20Advertiser "METRIC_FEE20_USD" -> Right MetricFEE20Usd "METRIC_FEE20_PARTNER" -> Right MetricFEE20Partner "METRIC_FEE21_ADVERTISER" -> Right MetricFEE21Advertiser "METRIC_FEE21_USD" -> Right MetricFEE21Usd "METRIC_FEE21_PARTNER" -> Right MetricFEE21Partner "METRIC_FEE22_ADVERTISER" -> Right MetricFEE22Advertiser "METRIC_FEE22_USD" -> Right MetricFEE22Usd "METRIC_FEE22_PARTNER" -> Right MetricFEE22Partner "METRIC_TRUEVIEW_TOTAL_CONVERSION_VALUES_ADVERTISER" -> Right MetricTrueviewTotalConversionValuesAdvertiser "METRIC_TRUEVIEW_TOTAL_CONVERSION_VALUES_USD" -> Right MetricTrueviewTotalConversionValuesUsd "METRIC_TRUEVIEW_TOTAL_CONVERSION_VALUES_PARTNER" -> Right MetricTrueviewTotalConversionValuesPartner "METRIC_TRUEVIEW_CONVERSION_COST_MANY_PER_VIEW_ADVERTISER" -> Right MetricTrueviewConversionCostManyPerViewAdvertiser "METRIC_TRUEVIEW_CONVERSION_COST_MANY_PER_VIEW_USD" -> Right MetricTrueviewConversionCostManyPerViewUsd "METRIC_TRUEVIEW_CONVERSION_COST_MANY_PER_VIEW_PARTNER" -> Right MetricTrueviewConversionCostManyPerViewPartner "METRIC_PROFIT_VIEWABLE_ECPM_ADVERTISER" -> Right MetricProfitViewableEcpmAdvertiser "METRIC_PROFIT_VIEWABLE_ECPM_USD" -> Right MetricProfitViewableEcpmUsd "METRIC_PROFIT_VIEWABLE_ECPM_PARTNER" -> Right MetricProfitViewableEcpmPartner "METRIC_REVENUE_VIEWABLE_ECPM_ADVERTISER" -> Right MetricRevenueViewableEcpmAdvertiser "METRIC_REVENUE_VIEWABLE_ECPM_USD" -> Right MetricRevenueViewableEcpmUsd "METRIC_REVENUE_VIEWABLE_ECPM_PARTNER" -> Right MetricRevenueViewableEcpmPartner "METRIC_MEDIA_COST_VIEWABLE_ECPM_ADVERTISER" -> Right MetricMediaCostViewableEcpmAdvertiser "METRIC_MEDIA_COST_VIEWABLE_ECPM_USD" -> Right MetricMediaCostViewableEcpmUsd "METRIC_MEDIA_COST_VIEWABLE_ECPM_PARTNER" -> Right MetricMediaCostViewableEcpmPartner "METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_ADVERTISER" -> Right MetricTotalMediaCostViewableEcpmAdvertiser "METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_USD" -> Right MetricTotalMediaCostViewableEcpmUsd "METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_PARTNER" -> Right MetricTotalMediaCostViewableEcpmPartner "METRIC_TRUEVIEW_ENGAGEMENTS" -> Right MetricTrueviewEngagements "METRIC_TRUEVIEW_ENGAGEMENT_RATE" -> Right MetricTrueviewEngagementRate "METRIC_TRUEVIEW_AVERAGE_CPE_ADVERTISER" -> Right MetricTrueviewAverageCpeAdvertiser "METRIC_TRUEVIEW_AVERAGE_CPE_USD" -> Right MetricTrueviewAverageCpeUsd "METRIC_TRUEVIEW_AVERAGE_CPE_PARTNER" -> Right MetricTrueviewAverageCpePartner "METRIC_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS" -> Right MetricActiveViewViewableImpressions "METRIC_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS" -> Right MetricActiveViewEligibleImpressions "METRIC_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS" -> Right MetricActiveViewMeasurableImpressions "METRIC_ACTIVE_VIEW_PCT_MEASURABLE_IMPRESSIONS" -> Right MetricActiveViewPctMeasurableImpressions "METRIC_ACTIVE_VIEW_PCT_VIEWABLE_IMPRESSIONS" -> Right MetricActiveViewPctViewableImpressions "METRIC_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME" -> Right MetricActiveViewAverageViewableTime "METRIC_ACTIVE_VIEW_UNMEASURABLE_IMPRESSIONS" -> Right MetricActiveViewUnmeasurableImpressions "METRIC_ACTIVE_VIEW_UNVIEWABLE_IMPRESSIONS" -> Right MetricActiveViewUnviewableImpressions "METRIC_ACTIVE_VIEW_DISTRIBUTION_UNMEASURABLE" -> Right MetricActiveViewDistributionUnmeasurable "METRIC_ACTIVE_VIEW_DISTRIBUTION_UNVIEWABLE" -> Right MetricActiveViewDistributionUnviewable "METRIC_ACTIVE_VIEW_DISTRIBUTION_VIEWABLE" -> Right MetricActiveViewDistributionViewable "METRIC_ACTIVE_VIEW_PERCENT_VIEWABLE_FOR_TIME_THRESHOLD" -> Right MetricActiveViewPercentViewableForTimeThreshold "METRIC_ACTIVE_VIEW_VIEWABLE_FOR_TIME_THRESHOLD" -> Right MetricActiveViewViewableForTimeThreshold "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_AT_START" -> Right MetricActiveViewPercentVisibleAtStart "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_FIRST_QUAR" -> Right MetricActiveViewPercentVisibleFirstQuar "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_SECOND_QUAR" -> Right MetricActiveViewPercentVisibleSecondQuar "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_THIRD_QUAR" -> Right MetricActiveViewPercentVisibleThirdQuar "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_ON_COMPLETE" -> Right MetricActiveViewPercentVisibleOnComplete "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_AT_START" -> Right MetricActiveViewPercentAudibleVisibleAtStart "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_FIRST_QUAR" -> Right MetricActiveViewPercentAudibleVisibleFirstQuar "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_SECOND_QUAR" -> Right MetricActiveViewPercentAudibleVisibleSecondQuar "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_THIRD_QUAR" -> Right MetricActiveViewPercentAudibleVisibleThirdQuar "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_ON_COMPLETE" -> Right MetricActiveViewPercentAudibleVisibleOnComplete "METRIC_ACTIVE_VIEW_AUDIBLE_VISIBLE_ON_COMPLETE_IMPRESSIONS" -> Right MetricActiveViewAudibleVisibleOnCompleteImpressions "METRIC_VIEWABLE_BID_REQUESTS" -> Right MetricViewableBidRequests "METRIC_COOKIE_REACH_IMPRESSION_REACH" -> Right MetricCookieReachImpressionReach "METRIC_COOKIE_REACH_AVERAGE_IMPRESSION_FREQUENCY" -> Right MetricCookieReachAverageImpressionFrequency "METRIC_DBM_ENGAGEMENT_RATE" -> Right MetricDBmEngagementRate "METRIC_RICH_MEDIA_SCROLLS" -> Right MetricRichMediaScrolls "METRIC_CM_POST_VIEW_REVENUE" -> Right MetricCmPostViewRevenue "METRIC_CM_POST_CLICK_REVENUE" -> Right MetricCmPostClickRevenue "METRIC_FLOODLIGHT_IMPRESSIONS" -> Right MetricFloodlightImpressions "METRIC_BILLABLE_IMPRESSIONS" -> Right MetricBillableImpressions "METRIC_NIELSEN_AVERAGE_FREQUENCY" -> Right MetricNielsenAverageFrequency "METRIC_NIELSEN_IMPRESSIONS" -> Right MetricNielsenImpressions "METRIC_NIELSEN_UNIQUE_AUDIENCE" -> Right MetricNielsenUniqueAudience "METRIC_NIELSEN_GRP" -> Right MetricNielsenGrp "METRIC_NIELSEN_IMPRESSION_INDEX" -> Right MetricNielsenImpressionIndex "METRIC_NIELSEN_IMPRESSIONS_SHARE" -> Right MetricNielsenImpressionsShare "METRIC_NIELSEN_POPULATION" -> Right MetricNielsenPopulation "METRIC_NIELSEN_POPULATION_REACH" -> Right MetricNielsenPopulationReach "METRIC_NIELSEN_POPULATION_SHARE" -> Right MetricNielsenPopulationShare "METRIC_NIELSEN_REACH_INDEX" -> Right MetricNielsenReachIndex "METRIC_NIELSEN_REACH_SHARE" -> Right MetricNielsenReachShare "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_IMPRESSIONS" -> Right MetricActiveViewAudibleFullyOnScreenHalfOfDurationImpressions "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_MEASURABLE_IMPRESSIONS" -> Right MetricActiveViewAudibleFullyOnScreenHalfOfDurationMeasurableImpressions "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_RATE" -> Right MetricActiveViewAudibleFullyOnScreenHalfOfDurationRate "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_IMPRESSIONS" -> Right MetricActiveViewAudibleFullyOnScreenHalfOfDurationTrueviewImpressions "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_MEASURABLE_IMPRESSIONS" -> Right MetricActiveViewAudibleFullyOnScreenHalfOfDurationTrueviewMeasurableImpressions "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_RATE" -> Right MetricActiveViewAudibleFullyOnScreenHalfOfDurationTrueviewRate "METRIC_ACTIVE_VIEW_CUSTOM_METRIC_MEASURABLE_IMPRESSIONS" -> Right MetricActiveViewCustomMetricMeasurableImpressions "METRIC_ACTIVE_VIEW_CUSTOM_METRIC_VIEWABLE_IMPRESSIONS" -> Right MetricActiveViewCustomMetricViewableImpressions "METRIC_ACTIVE_VIEW_CUSTOM_METRIC_VIEWABLE_RATE" -> Right MetricActiveViewCustomMetricViewableRate "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_IMPRESSIONS" -> Right MetricActiveViewPercentAudibleImpressions "METRIC_ACTIVE_VIEW_PERCENT_FULLY_ON_SCREEN_2_SEC" -> Right MetricActiveViewPercentFullyOnScreen2Sec "METRIC_ACTIVE_VIEW_PERCENT_FULL_SCREEN" -> Right MetricActiveViewPercentFullScreen "METRIC_ACTIVE_VIEW_PERCENT_IN_BACKGROUND" -> Right MetricActiveViewPercentInBackgRound "METRIC_ACTIVE_VIEW_PERCENT_OF_AD_PLAYED" -> Right MetricActiveViewPercentOfAdPlayed "METRIC_ACTIVE_VIEW_PERCENT_OF_COMPLETED_IMPRESSIONS_AUDIBLE_AND_VISIBLE" -> Right MetricActiveViewPercentOfCompletedImpressionsAudibleAndVisible "METRIC_ACTIVE_VIEW_PERCENT_OF_COMPLETED_IMPRESSIONS_VISIBLE" -> Right MetricActiveViewPercentOfCompletedImpressionsVisible "METRIC_ACTIVE_VIEW_PERCENT_OF_FIRST_QUARTILE_IMPRESSIONS_AUDIBLE_AND_VISIBLE" -> Right MetricActiveViewPercentOfFirstQuartileImpressionsAudibleAndVisible "METRIC_ACTIVE_VIEW_PERCENT_OF_FIRST_QUARTILE_IMPRESSIONS_VISIBLE" -> Right MetricActiveViewPercentOfFirstQuartileImpressionsVisible "METRIC_ACTIVE_VIEW_PERCENT_OF_MIDPOINT_IMPRESSIONS_AUDIBLE_AND_VISIBLE" -> Right MetricActiveViewPercentOfMidpointImpressionsAudibleAndVisible "METRIC_ACTIVE_VIEW_PERCENT_OF_MIDPOINT_IMPRESSIONS_VISIBLE" -> Right MetricActiveViewPercentOfMidpointImpressionsVisible "METRIC_ACTIVE_VIEW_PERCENT_OF_THIRD_QUARTILE_IMPRESSIONS_AUDIBLE_AND_VISIBLE" -> Right MetricActiveViewPercentOfThirdQuartileImpressionsAudibleAndVisible "METRIC_ACTIVE_VIEW_PERCENT_OF_THIRD_QUARTILE_IMPRESSIONS_VISIBLE" -> Right MetricActiveViewPercentOfThirdQuartileImpressionsVisible "METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_AUDIBLE" -> Right MetricActiveViewPercentPlayTimeAudible "METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_AUDIBLE_AND_VISIBLE" -> Right MetricActiveViewPercentPlayTimeAudibleAndVisible "METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_VISIBLE" -> Right MetricActiveViewPercentPlayTimeVisible "METRIC_ADAPTED_AUDIENCE_FREQUENCY" -> Right MetricAdaptedAudienceFrequency "METRIC_ADLINGO_FEE_ADVERTISER_CURRENCY" -> Right MetricAdlingoFeeAdvertiserCurrency "METRIC_AUDIO_CLIENT_COST_ECPCL_ADVERTISER_CURRENCY" -> Right MetricAudioClientCostEcpclAdvertiserCurrency "METRIC_AUDIO_MEDIA_COST_ECPCL_ADVERTISER_CURRENCY" -> Right MetricAudioMediaCostEcpclAdvertiserCurrency "METRIC_AUDIO_MUTES_AUDIO" -> Right MetricAudioMutesAudio "METRIC_AUDIO_REVENUE_ECPCL_ADVERTISER_CURRENCY" -> Right MetricAudioRevenueEcpclAdvertiserCurrency "METRIC_AUDIO_UNMUTES_AUDIO" -> Right MetricAudioUnmutesAudio "METRIC_AUDIO_UNMUTES_VIDEO" -> Right MetricAudioUnmutesVideo "METRIC_AVERAGE_DISPLAY_TIME" -> Right MetricAverageDisplayTime "METRIC_AVERAGE_IMPRESSION_FREQUENCY_PER_USER" -> Right MetricAverageImpressionFrequencyPerUser "METRIC_AVERAGE_INTERACTION_TIME" -> Right MetricAverageInteractionTime "METRIC_AVERAGE_WATCH_TIME_PER_IMPRESSION" -> Right MetricAverageWatchTimePerImpression "METRIC_BEGIN_TO_RENDER_ELIGIBLE_IMPRESSIONS" -> Right MetricBeginToRenderEligibleImpressions "METRIC_BEGIN_TO_RENDER_IMPRESSIONS" -> Right MetricBeginToRenderImpressions "METRIC_BENCHMARK_FREQUENCY" -> Right MetricBenchmarkFrequency "METRIC_BRAND_LIFT_ABSOLUTE_BRAND_LIFT" -> Right MetricBrandLiftAbsoluteBrandLift "METRIC_BRAND_LIFT_ALL_SURVEY_RESPONSES" -> Right MetricBrandLiftAllSurveyResponses "METRIC_BRAND_LIFT_BASELINE_POSITIVE_RESPONSE_RATE" -> Right MetricBrandLiftBaselinePositiveResponseRate "METRIC_BRAND_LIFT_BASELINE_SURVEY_RESPONSES" -> Right MetricBrandLiftBaselineSurveyResponses "METRIC_BRAND_LIFT_COST_PER_LIFTED_USER" -> Right MetricBrandLiftCostPerLiftedUser "METRIC_BRAND_LIFT_EXPOSED_SURVEY_RESPONSES" -> Right MetricBrandLiftExposedSurveyResponses "METRIC_BRAND_LIFT_HEADROOM_BRAND_LIFT" -> Right MetricBrandLiftHeadroomBrandLift "METRIC_BRAND_LIFT_RELATIVE_BRAND_LIFT" -> Right MetricBrandLiftRelativeBrandLift "METRIC_BRAND_LIFT_USERS" -> Right MetricBrandLiftUsers "METRIC_CARD_CLICKS" -> Right MetricCardClicks "METRIC_CLIENT_COST_ADVERTISER_CURRENCY" -> Right MetricClientCostAdvertiserCurrency "METRIC_CLIENT_COST_ECPA_ADVERTISER_CURRENCY" -> Right MetricClientCostEcpaAdvertiserCurrency "METRIC_CLIENT_COST_ECPA_PC_ADVERTISER_CURRENCY" -> Right MetricClientCostEcpaPcAdvertiserCurrency "METRIC_CLIENT_COST_ECPA_PV_ADVERTISER_CURRENCY" -> Right MetricClientCostEcpaPvAdvertiserCurrency "METRIC_CLIENT_COST_ECPC_ADVERTISER_CURRENCY" -> Right MetricClientCostEcpcAdvertiserCurrency "METRIC_CLIENT_COST_ECPM_ADVERTISER_CURRENCY" -> Right MetricClientCostEcpmAdvertiserCurrency "METRIC_CLIENT_COST_VIEWABLE_ECPM_ADVERTISER_CURRENCY" -> Right MetricClientCostViewableEcpmAdvertiserCurrency "METRIC_CM_POST_CLICK_REVENUE_CROSS_ENVIRONMENT" -> Right MetricCmPostClickRevenueCrossEnvironment "METRIC_CM_POST_VIEW_REVENUE_CROSS_ENVIRONMENT" -> Right MetricCmPostViewRevenueCrossEnvironment "METRIC_COMPANION_CLICKS_AUDIO" -> Right MetricCompanionClicksAudio "METRIC_COMPANION_IMPRESSIONS_AUDIO" -> Right MetricCompanionImpressionsAudio "METRIC_COMPLETE_LISTENS_AUDIO" -> Right MetricCompleteListensAudio "METRIC_COMPLETION_RATE_AUDIO" -> Right MetricCompletionRateAudio "METRIC_COUNTERS" -> Right MetricCounters "METRIC_CUSTOM_FEE_1_ADVERTISER_CURRENCY" -> Right MetricCustomFee1AdvertiserCurrency "METRIC_CUSTOM_FEE_2_ADVERTISER_CURRENCY" -> Right MetricCustomFee2AdvertiserCurrency "METRIC_CUSTOM_FEE_3_ADVERTISER_CURRENCY" -> Right MetricCustomFee3AdvertiserCurrency "METRIC_CUSTOM_FEE_4_ADVERTISER_CURRENCY" -> Right MetricCustomFee4AdvertiserCurrency "METRIC_CUSTOM_FEE_5_ADVERTISER_CURRENCY" -> Right MetricCustomFee5AdvertiserCurrency "METRIC_CUSTOM_VALUE_PER_1000_IMPRESSIONS" -> Right MetricCustomValuePer1000Impressions "METRIC_ENGAGEMENTS" -> Right MetricEngagements "METRIC_ESTIMATED_CPM_FOR_IMPRESSIONS_WITH_CUSTOM_VALUE_ADVERTISER_CURRENCY" -> Right MetricEstimatedCpmForImpressionsWithCustomValueAdvertiserCurrency "METRIC_ESTIMATED_TOTAL_COST_FOR_IMPRESSIONS_WITH_CUSTOM_VALUE_ADVERTISER_CURRENCY" -> Right MetricEstimatedTotalCostForImpressionsWithCustomValueAdvertiserCurrency "METRIC_EXITS" -> Right MetricExits "METRIC_EXPANSIONS" -> Right MetricExpansions "METRIC_FIRST_QUARTILE_AUDIO" -> Right MetricFirstQuartileAudio "METRIC_GENERAL_INVALID_TRAFFIC_GIVT_IMPRESSIONS" -> Right MetricGeneralInvalidTrafficGivtImpressions "METRIC_GENERAL_INVALID_TRAFFIC_GIVT_TRACKED_ADS" -> Right MetricGeneralInvalidTrafficGivtTrackedAds "METRIC_GIVT_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS" -> Right MetricGivtActiveViewEligibleImpressions "METRIC_GIVT_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS" -> Right MetricGivtActiveViewMeasurableImpressions "METRIC_GIVT_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS" -> Right MetricGivtActiveViewViewableImpressions "METRIC_GIVT_BEGIN_TO_RENDER_IMPRESSIONS" -> Right MetricGivtBeginToRenderImpressions "METRIC_GIVT_CLICKS" -> Right MetricGivtClicks "METRIC_GMAIL_CONVERSIONS" -> Right MetricGmailConversions "METRIC_GMAIL_POST_CLICK_CONVERSIONS" -> Right MetricGmailPostClickConversions "METRIC_GMAIL_POST_VIEW_CONVERSIONS" -> Right MetricGmailPostViewConversions "METRIC_GMAIL_POTENTIAL_VIEWS" -> Right MetricGmailPotentialViews "METRIC_IMPRESSIONS_WITH_CUSTOM_VALUE" -> Right MetricImpressionsWithCustomValue "METRIC_IMPRESSIONS_WITH_POSITIVE_CUSTOM_VALUE" -> Right MetricImpressionsWithPositiveCustomValue "METRIC_IMPRESSION_CUSTOM_VALUE_COST" -> Right MetricImpressionCustomValueCost "METRIC_INTERACTIVE_IMPRESSIONS" -> Right MetricInteractiveImpressions "METRIC_INVALID_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS" -> Right MetricInvalidActiveViewEligibleImpressions "METRIC_INVALID_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS" -> Right MetricInvalidActiveViewMeasurableImpressions "METRIC_INVALID_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS" -> Right MetricInvalidActiveViewViewableImpressions "METRIC_INVALID_BEGIN_TO_RENDER_IMPRESSIONS" -> Right MetricInvalidBeginToRenderImpressions "METRIC_INVALID_CLICKS" -> Right MetricInvalidClicks "METRIC_INVALID_IMPRESSIONS" -> Right MetricInvalidImpressions "METRIC_INVALID_TRACKED_ADS" -> Right MetricInvalidTrackedAds "METRIC_MEDIA_COST_ADVERTISER_CURRENCY_PER_STORE_VISIT_ADX_ONLY" -> Right MetricMediaCostAdvertiserCurrencyPerStoreVisitAdxOnly "METRIC_MIDPOINT_AUDIO" -> Right MetricMidpointAudio "METRIC_ORIGINAL_AUDIENCE_FREQUENCY" -> Right MetricOriginalAudienceFrequency "METRIC_PAUSES_AUDIO" -> Right MetricPausesAudio "METRIC_PERCENT_IMPRESSIONS_WITH_POSITIVE_CUSTOM_VALUE" -> Right MetricPercentImpressionsWithPositiveCustomValue "METRIC_PLATFORM_FEE_RATE" -> Right MetricPlatformFeeRate "METRIC_POST_CLICK_CONVERSIONS_CROSS_ENVIRONMENT" -> Right MetricPostClickConversionsCrossEnvironment "METRIC_POST_VIEW_CONVERSIONS_CROSS_ENVIRONMENT" -> Right MetricPostViewConversionsCrossEnvironment "METRIC_POTENTIAL_IMPRESSIONS" -> Right MetricPotentialImpressions "METRIC_POTENTIAL_VIEWS" -> Right MetricPotentialViews "METRIC_PREMIUM_FEE_ADVERTISER_CURRENCY" -> Right MetricPremiumFeeAdvertiserCurrency "METRIC_PROGRAMMATIC_GUARANTEED_IMPRESSIONS_PASSED_DUE_TO_FREQUENCY" -> Right MetricProgrammaticGuaranteedImpressionsPassedDueToFrequency "METRIC_PROGRAMMATIC_GUARANTEED_SAVINGS_RE_INVESTED_DUE_TO_FREQUENCY_ADVERTISER_CURRENCY" -> Right MetricProgrammaticGuaranteedSavingsReInvestedDueToFrequencyAdvertiserCurrency "METRIC_REFUND_BILLABLE_COST_ADVERTISER_CURRENCY" -> Right MetricRefundBillableCostAdvertiserCurrency "METRIC_REFUND_MEDIA_COST_ADVERTISER_CURRENCY" -> Right MetricRefundMediaCostAdvertiserCurrency "METRIC_REFUND_PLATFORM_FEE_ADVERTISER_CURRENCY" -> Right MetricRefundPlatformFeeAdvertiserCurrency "METRIC_REVENUE_ADVERTISER_CURRENCY_PER_STORE_VISIT_ADX_ONLY" -> Right MetricRevenueAdvertiserCurrencyPerStoreVisitAdxOnly "METRIC_RICH_MEDIA_ENGAGEMENTS" -> Right MetricRichMediaEngagements "METRIC_STARTS_AUDIO" -> Right MetricStartsAudio "METRIC_STOPS_AUDIO" -> Right MetricStopsAudio "METRIC_STORE_VISITS_ADX_ONLY" -> Right MetricStoreVisitsAdxOnly "METRIC_STORE_VISIT_CONVERSIONS" -> Right MetricStoreVisitConversions "METRIC_THIRD_QUARTILE_AUDIO" -> Right MetricThirdQuartileAudio "METRIC_TIMERS" -> Right MetricTimers "METRIC_TOTAL_AUDIO_MEDIA_COST_ECPCL_ADVERTISER_CURRENCY" -> Right MetricTotalAudioMediaCostEcpclAdvertiserCurrency "METRIC_TOTAL_CONVERSIONS_CROSS_ENVIRONMENT" -> Right MetricTotalConversionsCrossEnvironment "METRIC_TOTAL_DISPLAY_TIME" -> Right MetricTotalDisplayTime "METRIC_TOTAL_IMPRESSION_CUSTOM_VALUE" -> Right MetricTotalImpressionCustomValue "METRIC_TOTAL_INTERACTION_TIME" -> Right MetricTotalInteractionTime "METRIC_TOTAL_MEDIA_COST_ADVERTISER_CURRENCY_PER_STORE_VISIT_ADX_ONLY" -> Right MetricTotalMediaCostAdvertiserCurrencyPerStoreVisitAdxOnly "METRIC_TOTAL_USERS" -> Right MetricTotalUsers "METRIC_TRACKED_ADS" -> Right MetricTrackedAds "METRIC_TRUEVIEW_GENERAL_INVALID_TRAFFIC_GIVT_VIEWS" -> Right MetricTrueviewGeneralInvalidTrafficGivtViews "METRIC_TRUEVIEW_INVALID_VIEWS" -> Right MetricTrueviewInvalidViews "METRIC_UNIQUE_COOKIES_WITH_IMPRESSIONS" -> Right MetricUniqueCookiesWithImpressions "METRIC_UNIQUE_REACH_AVERAGE_IMPRESSION_FREQUENCY" -> Right MetricUniqueReachAverageImpressionFrequency "METRIC_UNIQUE_REACH_CLICK_REACH" -> Right MetricUniqueReachClickReach "METRIC_UNIQUE_REACH_IMPRESSION_REACH" -> Right MetricUniqueReachImpressionReach "METRIC_UNIQUE_REACH_TOTAL_REACH" -> Right MetricUniqueReachTotalReach "METRIC_VERIFIABLE_IMPRESSIONS" -> Right MetricVerifiableImpressions "METRIC_VIDEO_CLIENT_COST_ECPCV_ADVERTISER_CURRENCY" -> Right MetricVideoClientCostEcpcvAdvertiserCurrency "METRIC_WATCH_TIME" -> Right MetricWatchTime "METRIC_LAST_TOUCH_TOTAL_CONVERSIONS" -> Right MetricLastTouchTotalConversions "METRIC_LAST_TOUCH_CLICK_THROUGH_CONVERSIONS" -> Right MetricLastTouchClickThroughConversions "METRIC_LAST_TOUCH_VIEW_THROUGH_CONVERSIONS" -> Right MetricLastTouchViewThroughConversions "METRIC_TOTAL_PATHS" -> Right MetricTotalPaths "METRIC_TOTAL_EXPOSURES" -> Right MetricTotalExposures "METRIC_PATH_CONVERSION_RATE" -> Right MetricPathConversionRate "METRIC_CONVERTING_PATHS" -> Right MetricConvertingPaths "METRIC_ACTIVITY_REVENUE" -> Right MetricActivityRevenue "METRIC_PERCENT_INVALID_IMPRESSIONS_PREBID" -> Right MetricPercentInvalidImpressionsPreBid "METRIC_GRP_CORRECTED_IMPRESSIONS" -> Right MetricGrpCorrectedImpressions "METRIC_DEMO_CORRECTED_CLICKS" -> Right MetricDemoCorrectedClicks "METRIC_VIRTUAL_PEOPLE_IMPRESSION_REACH_BY_DEMO" -> Right MetricVirtualPeopleImpressionReachByDemo "METRIC_VIRTUAL_PEOPLE_CLICK_REACH_BY_DEMO" -> Right MetricVirtualPeopleClickReachByDemo "METRIC_VIRTUAL_PEOPLE_AVERAGE_IMPRESSION_FREQUENCY_BY_DEMO" -> Right MetricVirtualPeopleAverageImpressionFrequencyByDemo "METRIC_DEMO_COMPOSITION_IMPRESSION" -> Right MetricDemoComPositionImpression "METRIC_VIRTUAL_PEOPLE_IMPRESSION_REACH_SHARE_PERCENT" -> Right MetricVirtualPeopleImpressionReachSharePercent "METRIC_DEMO_POPULATION" -> Right MetricDemoPopulation "METRIC_VIRTUAL_PEOPLE_IMPRESSION_REACH_PERCENT" -> Right MetricVirtualPeopleImpressionReachPercent "METRIC_TARGET_RATING_POINTS" -> Right MetricTargetRatingPoints "METRIC_PROVISIONAL_IMPRESSIONS" -> Right MetricProvisionalImpressions "METRIC_VENDOR_BLOCKED_ADS" -> Right MetricVendorBlockedAds "METRIC_GRP_CORRECTED_VIEWABLE_IMPRESSIONS" -> Right MetricGrpCorrectedViewableImpressions "METRIC_GRP_CORRECTED_VIEWABLE_IMPRESSIONS_SHARE_PERCENT" -> Right MetricGrpCorrectedViewableImpressionsSharePercent "METRIC_VIEWABLE_GROSS_RATING_POINTS" -> Right MetricViewableGrossRatingPoints "METRIC_VIRTUAL_PEOPLE_AVERAGE_VIEWABLE_IMPRESSION_FREQUENCY_BY_DEMO" -> Right MetricVirtualPeopleAverageViewableImpressionFrequencyByDemo "METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_BY_DEMO" -> Right MetricVirtualPeopleViewableImpressionReachByDemo "METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_PERCENT" -> Right MetricVirtualPeopleViewableImpressionReachPercent "METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_SHARE_PERCENT" -> Right MetricVirtualPeopleViewableImpressionReachSharePercent "METRIC_ENGAGEMENT_RATE" -> Right MetricEngagementRate "METRIC_CM360_POST_VIEW_REVENUE" -> Right MetricCM360PostViewRevenue "METRIC_CM360_POST_CLICK_REVENUE" -> Right MetricCM360PostClickRevenue "METRIC_CM360_POST_CLICK_REVENUE_CROSS_ENVIRONMENT" -> Right MetricCM360PostClickRevenueCrossEnvironment "METRIC_CM360_POST_VIEW_REVENUE_CROSS_ENVIRONMENT" -> Right MetricCM360PostViewRevenueCrossEnvironment "METRIC_PERCENTAGE_FROM_CURRENT_IO_GOAL" -> Right MetricPercentageFromCurrentIoGoal "METRIC_DUPLICATE_FLOODLIGHT_IMPRESSIONS" -> Right MetricDuplicateFloodlightImpressions "METRIC_COOKIE_CONSENTED_FLOODLIGHT_IMPRESSIONS" -> Right MetricCookieConsentedFloodlightImpressions "METRIC_COOKIE_UNCONSENTED_FLOODLIGHT_IMPRESSIONS" -> Right MetricCookieUnconsentedFloodlightImpressions "METRIC_TRACKING_UNCONSENTED_CLICKS" -> Right MetricTrackingUnconsentedClicks x -> Left ("Unable to parse ParametersMetricsItem from: " <> x) instance ToHttpApiData ParametersMetricsItem where toQueryParam = \case MetricUnknown -> "METRIC_UNKNOWN" MetricImpressions -> "METRIC_IMPRESSIONS" MetricClicks -> "METRIC_CLICKS" MetricLastImpressions -> "METRIC_LAST_IMPRESSIONS" MetricLastClicks -> "METRIC_LAST_CLICKS" MetricTotalConversions -> "METRIC_TOTAL_CONVERSIONS" MetricMediaCostAdvertiser -> "METRIC_MEDIA_COST_ADVERTISER" MetricMediaCostUsd -> "METRIC_MEDIA_COST_USD" MetricMediaCostPartner -> "METRIC_MEDIA_COST_PARTNER" MetricDataCostAdvertiser -> "METRIC_DATA_COST_ADVERTISER" MetricDataCostUsd -> "METRIC_DATA_COST_USD" MetricDataCostPartner -> "METRIC_DATA_COST_PARTNER" MetricCpmFEE1Advertiser -> "METRIC_CPM_FEE1_ADVERTISER" MetricCpmFEE1Usd -> "METRIC_CPM_FEE1_USD" MetricCpmFEE1Partner -> "METRIC_CPM_FEE1_PARTNER" MetricCpmFEE2Advertiser -> "METRIC_CPM_FEE2_ADVERTISER" MetricCpmFEE2Usd -> "METRIC_CPM_FEE2_USD" MetricCpmFEE2Partner -> "METRIC_CPM_FEE2_PARTNER" MetricMediaFEE1Advertiser -> "METRIC_MEDIA_FEE1_ADVERTISER" MetricMediaFEE1Usd -> "METRIC_MEDIA_FEE1_USD" MetricMediaFEE1Partner -> "METRIC_MEDIA_FEE1_PARTNER" MetricMediaFEE2Advertiser -> "METRIC_MEDIA_FEE2_ADVERTISER" MetricMediaFEE2Usd -> "METRIC_MEDIA_FEE2_USD" MetricMediaFEE2Partner -> "METRIC_MEDIA_FEE2_PARTNER" MetricRevenueAdvertiser -> "METRIC_REVENUE_ADVERTISER" MetricRevenueUsd -> "METRIC_REVENUE_USD" MetricRevenuePartner -> "METRIC_REVENUE_PARTNER" MetricProfitAdvertiser -> "METRIC_PROFIT_ADVERTISER" MetricProfitUsd -> "METRIC_PROFIT_USD" MetricProfitPartner -> "METRIC_PROFIT_PARTNER" MetricProfitMargin -> "METRIC_PROFIT_MARGIN" MetricTotalMediaCostUsd -> "METRIC_TOTAL_MEDIA_COST_USD" MetricTotalMediaCostPartner -> "METRIC_TOTAL_MEDIA_COST_PARTNER" MetricTotalMediaCostAdvertiser -> "METRIC_TOTAL_MEDIA_COST_ADVERTISER" MetricBillableCostUsd -> "METRIC_BILLABLE_COST_USD" MetricBillableCostPartner -> "METRIC_BILLABLE_COST_PARTNER" MetricBillableCostAdvertiser -> "METRIC_BILLABLE_COST_ADVERTISER" MetricPlatformFeeUsd -> "METRIC_PLATFORM_FEE_USD" MetricPlatformFeePartner -> "METRIC_PLATFORM_FEE_PARTNER" MetricPlatformFeeAdvertiser -> "METRIC_PLATFORM_FEE_ADVERTISER" MetricVideoCompletionRate -> "METRIC_VIDEO_COMPLETION_RATE" MetricProfitEcpmAdvertiser -> "METRIC_PROFIT_ECPM_ADVERTISER" MetricProfitEcpmUsd -> "METRIC_PROFIT_ECPM_USD" MetricProfitEcpmPartner -> "METRIC_PROFIT_ECPM_PARTNER" MetricRevenueEcpmAdvertiser -> "METRIC_REVENUE_ECPM_ADVERTISER" MetricRevenueEcpmUsd -> "METRIC_REVENUE_ECPM_USD" MetricRevenueEcpmPartner -> "METRIC_REVENUE_ECPM_PARTNER" MetricRevenueEcpcAdvertiser -> "METRIC_REVENUE_ECPC_ADVERTISER" MetricRevenueEcpcUsd -> "METRIC_REVENUE_ECPC_USD" MetricRevenueEcpcPartner -> "METRIC_REVENUE_ECPC_PARTNER" MetricRevenueEcpaAdvertiser -> "METRIC_REVENUE_ECPA_ADVERTISER" MetricRevenueEcpaUsd -> "METRIC_REVENUE_ECPA_USD" MetricRevenueEcpaPartner -> "METRIC_REVENUE_ECPA_PARTNER" MetricRevenueEcpapvAdvertiser -> "METRIC_REVENUE_ECPAPV_ADVERTISER" MetricRevenueEcpapvUsd -> "METRIC_REVENUE_ECPAPV_USD" MetricRevenueEcpapvPartner -> "METRIC_REVENUE_ECPAPV_PARTNER" MetricRevenueEcpapcAdvertiser -> "METRIC_REVENUE_ECPAPC_ADVERTISER" MetricRevenueEcpapcUsd -> "METRIC_REVENUE_ECPAPC_USD" MetricRevenueEcpapcPartner -> "METRIC_REVENUE_ECPAPC_PARTNER" MetricMediaCostEcpmAdvertiser -> "METRIC_MEDIA_COST_ECPM_ADVERTISER" MetricMediaCostEcpmUsd -> "METRIC_MEDIA_COST_ECPM_USD" MetricMediaCostEcpmPartner -> "METRIC_MEDIA_COST_ECPM_PARTNER" MetricMediaCostEcpcAdvertiser -> "METRIC_MEDIA_COST_ECPC_ADVERTISER" MetricMediaCostEcpcUsd -> "METRIC_MEDIA_COST_ECPC_USD" MetricMediaCostEcpcPartner -> "METRIC_MEDIA_COST_ECPC_PARTNER" MetricMediaCostEcpaAdvertiser -> "METRIC_MEDIA_COST_ECPA_ADVERTISER" MetricMediaCostEcpaUsd -> "METRIC_MEDIA_COST_ECPA_USD" MetricMediaCostEcpaPartner -> "METRIC_MEDIA_COST_ECPA_PARTNER" MetricMediaCostEcpapvAdvertiser -> "METRIC_MEDIA_COST_ECPAPV_ADVERTISER" MetricMediaCostEcpapvUsd -> "METRIC_MEDIA_COST_ECPAPV_USD" MetricMediaCostEcpapvPartner -> "METRIC_MEDIA_COST_ECPAPV_PARTNER" MetricMediaCostEcpapcAdvertiser -> "METRIC_MEDIA_COST_ECPAPC_ADVERTISER" MetricMediaCostEcpapcUsd -> "METRIC_MEDIA_COST_ECPAPC_USD" MetricMediaCostEcpapcPartner -> "METRIC_MEDIA_COST_ECPAPC_PARTNER" MetricTotalMediaCostEcpmAdvertiser -> "METRIC_TOTAL_MEDIA_COST_ECPM_ADVERTISER" MetricTotalMediaCostEcpmUsd -> "METRIC_TOTAL_MEDIA_COST_ECPM_USD" MetricTotalMediaCostEcpmPartner -> "METRIC_TOTAL_MEDIA_COST_ECPM_PARTNER" MetricTotalMediaCostEcpcAdvertiser -> "METRIC_TOTAL_MEDIA_COST_ECPC_ADVERTISER" MetricTotalMediaCostEcpcUsd -> "METRIC_TOTAL_MEDIA_COST_ECPC_USD" MetricTotalMediaCostEcpcPartner -> "METRIC_TOTAL_MEDIA_COST_ECPC_PARTNER" MetricTotalMediaCostEcpaAdvertiser -> "METRIC_TOTAL_MEDIA_COST_ECPA_ADVERTISER" MetricTotalMediaCostEcpaUsd -> "METRIC_TOTAL_MEDIA_COST_ECPA_USD" MetricTotalMediaCostEcpaPartner -> "METRIC_TOTAL_MEDIA_COST_ECPA_PARTNER" MetricTotalMediaCostEcpapvAdvertiser -> "METRIC_TOTAL_MEDIA_COST_ECPAPV_ADVERTISER" MetricTotalMediaCostEcpapvUsd -> "METRIC_TOTAL_MEDIA_COST_ECPAPV_USD" MetricTotalMediaCostEcpapvPartner -> "METRIC_TOTAL_MEDIA_COST_ECPAPV_PARTNER" MetricTotalMediaCostEcpapcAdvertiser -> "METRIC_TOTAL_MEDIA_COST_ECPAPC_ADVERTISER" MetricTotalMediaCostEcpapcUsd -> "METRIC_TOTAL_MEDIA_COST_ECPAPC_USD" MetricTotalMediaCostEcpapcPartner -> "METRIC_TOTAL_MEDIA_COST_ECPAPC_PARTNER" MetricRichMediaVideoPlays -> "METRIC_RICH_MEDIA_VIDEO_PLAYS" MetricRichMediaVideoCompletions -> "METRIC_RICH_MEDIA_VIDEO_COMPLETIONS" MetricRichMediaVideoPauses -> "METRIC_RICH_MEDIA_VIDEO_PAUSES" MetricRichMediaVideoMutes -> "METRIC_RICH_MEDIA_VIDEO_MUTES" MetricRichMediaVideoMidpoints -> "METRIC_RICH_MEDIA_VIDEO_MIDPOINTS" MetricRichMediaVideoFullScreens -> "METRIC_RICH_MEDIA_VIDEO_FULL_SCREENS" MetricRichMediaVideoFirstQuartileCompletes -> "METRIC_RICH_MEDIA_VIDEO_FIRST_QUARTILE_COMPLETES" MetricRichMediaVideoThirdQuartileCompletes -> "METRIC_RICH_MEDIA_VIDEO_THIRD_QUARTILE_COMPLETES" MetricClickToPostClickConversionRate -> "METRIC_CLICK_TO_POST_CLICK_CONVERSION_RATE" MetricImpressionsToConversionRate -> "METRIC_IMPRESSIONS_TO_CONVERSION_RATE" MetricConversionsPerMille -> "METRIC_CONVERSIONS_PER_MILLE" MetricCtr -> "METRIC_CTR" MetricBidRequests -> "METRIC_BID_REQUESTS" MetricUniqueVisitorsCookies -> "METRIC_UNIQUE_VISITORS_COOKIES" MetricRevenueEcpcvAdvertiser -> "METRIC_REVENUE_ECPCV_ADVERTISER" MetricRevenueEcpcvUsd -> "METRIC_REVENUE_ECPCV_USD" MetricRevenueEcpcvPartner -> "METRIC_REVENUE_ECPCV_PARTNER" MetricMediaCostEcpcvAdvertiser -> "METRIC_MEDIA_COST_ECPCV_ADVERTISER" MetricMediaCostEcpcvUsd -> "METRIC_MEDIA_COST_ECPCV_USD" MetricMediaCostEcpcvPartner -> "METRIC_MEDIA_COST_ECPCV_PARTNER" MetricTotalMediaCostEcpcvAdvertiser -> "METRIC_TOTAL_MEDIA_COST_ECPCV_ADVERTISER" MetricTotalMediaCostEcpcvUsd -> "METRIC_TOTAL_MEDIA_COST_ECPCV_USD" MetricTotalMediaCostEcpcvPartner -> "METRIC_TOTAL_MEDIA_COST_ECPCV_PARTNER" MetricRichMediaVideoSkips -> "METRIC_RICH_MEDIA_VIDEO_SKIPS" MetricFEE2Advertiser -> "METRIC_FEE2_ADVERTISER" MetricFEE2Usd -> "METRIC_FEE2_USD" MetricFEE2Partner -> "METRIC_FEE2_PARTNER" MetricFEE3Advertiser -> "METRIC_FEE3_ADVERTISER" MetricFEE3Usd -> "METRIC_FEE3_USD" MetricFEE3Partner -> "METRIC_FEE3_PARTNER" MetricFEE4Advertiser -> "METRIC_FEE4_ADVERTISER" MetricFEE4Usd -> "METRIC_FEE4_USD" MetricFEE4Partner -> "METRIC_FEE4_PARTNER" MetricFEE5Advertiser -> "METRIC_FEE5_ADVERTISER" MetricFEE5Usd -> "METRIC_FEE5_USD" MetricFEE5Partner -> "METRIC_FEE5_PARTNER" MetricFEE6Advertiser -> "METRIC_FEE6_ADVERTISER" MetricFEE6Usd -> "METRIC_FEE6_USD" MetricFEE6Partner -> "METRIC_FEE6_PARTNER" MetricFEE7Advertiser -> "METRIC_FEE7_ADVERTISER" MetricFEE7Usd -> "METRIC_FEE7_USD" MetricFEE7Partner -> "METRIC_FEE7_PARTNER" MetricFEE8Advertiser -> "METRIC_FEE8_ADVERTISER" MetricFEE8Usd -> "METRIC_FEE8_USD" MetricFEE8Partner -> "METRIC_FEE8_PARTNER" MetricFEE9Advertiser -> "METRIC_FEE9_ADVERTISER" MetricFEE9Usd -> "METRIC_FEE9_USD" MetricFEE9Partner -> "METRIC_FEE9_PARTNER" MetricFEE10Advertiser -> "METRIC_FEE10_ADVERTISER" MetricFEE10Usd -> "METRIC_FEE10_USD" MetricFEE10Partner -> "METRIC_FEE10_PARTNER" MetricFEE11Advertiser -> "METRIC_FEE11_ADVERTISER" MetricFEE11Usd -> "METRIC_FEE11_USD" MetricFEE11Partner -> "METRIC_FEE11_PARTNER" MetricFEE12Advertiser -> "METRIC_FEE12_ADVERTISER" MetricFEE12Usd -> "METRIC_FEE12_USD" MetricFEE12Partner -> "METRIC_FEE12_PARTNER" MetricFEE13Advertiser -> "METRIC_FEE13_ADVERTISER" MetricFEE13Usd -> "METRIC_FEE13_USD" MetricFEE13Partner -> "METRIC_FEE13_PARTNER" MetricFEE14Advertiser -> "METRIC_FEE14_ADVERTISER" MetricFEE14Usd -> "METRIC_FEE14_USD" MetricFEE14Partner -> "METRIC_FEE14_PARTNER" MetricFEE15Advertiser -> "METRIC_FEE15_ADVERTISER" MetricFEE15Usd -> "METRIC_FEE15_USD" MetricFEE15Partner -> "METRIC_FEE15_PARTNER" MetricCpmFEE3Advertiser -> "METRIC_CPM_FEE3_ADVERTISER" MetricCpmFEE3Usd -> "METRIC_CPM_FEE3_USD" MetricCpmFEE3Partner -> "METRIC_CPM_FEE3_PARTNER" MetricCpmFEE4Advertiser -> "METRIC_CPM_FEE4_ADVERTISER" MetricCpmFEE4Usd -> "METRIC_CPM_FEE4_USD" MetricCpmFEE4Partner -> "METRIC_CPM_FEE4_PARTNER" MetricCpmFEE5Advertiser -> "METRIC_CPM_FEE5_ADVERTISER" MetricCpmFEE5Usd -> "METRIC_CPM_FEE5_USD" MetricCpmFEE5Partner -> "METRIC_CPM_FEE5_PARTNER" MetricMediaFEE3Advertiser -> "METRIC_MEDIA_FEE3_ADVERTISER" MetricMediaFEE3Usd -> "METRIC_MEDIA_FEE3_USD" MetricMediaFEE3Partner -> "METRIC_MEDIA_FEE3_PARTNER" MetricMediaFEE4Advertiser -> "METRIC_MEDIA_FEE4_ADVERTISER" MetricMediaFEE4Usd -> "METRIC_MEDIA_FEE4_USD" MetricMediaFEE4Partner -> "METRIC_MEDIA_FEE4_PARTNER" MetricMediaFEE5Advertiser -> "METRIC_MEDIA_FEE5_ADVERTISER" MetricMediaFEE5Usd -> "METRIC_MEDIA_FEE5_USD" MetricMediaFEE5Partner -> "METRIC_MEDIA_FEE5_PARTNER" MetricVideoCompanionImpressions -> "METRIC_VIDEO_COMPANION_IMPRESSIONS" MetricVideoCompanionClicks -> "METRIC_VIDEO_COMPANION_CLICKS" MetricFEE16Advertiser -> "METRIC_FEE16_ADVERTISER" MetricFEE16Usd -> "METRIC_FEE16_USD" MetricFEE16Partner -> "METRIC_FEE16_PARTNER" MetricFEE17Advertiser -> "METRIC_FEE17_ADVERTISER" MetricFEE17Usd -> "METRIC_FEE17_USD" MetricFEE17Partner -> "METRIC_FEE17_PARTNER" MetricFEE18Advertiser -> "METRIC_FEE18_ADVERTISER" MetricFEE18Usd -> "METRIC_FEE18_USD" MetricFEE18Partner -> "METRIC_FEE18_PARTNER" MetricTrueviewViews -> "METRIC_TRUEVIEW_VIEWS" MetricTrueviewUniqueViewers -> "METRIC_TRUEVIEW_UNIQUE_VIEWERS" MetricTrueviewEarnedViews -> "METRIC_TRUEVIEW_EARNED_VIEWS" MetricTrueviewEarnedSubscribers -> "METRIC_TRUEVIEW_EARNED_SUBSCRIBERS" MetricTrueviewEarnedPlayListAdditions -> "METRIC_TRUEVIEW_EARNED_PLAYLIST_ADDITIONS" MetricTrueviewEarnedLikes -> "METRIC_TRUEVIEW_EARNED_LIKES" MetricTrueviewEarnedShares -> "METRIC_TRUEVIEW_EARNED_SHARES" MetricTrueviewImpressionShare -> "METRIC_TRUEVIEW_IMPRESSION_SHARE" MetricTrueviewLostIsBudget -> "METRIC_TRUEVIEW_LOST_IS_BUDGET" MetricTrueviewLostIsRank -> "METRIC_TRUEVIEW_LOST_IS_RANK" MetricTrueviewViewThroughConversion -> "METRIC_TRUEVIEW_VIEW_THROUGH_CONVERSION" MetricTrueviewConversionManyPerView -> "METRIC_TRUEVIEW_CONVERSION_MANY_PER_VIEW" MetricTrueviewViewRate -> "METRIC_TRUEVIEW_VIEW_RATE" MetricTrueviewConversionRateOnePerView -> "METRIC_TRUEVIEW_CONVERSION_RATE_ONE_PER_VIEW" MetricTrueviewCpvAdvertiser -> "METRIC_TRUEVIEW_CPV_ADVERTISER" MetricTrueviewCpvUsd -> "METRIC_TRUEVIEW_CPV_USD" MetricTrueviewCpvPartner -> "METRIC_TRUEVIEW_CPV_PARTNER" MetricFEE19Advertiser -> "METRIC_FEE19_ADVERTISER" MetricFEE19Usd -> "METRIC_FEE19_USD" MetricFEE19Partner -> "METRIC_FEE19_PARTNER" MetricTeaTrueviewImpressions -> "METRIC_TEA_TRUEVIEW_IMPRESSIONS" MetricTeaTrueviewUniqueCookies -> "METRIC_TEA_TRUEVIEW_UNIQUE_COOKIES" MetricFEE20Advertiser -> "METRIC_FEE20_ADVERTISER" MetricFEE20Usd -> "METRIC_FEE20_USD" MetricFEE20Partner -> "METRIC_FEE20_PARTNER" MetricFEE21Advertiser -> "METRIC_FEE21_ADVERTISER" MetricFEE21Usd -> "METRIC_FEE21_USD" MetricFEE21Partner -> "METRIC_FEE21_PARTNER" MetricFEE22Advertiser -> "METRIC_FEE22_ADVERTISER" MetricFEE22Usd -> "METRIC_FEE22_USD" MetricFEE22Partner -> "METRIC_FEE22_PARTNER" MetricTrueviewTotalConversionValuesAdvertiser -> "METRIC_TRUEVIEW_TOTAL_CONVERSION_VALUES_ADVERTISER" MetricTrueviewTotalConversionValuesUsd -> "METRIC_TRUEVIEW_TOTAL_CONVERSION_VALUES_USD" MetricTrueviewTotalConversionValuesPartner -> "METRIC_TRUEVIEW_TOTAL_CONVERSION_VALUES_PARTNER" MetricTrueviewConversionCostManyPerViewAdvertiser -> "METRIC_TRUEVIEW_CONVERSION_COST_MANY_PER_VIEW_ADVERTISER" MetricTrueviewConversionCostManyPerViewUsd -> "METRIC_TRUEVIEW_CONVERSION_COST_MANY_PER_VIEW_USD" MetricTrueviewConversionCostManyPerViewPartner -> "METRIC_TRUEVIEW_CONVERSION_COST_MANY_PER_VIEW_PARTNER" MetricProfitViewableEcpmAdvertiser -> "METRIC_PROFIT_VIEWABLE_ECPM_ADVERTISER" MetricProfitViewableEcpmUsd -> "METRIC_PROFIT_VIEWABLE_ECPM_USD" MetricProfitViewableEcpmPartner -> "METRIC_PROFIT_VIEWABLE_ECPM_PARTNER" MetricRevenueViewableEcpmAdvertiser -> "METRIC_REVENUE_VIEWABLE_ECPM_ADVERTISER" MetricRevenueViewableEcpmUsd -> "METRIC_REVENUE_VIEWABLE_ECPM_USD" MetricRevenueViewableEcpmPartner -> "METRIC_REVENUE_VIEWABLE_ECPM_PARTNER" MetricMediaCostViewableEcpmAdvertiser -> "METRIC_MEDIA_COST_VIEWABLE_ECPM_ADVERTISER" MetricMediaCostViewableEcpmUsd -> "METRIC_MEDIA_COST_VIEWABLE_ECPM_USD" MetricMediaCostViewableEcpmPartner -> "METRIC_MEDIA_COST_VIEWABLE_ECPM_PARTNER" MetricTotalMediaCostViewableEcpmAdvertiser -> "METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_ADVERTISER" MetricTotalMediaCostViewableEcpmUsd -> "METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_USD" MetricTotalMediaCostViewableEcpmPartner -> "METRIC_TOTAL_MEDIA_COST_VIEWABLE_ECPM_PARTNER" MetricTrueviewEngagements -> "METRIC_TRUEVIEW_ENGAGEMENTS" MetricTrueviewEngagementRate -> "METRIC_TRUEVIEW_ENGAGEMENT_RATE" MetricTrueviewAverageCpeAdvertiser -> "METRIC_TRUEVIEW_AVERAGE_CPE_ADVERTISER" MetricTrueviewAverageCpeUsd -> "METRIC_TRUEVIEW_AVERAGE_CPE_USD" MetricTrueviewAverageCpePartner -> "METRIC_TRUEVIEW_AVERAGE_CPE_PARTNER" MetricActiveViewViewableImpressions -> "METRIC_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS" MetricActiveViewEligibleImpressions -> "METRIC_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS" MetricActiveViewMeasurableImpressions -> "METRIC_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS" MetricActiveViewPctMeasurableImpressions -> "METRIC_ACTIVE_VIEW_PCT_MEASURABLE_IMPRESSIONS" MetricActiveViewPctViewableImpressions -> "METRIC_ACTIVE_VIEW_PCT_VIEWABLE_IMPRESSIONS" MetricActiveViewAverageViewableTime -> "METRIC_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME" MetricActiveViewUnmeasurableImpressions -> "METRIC_ACTIVE_VIEW_UNMEASURABLE_IMPRESSIONS" MetricActiveViewUnviewableImpressions -> "METRIC_ACTIVE_VIEW_UNVIEWABLE_IMPRESSIONS" MetricActiveViewDistributionUnmeasurable -> "METRIC_ACTIVE_VIEW_DISTRIBUTION_UNMEASURABLE" MetricActiveViewDistributionUnviewable -> "METRIC_ACTIVE_VIEW_DISTRIBUTION_UNVIEWABLE" MetricActiveViewDistributionViewable -> "METRIC_ACTIVE_VIEW_DISTRIBUTION_VIEWABLE" MetricActiveViewPercentViewableForTimeThreshold -> "METRIC_ACTIVE_VIEW_PERCENT_VIEWABLE_FOR_TIME_THRESHOLD" MetricActiveViewViewableForTimeThreshold -> "METRIC_ACTIVE_VIEW_VIEWABLE_FOR_TIME_THRESHOLD" MetricActiveViewPercentVisibleAtStart -> "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_AT_START" MetricActiveViewPercentVisibleFirstQuar -> "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_FIRST_QUAR" MetricActiveViewPercentVisibleSecondQuar -> "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_SECOND_QUAR" MetricActiveViewPercentVisibleThirdQuar -> "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_THIRD_QUAR" MetricActiveViewPercentVisibleOnComplete -> "METRIC_ACTIVE_VIEW_PERCENT_VISIBLE_ON_COMPLETE" MetricActiveViewPercentAudibleVisibleAtStart -> "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_AT_START" MetricActiveViewPercentAudibleVisibleFirstQuar -> "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_FIRST_QUAR" MetricActiveViewPercentAudibleVisibleSecondQuar -> "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_SECOND_QUAR" MetricActiveViewPercentAudibleVisibleThirdQuar -> "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_THIRD_QUAR" MetricActiveViewPercentAudibleVisibleOnComplete -> "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_VISIBLE_ON_COMPLETE" MetricActiveViewAudibleVisibleOnCompleteImpressions -> "METRIC_ACTIVE_VIEW_AUDIBLE_VISIBLE_ON_COMPLETE_IMPRESSIONS" MetricViewableBidRequests -> "METRIC_VIEWABLE_BID_REQUESTS" MetricCookieReachImpressionReach -> "METRIC_COOKIE_REACH_IMPRESSION_REACH" MetricCookieReachAverageImpressionFrequency -> "METRIC_COOKIE_REACH_AVERAGE_IMPRESSION_FREQUENCY" MetricDBmEngagementRate -> "METRIC_DBM_ENGAGEMENT_RATE" MetricRichMediaScrolls -> "METRIC_RICH_MEDIA_SCROLLS" MetricCmPostViewRevenue -> "METRIC_CM_POST_VIEW_REVENUE" MetricCmPostClickRevenue -> "METRIC_CM_POST_CLICK_REVENUE" MetricFloodlightImpressions -> "METRIC_FLOODLIGHT_IMPRESSIONS" MetricBillableImpressions -> "METRIC_BILLABLE_IMPRESSIONS" MetricNielsenAverageFrequency -> "METRIC_NIELSEN_AVERAGE_FREQUENCY" MetricNielsenImpressions -> "METRIC_NIELSEN_IMPRESSIONS" MetricNielsenUniqueAudience -> "METRIC_NIELSEN_UNIQUE_AUDIENCE" MetricNielsenGrp -> "METRIC_NIELSEN_GRP" MetricNielsenImpressionIndex -> "METRIC_NIELSEN_IMPRESSION_INDEX" MetricNielsenImpressionsShare -> "METRIC_NIELSEN_IMPRESSIONS_SHARE" MetricNielsenPopulation -> "METRIC_NIELSEN_POPULATION" MetricNielsenPopulationReach -> "METRIC_NIELSEN_POPULATION_REACH" MetricNielsenPopulationShare -> "METRIC_NIELSEN_POPULATION_SHARE" MetricNielsenReachIndex -> "METRIC_NIELSEN_REACH_INDEX" MetricNielsenReachShare -> "METRIC_NIELSEN_REACH_SHARE" MetricActiveViewAudibleFullyOnScreenHalfOfDurationImpressions -> "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_IMPRESSIONS" MetricActiveViewAudibleFullyOnScreenHalfOfDurationMeasurableImpressions -> "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_MEASURABLE_IMPRESSIONS" MetricActiveViewAudibleFullyOnScreenHalfOfDurationRate -> "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_RATE" MetricActiveViewAudibleFullyOnScreenHalfOfDurationTrueviewImpressions -> "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_IMPRESSIONS" MetricActiveViewAudibleFullyOnScreenHalfOfDurationTrueviewMeasurableImpressions -> "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_MEASURABLE_IMPRESSIONS" MetricActiveViewAudibleFullyOnScreenHalfOfDurationTrueviewRate -> "METRIC_ACTIVE_VIEW_AUDIBLE_FULLY_ON_SCREEN_HALF_OF_DURATION_TRUEVIEW_RATE" MetricActiveViewCustomMetricMeasurableImpressions -> "METRIC_ACTIVE_VIEW_CUSTOM_METRIC_MEASURABLE_IMPRESSIONS" MetricActiveViewCustomMetricViewableImpressions -> "METRIC_ACTIVE_VIEW_CUSTOM_METRIC_VIEWABLE_IMPRESSIONS" MetricActiveViewCustomMetricViewableRate -> "METRIC_ACTIVE_VIEW_CUSTOM_METRIC_VIEWABLE_RATE" MetricActiveViewPercentAudibleImpressions -> "METRIC_ACTIVE_VIEW_PERCENT_AUDIBLE_IMPRESSIONS" MetricActiveViewPercentFullyOnScreen2Sec -> "METRIC_ACTIVE_VIEW_PERCENT_FULLY_ON_SCREEN_2_SEC" MetricActiveViewPercentFullScreen -> "METRIC_ACTIVE_VIEW_PERCENT_FULL_SCREEN" MetricActiveViewPercentInBackgRound -> "METRIC_ACTIVE_VIEW_PERCENT_IN_BACKGROUND" MetricActiveViewPercentOfAdPlayed -> "METRIC_ACTIVE_VIEW_PERCENT_OF_AD_PLAYED" MetricActiveViewPercentOfCompletedImpressionsAudibleAndVisible -> "METRIC_ACTIVE_VIEW_PERCENT_OF_COMPLETED_IMPRESSIONS_AUDIBLE_AND_VISIBLE" MetricActiveViewPercentOfCompletedImpressionsVisible -> "METRIC_ACTIVE_VIEW_PERCENT_OF_COMPLETED_IMPRESSIONS_VISIBLE" MetricActiveViewPercentOfFirstQuartileImpressionsAudibleAndVisible -> "METRIC_ACTIVE_VIEW_PERCENT_OF_FIRST_QUARTILE_IMPRESSIONS_AUDIBLE_AND_VISIBLE" MetricActiveViewPercentOfFirstQuartileImpressionsVisible -> "METRIC_ACTIVE_VIEW_PERCENT_OF_FIRST_QUARTILE_IMPRESSIONS_VISIBLE" MetricActiveViewPercentOfMidpointImpressionsAudibleAndVisible -> "METRIC_ACTIVE_VIEW_PERCENT_OF_MIDPOINT_IMPRESSIONS_AUDIBLE_AND_VISIBLE" MetricActiveViewPercentOfMidpointImpressionsVisible -> "METRIC_ACTIVE_VIEW_PERCENT_OF_MIDPOINT_IMPRESSIONS_VISIBLE" MetricActiveViewPercentOfThirdQuartileImpressionsAudibleAndVisible -> "METRIC_ACTIVE_VIEW_PERCENT_OF_THIRD_QUARTILE_IMPRESSIONS_AUDIBLE_AND_VISIBLE" MetricActiveViewPercentOfThirdQuartileImpressionsVisible -> "METRIC_ACTIVE_VIEW_PERCENT_OF_THIRD_QUARTILE_IMPRESSIONS_VISIBLE" MetricActiveViewPercentPlayTimeAudible -> "METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_AUDIBLE" MetricActiveViewPercentPlayTimeAudibleAndVisible -> "METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_AUDIBLE_AND_VISIBLE" MetricActiveViewPercentPlayTimeVisible -> "METRIC_ACTIVE_VIEW_PERCENT_PLAY_TIME_VISIBLE" MetricAdaptedAudienceFrequency -> "METRIC_ADAPTED_AUDIENCE_FREQUENCY" MetricAdlingoFeeAdvertiserCurrency -> "METRIC_ADLINGO_FEE_ADVERTISER_CURRENCY" MetricAudioClientCostEcpclAdvertiserCurrency -> "METRIC_AUDIO_CLIENT_COST_ECPCL_ADVERTISER_CURRENCY" MetricAudioMediaCostEcpclAdvertiserCurrency -> "METRIC_AUDIO_MEDIA_COST_ECPCL_ADVERTISER_CURRENCY" MetricAudioMutesAudio -> "METRIC_AUDIO_MUTES_AUDIO" MetricAudioRevenueEcpclAdvertiserCurrency -> "METRIC_AUDIO_REVENUE_ECPCL_ADVERTISER_CURRENCY" MetricAudioUnmutesAudio -> "METRIC_AUDIO_UNMUTES_AUDIO" MetricAudioUnmutesVideo -> "METRIC_AUDIO_UNMUTES_VIDEO" MetricAverageDisplayTime -> "METRIC_AVERAGE_DISPLAY_TIME" MetricAverageImpressionFrequencyPerUser -> "METRIC_AVERAGE_IMPRESSION_FREQUENCY_PER_USER" MetricAverageInteractionTime -> "METRIC_AVERAGE_INTERACTION_TIME" MetricAverageWatchTimePerImpression -> "METRIC_AVERAGE_WATCH_TIME_PER_IMPRESSION" MetricBeginToRenderEligibleImpressions -> "METRIC_BEGIN_TO_RENDER_ELIGIBLE_IMPRESSIONS" MetricBeginToRenderImpressions -> "METRIC_BEGIN_TO_RENDER_IMPRESSIONS" MetricBenchmarkFrequency -> "METRIC_BENCHMARK_FREQUENCY" MetricBrandLiftAbsoluteBrandLift -> "METRIC_BRAND_LIFT_ABSOLUTE_BRAND_LIFT" MetricBrandLiftAllSurveyResponses -> "METRIC_BRAND_LIFT_ALL_SURVEY_RESPONSES" MetricBrandLiftBaselinePositiveResponseRate -> "METRIC_BRAND_LIFT_BASELINE_POSITIVE_RESPONSE_RATE" MetricBrandLiftBaselineSurveyResponses -> "METRIC_BRAND_LIFT_BASELINE_SURVEY_RESPONSES" MetricBrandLiftCostPerLiftedUser -> "METRIC_BRAND_LIFT_COST_PER_LIFTED_USER" MetricBrandLiftExposedSurveyResponses -> "METRIC_BRAND_LIFT_EXPOSED_SURVEY_RESPONSES" MetricBrandLiftHeadroomBrandLift -> "METRIC_BRAND_LIFT_HEADROOM_BRAND_LIFT" MetricBrandLiftRelativeBrandLift -> "METRIC_BRAND_LIFT_RELATIVE_BRAND_LIFT" MetricBrandLiftUsers -> "METRIC_BRAND_LIFT_USERS" MetricCardClicks -> "METRIC_CARD_CLICKS" MetricClientCostAdvertiserCurrency -> "METRIC_CLIENT_COST_ADVERTISER_CURRENCY" MetricClientCostEcpaAdvertiserCurrency -> "METRIC_CLIENT_COST_ECPA_ADVERTISER_CURRENCY" MetricClientCostEcpaPcAdvertiserCurrency -> "METRIC_CLIENT_COST_ECPA_PC_ADVERTISER_CURRENCY" MetricClientCostEcpaPvAdvertiserCurrency -> "METRIC_CLIENT_COST_ECPA_PV_ADVERTISER_CURRENCY" MetricClientCostEcpcAdvertiserCurrency -> "METRIC_CLIENT_COST_ECPC_ADVERTISER_CURRENCY" MetricClientCostEcpmAdvertiserCurrency -> "METRIC_CLIENT_COST_ECPM_ADVERTISER_CURRENCY" MetricClientCostViewableEcpmAdvertiserCurrency -> "METRIC_CLIENT_COST_VIEWABLE_ECPM_ADVERTISER_CURRENCY" MetricCmPostClickRevenueCrossEnvironment -> "METRIC_CM_POST_CLICK_REVENUE_CROSS_ENVIRONMENT" MetricCmPostViewRevenueCrossEnvironment -> "METRIC_CM_POST_VIEW_REVENUE_CROSS_ENVIRONMENT" MetricCompanionClicksAudio -> "METRIC_COMPANION_CLICKS_AUDIO" MetricCompanionImpressionsAudio -> "METRIC_COMPANION_IMPRESSIONS_AUDIO" MetricCompleteListensAudio -> "METRIC_COMPLETE_LISTENS_AUDIO" MetricCompletionRateAudio -> "METRIC_COMPLETION_RATE_AUDIO" MetricCounters -> "METRIC_COUNTERS" MetricCustomFee1AdvertiserCurrency -> "METRIC_CUSTOM_FEE_1_ADVERTISER_CURRENCY" MetricCustomFee2AdvertiserCurrency -> "METRIC_CUSTOM_FEE_2_ADVERTISER_CURRENCY" MetricCustomFee3AdvertiserCurrency -> "METRIC_CUSTOM_FEE_3_ADVERTISER_CURRENCY" MetricCustomFee4AdvertiserCurrency -> "METRIC_CUSTOM_FEE_4_ADVERTISER_CURRENCY" MetricCustomFee5AdvertiserCurrency -> "METRIC_CUSTOM_FEE_5_ADVERTISER_CURRENCY" MetricCustomValuePer1000Impressions -> "METRIC_CUSTOM_VALUE_PER_1000_IMPRESSIONS" MetricEngagements -> "METRIC_ENGAGEMENTS" MetricEstimatedCpmForImpressionsWithCustomValueAdvertiserCurrency -> "METRIC_ESTIMATED_CPM_FOR_IMPRESSIONS_WITH_CUSTOM_VALUE_ADVERTISER_CURRENCY" MetricEstimatedTotalCostForImpressionsWithCustomValueAdvertiserCurrency -> "METRIC_ESTIMATED_TOTAL_COST_FOR_IMPRESSIONS_WITH_CUSTOM_VALUE_ADVERTISER_CURRENCY" MetricExits -> "METRIC_EXITS" MetricExpansions -> "METRIC_EXPANSIONS" MetricFirstQuartileAudio -> "METRIC_FIRST_QUARTILE_AUDIO" MetricGeneralInvalidTrafficGivtImpressions -> "METRIC_GENERAL_INVALID_TRAFFIC_GIVT_IMPRESSIONS" MetricGeneralInvalidTrafficGivtTrackedAds -> "METRIC_GENERAL_INVALID_TRAFFIC_GIVT_TRACKED_ADS" MetricGivtActiveViewEligibleImpressions -> "METRIC_GIVT_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS" MetricGivtActiveViewMeasurableImpressions -> "METRIC_GIVT_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS" MetricGivtActiveViewViewableImpressions -> "METRIC_GIVT_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS" MetricGivtBeginToRenderImpressions -> "METRIC_GIVT_BEGIN_TO_RENDER_IMPRESSIONS" MetricGivtClicks -> "METRIC_GIVT_CLICKS" MetricGmailConversions -> "METRIC_GMAIL_CONVERSIONS" MetricGmailPostClickConversions -> "METRIC_GMAIL_POST_CLICK_CONVERSIONS" MetricGmailPostViewConversions -> "METRIC_GMAIL_POST_VIEW_CONVERSIONS" MetricGmailPotentialViews -> "METRIC_GMAIL_POTENTIAL_VIEWS" MetricImpressionsWithCustomValue -> "METRIC_IMPRESSIONS_WITH_CUSTOM_VALUE" MetricImpressionsWithPositiveCustomValue -> "METRIC_IMPRESSIONS_WITH_POSITIVE_CUSTOM_VALUE" MetricImpressionCustomValueCost -> "METRIC_IMPRESSION_CUSTOM_VALUE_COST" MetricInteractiveImpressions -> "METRIC_INTERACTIVE_IMPRESSIONS" MetricInvalidActiveViewEligibleImpressions -> "METRIC_INVALID_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS" MetricInvalidActiveViewMeasurableImpressions -> "METRIC_INVALID_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS" MetricInvalidActiveViewViewableImpressions -> "METRIC_INVALID_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS" MetricInvalidBeginToRenderImpressions -> "METRIC_INVALID_BEGIN_TO_RENDER_IMPRESSIONS" MetricInvalidClicks -> "METRIC_INVALID_CLICKS" MetricInvalidImpressions -> "METRIC_INVALID_IMPRESSIONS" MetricInvalidTrackedAds -> "METRIC_INVALID_TRACKED_ADS" MetricMediaCostAdvertiserCurrencyPerStoreVisitAdxOnly -> "METRIC_MEDIA_COST_ADVERTISER_CURRENCY_PER_STORE_VISIT_ADX_ONLY" MetricMidpointAudio -> "METRIC_MIDPOINT_AUDIO" MetricOriginalAudienceFrequency -> "METRIC_ORIGINAL_AUDIENCE_FREQUENCY" MetricPausesAudio -> "METRIC_PAUSES_AUDIO" MetricPercentImpressionsWithPositiveCustomValue -> "METRIC_PERCENT_IMPRESSIONS_WITH_POSITIVE_CUSTOM_VALUE" MetricPlatformFeeRate -> "METRIC_PLATFORM_FEE_RATE" MetricPostClickConversionsCrossEnvironment -> "METRIC_POST_CLICK_CONVERSIONS_CROSS_ENVIRONMENT" MetricPostViewConversionsCrossEnvironment -> "METRIC_POST_VIEW_CONVERSIONS_CROSS_ENVIRONMENT" MetricPotentialImpressions -> "METRIC_POTENTIAL_IMPRESSIONS" MetricPotentialViews -> "METRIC_POTENTIAL_VIEWS" MetricPremiumFeeAdvertiserCurrency -> "METRIC_PREMIUM_FEE_ADVERTISER_CURRENCY" MetricProgrammaticGuaranteedImpressionsPassedDueToFrequency -> "METRIC_PROGRAMMATIC_GUARANTEED_IMPRESSIONS_PASSED_DUE_TO_FREQUENCY" MetricProgrammaticGuaranteedSavingsReInvestedDueToFrequencyAdvertiserCurrency -> "METRIC_PROGRAMMATIC_GUARANTEED_SAVINGS_RE_INVESTED_DUE_TO_FREQUENCY_ADVERTISER_CURRENCY" MetricRefundBillableCostAdvertiserCurrency -> "METRIC_REFUND_BILLABLE_COST_ADVERTISER_CURRENCY" MetricRefundMediaCostAdvertiserCurrency -> "METRIC_REFUND_MEDIA_COST_ADVERTISER_CURRENCY" MetricRefundPlatformFeeAdvertiserCurrency -> "METRIC_REFUND_PLATFORM_FEE_ADVERTISER_CURRENCY" MetricRevenueAdvertiserCurrencyPerStoreVisitAdxOnly -> "METRIC_REVENUE_ADVERTISER_CURRENCY_PER_STORE_VISIT_ADX_ONLY" MetricRichMediaEngagements -> "METRIC_RICH_MEDIA_ENGAGEMENTS" MetricStartsAudio -> "METRIC_STARTS_AUDIO" MetricStopsAudio -> "METRIC_STOPS_AUDIO" MetricStoreVisitsAdxOnly -> "METRIC_STORE_VISITS_ADX_ONLY" MetricStoreVisitConversions -> "METRIC_STORE_VISIT_CONVERSIONS" MetricThirdQuartileAudio -> "METRIC_THIRD_QUARTILE_AUDIO" MetricTimers -> "METRIC_TIMERS" MetricTotalAudioMediaCostEcpclAdvertiserCurrency -> "METRIC_TOTAL_AUDIO_MEDIA_COST_ECPCL_ADVERTISER_CURRENCY" MetricTotalConversionsCrossEnvironment -> "METRIC_TOTAL_CONVERSIONS_CROSS_ENVIRONMENT" MetricTotalDisplayTime -> "METRIC_TOTAL_DISPLAY_TIME" MetricTotalImpressionCustomValue -> "METRIC_TOTAL_IMPRESSION_CUSTOM_VALUE" MetricTotalInteractionTime -> "METRIC_TOTAL_INTERACTION_TIME" MetricTotalMediaCostAdvertiserCurrencyPerStoreVisitAdxOnly -> "METRIC_TOTAL_MEDIA_COST_ADVERTISER_CURRENCY_PER_STORE_VISIT_ADX_ONLY" MetricTotalUsers -> "METRIC_TOTAL_USERS" MetricTrackedAds -> "METRIC_TRACKED_ADS" MetricTrueviewGeneralInvalidTrafficGivtViews -> "METRIC_TRUEVIEW_GENERAL_INVALID_TRAFFIC_GIVT_VIEWS" MetricTrueviewInvalidViews -> "METRIC_TRUEVIEW_INVALID_VIEWS" MetricUniqueCookiesWithImpressions -> "METRIC_UNIQUE_COOKIES_WITH_IMPRESSIONS" MetricUniqueReachAverageImpressionFrequency -> "METRIC_UNIQUE_REACH_AVERAGE_IMPRESSION_FREQUENCY" MetricUniqueReachClickReach -> "METRIC_UNIQUE_REACH_CLICK_REACH" MetricUniqueReachImpressionReach -> "METRIC_UNIQUE_REACH_IMPRESSION_REACH" MetricUniqueReachTotalReach -> "METRIC_UNIQUE_REACH_TOTAL_REACH" MetricVerifiableImpressions -> "METRIC_VERIFIABLE_IMPRESSIONS" MetricVideoClientCostEcpcvAdvertiserCurrency -> "METRIC_VIDEO_CLIENT_COST_ECPCV_ADVERTISER_CURRENCY" MetricWatchTime -> "METRIC_WATCH_TIME" MetricLastTouchTotalConversions -> "METRIC_LAST_TOUCH_TOTAL_CONVERSIONS" MetricLastTouchClickThroughConversions -> "METRIC_LAST_TOUCH_CLICK_THROUGH_CONVERSIONS" MetricLastTouchViewThroughConversions -> "METRIC_LAST_TOUCH_VIEW_THROUGH_CONVERSIONS" MetricTotalPaths -> "METRIC_TOTAL_PATHS" MetricTotalExposures -> "METRIC_TOTAL_EXPOSURES" MetricPathConversionRate -> "METRIC_PATH_CONVERSION_RATE" MetricConvertingPaths -> "METRIC_CONVERTING_PATHS" MetricActivityRevenue -> "METRIC_ACTIVITY_REVENUE" MetricPercentInvalidImpressionsPreBid -> "METRIC_PERCENT_INVALID_IMPRESSIONS_PREBID" MetricGrpCorrectedImpressions -> "METRIC_GRP_CORRECTED_IMPRESSIONS" MetricDemoCorrectedClicks -> "METRIC_DEMO_CORRECTED_CLICKS" MetricVirtualPeopleImpressionReachByDemo -> "METRIC_VIRTUAL_PEOPLE_IMPRESSION_REACH_BY_DEMO" MetricVirtualPeopleClickReachByDemo -> "METRIC_VIRTUAL_PEOPLE_CLICK_REACH_BY_DEMO" MetricVirtualPeopleAverageImpressionFrequencyByDemo -> "METRIC_VIRTUAL_PEOPLE_AVERAGE_IMPRESSION_FREQUENCY_BY_DEMO" MetricDemoComPositionImpression -> "METRIC_DEMO_COMPOSITION_IMPRESSION" MetricVirtualPeopleImpressionReachSharePercent -> "METRIC_VIRTUAL_PEOPLE_IMPRESSION_REACH_SHARE_PERCENT" MetricDemoPopulation -> "METRIC_DEMO_POPULATION" MetricVirtualPeopleImpressionReachPercent -> "METRIC_VIRTUAL_PEOPLE_IMPRESSION_REACH_PERCENT" MetricTargetRatingPoints -> "METRIC_TARGET_RATING_POINTS" MetricProvisionalImpressions -> "METRIC_PROVISIONAL_IMPRESSIONS" MetricVendorBlockedAds -> "METRIC_VENDOR_BLOCKED_ADS" MetricGrpCorrectedViewableImpressions -> "METRIC_GRP_CORRECTED_VIEWABLE_IMPRESSIONS" MetricGrpCorrectedViewableImpressionsSharePercent -> "METRIC_GRP_CORRECTED_VIEWABLE_IMPRESSIONS_SHARE_PERCENT" MetricViewableGrossRatingPoints -> "METRIC_VIEWABLE_GROSS_RATING_POINTS" MetricVirtualPeopleAverageViewableImpressionFrequencyByDemo -> "METRIC_VIRTUAL_PEOPLE_AVERAGE_VIEWABLE_IMPRESSION_FREQUENCY_BY_DEMO" MetricVirtualPeopleViewableImpressionReachByDemo -> "METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_BY_DEMO" MetricVirtualPeopleViewableImpressionReachPercent -> "METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_PERCENT" MetricVirtualPeopleViewableImpressionReachSharePercent -> "METRIC_VIRTUAL_PEOPLE_VIEWABLE_IMPRESSION_REACH_SHARE_PERCENT" MetricEngagementRate -> "METRIC_ENGAGEMENT_RATE" MetricCM360PostViewRevenue -> "METRIC_CM360_POST_VIEW_REVENUE" MetricCM360PostClickRevenue -> "METRIC_CM360_POST_CLICK_REVENUE" MetricCM360PostClickRevenueCrossEnvironment -> "METRIC_CM360_POST_CLICK_REVENUE_CROSS_ENVIRONMENT" MetricCM360PostViewRevenueCrossEnvironment -> "METRIC_CM360_POST_VIEW_REVENUE_CROSS_ENVIRONMENT" MetricPercentageFromCurrentIoGoal -> "METRIC_PERCENTAGE_FROM_CURRENT_IO_GOAL" MetricDuplicateFloodlightImpressions -> "METRIC_DUPLICATE_FLOODLIGHT_IMPRESSIONS" MetricCookieConsentedFloodlightImpressions -> "METRIC_COOKIE_CONSENTED_FLOODLIGHT_IMPRESSIONS" MetricCookieUnconsentedFloodlightImpressions -> "METRIC_COOKIE_UNCONSENTED_FLOODLIGHT_IMPRESSIONS" MetricTrackingUnconsentedClicks -> "METRIC_TRACKING_UNCONSENTED_CLICKS" instance FromJSON ParametersMetricsItem where parseJSON = parseJSONText "ParametersMetricsItem" instance ToJSON ParametersMetricsItem where toJSON = toJSONText -- | How often the query is run. data QueryScheduleFrequency = OneTime -- ^ @ONE_TIME@ | Daily -- ^ @DAILY@ | Weekly -- ^ @WEEKLY@ | SemiMonthly -- ^ @SEMI_MONTHLY@ | Monthly -- ^ @MONTHLY@ | Quarterly -- ^ @QUARTERLY@ | Yearly -- ^ @YEARLY@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable QueryScheduleFrequency instance FromHttpApiData QueryScheduleFrequency where parseQueryParam = \case "ONE_TIME" -> Right OneTime "DAILY" -> Right Daily "WEEKLY" -> Right Weekly "SEMI_MONTHLY" -> Right SemiMonthly "MONTHLY" -> Right Monthly "QUARTERLY" -> Right Quarterly "YEARLY" -> Right Yearly x -> Left ("Unable to parse QueryScheduleFrequency from: " <> x) instance ToHttpApiData QueryScheduleFrequency where toQueryParam = \case OneTime -> "ONE_TIME" Daily -> "DAILY" Weekly -> "WEEKLY" SemiMonthly -> "SEMI_MONTHLY" Monthly -> "MONTHLY" Quarterly -> "QUARTERLY" Yearly -> "YEARLY" instance FromJSON QueryScheduleFrequency where parseJSON = parseJSONText "QueryScheduleFrequency" instance ToJSON QueryScheduleFrequency where toJSON = toJSONText -- | Indicates how the filter should be matched to the value. data PathQueryOptionsFilterMatch = Unknown -- ^ @UNKNOWN@ | Exact -- ^ @EXACT@ | Partial -- ^ @PARTIAL@ | BeginsWith -- ^ @BEGINS_WITH@ | WildcardExpression -- ^ @WILDCARD_EXPRESSION@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable PathQueryOptionsFilterMatch instance FromHttpApiData PathQueryOptionsFilterMatch where parseQueryParam = \case "UNKNOWN" -> Right Unknown "EXACT" -> Right Exact "PARTIAL" -> Right Partial "BEGINS_WITH" -> Right BeginsWith "WILDCARD_EXPRESSION" -> Right WildcardExpression x -> Left ("Unable to parse PathQueryOptionsFilterMatch from: " <> x) instance ToHttpApiData PathQueryOptionsFilterMatch where toQueryParam = \case Unknown -> "UNKNOWN" Exact -> "EXACT" Partial -> "PARTIAL" BeginsWith -> "BEGINS_WITH" WildcardExpression -> "WILDCARD_EXPRESSION" instance FromJSON PathQueryOptionsFilterMatch where parseJSON = parseJSONText "PathQueryOptionsFilterMatch" instance ToJSON PathQueryOptionsFilterMatch where toJSON = toJSONText -- | V1 error format. data Xgafv = X1 -- ^ @1@ -- v1 error format | X2 -- ^ @2@ -- v2 error format deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable Xgafv instance FromHttpApiData Xgafv where parseQueryParam = \case "1" -> Right X1 "2" -> Right X2 x -> Left ("Unable to parse Xgafv from: " <> x) instance ToHttpApiData Xgafv where toQueryParam = \case X1 -> "1" X2 -> "2" instance FromJSON Xgafv where parseJSON = parseJSONText "Xgafv" instance ToJSON Xgafv where toJSON = toJSONText -- | Indicates the position of the path the filter should match to (first, -- last, or any event in path). data PathFilterPathMatchPosition = Any -- ^ @ANY@ | First -- ^ @FIRST@ | Last -- ^ @LAST@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable PathFilterPathMatchPosition instance FromHttpApiData PathFilterPathMatchPosition where parseQueryParam = \case "ANY" -> Right Any "FIRST" -> Right First "LAST" -> Right Last x -> Left ("Unable to parse PathFilterPathMatchPosition from: " <> x) instance ToHttpApiData PathFilterPathMatchPosition where toQueryParam = \case Any -> "ANY" First -> "FIRST" Last -> "LAST" instance FromJSON PathFilterPathMatchPosition where parseJSON = parseJSONText "PathFilterPathMatchPosition" instance ToJSON PathFilterPathMatchPosition where toJSON = toJSONText -- | Filter type. data FilterPairType = FPTFilterUnknown -- ^ @FILTER_UNKNOWN@ | FPTFilterDate -- ^ @FILTER_DATE@ | FPTFilterDayOfWeek -- ^ @FILTER_DAY_OF_WEEK@ | FPTFilterWeek -- ^ @FILTER_WEEK@ | FPTFilterMonth -- ^ @FILTER_MONTH@ | FPTFilterYear -- ^ @FILTER_YEAR@ | FPTFilterTimeOfDay -- ^ @FILTER_TIME_OF_DAY@ | FPTFilterConversionDelay -- ^ @FILTER_CONVERSION_DELAY@ | FPTFilterCreativeId -- ^ @FILTER_CREATIVE_ID@ | FPTFilterCreativeSize -- ^ @FILTER_CREATIVE_SIZE@ | FPTFilterCreativeType -- ^ @FILTER_CREATIVE_TYPE@ | FPTFilterExchangeId -- ^ @FILTER_EXCHANGE_ID@ | FPTFilterAdPosition -- ^ @FILTER_AD_POSITION@ | FPTFilterPublicInventory -- ^ @FILTER_PUBLIC_INVENTORY@ | FPTFilterInventorySource -- ^ @FILTER_INVENTORY_SOURCE@ | FPTFilterCity -- ^ @FILTER_CITY@ | FPTFilterRegion -- ^ @FILTER_REGION@ | FPTFilterDma -- ^ @FILTER_DMA@ | FPTFilterCountry -- ^ @FILTER_COUNTRY@ | FPTFilterSiteId -- ^ @FILTER_SITE_ID@ | FPTFilterChannelId -- ^ @FILTER_CHANNEL_ID@ | FPTFilterPartner -- ^ @FILTER_PARTNER@ | FPTFilterAdvertiser -- ^ @FILTER_ADVERTISER@ | FPTFilterInsertionOrder -- ^ @FILTER_INSERTION_ORDER@ | FPTFilterLineItem -- ^ @FILTER_LINE_ITEM@ | FPTFilterPartnerCurrency -- ^ @FILTER_PARTNER_CURRENCY@ | FPTFilterAdvertiserCurrency -- ^ @FILTER_ADVERTISER_CURRENCY@ | FPTFilterAdvertiserTimezone -- ^ @FILTER_ADVERTISER_TIMEZONE@ | FPTFilterLineItemType -- ^ @FILTER_LINE_ITEM_TYPE@ | FPTFilterUserList -- ^ @FILTER_USER_LIST@ | FPTFilterUserListFirstParty -- ^ @FILTER_USER_LIST_FIRST_PARTY@ | FPTFilterUserListThirdParty -- ^ @FILTER_USER_LIST_THIRD_PARTY@ | FPTFilterTargetedUserList -- ^ @FILTER_TARGETED_USER_LIST@ | FPTFilterDataProvider -- ^ @FILTER_DATA_PROVIDER@ | FPTFilterOrderId -- ^ @FILTER_ORDER_ID@ | FPTFilterVideoPlayerSize -- ^ @FILTER_VIDEO_PLAYER_SIZE@ | FPTFilterVideoDurationSeconds -- ^ @FILTER_VIDEO_DURATION_SECONDS@ | FPTFilterKeyword -- ^ @FILTER_KEYWORD@ | FPTFilterPageCategory -- ^ @FILTER_PAGE_CATEGORY@ | FPTFilterCampaignDailyFrequency -- ^ @FILTER_CAMPAIGN_DAILY_FREQUENCY@ | FPTFilterLineItemDailyFrequency -- ^ @FILTER_LINE_ITEM_DAILY_FREQUENCY@ | FPTFilterLineItemLifetimeFrequency -- ^ @FILTER_LINE_ITEM_LIFETIME_FREQUENCY@ | FPTFilterOS -- ^ @FILTER_OS@ | FPTFilterBrowser -- ^ @FILTER_BROWSER@ | FPTFilterCarrier -- ^ @FILTER_CARRIER@ | FPTFilterSiteLanguage -- ^ @FILTER_SITE_LANGUAGE@ | FPTFilterInventoryFormat -- ^ @FILTER_INVENTORY_FORMAT@ | FPTFilterZipCode -- ^ @FILTER_ZIP_CODE@ | FPTFilterVideoRatingTier -- ^ @FILTER_VIDEO_RATING_TIER@ | FPTFilterVideoFormatSupport -- ^ @FILTER_VIDEO_FORMAT_SUPPORT@ | FPTFilterVideoSkippableSupport -- ^ @FILTER_VIDEO_SKIPPABLE_SUPPORT@ | FPTFilterVideoCreativeDuration -- ^ @FILTER_VIDEO_CREATIVE_DURATION@ | FPTFilterPageLayout -- ^ @FILTER_PAGE_LAYOUT@ | FPTFilterVideoAdPositionInStream -- ^ @FILTER_VIDEO_AD_POSITION_IN_STREAM@ | FPTFilterAge -- ^ @FILTER_AGE@ | FPTFilterGender -- ^ @FILTER_GENDER@ | FPTFilterQuarter -- ^ @FILTER_QUARTER@ | FPTFilterTrueviewConversionType -- ^ @FILTER_TRUEVIEW_CONVERSION_TYPE@ | FPTFilterMobileGeo -- ^ @FILTER_MOBILE_GEO@ | FPTFilterMraidSupport -- ^ @FILTER_MRAID_SUPPORT@ | FPTFilterActiveViewExpectedViewability -- ^ @FILTER_ACTIVE_VIEW_EXPECTED_VIEWABILITY@ | FPTFilterVideoCreativeDurationSkippable -- ^ @FILTER_VIDEO_CREATIVE_DURATION_SKIPPABLE@ | FPTFilterNielsenCountryCode -- ^ @FILTER_NIELSEN_COUNTRY_CODE@ | FPTFilterNielsenDeviceId -- ^ @FILTER_NIELSEN_DEVICE_ID@ | FPTFilterNielsenGender -- ^ @FILTER_NIELSEN_GENDER@ | FPTFilterNielsenAge -- ^ @FILTER_NIELSEN_AGE@ | FPTFilterInventorySourceType -- ^ @FILTER_INVENTORY_SOURCE_TYPE@ | FPTFilterCreativeWidth -- ^ @FILTER_CREATIVE_WIDTH@ | FPTFilterCreativeHeight -- ^ @FILTER_CREATIVE_HEIGHT@ | FPTFilterDfpOrderId -- ^ @FILTER_DFP_ORDER_ID@ | FPTFilterTrueviewAge -- ^ @FILTER_TRUEVIEW_AGE@ | FPTFilterTrueviewGender -- ^ @FILTER_TRUEVIEW_GENDER@ | FPTFilterTrueviewParentalStatus -- ^ @FILTER_TRUEVIEW_PARENTAL_STATUS@ | FPTFilterTrueviewRemarketingList -- ^ @FILTER_TRUEVIEW_REMARKETING_LIST@ | FPTFilterTrueviewInterest -- ^ @FILTER_TRUEVIEW_INTEREST@ | FPTFilterTrueviewAdGroupId -- ^ @FILTER_TRUEVIEW_AD_GROUP_ID@ | FPTFilterTrueviewAdGroupAdId -- ^ @FILTER_TRUEVIEW_AD_GROUP_AD_ID@ | FPTFilterTrueviewIarLanguage -- ^ @FILTER_TRUEVIEW_IAR_LANGUAGE@ | FPTFilterTrueviewIarGender -- ^ @FILTER_TRUEVIEW_IAR_GENDER@ | FPTFilterTrueviewIarAge -- ^ @FILTER_TRUEVIEW_IAR_AGE@ | FPTFilterTrueviewIarCategory -- ^ @FILTER_TRUEVIEW_IAR_CATEGORY@ | FPTFilterTrueviewIarCountry -- ^ @FILTER_TRUEVIEW_IAR_COUNTRY@ | FPTFilterTrueviewIarCity -- ^ @FILTER_TRUEVIEW_IAR_CITY@ | FPTFilterTrueviewIarRegion -- ^ @FILTER_TRUEVIEW_IAR_REGION@ | FPTFilterTrueviewIarZipcode -- ^ @FILTER_TRUEVIEW_IAR_ZIPCODE@ | FPTFilterTrueviewIarRemarketingList -- ^ @FILTER_TRUEVIEW_IAR_REMARKETING_LIST@ | FPTFilterTrueviewIarInterest -- ^ @FILTER_TRUEVIEW_IAR_INTEREST@ | FPTFilterTrueviewIarParentalStatus -- ^ @FILTER_TRUEVIEW_IAR_PARENTAL_STATUS@ | FPTFilterTrueviewIarTimeOfDay -- ^ @FILTER_TRUEVIEW_IAR_TIME_OF_DAY@ | FPTFilterTrueviewCustomAffinity -- ^ @FILTER_TRUEVIEW_CUSTOM_AFFINITY@ | FPTFilterTrueviewCategory -- ^ @FILTER_TRUEVIEW_CATEGORY@ | FPTFilterTrueviewKeyword -- ^ @FILTER_TRUEVIEW_KEYWORD@ | FPTFilterTrueviewPlacement -- ^ @FILTER_TRUEVIEW_PLACEMENT@ | FPTFilterTrueviewURL -- ^ @FILTER_TRUEVIEW_URL@ | FPTFilterTrueviewCountry -- ^ @FILTER_TRUEVIEW_COUNTRY@ | FPTFilterTrueviewRegion -- ^ @FILTER_TRUEVIEW_REGION@ | FPTFilterTrueviewCity -- ^ @FILTER_TRUEVIEW_CITY@ | FPTFilterTrueviewDma -- ^ @FILTER_TRUEVIEW_DMA@ | FPTFilterTrueviewZipcode -- ^ @FILTER_TRUEVIEW_ZIPCODE@ | FPTFilterNotSupported -- ^ @FILTER_NOT_SUPPORTED@ | FPTFilterMediaPlan -- ^ @FILTER_MEDIA_PLAN@ | FPTFilterTrueviewIarYouTubeChannel -- ^ @FILTER_TRUEVIEW_IAR_YOUTUBE_CHANNEL@ | FPTFilterTrueviewIarYouTubeVideo -- ^ @FILTER_TRUEVIEW_IAR_YOUTUBE_VIDEO@ | FPTFilterSkippableSupport -- ^ @FILTER_SKIPPABLE_SUPPORT@ | FPTFilterCompanionCreativeId -- ^ @FILTER_COMPANION_CREATIVE_ID@ | FPTFilterBudgetSegmentDescription -- ^ @FILTER_BUDGET_SEGMENT_DESCRIPTION@ | FPTFilterFloodlightActivityId -- ^ @FILTER_FLOODLIGHT_ACTIVITY_ID@ | FPTFilterDeviceModel -- ^ @FILTER_DEVICE_MODEL@ | FPTFilterDeviceMake -- ^ @FILTER_DEVICE_MAKE@ | FPTFilterDeviceType -- ^ @FILTER_DEVICE_TYPE@ | FPTFilterCreativeAttribute -- ^ @FILTER_CREATIVE_ATTRIBUTE@ | FPTFilterInventoryCommitmentType -- ^ @FILTER_INVENTORY_COMMITMENT_TYPE@ | FPTFilterInventoryRateType -- ^ @FILTER_INVENTORY_RATE_TYPE@ | FPTFilterInventoryDeliveryMethod -- ^ @FILTER_INVENTORY_DELIVERY_METHOD@ | FPTFilterInventorySourceExternalId -- ^ @FILTER_INVENTORY_SOURCE_EXTERNAL_ID@ | FPTFilterAuthorizedSellerState -- ^ @FILTER_AUTHORIZED_SELLER_STATE@ | FPTFilterVideoDurationSecondsRange -- ^ @FILTER_VIDEO_DURATION_SECONDS_RANGE@ | FPTFilterPartnerName -- ^ @FILTER_PARTNER_NAME@ | FPTFilterPartnerStatus -- ^ @FILTER_PARTNER_STATUS@ | FPTFilterAdvertiserName -- ^ @FILTER_ADVERTISER_NAME@ | FPTFilterAdvertiserIntegrationCode -- ^ @FILTER_ADVERTISER_INTEGRATION_CODE@ | FPTFilterAdvertiserIntegrationStatus -- ^ @FILTER_ADVERTISER_INTEGRATION_STATUS@ | FPTFilterCarrierName -- ^ @FILTER_CARRIER_NAME@ | FPTFilterChannelName -- ^ @FILTER_CHANNEL_NAME@ | FPTFilterCityName -- ^ @FILTER_CITY_NAME@ | FPTFilterCompanionCreativeName -- ^ @FILTER_COMPANION_CREATIVE_NAME@ | FPTFilterUserListFirstPartyName -- ^ @FILTER_USER_LIST_FIRST_PARTY_NAME@ | FPTFilterUserListThirdPartyName -- ^ @FILTER_USER_LIST_THIRD_PARTY_NAME@ | FPTFilterNielsenReStatementDate -- ^ @FILTER_NIELSEN_RESTATEMENT_DATE@ | FPTFilterNielsenDateRange -- ^ @FILTER_NIELSEN_DATE_RANGE@ | FPTFilterInsertionOrderName -- ^ @FILTER_INSERTION_ORDER_NAME@ | FPTFilterRegionName -- ^ @FILTER_REGION_NAME@ | FPTFilterDmaName -- ^ @FILTER_DMA_NAME@ | FPTFilterTrueviewIarRegionName -- ^ @FILTER_TRUEVIEW_IAR_REGION_NAME@ | FPTFilterTrueviewDmaName -- ^ @FILTER_TRUEVIEW_DMA_NAME@ | FPTFilterTrueviewRegionName -- ^ @FILTER_TRUEVIEW_REGION_NAME@ | FPTFilterActiveViewCustomMetricId -- ^ @FILTER_ACTIVE_VIEW_CUSTOM_METRIC_ID@ | FPTFilterActiveViewCustomMetricName -- ^ @FILTER_ACTIVE_VIEW_CUSTOM_METRIC_NAME@ | FPTFilterAdType -- ^ @FILTER_AD_TYPE@ | FPTFilterAlgorithm -- ^ @FILTER_ALGORITHM@ | FPTFilterAlgorithmId -- ^ @FILTER_ALGORITHM_ID@ | FPTFilterAmpPageRequest -- ^ @FILTER_AMP_PAGE_REQUEST@ | FPTFilterAnonymousInventoryModeling -- ^ @FILTER_ANONYMOUS_INVENTORY_MODELING@ | FPTFilterAppURL -- ^ @FILTER_APP_URL@ | FPTFilterAppURLExcluded -- ^ @FILTER_APP_URL_EXCLUDED@ | FPTFilterAttributedUserList -- ^ @FILTER_ATTRIBUTED_USERLIST@ | FPTFilterAttributedUserListCost -- ^ @FILTER_ATTRIBUTED_USERLIST_COST@ | FPTFilterAttributedUserListType -- ^ @FILTER_ATTRIBUTED_USERLIST_TYPE@ | FPTFilterAttributionModel -- ^ @FILTER_ATTRIBUTION_MODEL@ | FPTFilterAudienceList -- ^ @FILTER_AUDIENCE_LIST@ | FPTFilterAudienceListCost -- ^ @FILTER_AUDIENCE_LIST_COST@ | FPTFilterAudienceListType -- ^ @FILTER_AUDIENCE_LIST_TYPE@ | FPTFilterAudienceName -- ^ @FILTER_AUDIENCE_NAME@ | FPTFilterAudienceType -- ^ @FILTER_AUDIENCE_TYPE@ | FPTFilterBillableOutcome -- ^ @FILTER_BILLABLE_OUTCOME@ | FPTFilterBrandLiftType -- ^ @FILTER_BRAND_LIFT_TYPE@ | FPTFilterChannelType -- ^ @FILTER_CHANNEL_TYPE@ | FPTFilterCmPlacementId -- ^ @FILTER_CM_PLACEMENT_ID@ | FPTFilterConversionSource -- ^ @FILTER_CONVERSION_SOURCE@ | FPTFilterConversionSourceId -- ^ @FILTER_CONVERSION_SOURCE_ID@ | FPTFilterCountryId -- ^ @FILTER_COUNTRY_ID@ | FPTFilterCreative -- ^ @FILTER_CREATIVE@ | FPTFilterCreativeAsset -- ^ @FILTER_CREATIVE_ASSET@ | FPTFilterCreativeIntegrationCode -- ^ @FILTER_CREATIVE_INTEGRATION_CODE@ | FPTFilterCreativeRenderedInAmp -- ^ @FILTER_CREATIVE_RENDERED_IN_AMP@ | FPTFilterCreativeSource -- ^ @FILTER_CREATIVE_SOURCE@ | FPTFilterCreativeStatus -- ^ @FILTER_CREATIVE_STATUS@ | FPTFilterDataProviderName -- ^ @FILTER_DATA_PROVIDER_NAME@ | FPTFilterDetailedDemographics -- ^ @FILTER_DETAILED_DEMOGRAPHICS@ | FPTFilterDetailedDemographicsId -- ^ @FILTER_DETAILED_DEMOGRAPHICS_ID@ | FPTFilterDevice -- ^ @FILTER_DEVICE@ | FPTFilterGamInsertionOrder -- ^ @FILTER_GAM_INSERTION_ORDER@ | FPTFilterGamLineItem -- ^ @FILTER_GAM_LINE_ITEM@ | FPTFilterGamLineItemId -- ^ @FILTER_GAM_LINE_ITEM_ID@ | FPTFilterDigitalContentLabel -- ^ @FILTER_DIGITAL_CONTENT_LABEL@ | FPTFilterDomain -- ^ @FILTER_DOMAIN@ | FPTFilterEligibleCookiesOnFirstPartyAudienceList -- ^ @FILTER_ELIGIBLE_COOKIES_ON_FIRST_PARTY_AUDIENCE_LIST@ | FPTFilterEligibleCookiesOnThirdPartyAudienceListAndInterest -- ^ @FILTER_ELIGIBLE_COOKIES_ON_THIRD_PARTY_AUDIENCE_LIST_AND_INTEREST@ | FPTFilterExchange -- ^ @FILTER_EXCHANGE@ | FPTFilterExchangeCode -- ^ @FILTER_EXCHANGE_CODE@ | FPTFilterExtension -- ^ @FILTER_EXTENSION@ | FPTFilterExtensionStatus -- ^ @FILTER_EXTENSION_STATUS@ | FPTFilterExtensionType -- ^ @FILTER_EXTENSION_TYPE@ | FPTFilterFirstPartyAudienceListCost -- ^ @FILTER_FIRST_PARTY_AUDIENCE_LIST_COST@ | FPTFilterFirstPartyAudienceListType -- ^ @FILTER_FIRST_PARTY_AUDIENCE_LIST_TYPE@ | FPTFilterFloodlightActivity -- ^ @FILTER_FLOODLIGHT_ACTIVITY@ | FPTFilterFormat -- ^ @FILTER_FORMAT@ | FPTFilterGmailAge -- ^ @FILTER_GMAIL_AGE@ | FPTFilterGmailCity -- ^ @FILTER_GMAIL_CITY@ | FPTFilterGmailCountry -- ^ @FILTER_GMAIL_COUNTRY@ | FPTFilterGmailCountryName -- ^ @FILTER_GMAIL_COUNTRY_NAME@ | FPTFilterGmailDeviceType -- ^ @FILTER_GMAIL_DEVICE_TYPE@ | FPTFilterGmailDeviceTypeName -- ^ @FILTER_GMAIL_DEVICE_TYPE_NAME@ | FPTFilterGmailGender -- ^ @FILTER_GMAIL_GENDER@ | FPTFilterGmailRegion -- ^ @FILTER_GMAIL_REGION@ | FPTFilterGmailRemarketingList -- ^ @FILTER_GMAIL_REMARKETING_LIST@ | FPTFilterHouseholdIncome -- ^ @FILTER_HOUSEHOLD_INCOME@ | FPTFilterImpressionCountingMethod -- ^ @FILTER_IMPRESSION_COUNTING_METHOD@ | FPTFilterYouTubeProgrammaticGuaranteedInsertionOrder -- ^ @FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_INSERTION_ORDER@ | FPTFilterInsertionOrderIntegrationCode -- ^ @FILTER_INSERTION_ORDER_INTEGRATION_CODE@ | FPTFilterInsertionOrderStatus -- ^ @FILTER_INSERTION_ORDER_STATUS@ | FPTFilterInterest -- ^ @FILTER_INTEREST@ | FPTFilterInventorySourceGroup -- ^ @FILTER_INVENTORY_SOURCE_GROUP@ | FPTFilterInventorySourceGroupId -- ^ @FILTER_INVENTORY_SOURCE_GROUP_ID@ | FPTFilterInventorySourceId -- ^ @FILTER_INVENTORY_SOURCE_ID@ | FPTFilterInventorySourceName -- ^ @FILTER_INVENTORY_SOURCE_NAME@ | FPTFilterLifeEvent -- ^ @FILTER_LIFE_EVENT@ | FPTFilterLifeEvents -- ^ @FILTER_LIFE_EVENTS@ | FPTFilterLineItemIntegrationCode -- ^ @FILTER_LINE_ITEM_INTEGRATION_CODE@ | FPTFilterLineItemName -- ^ @FILTER_LINE_ITEM_NAME@ | FPTFilterLineItemStatus -- ^ @FILTER_LINE_ITEM_STATUS@ | FPTFilterMatchRatio -- ^ @FILTER_MATCH_RATIO@ | FPTFilterMeasurementSource -- ^ @FILTER_MEASUREMENT_SOURCE@ | FPTFilterMediaPlanName -- ^ @FILTER_MEDIA_PLAN_NAME@ | FPTFilterParentalStatus -- ^ @FILTER_PARENTAL_STATUS@ | FPTFilterPlacementAllYouTubeChannels -- ^ @FILTER_PLACEMENT_ALL_YOUTUBE_CHANNELS@ | FPTFilterPlatform -- ^ @FILTER_PLATFORM@ | FPTFilterPlaybackMethod -- ^ @FILTER_PLAYBACK_METHOD@ | FPTFilterPositionInContent -- ^ @FILTER_POSITION_IN_CONTENT@ | FPTFilterPublisherProperty -- ^ @FILTER_PUBLISHER_PROPERTY@ | FPTFilterPublisherPropertyId -- ^ @FILTER_PUBLISHER_PROPERTY_ID@ | FPTFilterPublisherPropertySection -- ^ @FILTER_PUBLISHER_PROPERTY_SECTION@ | FPTFilterPublisherPropertySectionId -- ^ @FILTER_PUBLISHER_PROPERTY_SECTION_ID@ | FPTFilterRefundReason -- ^ @FILTER_REFUND_REASON@ | FPTFilterRemarketingList -- ^ @FILTER_REMARKETING_LIST@ | FPTFilterRewarded -- ^ @FILTER_REWARDED@ | FPTFilterSensitiveCategory -- ^ @FILTER_SENSITIVE_CATEGORY@ | FPTFilterServedPixelDensity -- ^ @FILTER_SERVED_PIXEL_DENSITY@ | FPTFilterTargetedDataProviders -- ^ @FILTER_TARGETED_DATA_PROVIDERS@ | FPTFilterThirdPartyAudienceListCost -- ^ @FILTER_THIRD_PARTY_AUDIENCE_LIST_COST@ | FPTFilterThirdPartyAudienceListType -- ^ @FILTER_THIRD_PARTY_AUDIENCE_LIST_TYPE@ | FPTFilterTrueviewAd -- ^ @FILTER_TRUEVIEW_AD@ | FPTFilterTrueviewAdGroup -- ^ @FILTER_TRUEVIEW_AD_GROUP@ | FPTFilterTrueviewDetailedDemographics -- ^ @FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS@ | FPTFilterTrueviewDetailedDemographicsId -- ^ @FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS_ID@ | FPTFilterTrueviewHouseholdIncome -- ^ @FILTER_TRUEVIEW_HOUSEHOLD_INCOME@ | FPTFilterTrueviewIarCountryName -- ^ @FILTER_TRUEVIEW_IAR_COUNTRY_NAME@ | FPTFilterTrueviewRemarketingListName -- ^ @FILTER_TRUEVIEW_REMARKETING_LIST_NAME@ | FPTFilterVariantId -- ^ @FILTER_VARIANT_ID@ | FPTFilterVariantName -- ^ @FILTER_VARIANT_NAME@ | FPTFilterVariantVersion -- ^ @FILTER_VARIANT_VERSION@ | FPTFilterVerificationVideoPlayerSize -- ^ @FILTER_VERIFICATION_VIDEO_PLAYER_SIZE@ | FPTFilterVerificationVideoPosition -- ^ @FILTER_VERIFICATION_VIDEO_POSITION@ | FPTFilterVideoCompanionCreativeSize -- ^ @FILTER_VIDEO_COMPANION_CREATIVE_SIZE@ | FPTFilterVideoContinuousPlay -- ^ @FILTER_VIDEO_CONTINUOUS_PLAY@ | FPTFilterVideoDuration -- ^ @FILTER_VIDEO_DURATION@ | FPTFilterYouTubeAdaptedAudienceList -- ^ @FILTER_YOUTUBE_ADAPTED_AUDIENCE_LIST@ | FPTFilterYouTubeAdVideo -- ^ @FILTER_YOUTUBE_AD_VIDEO@ | FPTFilterYouTubeAdVideoId -- ^ @FILTER_YOUTUBE_AD_VIDEO_ID@ | FPTFilterYouTubeChannel -- ^ @FILTER_YOUTUBE_CHANNEL@ | FPTFilterYouTubeProgrammaticGuaranteedAdvertiser -- ^ @FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_ADVERTISER@ | FPTFilterYouTubeProgrammaticGuaranteedPartner -- ^ @FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_PARTNER@ | FPTFilterYouTubeVideo -- ^ @FILTER_YOUTUBE_VIDEO@ | FPTFilterZipPostalCode -- ^ @FILTER_ZIP_POSTAL_CODE@ | FPTFilterPlacementNameAllYouTubeChannels -- ^ @FILTER_PLACEMENT_NAME_ALL_YOUTUBE_CHANNELS@ | FPTFilterTrueviewPlacementId -- ^ @FILTER_TRUEVIEW_PLACEMENT_ID@ | FPTFilterPathPatternId -- ^ @FILTER_PATH_PATTERN_ID@ | FPTFilterPathEventIndex -- ^ @FILTER_PATH_EVENT_INDEX@ | FPTFilterEventType -- ^ @FILTER_EVENT_TYPE@ | FPTFilterChannelGrouping -- ^ @FILTER_CHANNEL_GROUPING@ | FPTFilterOmSdkAvailable -- ^ @FILTER_OM_SDK_AVAILABLE@ | FPTFilterDataSource -- ^ @FILTER_DATA_SOURCE@ | FPTFilterCM360PlacementId -- ^ @FILTER_CM360_PLACEMENT_ID@ | FPTFilterTrueviewClickTypeName -- ^ @FILTER_TRUEVIEW_CLICK_TYPE_NAME@ | FPTFilterTrueviewAdTypeName -- ^ @FILTER_TRUEVIEW_AD_TYPE_NAME@ | FPTFilterVideoContentDuration -- ^ @FILTER_VIDEO_CONTENT_DURATION@ | FPTFilterMatchedGenreTarget -- ^ @FILTER_MATCHED_GENRE_TARGET@ | FPTFilterVideoContentLiveStream -- ^ @FILTER_VIDEO_CONTENT_LIVE_STREAM@ | FPTFilterBudgetSegmentType -- ^ @FILTER_BUDGET_SEGMENT_TYPE@ | FPTFilterBudgetSegmentBudget -- ^ @FILTER_BUDGET_SEGMENT_BUDGET@ | FPTFilterBudgetSegmentStartDate -- ^ @FILTER_BUDGET_SEGMENT_START_DATE@ | FPTFilterBudgetSegmentEndDate -- ^ @FILTER_BUDGET_SEGMENT_END_DATE@ | FPTFilterBudgetSegmentPacingPercentage -- ^ @FILTER_BUDGET_SEGMENT_PACING_PERCENTAGE@ | FPTFilterLineItemBudget -- ^ @FILTER_LINE_ITEM_BUDGET@ | FPTFilterLineItemStartDate -- ^ @FILTER_LINE_ITEM_START_DATE@ | FPTFilterLineItemEndDate -- ^ @FILTER_LINE_ITEM_END_DATE@ | FPTFilterInsertionOrderGoalType -- ^ @FILTER_INSERTION_ORDER_GOAL_TYPE@ | FPTFilterLineItemPacingPercentage -- ^ @FILTER_LINE_ITEM_PACING_PERCENTAGE@ | FPTFilterInsertionOrderGoalValue -- ^ @FILTER_INSERTION_ORDER_GOAL_VALUE@ | FPTFilterOmidCapable -- ^ @FILTER_OMID_CAPABLE@ | FPTFilterVendorMeasurementMode -- ^ @FILTER_VENDOR_MEASUREMENT_MODE@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable FilterPairType instance FromHttpApiData FilterPairType where parseQueryParam = \case "FILTER_UNKNOWN" -> Right FPTFilterUnknown "FILTER_DATE" -> Right FPTFilterDate "FILTER_DAY_OF_WEEK" -> Right FPTFilterDayOfWeek "FILTER_WEEK" -> Right FPTFilterWeek "FILTER_MONTH" -> Right FPTFilterMonth "FILTER_YEAR" -> Right FPTFilterYear "FILTER_TIME_OF_DAY" -> Right FPTFilterTimeOfDay "FILTER_CONVERSION_DELAY" -> Right FPTFilterConversionDelay "FILTER_CREATIVE_ID" -> Right FPTFilterCreativeId "FILTER_CREATIVE_SIZE" -> Right FPTFilterCreativeSize "FILTER_CREATIVE_TYPE" -> Right FPTFilterCreativeType "FILTER_EXCHANGE_ID" -> Right FPTFilterExchangeId "FILTER_AD_POSITION" -> Right FPTFilterAdPosition "FILTER_PUBLIC_INVENTORY" -> Right FPTFilterPublicInventory "FILTER_INVENTORY_SOURCE" -> Right FPTFilterInventorySource "FILTER_CITY" -> Right FPTFilterCity "FILTER_REGION" -> Right FPTFilterRegion "FILTER_DMA" -> Right FPTFilterDma "FILTER_COUNTRY" -> Right FPTFilterCountry "FILTER_SITE_ID" -> Right FPTFilterSiteId "FILTER_CHANNEL_ID" -> Right FPTFilterChannelId "FILTER_PARTNER" -> Right FPTFilterPartner "FILTER_ADVERTISER" -> Right FPTFilterAdvertiser "FILTER_INSERTION_ORDER" -> Right FPTFilterInsertionOrder "FILTER_LINE_ITEM" -> Right FPTFilterLineItem "FILTER_PARTNER_CURRENCY" -> Right FPTFilterPartnerCurrency "FILTER_ADVERTISER_CURRENCY" -> Right FPTFilterAdvertiserCurrency "FILTER_ADVERTISER_TIMEZONE" -> Right FPTFilterAdvertiserTimezone "FILTER_LINE_ITEM_TYPE" -> Right FPTFilterLineItemType "FILTER_USER_LIST" -> Right FPTFilterUserList "FILTER_USER_LIST_FIRST_PARTY" -> Right FPTFilterUserListFirstParty "FILTER_USER_LIST_THIRD_PARTY" -> Right FPTFilterUserListThirdParty "FILTER_TARGETED_USER_LIST" -> Right FPTFilterTargetedUserList "FILTER_DATA_PROVIDER" -> Right FPTFilterDataProvider "FILTER_ORDER_ID" -> Right FPTFilterOrderId "FILTER_VIDEO_PLAYER_SIZE" -> Right FPTFilterVideoPlayerSize "FILTER_VIDEO_DURATION_SECONDS" -> Right FPTFilterVideoDurationSeconds "FILTER_KEYWORD" -> Right FPTFilterKeyword "FILTER_PAGE_CATEGORY" -> Right FPTFilterPageCategory "FILTER_CAMPAIGN_DAILY_FREQUENCY" -> Right FPTFilterCampaignDailyFrequency "FILTER_LINE_ITEM_DAILY_FREQUENCY" -> Right FPTFilterLineItemDailyFrequency "FILTER_LINE_ITEM_LIFETIME_FREQUENCY" -> Right FPTFilterLineItemLifetimeFrequency "FILTER_OS" -> Right FPTFilterOS "FILTER_BROWSER" -> Right FPTFilterBrowser "FILTER_CARRIER" -> Right FPTFilterCarrier "FILTER_SITE_LANGUAGE" -> Right FPTFilterSiteLanguage "FILTER_INVENTORY_FORMAT" -> Right FPTFilterInventoryFormat "FILTER_ZIP_CODE" -> Right FPTFilterZipCode "FILTER_VIDEO_RATING_TIER" -> Right FPTFilterVideoRatingTier "FILTER_VIDEO_FORMAT_SUPPORT" -> Right FPTFilterVideoFormatSupport "FILTER_VIDEO_SKIPPABLE_SUPPORT" -> Right FPTFilterVideoSkippableSupport "FILTER_VIDEO_CREATIVE_DURATION" -> Right FPTFilterVideoCreativeDuration "FILTER_PAGE_LAYOUT" -> Right FPTFilterPageLayout "FILTER_VIDEO_AD_POSITION_IN_STREAM" -> Right FPTFilterVideoAdPositionInStream "FILTER_AGE" -> Right FPTFilterAge "FILTER_GENDER" -> Right FPTFilterGender "FILTER_QUARTER" -> Right FPTFilterQuarter "FILTER_TRUEVIEW_CONVERSION_TYPE" -> Right FPTFilterTrueviewConversionType "FILTER_MOBILE_GEO" -> Right FPTFilterMobileGeo "FILTER_MRAID_SUPPORT" -> Right FPTFilterMraidSupport "FILTER_ACTIVE_VIEW_EXPECTED_VIEWABILITY" -> Right FPTFilterActiveViewExpectedViewability "FILTER_VIDEO_CREATIVE_DURATION_SKIPPABLE" -> Right FPTFilterVideoCreativeDurationSkippable "FILTER_NIELSEN_COUNTRY_CODE" -> Right FPTFilterNielsenCountryCode "FILTER_NIELSEN_DEVICE_ID" -> Right FPTFilterNielsenDeviceId "FILTER_NIELSEN_GENDER" -> Right FPTFilterNielsenGender "FILTER_NIELSEN_AGE" -> Right FPTFilterNielsenAge "FILTER_INVENTORY_SOURCE_TYPE" -> Right FPTFilterInventorySourceType "FILTER_CREATIVE_WIDTH" -> Right FPTFilterCreativeWidth "FILTER_CREATIVE_HEIGHT" -> Right FPTFilterCreativeHeight "FILTER_DFP_ORDER_ID" -> Right FPTFilterDfpOrderId "FILTER_TRUEVIEW_AGE" -> Right FPTFilterTrueviewAge "FILTER_TRUEVIEW_GENDER" -> Right FPTFilterTrueviewGender "FILTER_TRUEVIEW_PARENTAL_STATUS" -> Right FPTFilterTrueviewParentalStatus "FILTER_TRUEVIEW_REMARKETING_LIST" -> Right FPTFilterTrueviewRemarketingList "FILTER_TRUEVIEW_INTEREST" -> Right FPTFilterTrueviewInterest "FILTER_TRUEVIEW_AD_GROUP_ID" -> Right FPTFilterTrueviewAdGroupId "FILTER_TRUEVIEW_AD_GROUP_AD_ID" -> Right FPTFilterTrueviewAdGroupAdId "FILTER_TRUEVIEW_IAR_LANGUAGE" -> Right FPTFilterTrueviewIarLanguage "FILTER_TRUEVIEW_IAR_GENDER" -> Right FPTFilterTrueviewIarGender "FILTER_TRUEVIEW_IAR_AGE" -> Right FPTFilterTrueviewIarAge "FILTER_TRUEVIEW_IAR_CATEGORY" -> Right FPTFilterTrueviewIarCategory "FILTER_TRUEVIEW_IAR_COUNTRY" -> Right FPTFilterTrueviewIarCountry "FILTER_TRUEVIEW_IAR_CITY" -> Right FPTFilterTrueviewIarCity "FILTER_TRUEVIEW_IAR_REGION" -> Right FPTFilterTrueviewIarRegion "FILTER_TRUEVIEW_IAR_ZIPCODE" -> Right FPTFilterTrueviewIarZipcode "FILTER_TRUEVIEW_IAR_REMARKETING_LIST" -> Right FPTFilterTrueviewIarRemarketingList "FILTER_TRUEVIEW_IAR_INTEREST" -> Right FPTFilterTrueviewIarInterest "FILTER_TRUEVIEW_IAR_PARENTAL_STATUS" -> Right FPTFilterTrueviewIarParentalStatus "FILTER_TRUEVIEW_IAR_TIME_OF_DAY" -> Right FPTFilterTrueviewIarTimeOfDay "FILTER_TRUEVIEW_CUSTOM_AFFINITY" -> Right FPTFilterTrueviewCustomAffinity "FILTER_TRUEVIEW_CATEGORY" -> Right FPTFilterTrueviewCategory "FILTER_TRUEVIEW_KEYWORD" -> Right FPTFilterTrueviewKeyword "FILTER_TRUEVIEW_PLACEMENT" -> Right FPTFilterTrueviewPlacement "FILTER_TRUEVIEW_URL" -> Right FPTFilterTrueviewURL "FILTER_TRUEVIEW_COUNTRY" -> Right FPTFilterTrueviewCountry "FILTER_TRUEVIEW_REGION" -> Right FPTFilterTrueviewRegion "FILTER_TRUEVIEW_CITY" -> Right FPTFilterTrueviewCity "FILTER_TRUEVIEW_DMA" -> Right FPTFilterTrueviewDma "FILTER_TRUEVIEW_ZIPCODE" -> Right FPTFilterTrueviewZipcode "FILTER_NOT_SUPPORTED" -> Right FPTFilterNotSupported "FILTER_MEDIA_PLAN" -> Right FPTFilterMediaPlan "FILTER_TRUEVIEW_IAR_YOUTUBE_CHANNEL" -> Right FPTFilterTrueviewIarYouTubeChannel "FILTER_TRUEVIEW_IAR_YOUTUBE_VIDEO" -> Right FPTFilterTrueviewIarYouTubeVideo "FILTER_SKIPPABLE_SUPPORT" -> Right FPTFilterSkippableSupport "FILTER_COMPANION_CREATIVE_ID" -> Right FPTFilterCompanionCreativeId "FILTER_BUDGET_SEGMENT_DESCRIPTION" -> Right FPTFilterBudgetSegmentDescription "FILTER_FLOODLIGHT_ACTIVITY_ID" -> Right FPTFilterFloodlightActivityId "FILTER_DEVICE_MODEL" -> Right FPTFilterDeviceModel "FILTER_DEVICE_MAKE" -> Right FPTFilterDeviceMake "FILTER_DEVICE_TYPE" -> Right FPTFilterDeviceType "FILTER_CREATIVE_ATTRIBUTE" -> Right FPTFilterCreativeAttribute "FILTER_INVENTORY_COMMITMENT_TYPE" -> Right FPTFilterInventoryCommitmentType "FILTER_INVENTORY_RATE_TYPE" -> Right FPTFilterInventoryRateType "FILTER_INVENTORY_DELIVERY_METHOD" -> Right FPTFilterInventoryDeliveryMethod "FILTER_INVENTORY_SOURCE_EXTERNAL_ID" -> Right FPTFilterInventorySourceExternalId "FILTER_AUTHORIZED_SELLER_STATE" -> Right FPTFilterAuthorizedSellerState "FILTER_VIDEO_DURATION_SECONDS_RANGE" -> Right FPTFilterVideoDurationSecondsRange "FILTER_PARTNER_NAME" -> Right FPTFilterPartnerName "FILTER_PARTNER_STATUS" -> Right FPTFilterPartnerStatus "FILTER_ADVERTISER_NAME" -> Right FPTFilterAdvertiserName "FILTER_ADVERTISER_INTEGRATION_CODE" -> Right FPTFilterAdvertiserIntegrationCode "FILTER_ADVERTISER_INTEGRATION_STATUS" -> Right FPTFilterAdvertiserIntegrationStatus "FILTER_CARRIER_NAME" -> Right FPTFilterCarrierName "FILTER_CHANNEL_NAME" -> Right FPTFilterChannelName "FILTER_CITY_NAME" -> Right FPTFilterCityName "FILTER_COMPANION_CREATIVE_NAME" -> Right FPTFilterCompanionCreativeName "FILTER_USER_LIST_FIRST_PARTY_NAME" -> Right FPTFilterUserListFirstPartyName "FILTER_USER_LIST_THIRD_PARTY_NAME" -> Right FPTFilterUserListThirdPartyName "FILTER_NIELSEN_RESTATEMENT_DATE" -> Right FPTFilterNielsenReStatementDate "FILTER_NIELSEN_DATE_RANGE" -> Right FPTFilterNielsenDateRange "FILTER_INSERTION_ORDER_NAME" -> Right FPTFilterInsertionOrderName "FILTER_REGION_NAME" -> Right FPTFilterRegionName "FILTER_DMA_NAME" -> Right FPTFilterDmaName "FILTER_TRUEVIEW_IAR_REGION_NAME" -> Right FPTFilterTrueviewIarRegionName "FILTER_TRUEVIEW_DMA_NAME" -> Right FPTFilterTrueviewDmaName "FILTER_TRUEVIEW_REGION_NAME" -> Right FPTFilterTrueviewRegionName "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_ID" -> Right FPTFilterActiveViewCustomMetricId "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_NAME" -> Right FPTFilterActiveViewCustomMetricName "FILTER_AD_TYPE" -> Right FPTFilterAdType "FILTER_ALGORITHM" -> Right FPTFilterAlgorithm "FILTER_ALGORITHM_ID" -> Right FPTFilterAlgorithmId "FILTER_AMP_PAGE_REQUEST" -> Right FPTFilterAmpPageRequest "FILTER_ANONYMOUS_INVENTORY_MODELING" -> Right FPTFilterAnonymousInventoryModeling "FILTER_APP_URL" -> Right FPTFilterAppURL "FILTER_APP_URL_EXCLUDED" -> Right FPTFilterAppURLExcluded "FILTER_ATTRIBUTED_USERLIST" -> Right FPTFilterAttributedUserList "FILTER_ATTRIBUTED_USERLIST_COST" -> Right FPTFilterAttributedUserListCost "FILTER_ATTRIBUTED_USERLIST_TYPE" -> Right FPTFilterAttributedUserListType "FILTER_ATTRIBUTION_MODEL" -> Right FPTFilterAttributionModel "FILTER_AUDIENCE_LIST" -> Right FPTFilterAudienceList "FILTER_AUDIENCE_LIST_COST" -> Right FPTFilterAudienceListCost "FILTER_AUDIENCE_LIST_TYPE" -> Right FPTFilterAudienceListType "FILTER_AUDIENCE_NAME" -> Right FPTFilterAudienceName "FILTER_AUDIENCE_TYPE" -> Right FPTFilterAudienceType "FILTER_BILLABLE_OUTCOME" -> Right FPTFilterBillableOutcome "FILTER_BRAND_LIFT_TYPE" -> Right FPTFilterBrandLiftType "FILTER_CHANNEL_TYPE" -> Right FPTFilterChannelType "FILTER_CM_PLACEMENT_ID" -> Right FPTFilterCmPlacementId "FILTER_CONVERSION_SOURCE" -> Right FPTFilterConversionSource "FILTER_CONVERSION_SOURCE_ID" -> Right FPTFilterConversionSourceId "FILTER_COUNTRY_ID" -> Right FPTFilterCountryId "FILTER_CREATIVE" -> Right FPTFilterCreative "FILTER_CREATIVE_ASSET" -> Right FPTFilterCreativeAsset "FILTER_CREATIVE_INTEGRATION_CODE" -> Right FPTFilterCreativeIntegrationCode "FILTER_CREATIVE_RENDERED_IN_AMP" -> Right FPTFilterCreativeRenderedInAmp "FILTER_CREATIVE_SOURCE" -> Right FPTFilterCreativeSource "FILTER_CREATIVE_STATUS" -> Right FPTFilterCreativeStatus "FILTER_DATA_PROVIDER_NAME" -> Right FPTFilterDataProviderName "FILTER_DETAILED_DEMOGRAPHICS" -> Right FPTFilterDetailedDemographics "FILTER_DETAILED_DEMOGRAPHICS_ID" -> Right FPTFilterDetailedDemographicsId "FILTER_DEVICE" -> Right FPTFilterDevice "FILTER_GAM_INSERTION_ORDER" -> Right FPTFilterGamInsertionOrder "FILTER_GAM_LINE_ITEM" -> Right FPTFilterGamLineItem "FILTER_GAM_LINE_ITEM_ID" -> Right FPTFilterGamLineItemId "FILTER_DIGITAL_CONTENT_LABEL" -> Right FPTFilterDigitalContentLabel "FILTER_DOMAIN" -> Right FPTFilterDomain "FILTER_ELIGIBLE_COOKIES_ON_FIRST_PARTY_AUDIENCE_LIST" -> Right FPTFilterEligibleCookiesOnFirstPartyAudienceList "FILTER_ELIGIBLE_COOKIES_ON_THIRD_PARTY_AUDIENCE_LIST_AND_INTEREST" -> Right FPTFilterEligibleCookiesOnThirdPartyAudienceListAndInterest "FILTER_EXCHANGE" -> Right FPTFilterExchange "FILTER_EXCHANGE_CODE" -> Right FPTFilterExchangeCode "FILTER_EXTENSION" -> Right FPTFilterExtension "FILTER_EXTENSION_STATUS" -> Right FPTFilterExtensionStatus "FILTER_EXTENSION_TYPE" -> Right FPTFilterExtensionType "FILTER_FIRST_PARTY_AUDIENCE_LIST_COST" -> Right FPTFilterFirstPartyAudienceListCost "FILTER_FIRST_PARTY_AUDIENCE_LIST_TYPE" -> Right FPTFilterFirstPartyAudienceListType "FILTER_FLOODLIGHT_ACTIVITY" -> Right FPTFilterFloodlightActivity "FILTER_FORMAT" -> Right FPTFilterFormat "FILTER_GMAIL_AGE" -> Right FPTFilterGmailAge "FILTER_GMAIL_CITY" -> Right FPTFilterGmailCity "FILTER_GMAIL_COUNTRY" -> Right FPTFilterGmailCountry "FILTER_GMAIL_COUNTRY_NAME" -> Right FPTFilterGmailCountryName "FILTER_GMAIL_DEVICE_TYPE" -> Right FPTFilterGmailDeviceType "FILTER_GMAIL_DEVICE_TYPE_NAME" -> Right FPTFilterGmailDeviceTypeName "FILTER_GMAIL_GENDER" -> Right FPTFilterGmailGender "FILTER_GMAIL_REGION" -> Right FPTFilterGmailRegion "FILTER_GMAIL_REMARKETING_LIST" -> Right FPTFilterGmailRemarketingList "FILTER_HOUSEHOLD_INCOME" -> Right FPTFilterHouseholdIncome "FILTER_IMPRESSION_COUNTING_METHOD" -> Right FPTFilterImpressionCountingMethod "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_INSERTION_ORDER" -> Right FPTFilterYouTubeProgrammaticGuaranteedInsertionOrder "FILTER_INSERTION_ORDER_INTEGRATION_CODE" -> Right FPTFilterInsertionOrderIntegrationCode "FILTER_INSERTION_ORDER_STATUS" -> Right FPTFilterInsertionOrderStatus "FILTER_INTEREST" -> Right FPTFilterInterest "FILTER_INVENTORY_SOURCE_GROUP" -> Right FPTFilterInventorySourceGroup "FILTER_INVENTORY_SOURCE_GROUP_ID" -> Right FPTFilterInventorySourceGroupId "FILTER_INVENTORY_SOURCE_ID" -> Right FPTFilterInventorySourceId "FILTER_INVENTORY_SOURCE_NAME" -> Right FPTFilterInventorySourceName "FILTER_LIFE_EVENT" -> Right FPTFilterLifeEvent "FILTER_LIFE_EVENTS" -> Right FPTFilterLifeEvents "FILTER_LINE_ITEM_INTEGRATION_CODE" -> Right FPTFilterLineItemIntegrationCode "FILTER_LINE_ITEM_NAME" -> Right FPTFilterLineItemName "FILTER_LINE_ITEM_STATUS" -> Right FPTFilterLineItemStatus "FILTER_MATCH_RATIO" -> Right FPTFilterMatchRatio "FILTER_MEASUREMENT_SOURCE" -> Right FPTFilterMeasurementSource "FILTER_MEDIA_PLAN_NAME" -> Right FPTFilterMediaPlanName "FILTER_PARENTAL_STATUS" -> Right FPTFilterParentalStatus "FILTER_PLACEMENT_ALL_YOUTUBE_CHANNELS" -> Right FPTFilterPlacementAllYouTubeChannels "FILTER_PLATFORM" -> Right FPTFilterPlatform "FILTER_PLAYBACK_METHOD" -> Right FPTFilterPlaybackMethod "FILTER_POSITION_IN_CONTENT" -> Right FPTFilterPositionInContent "FILTER_PUBLISHER_PROPERTY" -> Right FPTFilterPublisherProperty "FILTER_PUBLISHER_PROPERTY_ID" -> Right FPTFilterPublisherPropertyId "FILTER_PUBLISHER_PROPERTY_SECTION" -> Right FPTFilterPublisherPropertySection "FILTER_PUBLISHER_PROPERTY_SECTION_ID" -> Right FPTFilterPublisherPropertySectionId "FILTER_REFUND_REASON" -> Right FPTFilterRefundReason "FILTER_REMARKETING_LIST" -> Right FPTFilterRemarketingList "FILTER_REWARDED" -> Right FPTFilterRewarded "FILTER_SENSITIVE_CATEGORY" -> Right FPTFilterSensitiveCategory "FILTER_SERVED_PIXEL_DENSITY" -> Right FPTFilterServedPixelDensity "FILTER_TARGETED_DATA_PROVIDERS" -> Right FPTFilterTargetedDataProviders "FILTER_THIRD_PARTY_AUDIENCE_LIST_COST" -> Right FPTFilterThirdPartyAudienceListCost "FILTER_THIRD_PARTY_AUDIENCE_LIST_TYPE" -> Right FPTFilterThirdPartyAudienceListType "FILTER_TRUEVIEW_AD" -> Right FPTFilterTrueviewAd "FILTER_TRUEVIEW_AD_GROUP" -> Right FPTFilterTrueviewAdGroup "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS" -> Right FPTFilterTrueviewDetailedDemographics "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS_ID" -> Right FPTFilterTrueviewDetailedDemographicsId "FILTER_TRUEVIEW_HOUSEHOLD_INCOME" -> Right FPTFilterTrueviewHouseholdIncome "FILTER_TRUEVIEW_IAR_COUNTRY_NAME" -> Right FPTFilterTrueviewIarCountryName "FILTER_TRUEVIEW_REMARKETING_LIST_NAME" -> Right FPTFilterTrueviewRemarketingListName "FILTER_VARIANT_ID" -> Right FPTFilterVariantId "FILTER_VARIANT_NAME" -> Right FPTFilterVariantName "FILTER_VARIANT_VERSION" -> Right FPTFilterVariantVersion "FILTER_VERIFICATION_VIDEO_PLAYER_SIZE" -> Right FPTFilterVerificationVideoPlayerSize "FILTER_VERIFICATION_VIDEO_POSITION" -> Right FPTFilterVerificationVideoPosition "FILTER_VIDEO_COMPANION_CREATIVE_SIZE" -> Right FPTFilterVideoCompanionCreativeSize "FILTER_VIDEO_CONTINUOUS_PLAY" -> Right FPTFilterVideoContinuousPlay "FILTER_VIDEO_DURATION" -> Right FPTFilterVideoDuration "FILTER_YOUTUBE_ADAPTED_AUDIENCE_LIST" -> Right FPTFilterYouTubeAdaptedAudienceList "FILTER_YOUTUBE_AD_VIDEO" -> Right FPTFilterYouTubeAdVideo "FILTER_YOUTUBE_AD_VIDEO_ID" -> Right FPTFilterYouTubeAdVideoId "FILTER_YOUTUBE_CHANNEL" -> Right FPTFilterYouTubeChannel "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_ADVERTISER" -> Right FPTFilterYouTubeProgrammaticGuaranteedAdvertiser "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_PARTNER" -> Right FPTFilterYouTubeProgrammaticGuaranteedPartner "FILTER_YOUTUBE_VIDEO" -> Right FPTFilterYouTubeVideo "FILTER_ZIP_POSTAL_CODE" -> Right FPTFilterZipPostalCode "FILTER_PLACEMENT_NAME_ALL_YOUTUBE_CHANNELS" -> Right FPTFilterPlacementNameAllYouTubeChannels "FILTER_TRUEVIEW_PLACEMENT_ID" -> Right FPTFilterTrueviewPlacementId "FILTER_PATH_PATTERN_ID" -> Right FPTFilterPathPatternId "FILTER_PATH_EVENT_INDEX" -> Right FPTFilterPathEventIndex "FILTER_EVENT_TYPE" -> Right FPTFilterEventType "FILTER_CHANNEL_GROUPING" -> Right FPTFilterChannelGrouping "FILTER_OM_SDK_AVAILABLE" -> Right FPTFilterOmSdkAvailable "FILTER_DATA_SOURCE" -> Right FPTFilterDataSource "FILTER_CM360_PLACEMENT_ID" -> Right FPTFilterCM360PlacementId "FILTER_TRUEVIEW_CLICK_TYPE_NAME" -> Right FPTFilterTrueviewClickTypeName "FILTER_TRUEVIEW_AD_TYPE_NAME" -> Right FPTFilterTrueviewAdTypeName "FILTER_VIDEO_CONTENT_DURATION" -> Right FPTFilterVideoContentDuration "FILTER_MATCHED_GENRE_TARGET" -> Right FPTFilterMatchedGenreTarget "FILTER_VIDEO_CONTENT_LIVE_STREAM" -> Right FPTFilterVideoContentLiveStream "FILTER_BUDGET_SEGMENT_TYPE" -> Right FPTFilterBudgetSegmentType "FILTER_BUDGET_SEGMENT_BUDGET" -> Right FPTFilterBudgetSegmentBudget "FILTER_BUDGET_SEGMENT_START_DATE" -> Right FPTFilterBudgetSegmentStartDate "FILTER_BUDGET_SEGMENT_END_DATE" -> Right FPTFilterBudgetSegmentEndDate "FILTER_BUDGET_SEGMENT_PACING_PERCENTAGE" -> Right FPTFilterBudgetSegmentPacingPercentage "FILTER_LINE_ITEM_BUDGET" -> Right FPTFilterLineItemBudget "FILTER_LINE_ITEM_START_DATE" -> Right FPTFilterLineItemStartDate "FILTER_LINE_ITEM_END_DATE" -> Right FPTFilterLineItemEndDate "FILTER_INSERTION_ORDER_GOAL_TYPE" -> Right FPTFilterInsertionOrderGoalType "FILTER_LINE_ITEM_PACING_PERCENTAGE" -> Right FPTFilterLineItemPacingPercentage "FILTER_INSERTION_ORDER_GOAL_VALUE" -> Right FPTFilterInsertionOrderGoalValue "FILTER_OMID_CAPABLE" -> Right FPTFilterOmidCapable "FILTER_VENDOR_MEASUREMENT_MODE" -> Right FPTFilterVendorMeasurementMode x -> Left ("Unable to parse FilterPairType from: " <> x) instance ToHttpApiData FilterPairType where toQueryParam = \case FPTFilterUnknown -> "FILTER_UNKNOWN" FPTFilterDate -> "FILTER_DATE" FPTFilterDayOfWeek -> "FILTER_DAY_OF_WEEK" FPTFilterWeek -> "FILTER_WEEK" FPTFilterMonth -> "FILTER_MONTH" FPTFilterYear -> "FILTER_YEAR" FPTFilterTimeOfDay -> "FILTER_TIME_OF_DAY" FPTFilterConversionDelay -> "FILTER_CONVERSION_DELAY" FPTFilterCreativeId -> "FILTER_CREATIVE_ID" FPTFilterCreativeSize -> "FILTER_CREATIVE_SIZE" FPTFilterCreativeType -> "FILTER_CREATIVE_TYPE" FPTFilterExchangeId -> "FILTER_EXCHANGE_ID" FPTFilterAdPosition -> "FILTER_AD_POSITION" FPTFilterPublicInventory -> "FILTER_PUBLIC_INVENTORY" FPTFilterInventorySource -> "FILTER_INVENTORY_SOURCE" FPTFilterCity -> "FILTER_CITY" FPTFilterRegion -> "FILTER_REGION" FPTFilterDma -> "FILTER_DMA" FPTFilterCountry -> "FILTER_COUNTRY" FPTFilterSiteId -> "FILTER_SITE_ID" FPTFilterChannelId -> "FILTER_CHANNEL_ID" FPTFilterPartner -> "FILTER_PARTNER" FPTFilterAdvertiser -> "FILTER_ADVERTISER" FPTFilterInsertionOrder -> "FILTER_INSERTION_ORDER" FPTFilterLineItem -> "FILTER_LINE_ITEM" FPTFilterPartnerCurrency -> "FILTER_PARTNER_CURRENCY" FPTFilterAdvertiserCurrency -> "FILTER_ADVERTISER_CURRENCY" FPTFilterAdvertiserTimezone -> "FILTER_ADVERTISER_TIMEZONE" FPTFilterLineItemType -> "FILTER_LINE_ITEM_TYPE" FPTFilterUserList -> "FILTER_USER_LIST" FPTFilterUserListFirstParty -> "FILTER_USER_LIST_FIRST_PARTY" FPTFilterUserListThirdParty -> "FILTER_USER_LIST_THIRD_PARTY" FPTFilterTargetedUserList -> "FILTER_TARGETED_USER_LIST" FPTFilterDataProvider -> "FILTER_DATA_PROVIDER" FPTFilterOrderId -> "FILTER_ORDER_ID" FPTFilterVideoPlayerSize -> "FILTER_VIDEO_PLAYER_SIZE" FPTFilterVideoDurationSeconds -> "FILTER_VIDEO_DURATION_SECONDS" FPTFilterKeyword -> "FILTER_KEYWORD" FPTFilterPageCategory -> "FILTER_PAGE_CATEGORY" FPTFilterCampaignDailyFrequency -> "FILTER_CAMPAIGN_DAILY_FREQUENCY" FPTFilterLineItemDailyFrequency -> "FILTER_LINE_ITEM_DAILY_FREQUENCY" FPTFilterLineItemLifetimeFrequency -> "FILTER_LINE_ITEM_LIFETIME_FREQUENCY" FPTFilterOS -> "FILTER_OS" FPTFilterBrowser -> "FILTER_BROWSER" FPTFilterCarrier -> "FILTER_CARRIER" FPTFilterSiteLanguage -> "FILTER_SITE_LANGUAGE" FPTFilterInventoryFormat -> "FILTER_INVENTORY_FORMAT" FPTFilterZipCode -> "FILTER_ZIP_CODE" FPTFilterVideoRatingTier -> "FILTER_VIDEO_RATING_TIER" FPTFilterVideoFormatSupport -> "FILTER_VIDEO_FORMAT_SUPPORT" FPTFilterVideoSkippableSupport -> "FILTER_VIDEO_SKIPPABLE_SUPPORT" FPTFilterVideoCreativeDuration -> "FILTER_VIDEO_CREATIVE_DURATION" FPTFilterPageLayout -> "FILTER_PAGE_LAYOUT" FPTFilterVideoAdPositionInStream -> "FILTER_VIDEO_AD_POSITION_IN_STREAM" FPTFilterAge -> "FILTER_AGE" FPTFilterGender -> "FILTER_GENDER" FPTFilterQuarter -> "FILTER_QUARTER" FPTFilterTrueviewConversionType -> "FILTER_TRUEVIEW_CONVERSION_TYPE" FPTFilterMobileGeo -> "FILTER_MOBILE_GEO" FPTFilterMraidSupport -> "FILTER_MRAID_SUPPORT" FPTFilterActiveViewExpectedViewability -> "FILTER_ACTIVE_VIEW_EXPECTED_VIEWABILITY" FPTFilterVideoCreativeDurationSkippable -> "FILTER_VIDEO_CREATIVE_DURATION_SKIPPABLE" FPTFilterNielsenCountryCode -> "FILTER_NIELSEN_COUNTRY_CODE" FPTFilterNielsenDeviceId -> "FILTER_NIELSEN_DEVICE_ID" FPTFilterNielsenGender -> "FILTER_NIELSEN_GENDER" FPTFilterNielsenAge -> "FILTER_NIELSEN_AGE" FPTFilterInventorySourceType -> "FILTER_INVENTORY_SOURCE_TYPE" FPTFilterCreativeWidth -> "FILTER_CREATIVE_WIDTH" FPTFilterCreativeHeight -> "FILTER_CREATIVE_HEIGHT" FPTFilterDfpOrderId -> "FILTER_DFP_ORDER_ID" FPTFilterTrueviewAge -> "FILTER_TRUEVIEW_AGE" FPTFilterTrueviewGender -> "FILTER_TRUEVIEW_GENDER" FPTFilterTrueviewParentalStatus -> "FILTER_TRUEVIEW_PARENTAL_STATUS" FPTFilterTrueviewRemarketingList -> "FILTER_TRUEVIEW_REMARKETING_LIST" FPTFilterTrueviewInterest -> "FILTER_TRUEVIEW_INTEREST" FPTFilterTrueviewAdGroupId -> "FILTER_TRUEVIEW_AD_GROUP_ID" FPTFilterTrueviewAdGroupAdId -> "FILTER_TRUEVIEW_AD_GROUP_AD_ID" FPTFilterTrueviewIarLanguage -> "FILTER_TRUEVIEW_IAR_LANGUAGE" FPTFilterTrueviewIarGender -> "FILTER_TRUEVIEW_IAR_GENDER" FPTFilterTrueviewIarAge -> "FILTER_TRUEVIEW_IAR_AGE" FPTFilterTrueviewIarCategory -> "FILTER_TRUEVIEW_IAR_CATEGORY" FPTFilterTrueviewIarCountry -> "FILTER_TRUEVIEW_IAR_COUNTRY" FPTFilterTrueviewIarCity -> "FILTER_TRUEVIEW_IAR_CITY" FPTFilterTrueviewIarRegion -> "FILTER_TRUEVIEW_IAR_REGION" FPTFilterTrueviewIarZipcode -> "FILTER_TRUEVIEW_IAR_ZIPCODE" FPTFilterTrueviewIarRemarketingList -> "FILTER_TRUEVIEW_IAR_REMARKETING_LIST" FPTFilterTrueviewIarInterest -> "FILTER_TRUEVIEW_IAR_INTEREST" FPTFilterTrueviewIarParentalStatus -> "FILTER_TRUEVIEW_IAR_PARENTAL_STATUS" FPTFilterTrueviewIarTimeOfDay -> "FILTER_TRUEVIEW_IAR_TIME_OF_DAY" FPTFilterTrueviewCustomAffinity -> "FILTER_TRUEVIEW_CUSTOM_AFFINITY" FPTFilterTrueviewCategory -> "FILTER_TRUEVIEW_CATEGORY" FPTFilterTrueviewKeyword -> "FILTER_TRUEVIEW_KEYWORD" FPTFilterTrueviewPlacement -> "FILTER_TRUEVIEW_PLACEMENT" FPTFilterTrueviewURL -> "FILTER_TRUEVIEW_URL" FPTFilterTrueviewCountry -> "FILTER_TRUEVIEW_COUNTRY" FPTFilterTrueviewRegion -> "FILTER_TRUEVIEW_REGION" FPTFilterTrueviewCity -> "FILTER_TRUEVIEW_CITY" FPTFilterTrueviewDma -> "FILTER_TRUEVIEW_DMA" FPTFilterTrueviewZipcode -> "FILTER_TRUEVIEW_ZIPCODE" FPTFilterNotSupported -> "FILTER_NOT_SUPPORTED" FPTFilterMediaPlan -> "FILTER_MEDIA_PLAN" FPTFilterTrueviewIarYouTubeChannel -> "FILTER_TRUEVIEW_IAR_YOUTUBE_CHANNEL" FPTFilterTrueviewIarYouTubeVideo -> "FILTER_TRUEVIEW_IAR_YOUTUBE_VIDEO" FPTFilterSkippableSupport -> "FILTER_SKIPPABLE_SUPPORT" FPTFilterCompanionCreativeId -> "FILTER_COMPANION_CREATIVE_ID" FPTFilterBudgetSegmentDescription -> "FILTER_BUDGET_SEGMENT_DESCRIPTION" FPTFilterFloodlightActivityId -> "FILTER_FLOODLIGHT_ACTIVITY_ID" FPTFilterDeviceModel -> "FILTER_DEVICE_MODEL" FPTFilterDeviceMake -> "FILTER_DEVICE_MAKE" FPTFilterDeviceType -> "FILTER_DEVICE_TYPE" FPTFilterCreativeAttribute -> "FILTER_CREATIVE_ATTRIBUTE" FPTFilterInventoryCommitmentType -> "FILTER_INVENTORY_COMMITMENT_TYPE" FPTFilterInventoryRateType -> "FILTER_INVENTORY_RATE_TYPE" FPTFilterInventoryDeliveryMethod -> "FILTER_INVENTORY_DELIVERY_METHOD" FPTFilterInventorySourceExternalId -> "FILTER_INVENTORY_SOURCE_EXTERNAL_ID" FPTFilterAuthorizedSellerState -> "FILTER_AUTHORIZED_SELLER_STATE" FPTFilterVideoDurationSecondsRange -> "FILTER_VIDEO_DURATION_SECONDS_RANGE" FPTFilterPartnerName -> "FILTER_PARTNER_NAME" FPTFilterPartnerStatus -> "FILTER_PARTNER_STATUS" FPTFilterAdvertiserName -> "FILTER_ADVERTISER_NAME" FPTFilterAdvertiserIntegrationCode -> "FILTER_ADVERTISER_INTEGRATION_CODE" FPTFilterAdvertiserIntegrationStatus -> "FILTER_ADVERTISER_INTEGRATION_STATUS" FPTFilterCarrierName -> "FILTER_CARRIER_NAME" FPTFilterChannelName -> "FILTER_CHANNEL_NAME" FPTFilterCityName -> "FILTER_CITY_NAME" FPTFilterCompanionCreativeName -> "FILTER_COMPANION_CREATIVE_NAME" FPTFilterUserListFirstPartyName -> "FILTER_USER_LIST_FIRST_PARTY_NAME" FPTFilterUserListThirdPartyName -> "FILTER_USER_LIST_THIRD_PARTY_NAME" FPTFilterNielsenReStatementDate -> "FILTER_NIELSEN_RESTATEMENT_DATE" FPTFilterNielsenDateRange -> "FILTER_NIELSEN_DATE_RANGE" FPTFilterInsertionOrderName -> "FILTER_INSERTION_ORDER_NAME" FPTFilterRegionName -> "FILTER_REGION_NAME" FPTFilterDmaName -> "FILTER_DMA_NAME" FPTFilterTrueviewIarRegionName -> "FILTER_TRUEVIEW_IAR_REGION_NAME" FPTFilterTrueviewDmaName -> "FILTER_TRUEVIEW_DMA_NAME" FPTFilterTrueviewRegionName -> "FILTER_TRUEVIEW_REGION_NAME" FPTFilterActiveViewCustomMetricId -> "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_ID" FPTFilterActiveViewCustomMetricName -> "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_NAME" FPTFilterAdType -> "FILTER_AD_TYPE" FPTFilterAlgorithm -> "FILTER_ALGORITHM" FPTFilterAlgorithmId -> "FILTER_ALGORITHM_ID" FPTFilterAmpPageRequest -> "FILTER_AMP_PAGE_REQUEST" FPTFilterAnonymousInventoryModeling -> "FILTER_ANONYMOUS_INVENTORY_MODELING" FPTFilterAppURL -> "FILTER_APP_URL" FPTFilterAppURLExcluded -> "FILTER_APP_URL_EXCLUDED" FPTFilterAttributedUserList -> "FILTER_ATTRIBUTED_USERLIST" FPTFilterAttributedUserListCost -> "FILTER_ATTRIBUTED_USERLIST_COST" FPTFilterAttributedUserListType -> "FILTER_ATTRIBUTED_USERLIST_TYPE" FPTFilterAttributionModel -> "FILTER_ATTRIBUTION_MODEL" FPTFilterAudienceList -> "FILTER_AUDIENCE_LIST" FPTFilterAudienceListCost -> "FILTER_AUDIENCE_LIST_COST" FPTFilterAudienceListType -> "FILTER_AUDIENCE_LIST_TYPE" FPTFilterAudienceName -> "FILTER_AUDIENCE_NAME" FPTFilterAudienceType -> "FILTER_AUDIENCE_TYPE" FPTFilterBillableOutcome -> "FILTER_BILLABLE_OUTCOME" FPTFilterBrandLiftType -> "FILTER_BRAND_LIFT_TYPE" FPTFilterChannelType -> "FILTER_CHANNEL_TYPE" FPTFilterCmPlacementId -> "FILTER_CM_PLACEMENT_ID" FPTFilterConversionSource -> "FILTER_CONVERSION_SOURCE" FPTFilterConversionSourceId -> "FILTER_CONVERSION_SOURCE_ID" FPTFilterCountryId -> "FILTER_COUNTRY_ID" FPTFilterCreative -> "FILTER_CREATIVE" FPTFilterCreativeAsset -> "FILTER_CREATIVE_ASSET" FPTFilterCreativeIntegrationCode -> "FILTER_CREATIVE_INTEGRATION_CODE" FPTFilterCreativeRenderedInAmp -> "FILTER_CREATIVE_RENDERED_IN_AMP" FPTFilterCreativeSource -> "FILTER_CREATIVE_SOURCE" FPTFilterCreativeStatus -> "FILTER_CREATIVE_STATUS" FPTFilterDataProviderName -> "FILTER_DATA_PROVIDER_NAME" FPTFilterDetailedDemographics -> "FILTER_DETAILED_DEMOGRAPHICS" FPTFilterDetailedDemographicsId -> "FILTER_DETAILED_DEMOGRAPHICS_ID" FPTFilterDevice -> "FILTER_DEVICE" FPTFilterGamInsertionOrder -> "FILTER_GAM_INSERTION_ORDER" FPTFilterGamLineItem -> "FILTER_GAM_LINE_ITEM" FPTFilterGamLineItemId -> "FILTER_GAM_LINE_ITEM_ID" FPTFilterDigitalContentLabel -> "FILTER_DIGITAL_CONTENT_LABEL" FPTFilterDomain -> "FILTER_DOMAIN" FPTFilterEligibleCookiesOnFirstPartyAudienceList -> "FILTER_ELIGIBLE_COOKIES_ON_FIRST_PARTY_AUDIENCE_LIST" FPTFilterEligibleCookiesOnThirdPartyAudienceListAndInterest -> "FILTER_ELIGIBLE_COOKIES_ON_THIRD_PARTY_AUDIENCE_LIST_AND_INTEREST" FPTFilterExchange -> "FILTER_EXCHANGE" FPTFilterExchangeCode -> "FILTER_EXCHANGE_CODE" FPTFilterExtension -> "FILTER_EXTENSION" FPTFilterExtensionStatus -> "FILTER_EXTENSION_STATUS" FPTFilterExtensionType -> "FILTER_EXTENSION_TYPE" FPTFilterFirstPartyAudienceListCost -> "FILTER_FIRST_PARTY_AUDIENCE_LIST_COST" FPTFilterFirstPartyAudienceListType -> "FILTER_FIRST_PARTY_AUDIENCE_LIST_TYPE" FPTFilterFloodlightActivity -> "FILTER_FLOODLIGHT_ACTIVITY" FPTFilterFormat -> "FILTER_FORMAT" FPTFilterGmailAge -> "FILTER_GMAIL_AGE" FPTFilterGmailCity -> "FILTER_GMAIL_CITY" FPTFilterGmailCountry -> "FILTER_GMAIL_COUNTRY" FPTFilterGmailCountryName -> "FILTER_GMAIL_COUNTRY_NAME" FPTFilterGmailDeviceType -> "FILTER_GMAIL_DEVICE_TYPE" FPTFilterGmailDeviceTypeName -> "FILTER_GMAIL_DEVICE_TYPE_NAME" FPTFilterGmailGender -> "FILTER_GMAIL_GENDER" FPTFilterGmailRegion -> "FILTER_GMAIL_REGION" FPTFilterGmailRemarketingList -> "FILTER_GMAIL_REMARKETING_LIST" FPTFilterHouseholdIncome -> "FILTER_HOUSEHOLD_INCOME" FPTFilterImpressionCountingMethod -> "FILTER_IMPRESSION_COUNTING_METHOD" FPTFilterYouTubeProgrammaticGuaranteedInsertionOrder -> "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_INSERTION_ORDER" FPTFilterInsertionOrderIntegrationCode -> "FILTER_INSERTION_ORDER_INTEGRATION_CODE" FPTFilterInsertionOrderStatus -> "FILTER_INSERTION_ORDER_STATUS" FPTFilterInterest -> "FILTER_INTEREST" FPTFilterInventorySourceGroup -> "FILTER_INVENTORY_SOURCE_GROUP" FPTFilterInventorySourceGroupId -> "FILTER_INVENTORY_SOURCE_GROUP_ID" FPTFilterInventorySourceId -> "FILTER_INVENTORY_SOURCE_ID" FPTFilterInventorySourceName -> "FILTER_INVENTORY_SOURCE_NAME" FPTFilterLifeEvent -> "FILTER_LIFE_EVENT" FPTFilterLifeEvents -> "FILTER_LIFE_EVENTS" FPTFilterLineItemIntegrationCode -> "FILTER_LINE_ITEM_INTEGRATION_CODE" FPTFilterLineItemName -> "FILTER_LINE_ITEM_NAME" FPTFilterLineItemStatus -> "FILTER_LINE_ITEM_STATUS" FPTFilterMatchRatio -> "FILTER_MATCH_RATIO" FPTFilterMeasurementSource -> "FILTER_MEASUREMENT_SOURCE" FPTFilterMediaPlanName -> "FILTER_MEDIA_PLAN_NAME" FPTFilterParentalStatus -> "FILTER_PARENTAL_STATUS" FPTFilterPlacementAllYouTubeChannels -> "FILTER_PLACEMENT_ALL_YOUTUBE_CHANNELS" FPTFilterPlatform -> "FILTER_PLATFORM" FPTFilterPlaybackMethod -> "FILTER_PLAYBACK_METHOD" FPTFilterPositionInContent -> "FILTER_POSITION_IN_CONTENT" FPTFilterPublisherProperty -> "FILTER_PUBLISHER_PROPERTY" FPTFilterPublisherPropertyId -> "FILTER_PUBLISHER_PROPERTY_ID" FPTFilterPublisherPropertySection -> "FILTER_PUBLISHER_PROPERTY_SECTION" FPTFilterPublisherPropertySectionId -> "FILTER_PUBLISHER_PROPERTY_SECTION_ID" FPTFilterRefundReason -> "FILTER_REFUND_REASON" FPTFilterRemarketingList -> "FILTER_REMARKETING_LIST" FPTFilterRewarded -> "FILTER_REWARDED" FPTFilterSensitiveCategory -> "FILTER_SENSITIVE_CATEGORY" FPTFilterServedPixelDensity -> "FILTER_SERVED_PIXEL_DENSITY" FPTFilterTargetedDataProviders -> "FILTER_TARGETED_DATA_PROVIDERS" FPTFilterThirdPartyAudienceListCost -> "FILTER_THIRD_PARTY_AUDIENCE_LIST_COST" FPTFilterThirdPartyAudienceListType -> "FILTER_THIRD_PARTY_AUDIENCE_LIST_TYPE" FPTFilterTrueviewAd -> "FILTER_TRUEVIEW_AD" FPTFilterTrueviewAdGroup -> "FILTER_TRUEVIEW_AD_GROUP" FPTFilterTrueviewDetailedDemographics -> "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS" FPTFilterTrueviewDetailedDemographicsId -> "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS_ID" FPTFilterTrueviewHouseholdIncome -> "FILTER_TRUEVIEW_HOUSEHOLD_INCOME" FPTFilterTrueviewIarCountryName -> "FILTER_TRUEVIEW_IAR_COUNTRY_NAME" FPTFilterTrueviewRemarketingListName -> "FILTER_TRUEVIEW_REMARKETING_LIST_NAME" FPTFilterVariantId -> "FILTER_VARIANT_ID" FPTFilterVariantName -> "FILTER_VARIANT_NAME" FPTFilterVariantVersion -> "FILTER_VARIANT_VERSION" FPTFilterVerificationVideoPlayerSize -> "FILTER_VERIFICATION_VIDEO_PLAYER_SIZE" FPTFilterVerificationVideoPosition -> "FILTER_VERIFICATION_VIDEO_POSITION" FPTFilterVideoCompanionCreativeSize -> "FILTER_VIDEO_COMPANION_CREATIVE_SIZE" FPTFilterVideoContinuousPlay -> "FILTER_VIDEO_CONTINUOUS_PLAY" FPTFilterVideoDuration -> "FILTER_VIDEO_DURATION" FPTFilterYouTubeAdaptedAudienceList -> "FILTER_YOUTUBE_ADAPTED_AUDIENCE_LIST" FPTFilterYouTubeAdVideo -> "FILTER_YOUTUBE_AD_VIDEO" FPTFilterYouTubeAdVideoId -> "FILTER_YOUTUBE_AD_VIDEO_ID" FPTFilterYouTubeChannel -> "FILTER_YOUTUBE_CHANNEL" FPTFilterYouTubeProgrammaticGuaranteedAdvertiser -> "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_ADVERTISER" FPTFilterYouTubeProgrammaticGuaranteedPartner -> "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_PARTNER" FPTFilterYouTubeVideo -> "FILTER_YOUTUBE_VIDEO" FPTFilterZipPostalCode -> "FILTER_ZIP_POSTAL_CODE" FPTFilterPlacementNameAllYouTubeChannels -> "FILTER_PLACEMENT_NAME_ALL_YOUTUBE_CHANNELS" FPTFilterTrueviewPlacementId -> "FILTER_TRUEVIEW_PLACEMENT_ID" FPTFilterPathPatternId -> "FILTER_PATH_PATTERN_ID" FPTFilterPathEventIndex -> "FILTER_PATH_EVENT_INDEX" FPTFilterEventType -> "FILTER_EVENT_TYPE" FPTFilterChannelGrouping -> "FILTER_CHANNEL_GROUPING" FPTFilterOmSdkAvailable -> "FILTER_OM_SDK_AVAILABLE" FPTFilterDataSource -> "FILTER_DATA_SOURCE" FPTFilterCM360PlacementId -> "FILTER_CM360_PLACEMENT_ID" FPTFilterTrueviewClickTypeName -> "FILTER_TRUEVIEW_CLICK_TYPE_NAME" FPTFilterTrueviewAdTypeName -> "FILTER_TRUEVIEW_AD_TYPE_NAME" FPTFilterVideoContentDuration -> "FILTER_VIDEO_CONTENT_DURATION" FPTFilterMatchedGenreTarget -> "FILTER_MATCHED_GENRE_TARGET" FPTFilterVideoContentLiveStream -> "FILTER_VIDEO_CONTENT_LIVE_STREAM" FPTFilterBudgetSegmentType -> "FILTER_BUDGET_SEGMENT_TYPE" FPTFilterBudgetSegmentBudget -> "FILTER_BUDGET_SEGMENT_BUDGET" FPTFilterBudgetSegmentStartDate -> "FILTER_BUDGET_SEGMENT_START_DATE" FPTFilterBudgetSegmentEndDate -> "FILTER_BUDGET_SEGMENT_END_DATE" FPTFilterBudgetSegmentPacingPercentage -> "FILTER_BUDGET_SEGMENT_PACING_PERCENTAGE" FPTFilterLineItemBudget -> "FILTER_LINE_ITEM_BUDGET" FPTFilterLineItemStartDate -> "FILTER_LINE_ITEM_START_DATE" FPTFilterLineItemEndDate -> "FILTER_LINE_ITEM_END_DATE" FPTFilterInsertionOrderGoalType -> "FILTER_INSERTION_ORDER_GOAL_TYPE" FPTFilterLineItemPacingPercentage -> "FILTER_LINE_ITEM_PACING_PERCENTAGE" FPTFilterInsertionOrderGoalValue -> "FILTER_INSERTION_ORDER_GOAL_VALUE" FPTFilterOmidCapable -> "FILTER_OMID_CAPABLE" FPTFilterVendorMeasurementMode -> "FILTER_VENDOR_MEASUREMENT_MODE" instance FromJSON FilterPairType where parseJSON = parseJSONText "FilterPairType" instance ToJSON FilterPairType where toJSON = toJSONText -- | Report data range used to generate the report. data RunQueryRequestDataRange = RQRDRCustomDates -- ^ @CUSTOM_DATES@ | RQRDRCurrentDay -- ^ @CURRENT_DAY@ | RQRDRPreviousDay -- ^ @PREVIOUS_DAY@ | RQRDRWeekToDate -- ^ @WEEK_TO_DATE@ | RQRDRMonthToDate -- ^ @MONTH_TO_DATE@ | RQRDRQuarterToDate -- ^ @QUARTER_TO_DATE@ | RQRDRYearToDate -- ^ @YEAR_TO_DATE@ | RQRDRPreviousWeek -- ^ @PREVIOUS_WEEK@ | RQRDRPreviousHalfMonth -- ^ @PREVIOUS_HALF_MONTH@ | RQRDRPreviousMonth -- ^ @PREVIOUS_MONTH@ | RQRDRPreviousQuarter -- ^ @PREVIOUS_QUARTER@ | RQRDRPreviousYear -- ^ @PREVIOUS_YEAR@ | RQRDRLast7Days -- ^ @LAST_7_DAYS@ | RQRDRLast30Days -- ^ @LAST_30_DAYS@ | RQRDRLast90Days -- ^ @LAST_90_DAYS@ | RQRDRLast365Days -- ^ @LAST_365_DAYS@ | RQRDRAllTime -- ^ @ALL_TIME@ | RQRDRLast14Days -- ^ @LAST_14_DAYS@ | RQRDRTypeNotSupported -- ^ @TYPE_NOT_SUPPORTED@ | RQRDRLast60Days -- ^ @LAST_60_DAYS@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable RunQueryRequestDataRange instance FromHttpApiData RunQueryRequestDataRange where parseQueryParam = \case "CUSTOM_DATES" -> Right RQRDRCustomDates "CURRENT_DAY" -> Right RQRDRCurrentDay "PREVIOUS_DAY" -> Right RQRDRPreviousDay "WEEK_TO_DATE" -> Right RQRDRWeekToDate "MONTH_TO_DATE" -> Right RQRDRMonthToDate "QUARTER_TO_DATE" -> Right RQRDRQuarterToDate "YEAR_TO_DATE" -> Right RQRDRYearToDate "PREVIOUS_WEEK" -> Right RQRDRPreviousWeek "PREVIOUS_HALF_MONTH" -> Right RQRDRPreviousHalfMonth "PREVIOUS_MONTH" -> Right RQRDRPreviousMonth "PREVIOUS_QUARTER" -> Right RQRDRPreviousQuarter "PREVIOUS_YEAR" -> Right RQRDRPreviousYear "LAST_7_DAYS" -> Right RQRDRLast7Days "LAST_30_DAYS" -> Right RQRDRLast30Days "LAST_90_DAYS" -> Right RQRDRLast90Days "LAST_365_DAYS" -> Right RQRDRLast365Days "ALL_TIME" -> Right RQRDRAllTime "LAST_14_DAYS" -> Right RQRDRLast14Days "TYPE_NOT_SUPPORTED" -> Right RQRDRTypeNotSupported "LAST_60_DAYS" -> Right RQRDRLast60Days x -> Left ("Unable to parse RunQueryRequestDataRange from: " <> x) instance ToHttpApiData RunQueryRequestDataRange where toQueryParam = \case RQRDRCustomDates -> "CUSTOM_DATES" RQRDRCurrentDay -> "CURRENT_DAY" RQRDRPreviousDay -> "PREVIOUS_DAY" RQRDRWeekToDate -> "WEEK_TO_DATE" RQRDRMonthToDate -> "MONTH_TO_DATE" RQRDRQuarterToDate -> "QUARTER_TO_DATE" RQRDRYearToDate -> "YEAR_TO_DATE" RQRDRPreviousWeek -> "PREVIOUS_WEEK" RQRDRPreviousHalfMonth -> "PREVIOUS_HALF_MONTH" RQRDRPreviousMonth -> "PREVIOUS_MONTH" RQRDRPreviousQuarter -> "PREVIOUS_QUARTER" RQRDRPreviousYear -> "PREVIOUS_YEAR" RQRDRLast7Days -> "LAST_7_DAYS" RQRDRLast30Days -> "LAST_30_DAYS" RQRDRLast90Days -> "LAST_90_DAYS" RQRDRLast365Days -> "LAST_365_DAYS" RQRDRAllTime -> "ALL_TIME" RQRDRLast14Days -> "LAST_14_DAYS" RQRDRTypeNotSupported -> "TYPE_NOT_SUPPORTED" RQRDRLast60Days -> "LAST_60_DAYS" instance FromJSON RunQueryRequestDataRange where parseJSON = parseJSONText "RunQueryRequestDataRange" instance ToJSON RunQueryRequestDataRange where toJSON = toJSONText -- | The state of the report. data ReportStatusState = Running -- ^ @RUNNING@ | Done -- ^ @DONE@ | Failed -- ^ @FAILED@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable ReportStatusState instance FromHttpApiData ReportStatusState where parseQueryParam = \case "RUNNING" -> Right Running "DONE" -> Right Done "FAILED" -> Right Failed x -> Left ("Unable to parse ReportStatusState from: " <> x) instance ToHttpApiData ReportStatusState where toQueryParam = \case Running -> "RUNNING" Done -> "DONE" Failed -> "FAILED" instance FromJSON ReportStatusState where parseJSON = parseJSONText "ReportStatusState" instance ToJSON ReportStatusState where toJSON = toJSONText data ParametersGroupBysItem = PGBIFilterUnknown -- ^ @FILTER_UNKNOWN@ | PGBIFilterDate -- ^ @FILTER_DATE@ | PGBIFilterDayOfWeek -- ^ @FILTER_DAY_OF_WEEK@ | PGBIFilterWeek -- ^ @FILTER_WEEK@ | PGBIFilterMonth -- ^ @FILTER_MONTH@ | PGBIFilterYear -- ^ @FILTER_YEAR@ | PGBIFilterTimeOfDay -- ^ @FILTER_TIME_OF_DAY@ | PGBIFilterConversionDelay -- ^ @FILTER_CONVERSION_DELAY@ | PGBIFilterCreativeId -- ^ @FILTER_CREATIVE_ID@ | PGBIFilterCreativeSize -- ^ @FILTER_CREATIVE_SIZE@ | PGBIFilterCreativeType -- ^ @FILTER_CREATIVE_TYPE@ | PGBIFilterExchangeId -- ^ @FILTER_EXCHANGE_ID@ | PGBIFilterAdPosition -- ^ @FILTER_AD_POSITION@ | PGBIFilterPublicInventory -- ^ @FILTER_PUBLIC_INVENTORY@ | PGBIFilterInventorySource -- ^ @FILTER_INVENTORY_SOURCE@ | PGBIFilterCity -- ^ @FILTER_CITY@ | PGBIFilterRegion -- ^ @FILTER_REGION@ | PGBIFilterDma -- ^ @FILTER_DMA@ | PGBIFilterCountry -- ^ @FILTER_COUNTRY@ | PGBIFilterSiteId -- ^ @FILTER_SITE_ID@ | PGBIFilterChannelId -- ^ @FILTER_CHANNEL_ID@ | PGBIFilterPartner -- ^ @FILTER_PARTNER@ | PGBIFilterAdvertiser -- ^ @FILTER_ADVERTISER@ | PGBIFilterInsertionOrder -- ^ @FILTER_INSERTION_ORDER@ | PGBIFilterLineItem -- ^ @FILTER_LINE_ITEM@ | PGBIFilterPartnerCurrency -- ^ @FILTER_PARTNER_CURRENCY@ | PGBIFilterAdvertiserCurrency -- ^ @FILTER_ADVERTISER_CURRENCY@ | PGBIFilterAdvertiserTimezone -- ^ @FILTER_ADVERTISER_TIMEZONE@ | PGBIFilterLineItemType -- ^ @FILTER_LINE_ITEM_TYPE@ | PGBIFilterUserList -- ^ @FILTER_USER_LIST@ | PGBIFilterUserListFirstParty -- ^ @FILTER_USER_LIST_FIRST_PARTY@ | PGBIFilterUserListThirdParty -- ^ @FILTER_USER_LIST_THIRD_PARTY@ | PGBIFilterTargetedUserList -- ^ @FILTER_TARGETED_USER_LIST@ | PGBIFilterDataProvider -- ^ @FILTER_DATA_PROVIDER@ | PGBIFilterOrderId -- ^ @FILTER_ORDER_ID@ | PGBIFilterVideoPlayerSize -- ^ @FILTER_VIDEO_PLAYER_SIZE@ | PGBIFilterVideoDurationSeconds -- ^ @FILTER_VIDEO_DURATION_SECONDS@ | PGBIFilterKeyword -- ^ @FILTER_KEYWORD@ | PGBIFilterPageCategory -- ^ @FILTER_PAGE_CATEGORY@ | PGBIFilterCampaignDailyFrequency -- ^ @FILTER_CAMPAIGN_DAILY_FREQUENCY@ | PGBIFilterLineItemDailyFrequency -- ^ @FILTER_LINE_ITEM_DAILY_FREQUENCY@ | PGBIFilterLineItemLifetimeFrequency -- ^ @FILTER_LINE_ITEM_LIFETIME_FREQUENCY@ | PGBIFilterOS -- ^ @FILTER_OS@ | PGBIFilterBrowser -- ^ @FILTER_BROWSER@ | PGBIFilterCarrier -- ^ @FILTER_CARRIER@ | PGBIFilterSiteLanguage -- ^ @FILTER_SITE_LANGUAGE@ | PGBIFilterInventoryFormat -- ^ @FILTER_INVENTORY_FORMAT@ | PGBIFilterZipCode -- ^ @FILTER_ZIP_CODE@ | PGBIFilterVideoRatingTier -- ^ @FILTER_VIDEO_RATING_TIER@ | PGBIFilterVideoFormatSupport -- ^ @FILTER_VIDEO_FORMAT_SUPPORT@ | PGBIFilterVideoSkippableSupport -- ^ @FILTER_VIDEO_SKIPPABLE_SUPPORT@ | PGBIFilterVideoCreativeDuration -- ^ @FILTER_VIDEO_CREATIVE_DURATION@ | PGBIFilterPageLayout -- ^ @FILTER_PAGE_LAYOUT@ | PGBIFilterVideoAdPositionInStream -- ^ @FILTER_VIDEO_AD_POSITION_IN_STREAM@ | PGBIFilterAge -- ^ @FILTER_AGE@ | PGBIFilterGender -- ^ @FILTER_GENDER@ | PGBIFilterQuarter -- ^ @FILTER_QUARTER@ | PGBIFilterTrueviewConversionType -- ^ @FILTER_TRUEVIEW_CONVERSION_TYPE@ | PGBIFilterMobileGeo -- ^ @FILTER_MOBILE_GEO@ | PGBIFilterMraidSupport -- ^ @FILTER_MRAID_SUPPORT@ | PGBIFilterActiveViewExpectedViewability -- ^ @FILTER_ACTIVE_VIEW_EXPECTED_VIEWABILITY@ | PGBIFilterVideoCreativeDurationSkippable -- ^ @FILTER_VIDEO_CREATIVE_DURATION_SKIPPABLE@ | PGBIFilterNielsenCountryCode -- ^ @FILTER_NIELSEN_COUNTRY_CODE@ | PGBIFilterNielsenDeviceId -- ^ @FILTER_NIELSEN_DEVICE_ID@ | PGBIFilterNielsenGender -- ^ @FILTER_NIELSEN_GENDER@ | PGBIFilterNielsenAge -- ^ @FILTER_NIELSEN_AGE@ | PGBIFilterInventorySourceType -- ^ @FILTER_INVENTORY_SOURCE_TYPE@ | PGBIFilterCreativeWidth -- ^ @FILTER_CREATIVE_WIDTH@ | PGBIFilterCreativeHeight -- ^ @FILTER_CREATIVE_HEIGHT@ | PGBIFilterDfpOrderId -- ^ @FILTER_DFP_ORDER_ID@ | PGBIFilterTrueviewAge -- ^ @FILTER_TRUEVIEW_AGE@ | PGBIFilterTrueviewGender -- ^ @FILTER_TRUEVIEW_GENDER@ | PGBIFilterTrueviewParentalStatus -- ^ @FILTER_TRUEVIEW_PARENTAL_STATUS@ | PGBIFilterTrueviewRemarketingList -- ^ @FILTER_TRUEVIEW_REMARKETING_LIST@ | PGBIFilterTrueviewInterest -- ^ @FILTER_TRUEVIEW_INTEREST@ | PGBIFilterTrueviewAdGroupId -- ^ @FILTER_TRUEVIEW_AD_GROUP_ID@ | PGBIFilterTrueviewAdGroupAdId -- ^ @FILTER_TRUEVIEW_AD_GROUP_AD_ID@ | PGBIFilterTrueviewIarLanguage -- ^ @FILTER_TRUEVIEW_IAR_LANGUAGE@ | PGBIFilterTrueviewIarGender -- ^ @FILTER_TRUEVIEW_IAR_GENDER@ | PGBIFilterTrueviewIarAge -- ^ @FILTER_TRUEVIEW_IAR_AGE@ | PGBIFilterTrueviewIarCategory -- ^ @FILTER_TRUEVIEW_IAR_CATEGORY@ | PGBIFilterTrueviewIarCountry -- ^ @FILTER_TRUEVIEW_IAR_COUNTRY@ | PGBIFilterTrueviewIarCity -- ^ @FILTER_TRUEVIEW_IAR_CITY@ | PGBIFilterTrueviewIarRegion -- ^ @FILTER_TRUEVIEW_IAR_REGION@ | PGBIFilterTrueviewIarZipcode -- ^ @FILTER_TRUEVIEW_IAR_ZIPCODE@ | PGBIFilterTrueviewIarRemarketingList -- ^ @FILTER_TRUEVIEW_IAR_REMARKETING_LIST@ | PGBIFilterTrueviewIarInterest -- ^ @FILTER_TRUEVIEW_IAR_INTEREST@ | PGBIFilterTrueviewIarParentalStatus -- ^ @FILTER_TRUEVIEW_IAR_PARENTAL_STATUS@ | PGBIFilterTrueviewIarTimeOfDay -- ^ @FILTER_TRUEVIEW_IAR_TIME_OF_DAY@ | PGBIFilterTrueviewCustomAffinity -- ^ @FILTER_TRUEVIEW_CUSTOM_AFFINITY@ | PGBIFilterTrueviewCategory -- ^ @FILTER_TRUEVIEW_CATEGORY@ | PGBIFilterTrueviewKeyword -- ^ @FILTER_TRUEVIEW_KEYWORD@ | PGBIFilterTrueviewPlacement -- ^ @FILTER_TRUEVIEW_PLACEMENT@ | PGBIFilterTrueviewURL -- ^ @FILTER_TRUEVIEW_URL@ | PGBIFilterTrueviewCountry -- ^ @FILTER_TRUEVIEW_COUNTRY@ | PGBIFilterTrueviewRegion -- ^ @FILTER_TRUEVIEW_REGION@ | PGBIFilterTrueviewCity -- ^ @FILTER_TRUEVIEW_CITY@ | PGBIFilterTrueviewDma -- ^ @FILTER_TRUEVIEW_DMA@ | PGBIFilterTrueviewZipcode -- ^ @FILTER_TRUEVIEW_ZIPCODE@ | PGBIFilterNotSupported -- ^ @FILTER_NOT_SUPPORTED@ | PGBIFilterMediaPlan -- ^ @FILTER_MEDIA_PLAN@ | PGBIFilterTrueviewIarYouTubeChannel -- ^ @FILTER_TRUEVIEW_IAR_YOUTUBE_CHANNEL@ | PGBIFilterTrueviewIarYouTubeVideo -- ^ @FILTER_TRUEVIEW_IAR_YOUTUBE_VIDEO@ | PGBIFilterSkippableSupport -- ^ @FILTER_SKIPPABLE_SUPPORT@ | PGBIFilterCompanionCreativeId -- ^ @FILTER_COMPANION_CREATIVE_ID@ | PGBIFilterBudgetSegmentDescription -- ^ @FILTER_BUDGET_SEGMENT_DESCRIPTION@ | PGBIFilterFloodlightActivityId -- ^ @FILTER_FLOODLIGHT_ACTIVITY_ID@ | PGBIFilterDeviceModel -- ^ @FILTER_DEVICE_MODEL@ | PGBIFilterDeviceMake -- ^ @FILTER_DEVICE_MAKE@ | PGBIFilterDeviceType -- ^ @FILTER_DEVICE_TYPE@ | PGBIFilterCreativeAttribute -- ^ @FILTER_CREATIVE_ATTRIBUTE@ | PGBIFilterInventoryCommitmentType -- ^ @FILTER_INVENTORY_COMMITMENT_TYPE@ | PGBIFilterInventoryRateType -- ^ @FILTER_INVENTORY_RATE_TYPE@ | PGBIFilterInventoryDeliveryMethod -- ^ @FILTER_INVENTORY_DELIVERY_METHOD@ | PGBIFilterInventorySourceExternalId -- ^ @FILTER_INVENTORY_SOURCE_EXTERNAL_ID@ | PGBIFilterAuthorizedSellerState -- ^ @FILTER_AUTHORIZED_SELLER_STATE@ | PGBIFilterVideoDurationSecondsRange -- ^ @FILTER_VIDEO_DURATION_SECONDS_RANGE@ | PGBIFilterPartnerName -- ^ @FILTER_PARTNER_NAME@ | PGBIFilterPartnerStatus -- ^ @FILTER_PARTNER_STATUS@ | PGBIFilterAdvertiserName -- ^ @FILTER_ADVERTISER_NAME@ | PGBIFilterAdvertiserIntegrationCode -- ^ @FILTER_ADVERTISER_INTEGRATION_CODE@ | PGBIFilterAdvertiserIntegrationStatus -- ^ @FILTER_ADVERTISER_INTEGRATION_STATUS@ | PGBIFilterCarrierName -- ^ @FILTER_CARRIER_NAME@ | PGBIFilterChannelName -- ^ @FILTER_CHANNEL_NAME@ | PGBIFilterCityName -- ^ @FILTER_CITY_NAME@ | PGBIFilterCompanionCreativeName -- ^ @FILTER_COMPANION_CREATIVE_NAME@ | PGBIFilterUserListFirstPartyName -- ^ @FILTER_USER_LIST_FIRST_PARTY_NAME@ | PGBIFilterUserListThirdPartyName -- ^ @FILTER_USER_LIST_THIRD_PARTY_NAME@ | PGBIFilterNielsenReStatementDate -- ^ @FILTER_NIELSEN_RESTATEMENT_DATE@ | PGBIFilterNielsenDateRange -- ^ @FILTER_NIELSEN_DATE_RANGE@ | PGBIFilterInsertionOrderName -- ^ @FILTER_INSERTION_ORDER_NAME@ | PGBIFilterRegionName -- ^ @FILTER_REGION_NAME@ | PGBIFilterDmaName -- ^ @FILTER_DMA_NAME@ | PGBIFilterTrueviewIarRegionName -- ^ @FILTER_TRUEVIEW_IAR_REGION_NAME@ | PGBIFilterTrueviewDmaName -- ^ @FILTER_TRUEVIEW_DMA_NAME@ | PGBIFilterTrueviewRegionName -- ^ @FILTER_TRUEVIEW_REGION_NAME@ | PGBIFilterActiveViewCustomMetricId -- ^ @FILTER_ACTIVE_VIEW_CUSTOM_METRIC_ID@ | PGBIFilterActiveViewCustomMetricName -- ^ @FILTER_ACTIVE_VIEW_CUSTOM_METRIC_NAME@ | PGBIFilterAdType -- ^ @FILTER_AD_TYPE@ | PGBIFilterAlgorithm -- ^ @FILTER_ALGORITHM@ | PGBIFilterAlgorithmId -- ^ @FILTER_ALGORITHM_ID@ | PGBIFilterAmpPageRequest -- ^ @FILTER_AMP_PAGE_REQUEST@ | PGBIFilterAnonymousInventoryModeling -- ^ @FILTER_ANONYMOUS_INVENTORY_MODELING@ | PGBIFilterAppURL -- ^ @FILTER_APP_URL@ | PGBIFilterAppURLExcluded -- ^ @FILTER_APP_URL_EXCLUDED@ | PGBIFilterAttributedUserList -- ^ @FILTER_ATTRIBUTED_USERLIST@ | PGBIFilterAttributedUserListCost -- ^ @FILTER_ATTRIBUTED_USERLIST_COST@ | PGBIFilterAttributedUserListType -- ^ @FILTER_ATTRIBUTED_USERLIST_TYPE@ | PGBIFilterAttributionModel -- ^ @FILTER_ATTRIBUTION_MODEL@ | PGBIFilterAudienceList -- ^ @FILTER_AUDIENCE_LIST@ | PGBIFilterAudienceListCost -- ^ @FILTER_AUDIENCE_LIST_COST@ | PGBIFilterAudienceListType -- ^ @FILTER_AUDIENCE_LIST_TYPE@ | PGBIFilterAudienceName -- ^ @FILTER_AUDIENCE_NAME@ | PGBIFilterAudienceType -- ^ @FILTER_AUDIENCE_TYPE@ | PGBIFilterBillableOutcome -- ^ @FILTER_BILLABLE_OUTCOME@ | PGBIFilterBrandLiftType -- ^ @FILTER_BRAND_LIFT_TYPE@ | PGBIFilterChannelType -- ^ @FILTER_CHANNEL_TYPE@ | PGBIFilterCmPlacementId -- ^ @FILTER_CM_PLACEMENT_ID@ | PGBIFilterConversionSource -- ^ @FILTER_CONVERSION_SOURCE@ | PGBIFilterConversionSourceId -- ^ @FILTER_CONVERSION_SOURCE_ID@ | PGBIFilterCountryId -- ^ @FILTER_COUNTRY_ID@ | PGBIFilterCreative -- ^ @FILTER_CREATIVE@ | PGBIFilterCreativeAsset -- ^ @FILTER_CREATIVE_ASSET@ | PGBIFilterCreativeIntegrationCode -- ^ @FILTER_CREATIVE_INTEGRATION_CODE@ | PGBIFilterCreativeRenderedInAmp -- ^ @FILTER_CREATIVE_RENDERED_IN_AMP@ | PGBIFilterCreativeSource -- ^ @FILTER_CREATIVE_SOURCE@ | PGBIFilterCreativeStatus -- ^ @FILTER_CREATIVE_STATUS@ | PGBIFilterDataProviderName -- ^ @FILTER_DATA_PROVIDER_NAME@ | PGBIFilterDetailedDemographics -- ^ @FILTER_DETAILED_DEMOGRAPHICS@ | PGBIFilterDetailedDemographicsId -- ^ @FILTER_DETAILED_DEMOGRAPHICS_ID@ | PGBIFilterDevice -- ^ @FILTER_DEVICE@ | PGBIFilterGamInsertionOrder -- ^ @FILTER_GAM_INSERTION_ORDER@ | PGBIFilterGamLineItem -- ^ @FILTER_GAM_LINE_ITEM@ | PGBIFilterGamLineItemId -- ^ @FILTER_GAM_LINE_ITEM_ID@ | PGBIFilterDigitalContentLabel -- ^ @FILTER_DIGITAL_CONTENT_LABEL@ | PGBIFilterDomain -- ^ @FILTER_DOMAIN@ | PGBIFilterEligibleCookiesOnFirstPartyAudienceList -- ^ @FILTER_ELIGIBLE_COOKIES_ON_FIRST_PARTY_AUDIENCE_LIST@ | PGBIFilterEligibleCookiesOnThirdPartyAudienceListAndInterest -- ^ @FILTER_ELIGIBLE_COOKIES_ON_THIRD_PARTY_AUDIENCE_LIST_AND_INTEREST@ | PGBIFilterExchange -- ^ @FILTER_EXCHANGE@ | PGBIFilterExchangeCode -- ^ @FILTER_EXCHANGE_CODE@ | PGBIFilterExtension -- ^ @FILTER_EXTENSION@ | PGBIFilterExtensionStatus -- ^ @FILTER_EXTENSION_STATUS@ | PGBIFilterExtensionType -- ^ @FILTER_EXTENSION_TYPE@ | PGBIFilterFirstPartyAudienceListCost -- ^ @FILTER_FIRST_PARTY_AUDIENCE_LIST_COST@ | PGBIFilterFirstPartyAudienceListType -- ^ @FILTER_FIRST_PARTY_AUDIENCE_LIST_TYPE@ | PGBIFilterFloodlightActivity -- ^ @FILTER_FLOODLIGHT_ACTIVITY@ | PGBIFilterFormat -- ^ @FILTER_FORMAT@ | PGBIFilterGmailAge -- ^ @FILTER_GMAIL_AGE@ | PGBIFilterGmailCity -- ^ @FILTER_GMAIL_CITY@ | PGBIFilterGmailCountry -- ^ @FILTER_GMAIL_COUNTRY@ | PGBIFilterGmailCountryName -- ^ @FILTER_GMAIL_COUNTRY_NAME@ | PGBIFilterGmailDeviceType -- ^ @FILTER_GMAIL_DEVICE_TYPE@ | PGBIFilterGmailDeviceTypeName -- ^ @FILTER_GMAIL_DEVICE_TYPE_NAME@ | PGBIFilterGmailGender -- ^ @FILTER_GMAIL_GENDER@ | PGBIFilterGmailRegion -- ^ @FILTER_GMAIL_REGION@ | PGBIFilterGmailRemarketingList -- ^ @FILTER_GMAIL_REMARKETING_LIST@ | PGBIFilterHouseholdIncome -- ^ @FILTER_HOUSEHOLD_INCOME@ | PGBIFilterImpressionCountingMethod -- ^ @FILTER_IMPRESSION_COUNTING_METHOD@ | PGBIFilterYouTubeProgrammaticGuaranteedInsertionOrder -- ^ @FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_INSERTION_ORDER@ | PGBIFilterInsertionOrderIntegrationCode -- ^ @FILTER_INSERTION_ORDER_INTEGRATION_CODE@ | PGBIFilterInsertionOrderStatus -- ^ @FILTER_INSERTION_ORDER_STATUS@ | PGBIFilterInterest -- ^ @FILTER_INTEREST@ | PGBIFilterInventorySourceGroup -- ^ @FILTER_INVENTORY_SOURCE_GROUP@ | PGBIFilterInventorySourceGroupId -- ^ @FILTER_INVENTORY_SOURCE_GROUP_ID@ | PGBIFilterInventorySourceId -- ^ @FILTER_INVENTORY_SOURCE_ID@ | PGBIFilterInventorySourceName -- ^ @FILTER_INVENTORY_SOURCE_NAME@ | PGBIFilterLifeEvent -- ^ @FILTER_LIFE_EVENT@ | PGBIFilterLifeEvents -- ^ @FILTER_LIFE_EVENTS@ | PGBIFilterLineItemIntegrationCode -- ^ @FILTER_LINE_ITEM_INTEGRATION_CODE@ | PGBIFilterLineItemName -- ^ @FILTER_LINE_ITEM_NAME@ | PGBIFilterLineItemStatus -- ^ @FILTER_LINE_ITEM_STATUS@ | PGBIFilterMatchRatio -- ^ @FILTER_MATCH_RATIO@ | PGBIFilterMeasurementSource -- ^ @FILTER_MEASUREMENT_SOURCE@ | PGBIFilterMediaPlanName -- ^ @FILTER_MEDIA_PLAN_NAME@ | PGBIFilterParentalStatus -- ^ @FILTER_PARENTAL_STATUS@ | PGBIFilterPlacementAllYouTubeChannels -- ^ @FILTER_PLACEMENT_ALL_YOUTUBE_CHANNELS@ | PGBIFilterPlatform -- ^ @FILTER_PLATFORM@ | PGBIFilterPlaybackMethod -- ^ @FILTER_PLAYBACK_METHOD@ | PGBIFilterPositionInContent -- ^ @FILTER_POSITION_IN_CONTENT@ | PGBIFilterPublisherProperty -- ^ @FILTER_PUBLISHER_PROPERTY@ | PGBIFilterPublisherPropertyId -- ^ @FILTER_PUBLISHER_PROPERTY_ID@ | PGBIFilterPublisherPropertySection -- ^ @FILTER_PUBLISHER_PROPERTY_SECTION@ | PGBIFilterPublisherPropertySectionId -- ^ @FILTER_PUBLISHER_PROPERTY_SECTION_ID@ | PGBIFilterRefundReason -- ^ @FILTER_REFUND_REASON@ | PGBIFilterRemarketingList -- ^ @FILTER_REMARKETING_LIST@ | PGBIFilterRewarded -- ^ @FILTER_REWARDED@ | PGBIFilterSensitiveCategory -- ^ @FILTER_SENSITIVE_CATEGORY@ | PGBIFilterServedPixelDensity -- ^ @FILTER_SERVED_PIXEL_DENSITY@ | PGBIFilterTargetedDataProviders -- ^ @FILTER_TARGETED_DATA_PROVIDERS@ | PGBIFilterThirdPartyAudienceListCost -- ^ @FILTER_THIRD_PARTY_AUDIENCE_LIST_COST@ | PGBIFilterThirdPartyAudienceListType -- ^ @FILTER_THIRD_PARTY_AUDIENCE_LIST_TYPE@ | PGBIFilterTrueviewAd -- ^ @FILTER_TRUEVIEW_AD@ | PGBIFilterTrueviewAdGroup -- ^ @FILTER_TRUEVIEW_AD_GROUP@ | PGBIFilterTrueviewDetailedDemographics -- ^ @FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS@ | PGBIFilterTrueviewDetailedDemographicsId -- ^ @FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS_ID@ | PGBIFilterTrueviewHouseholdIncome -- ^ @FILTER_TRUEVIEW_HOUSEHOLD_INCOME@ | PGBIFilterTrueviewIarCountryName -- ^ @FILTER_TRUEVIEW_IAR_COUNTRY_NAME@ | PGBIFilterTrueviewRemarketingListName -- ^ @FILTER_TRUEVIEW_REMARKETING_LIST_NAME@ | PGBIFilterVariantId -- ^ @FILTER_VARIANT_ID@ | PGBIFilterVariantName -- ^ @FILTER_VARIANT_NAME@ | PGBIFilterVariantVersion -- ^ @FILTER_VARIANT_VERSION@ | PGBIFilterVerificationVideoPlayerSize -- ^ @FILTER_VERIFICATION_VIDEO_PLAYER_SIZE@ | PGBIFilterVerificationVideoPosition -- ^ @FILTER_VERIFICATION_VIDEO_POSITION@ | PGBIFilterVideoCompanionCreativeSize -- ^ @FILTER_VIDEO_COMPANION_CREATIVE_SIZE@ | PGBIFilterVideoContinuousPlay -- ^ @FILTER_VIDEO_CONTINUOUS_PLAY@ | PGBIFilterVideoDuration -- ^ @FILTER_VIDEO_DURATION@ | PGBIFilterYouTubeAdaptedAudienceList -- ^ @FILTER_YOUTUBE_ADAPTED_AUDIENCE_LIST@ | PGBIFilterYouTubeAdVideo -- ^ @FILTER_YOUTUBE_AD_VIDEO@ | PGBIFilterYouTubeAdVideoId -- ^ @FILTER_YOUTUBE_AD_VIDEO_ID@ | PGBIFilterYouTubeChannel -- ^ @FILTER_YOUTUBE_CHANNEL@ | PGBIFilterYouTubeProgrammaticGuaranteedAdvertiser -- ^ @FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_ADVERTISER@ | PGBIFilterYouTubeProgrammaticGuaranteedPartner -- ^ @FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_PARTNER@ | PGBIFilterYouTubeVideo -- ^ @FILTER_YOUTUBE_VIDEO@ | PGBIFilterZipPostalCode -- ^ @FILTER_ZIP_POSTAL_CODE@ | PGBIFilterPlacementNameAllYouTubeChannels -- ^ @FILTER_PLACEMENT_NAME_ALL_YOUTUBE_CHANNELS@ | PGBIFilterTrueviewPlacementId -- ^ @FILTER_TRUEVIEW_PLACEMENT_ID@ | PGBIFilterPathPatternId -- ^ @FILTER_PATH_PATTERN_ID@ | PGBIFilterPathEventIndex -- ^ @FILTER_PATH_EVENT_INDEX@ | PGBIFilterEventType -- ^ @FILTER_EVENT_TYPE@ | PGBIFilterChannelGrouping -- ^ @FILTER_CHANNEL_GROUPING@ | PGBIFilterOmSdkAvailable -- ^ @FILTER_OM_SDK_AVAILABLE@ | PGBIFilterDataSource -- ^ @FILTER_DATA_SOURCE@ | PGBIFilterCM360PlacementId -- ^ @FILTER_CM360_PLACEMENT_ID@ | PGBIFilterTrueviewClickTypeName -- ^ @FILTER_TRUEVIEW_CLICK_TYPE_NAME@ | PGBIFilterTrueviewAdTypeName -- ^ @FILTER_TRUEVIEW_AD_TYPE_NAME@ | PGBIFilterVideoContentDuration -- ^ @FILTER_VIDEO_CONTENT_DURATION@ | PGBIFilterMatchedGenreTarget -- ^ @FILTER_MATCHED_GENRE_TARGET@ | PGBIFilterVideoContentLiveStream -- ^ @FILTER_VIDEO_CONTENT_LIVE_STREAM@ | PGBIFilterBudgetSegmentType -- ^ @FILTER_BUDGET_SEGMENT_TYPE@ | PGBIFilterBudgetSegmentBudget -- ^ @FILTER_BUDGET_SEGMENT_BUDGET@ | PGBIFilterBudgetSegmentStartDate -- ^ @FILTER_BUDGET_SEGMENT_START_DATE@ | PGBIFilterBudgetSegmentEndDate -- ^ @FILTER_BUDGET_SEGMENT_END_DATE@ | PGBIFilterBudgetSegmentPacingPercentage -- ^ @FILTER_BUDGET_SEGMENT_PACING_PERCENTAGE@ | PGBIFilterLineItemBudget -- ^ @FILTER_LINE_ITEM_BUDGET@ | PGBIFilterLineItemStartDate -- ^ @FILTER_LINE_ITEM_START_DATE@ | PGBIFilterLineItemEndDate -- ^ @FILTER_LINE_ITEM_END_DATE@ | PGBIFilterInsertionOrderGoalType -- ^ @FILTER_INSERTION_ORDER_GOAL_TYPE@ | PGBIFilterLineItemPacingPercentage -- ^ @FILTER_LINE_ITEM_PACING_PERCENTAGE@ | PGBIFilterInsertionOrderGoalValue -- ^ @FILTER_INSERTION_ORDER_GOAL_VALUE@ | PGBIFilterOmidCapable -- ^ @FILTER_OMID_CAPABLE@ | PGBIFilterVendorMeasurementMode -- ^ @FILTER_VENDOR_MEASUREMENT_MODE@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable ParametersGroupBysItem instance FromHttpApiData ParametersGroupBysItem where parseQueryParam = \case "FILTER_UNKNOWN" -> Right PGBIFilterUnknown "FILTER_DATE" -> Right PGBIFilterDate "FILTER_DAY_OF_WEEK" -> Right PGBIFilterDayOfWeek "FILTER_WEEK" -> Right PGBIFilterWeek "FILTER_MONTH" -> Right PGBIFilterMonth "FILTER_YEAR" -> Right PGBIFilterYear "FILTER_TIME_OF_DAY" -> Right PGBIFilterTimeOfDay "FILTER_CONVERSION_DELAY" -> Right PGBIFilterConversionDelay "FILTER_CREATIVE_ID" -> Right PGBIFilterCreativeId "FILTER_CREATIVE_SIZE" -> Right PGBIFilterCreativeSize "FILTER_CREATIVE_TYPE" -> Right PGBIFilterCreativeType "FILTER_EXCHANGE_ID" -> Right PGBIFilterExchangeId "FILTER_AD_POSITION" -> Right PGBIFilterAdPosition "FILTER_PUBLIC_INVENTORY" -> Right PGBIFilterPublicInventory "FILTER_INVENTORY_SOURCE" -> Right PGBIFilterInventorySource "FILTER_CITY" -> Right PGBIFilterCity "FILTER_REGION" -> Right PGBIFilterRegion "FILTER_DMA" -> Right PGBIFilterDma "FILTER_COUNTRY" -> Right PGBIFilterCountry "FILTER_SITE_ID" -> Right PGBIFilterSiteId "FILTER_CHANNEL_ID" -> Right PGBIFilterChannelId "FILTER_PARTNER" -> Right PGBIFilterPartner "FILTER_ADVERTISER" -> Right PGBIFilterAdvertiser "FILTER_INSERTION_ORDER" -> Right PGBIFilterInsertionOrder "FILTER_LINE_ITEM" -> Right PGBIFilterLineItem "FILTER_PARTNER_CURRENCY" -> Right PGBIFilterPartnerCurrency "FILTER_ADVERTISER_CURRENCY" -> Right PGBIFilterAdvertiserCurrency "FILTER_ADVERTISER_TIMEZONE" -> Right PGBIFilterAdvertiserTimezone "FILTER_LINE_ITEM_TYPE" -> Right PGBIFilterLineItemType "FILTER_USER_LIST" -> Right PGBIFilterUserList "FILTER_USER_LIST_FIRST_PARTY" -> Right PGBIFilterUserListFirstParty "FILTER_USER_LIST_THIRD_PARTY" -> Right PGBIFilterUserListThirdParty "FILTER_TARGETED_USER_LIST" -> Right PGBIFilterTargetedUserList "FILTER_DATA_PROVIDER" -> Right PGBIFilterDataProvider "FILTER_ORDER_ID" -> Right PGBIFilterOrderId "FILTER_VIDEO_PLAYER_SIZE" -> Right PGBIFilterVideoPlayerSize "FILTER_VIDEO_DURATION_SECONDS" -> Right PGBIFilterVideoDurationSeconds "FILTER_KEYWORD" -> Right PGBIFilterKeyword "FILTER_PAGE_CATEGORY" -> Right PGBIFilterPageCategory "FILTER_CAMPAIGN_DAILY_FREQUENCY" -> Right PGBIFilterCampaignDailyFrequency "FILTER_LINE_ITEM_DAILY_FREQUENCY" -> Right PGBIFilterLineItemDailyFrequency "FILTER_LINE_ITEM_LIFETIME_FREQUENCY" -> Right PGBIFilterLineItemLifetimeFrequency "FILTER_OS" -> Right PGBIFilterOS "FILTER_BROWSER" -> Right PGBIFilterBrowser "FILTER_CARRIER" -> Right PGBIFilterCarrier "FILTER_SITE_LANGUAGE" -> Right PGBIFilterSiteLanguage "FILTER_INVENTORY_FORMAT" -> Right PGBIFilterInventoryFormat "FILTER_ZIP_CODE" -> Right PGBIFilterZipCode "FILTER_VIDEO_RATING_TIER" -> Right PGBIFilterVideoRatingTier "FILTER_VIDEO_FORMAT_SUPPORT" -> Right PGBIFilterVideoFormatSupport "FILTER_VIDEO_SKIPPABLE_SUPPORT" -> Right PGBIFilterVideoSkippableSupport "FILTER_VIDEO_CREATIVE_DURATION" -> Right PGBIFilterVideoCreativeDuration "FILTER_PAGE_LAYOUT" -> Right PGBIFilterPageLayout "FILTER_VIDEO_AD_POSITION_IN_STREAM" -> Right PGBIFilterVideoAdPositionInStream "FILTER_AGE" -> Right PGBIFilterAge "FILTER_GENDER" -> Right PGBIFilterGender "FILTER_QUARTER" -> Right PGBIFilterQuarter "FILTER_TRUEVIEW_CONVERSION_TYPE" -> Right PGBIFilterTrueviewConversionType "FILTER_MOBILE_GEO" -> Right PGBIFilterMobileGeo "FILTER_MRAID_SUPPORT" -> Right PGBIFilterMraidSupport "FILTER_ACTIVE_VIEW_EXPECTED_VIEWABILITY" -> Right PGBIFilterActiveViewExpectedViewability "FILTER_VIDEO_CREATIVE_DURATION_SKIPPABLE" -> Right PGBIFilterVideoCreativeDurationSkippable "FILTER_NIELSEN_COUNTRY_CODE" -> Right PGBIFilterNielsenCountryCode "FILTER_NIELSEN_DEVICE_ID" -> Right PGBIFilterNielsenDeviceId "FILTER_NIELSEN_GENDER" -> Right PGBIFilterNielsenGender "FILTER_NIELSEN_AGE" -> Right PGBIFilterNielsenAge "FILTER_INVENTORY_SOURCE_TYPE" -> Right PGBIFilterInventorySourceType "FILTER_CREATIVE_WIDTH" -> Right PGBIFilterCreativeWidth "FILTER_CREATIVE_HEIGHT" -> Right PGBIFilterCreativeHeight "FILTER_DFP_ORDER_ID" -> Right PGBIFilterDfpOrderId "FILTER_TRUEVIEW_AGE" -> Right PGBIFilterTrueviewAge "FILTER_TRUEVIEW_GENDER" -> Right PGBIFilterTrueviewGender "FILTER_TRUEVIEW_PARENTAL_STATUS" -> Right PGBIFilterTrueviewParentalStatus "FILTER_TRUEVIEW_REMARKETING_LIST" -> Right PGBIFilterTrueviewRemarketingList "FILTER_TRUEVIEW_INTEREST" -> Right PGBIFilterTrueviewInterest "FILTER_TRUEVIEW_AD_GROUP_ID" -> Right PGBIFilterTrueviewAdGroupId "FILTER_TRUEVIEW_AD_GROUP_AD_ID" -> Right PGBIFilterTrueviewAdGroupAdId "FILTER_TRUEVIEW_IAR_LANGUAGE" -> Right PGBIFilterTrueviewIarLanguage "FILTER_TRUEVIEW_IAR_GENDER" -> Right PGBIFilterTrueviewIarGender "FILTER_TRUEVIEW_IAR_AGE" -> Right PGBIFilterTrueviewIarAge "FILTER_TRUEVIEW_IAR_CATEGORY" -> Right PGBIFilterTrueviewIarCategory "FILTER_TRUEVIEW_IAR_COUNTRY" -> Right PGBIFilterTrueviewIarCountry "FILTER_TRUEVIEW_IAR_CITY" -> Right PGBIFilterTrueviewIarCity "FILTER_TRUEVIEW_IAR_REGION" -> Right PGBIFilterTrueviewIarRegion "FILTER_TRUEVIEW_IAR_ZIPCODE" -> Right PGBIFilterTrueviewIarZipcode "FILTER_TRUEVIEW_IAR_REMARKETING_LIST" -> Right PGBIFilterTrueviewIarRemarketingList "FILTER_TRUEVIEW_IAR_INTEREST" -> Right PGBIFilterTrueviewIarInterest "FILTER_TRUEVIEW_IAR_PARENTAL_STATUS" -> Right PGBIFilterTrueviewIarParentalStatus "FILTER_TRUEVIEW_IAR_TIME_OF_DAY" -> Right PGBIFilterTrueviewIarTimeOfDay "FILTER_TRUEVIEW_CUSTOM_AFFINITY" -> Right PGBIFilterTrueviewCustomAffinity "FILTER_TRUEVIEW_CATEGORY" -> Right PGBIFilterTrueviewCategory "FILTER_TRUEVIEW_KEYWORD" -> Right PGBIFilterTrueviewKeyword "FILTER_TRUEVIEW_PLACEMENT" -> Right PGBIFilterTrueviewPlacement "FILTER_TRUEVIEW_URL" -> Right PGBIFilterTrueviewURL "FILTER_TRUEVIEW_COUNTRY" -> Right PGBIFilterTrueviewCountry "FILTER_TRUEVIEW_REGION" -> Right PGBIFilterTrueviewRegion "FILTER_TRUEVIEW_CITY" -> Right PGBIFilterTrueviewCity "FILTER_TRUEVIEW_DMA" -> Right PGBIFilterTrueviewDma "FILTER_TRUEVIEW_ZIPCODE" -> Right PGBIFilterTrueviewZipcode "FILTER_NOT_SUPPORTED" -> Right PGBIFilterNotSupported "FILTER_MEDIA_PLAN" -> Right PGBIFilterMediaPlan "FILTER_TRUEVIEW_IAR_YOUTUBE_CHANNEL" -> Right PGBIFilterTrueviewIarYouTubeChannel "FILTER_TRUEVIEW_IAR_YOUTUBE_VIDEO" -> Right PGBIFilterTrueviewIarYouTubeVideo "FILTER_SKIPPABLE_SUPPORT" -> Right PGBIFilterSkippableSupport "FILTER_COMPANION_CREATIVE_ID" -> Right PGBIFilterCompanionCreativeId "FILTER_BUDGET_SEGMENT_DESCRIPTION" -> Right PGBIFilterBudgetSegmentDescription "FILTER_FLOODLIGHT_ACTIVITY_ID" -> Right PGBIFilterFloodlightActivityId "FILTER_DEVICE_MODEL" -> Right PGBIFilterDeviceModel "FILTER_DEVICE_MAKE" -> Right PGBIFilterDeviceMake "FILTER_DEVICE_TYPE" -> Right PGBIFilterDeviceType "FILTER_CREATIVE_ATTRIBUTE" -> Right PGBIFilterCreativeAttribute "FILTER_INVENTORY_COMMITMENT_TYPE" -> Right PGBIFilterInventoryCommitmentType "FILTER_INVENTORY_RATE_TYPE" -> Right PGBIFilterInventoryRateType "FILTER_INVENTORY_DELIVERY_METHOD" -> Right PGBIFilterInventoryDeliveryMethod "FILTER_INVENTORY_SOURCE_EXTERNAL_ID" -> Right PGBIFilterInventorySourceExternalId "FILTER_AUTHORIZED_SELLER_STATE" -> Right PGBIFilterAuthorizedSellerState "FILTER_VIDEO_DURATION_SECONDS_RANGE" -> Right PGBIFilterVideoDurationSecondsRange "FILTER_PARTNER_NAME" -> Right PGBIFilterPartnerName "FILTER_PARTNER_STATUS" -> Right PGBIFilterPartnerStatus "FILTER_ADVERTISER_NAME" -> Right PGBIFilterAdvertiserName "FILTER_ADVERTISER_INTEGRATION_CODE" -> Right PGBIFilterAdvertiserIntegrationCode "FILTER_ADVERTISER_INTEGRATION_STATUS" -> Right PGBIFilterAdvertiserIntegrationStatus "FILTER_CARRIER_NAME" -> Right PGBIFilterCarrierName "FILTER_CHANNEL_NAME" -> Right PGBIFilterChannelName "FILTER_CITY_NAME" -> Right PGBIFilterCityName "FILTER_COMPANION_CREATIVE_NAME" -> Right PGBIFilterCompanionCreativeName "FILTER_USER_LIST_FIRST_PARTY_NAME" -> Right PGBIFilterUserListFirstPartyName "FILTER_USER_LIST_THIRD_PARTY_NAME" -> Right PGBIFilterUserListThirdPartyName "FILTER_NIELSEN_RESTATEMENT_DATE" -> Right PGBIFilterNielsenReStatementDate "FILTER_NIELSEN_DATE_RANGE" -> Right PGBIFilterNielsenDateRange "FILTER_INSERTION_ORDER_NAME" -> Right PGBIFilterInsertionOrderName "FILTER_REGION_NAME" -> Right PGBIFilterRegionName "FILTER_DMA_NAME" -> Right PGBIFilterDmaName "FILTER_TRUEVIEW_IAR_REGION_NAME" -> Right PGBIFilterTrueviewIarRegionName "FILTER_TRUEVIEW_DMA_NAME" -> Right PGBIFilterTrueviewDmaName "FILTER_TRUEVIEW_REGION_NAME" -> Right PGBIFilterTrueviewRegionName "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_ID" -> Right PGBIFilterActiveViewCustomMetricId "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_NAME" -> Right PGBIFilterActiveViewCustomMetricName "FILTER_AD_TYPE" -> Right PGBIFilterAdType "FILTER_ALGORITHM" -> Right PGBIFilterAlgorithm "FILTER_ALGORITHM_ID" -> Right PGBIFilterAlgorithmId "FILTER_AMP_PAGE_REQUEST" -> Right PGBIFilterAmpPageRequest "FILTER_ANONYMOUS_INVENTORY_MODELING" -> Right PGBIFilterAnonymousInventoryModeling "FILTER_APP_URL" -> Right PGBIFilterAppURL "FILTER_APP_URL_EXCLUDED" -> Right PGBIFilterAppURLExcluded "FILTER_ATTRIBUTED_USERLIST" -> Right PGBIFilterAttributedUserList "FILTER_ATTRIBUTED_USERLIST_COST" -> Right PGBIFilterAttributedUserListCost "FILTER_ATTRIBUTED_USERLIST_TYPE" -> Right PGBIFilterAttributedUserListType "FILTER_ATTRIBUTION_MODEL" -> Right PGBIFilterAttributionModel "FILTER_AUDIENCE_LIST" -> Right PGBIFilterAudienceList "FILTER_AUDIENCE_LIST_COST" -> Right PGBIFilterAudienceListCost "FILTER_AUDIENCE_LIST_TYPE" -> Right PGBIFilterAudienceListType "FILTER_AUDIENCE_NAME" -> Right PGBIFilterAudienceName "FILTER_AUDIENCE_TYPE" -> Right PGBIFilterAudienceType "FILTER_BILLABLE_OUTCOME" -> Right PGBIFilterBillableOutcome "FILTER_BRAND_LIFT_TYPE" -> Right PGBIFilterBrandLiftType "FILTER_CHANNEL_TYPE" -> Right PGBIFilterChannelType "FILTER_CM_PLACEMENT_ID" -> Right PGBIFilterCmPlacementId "FILTER_CONVERSION_SOURCE" -> Right PGBIFilterConversionSource "FILTER_CONVERSION_SOURCE_ID" -> Right PGBIFilterConversionSourceId "FILTER_COUNTRY_ID" -> Right PGBIFilterCountryId "FILTER_CREATIVE" -> Right PGBIFilterCreative "FILTER_CREATIVE_ASSET" -> Right PGBIFilterCreativeAsset "FILTER_CREATIVE_INTEGRATION_CODE" -> Right PGBIFilterCreativeIntegrationCode "FILTER_CREATIVE_RENDERED_IN_AMP" -> Right PGBIFilterCreativeRenderedInAmp "FILTER_CREATIVE_SOURCE" -> Right PGBIFilterCreativeSource "FILTER_CREATIVE_STATUS" -> Right PGBIFilterCreativeStatus "FILTER_DATA_PROVIDER_NAME" -> Right PGBIFilterDataProviderName "FILTER_DETAILED_DEMOGRAPHICS" -> Right PGBIFilterDetailedDemographics "FILTER_DETAILED_DEMOGRAPHICS_ID" -> Right PGBIFilterDetailedDemographicsId "FILTER_DEVICE" -> Right PGBIFilterDevice "FILTER_GAM_INSERTION_ORDER" -> Right PGBIFilterGamInsertionOrder "FILTER_GAM_LINE_ITEM" -> Right PGBIFilterGamLineItem "FILTER_GAM_LINE_ITEM_ID" -> Right PGBIFilterGamLineItemId "FILTER_DIGITAL_CONTENT_LABEL" -> Right PGBIFilterDigitalContentLabel "FILTER_DOMAIN" -> Right PGBIFilterDomain "FILTER_ELIGIBLE_COOKIES_ON_FIRST_PARTY_AUDIENCE_LIST" -> Right PGBIFilterEligibleCookiesOnFirstPartyAudienceList "FILTER_ELIGIBLE_COOKIES_ON_THIRD_PARTY_AUDIENCE_LIST_AND_INTEREST" -> Right PGBIFilterEligibleCookiesOnThirdPartyAudienceListAndInterest "FILTER_EXCHANGE" -> Right PGBIFilterExchange "FILTER_EXCHANGE_CODE" -> Right PGBIFilterExchangeCode "FILTER_EXTENSION" -> Right PGBIFilterExtension "FILTER_EXTENSION_STATUS" -> Right PGBIFilterExtensionStatus "FILTER_EXTENSION_TYPE" -> Right PGBIFilterExtensionType "FILTER_FIRST_PARTY_AUDIENCE_LIST_COST" -> Right PGBIFilterFirstPartyAudienceListCost "FILTER_FIRST_PARTY_AUDIENCE_LIST_TYPE" -> Right PGBIFilterFirstPartyAudienceListType "FILTER_FLOODLIGHT_ACTIVITY" -> Right PGBIFilterFloodlightActivity "FILTER_FORMAT" -> Right PGBIFilterFormat "FILTER_GMAIL_AGE" -> Right PGBIFilterGmailAge "FILTER_GMAIL_CITY" -> Right PGBIFilterGmailCity "FILTER_GMAIL_COUNTRY" -> Right PGBIFilterGmailCountry "FILTER_GMAIL_COUNTRY_NAME" -> Right PGBIFilterGmailCountryName "FILTER_GMAIL_DEVICE_TYPE" -> Right PGBIFilterGmailDeviceType "FILTER_GMAIL_DEVICE_TYPE_NAME" -> Right PGBIFilterGmailDeviceTypeName "FILTER_GMAIL_GENDER" -> Right PGBIFilterGmailGender "FILTER_GMAIL_REGION" -> Right PGBIFilterGmailRegion "FILTER_GMAIL_REMARKETING_LIST" -> Right PGBIFilterGmailRemarketingList "FILTER_HOUSEHOLD_INCOME" -> Right PGBIFilterHouseholdIncome "FILTER_IMPRESSION_COUNTING_METHOD" -> Right PGBIFilterImpressionCountingMethod "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_INSERTION_ORDER" -> Right PGBIFilterYouTubeProgrammaticGuaranteedInsertionOrder "FILTER_INSERTION_ORDER_INTEGRATION_CODE" -> Right PGBIFilterInsertionOrderIntegrationCode "FILTER_INSERTION_ORDER_STATUS" -> Right PGBIFilterInsertionOrderStatus "FILTER_INTEREST" -> Right PGBIFilterInterest "FILTER_INVENTORY_SOURCE_GROUP" -> Right PGBIFilterInventorySourceGroup "FILTER_INVENTORY_SOURCE_GROUP_ID" -> Right PGBIFilterInventorySourceGroupId "FILTER_INVENTORY_SOURCE_ID" -> Right PGBIFilterInventorySourceId "FILTER_INVENTORY_SOURCE_NAME" -> Right PGBIFilterInventorySourceName "FILTER_LIFE_EVENT" -> Right PGBIFilterLifeEvent "FILTER_LIFE_EVENTS" -> Right PGBIFilterLifeEvents "FILTER_LINE_ITEM_INTEGRATION_CODE" -> Right PGBIFilterLineItemIntegrationCode "FILTER_LINE_ITEM_NAME" -> Right PGBIFilterLineItemName "FILTER_LINE_ITEM_STATUS" -> Right PGBIFilterLineItemStatus "FILTER_MATCH_RATIO" -> Right PGBIFilterMatchRatio "FILTER_MEASUREMENT_SOURCE" -> Right PGBIFilterMeasurementSource "FILTER_MEDIA_PLAN_NAME" -> Right PGBIFilterMediaPlanName "FILTER_PARENTAL_STATUS" -> Right PGBIFilterParentalStatus "FILTER_PLACEMENT_ALL_YOUTUBE_CHANNELS" -> Right PGBIFilterPlacementAllYouTubeChannels "FILTER_PLATFORM" -> Right PGBIFilterPlatform "FILTER_PLAYBACK_METHOD" -> Right PGBIFilterPlaybackMethod "FILTER_POSITION_IN_CONTENT" -> Right PGBIFilterPositionInContent "FILTER_PUBLISHER_PROPERTY" -> Right PGBIFilterPublisherProperty "FILTER_PUBLISHER_PROPERTY_ID" -> Right PGBIFilterPublisherPropertyId "FILTER_PUBLISHER_PROPERTY_SECTION" -> Right PGBIFilterPublisherPropertySection "FILTER_PUBLISHER_PROPERTY_SECTION_ID" -> Right PGBIFilterPublisherPropertySectionId "FILTER_REFUND_REASON" -> Right PGBIFilterRefundReason "FILTER_REMARKETING_LIST" -> Right PGBIFilterRemarketingList "FILTER_REWARDED" -> Right PGBIFilterRewarded "FILTER_SENSITIVE_CATEGORY" -> Right PGBIFilterSensitiveCategory "FILTER_SERVED_PIXEL_DENSITY" -> Right PGBIFilterServedPixelDensity "FILTER_TARGETED_DATA_PROVIDERS" -> Right PGBIFilterTargetedDataProviders "FILTER_THIRD_PARTY_AUDIENCE_LIST_COST" -> Right PGBIFilterThirdPartyAudienceListCost "FILTER_THIRD_PARTY_AUDIENCE_LIST_TYPE" -> Right PGBIFilterThirdPartyAudienceListType "FILTER_TRUEVIEW_AD" -> Right PGBIFilterTrueviewAd "FILTER_TRUEVIEW_AD_GROUP" -> Right PGBIFilterTrueviewAdGroup "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS" -> Right PGBIFilterTrueviewDetailedDemographics "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS_ID" -> Right PGBIFilterTrueviewDetailedDemographicsId "FILTER_TRUEVIEW_HOUSEHOLD_INCOME" -> Right PGBIFilterTrueviewHouseholdIncome "FILTER_TRUEVIEW_IAR_COUNTRY_NAME" -> Right PGBIFilterTrueviewIarCountryName "FILTER_TRUEVIEW_REMARKETING_LIST_NAME" -> Right PGBIFilterTrueviewRemarketingListName "FILTER_VARIANT_ID" -> Right PGBIFilterVariantId "FILTER_VARIANT_NAME" -> Right PGBIFilterVariantName "FILTER_VARIANT_VERSION" -> Right PGBIFilterVariantVersion "FILTER_VERIFICATION_VIDEO_PLAYER_SIZE" -> Right PGBIFilterVerificationVideoPlayerSize "FILTER_VERIFICATION_VIDEO_POSITION" -> Right PGBIFilterVerificationVideoPosition "FILTER_VIDEO_COMPANION_CREATIVE_SIZE" -> Right PGBIFilterVideoCompanionCreativeSize "FILTER_VIDEO_CONTINUOUS_PLAY" -> Right PGBIFilterVideoContinuousPlay "FILTER_VIDEO_DURATION" -> Right PGBIFilterVideoDuration "FILTER_YOUTUBE_ADAPTED_AUDIENCE_LIST" -> Right PGBIFilterYouTubeAdaptedAudienceList "FILTER_YOUTUBE_AD_VIDEO" -> Right PGBIFilterYouTubeAdVideo "FILTER_YOUTUBE_AD_VIDEO_ID" -> Right PGBIFilterYouTubeAdVideoId "FILTER_YOUTUBE_CHANNEL" -> Right PGBIFilterYouTubeChannel "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_ADVERTISER" -> Right PGBIFilterYouTubeProgrammaticGuaranteedAdvertiser "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_PARTNER" -> Right PGBIFilterYouTubeProgrammaticGuaranteedPartner "FILTER_YOUTUBE_VIDEO" -> Right PGBIFilterYouTubeVideo "FILTER_ZIP_POSTAL_CODE" -> Right PGBIFilterZipPostalCode "FILTER_PLACEMENT_NAME_ALL_YOUTUBE_CHANNELS" -> Right PGBIFilterPlacementNameAllYouTubeChannels "FILTER_TRUEVIEW_PLACEMENT_ID" -> Right PGBIFilterTrueviewPlacementId "FILTER_PATH_PATTERN_ID" -> Right PGBIFilterPathPatternId "FILTER_PATH_EVENT_INDEX" -> Right PGBIFilterPathEventIndex "FILTER_EVENT_TYPE" -> Right PGBIFilterEventType "FILTER_CHANNEL_GROUPING" -> Right PGBIFilterChannelGrouping "FILTER_OM_SDK_AVAILABLE" -> Right PGBIFilterOmSdkAvailable "FILTER_DATA_SOURCE" -> Right PGBIFilterDataSource "FILTER_CM360_PLACEMENT_ID" -> Right PGBIFilterCM360PlacementId "FILTER_TRUEVIEW_CLICK_TYPE_NAME" -> Right PGBIFilterTrueviewClickTypeName "FILTER_TRUEVIEW_AD_TYPE_NAME" -> Right PGBIFilterTrueviewAdTypeName "FILTER_VIDEO_CONTENT_DURATION" -> Right PGBIFilterVideoContentDuration "FILTER_MATCHED_GENRE_TARGET" -> Right PGBIFilterMatchedGenreTarget "FILTER_VIDEO_CONTENT_LIVE_STREAM" -> Right PGBIFilterVideoContentLiveStream "FILTER_BUDGET_SEGMENT_TYPE" -> Right PGBIFilterBudgetSegmentType "FILTER_BUDGET_SEGMENT_BUDGET" -> Right PGBIFilterBudgetSegmentBudget "FILTER_BUDGET_SEGMENT_START_DATE" -> Right PGBIFilterBudgetSegmentStartDate "FILTER_BUDGET_SEGMENT_END_DATE" -> Right PGBIFilterBudgetSegmentEndDate "FILTER_BUDGET_SEGMENT_PACING_PERCENTAGE" -> Right PGBIFilterBudgetSegmentPacingPercentage "FILTER_LINE_ITEM_BUDGET" -> Right PGBIFilterLineItemBudget "FILTER_LINE_ITEM_START_DATE" -> Right PGBIFilterLineItemStartDate "FILTER_LINE_ITEM_END_DATE" -> Right PGBIFilterLineItemEndDate "FILTER_INSERTION_ORDER_GOAL_TYPE" -> Right PGBIFilterInsertionOrderGoalType "FILTER_LINE_ITEM_PACING_PERCENTAGE" -> Right PGBIFilterLineItemPacingPercentage "FILTER_INSERTION_ORDER_GOAL_VALUE" -> Right PGBIFilterInsertionOrderGoalValue "FILTER_OMID_CAPABLE" -> Right PGBIFilterOmidCapable "FILTER_VENDOR_MEASUREMENT_MODE" -> Right PGBIFilterVendorMeasurementMode x -> Left ("Unable to parse ParametersGroupBysItem from: " <> x) instance ToHttpApiData ParametersGroupBysItem where toQueryParam = \case PGBIFilterUnknown -> "FILTER_UNKNOWN" PGBIFilterDate -> "FILTER_DATE" PGBIFilterDayOfWeek -> "FILTER_DAY_OF_WEEK" PGBIFilterWeek -> "FILTER_WEEK" PGBIFilterMonth -> "FILTER_MONTH" PGBIFilterYear -> "FILTER_YEAR" PGBIFilterTimeOfDay -> "FILTER_TIME_OF_DAY" PGBIFilterConversionDelay -> "FILTER_CONVERSION_DELAY" PGBIFilterCreativeId -> "FILTER_CREATIVE_ID" PGBIFilterCreativeSize -> "FILTER_CREATIVE_SIZE" PGBIFilterCreativeType -> "FILTER_CREATIVE_TYPE" PGBIFilterExchangeId -> "FILTER_EXCHANGE_ID" PGBIFilterAdPosition -> "FILTER_AD_POSITION" PGBIFilterPublicInventory -> "FILTER_PUBLIC_INVENTORY" PGBIFilterInventorySource -> "FILTER_INVENTORY_SOURCE" PGBIFilterCity -> "FILTER_CITY" PGBIFilterRegion -> "FILTER_REGION" PGBIFilterDma -> "FILTER_DMA" PGBIFilterCountry -> "FILTER_COUNTRY" PGBIFilterSiteId -> "FILTER_SITE_ID" PGBIFilterChannelId -> "FILTER_CHANNEL_ID" PGBIFilterPartner -> "FILTER_PARTNER" PGBIFilterAdvertiser -> "FILTER_ADVERTISER" PGBIFilterInsertionOrder -> "FILTER_INSERTION_ORDER" PGBIFilterLineItem -> "FILTER_LINE_ITEM" PGBIFilterPartnerCurrency -> "FILTER_PARTNER_CURRENCY" PGBIFilterAdvertiserCurrency -> "FILTER_ADVERTISER_CURRENCY" PGBIFilterAdvertiserTimezone -> "FILTER_ADVERTISER_TIMEZONE" PGBIFilterLineItemType -> "FILTER_LINE_ITEM_TYPE" PGBIFilterUserList -> "FILTER_USER_LIST" PGBIFilterUserListFirstParty -> "FILTER_USER_LIST_FIRST_PARTY" PGBIFilterUserListThirdParty -> "FILTER_USER_LIST_THIRD_PARTY" PGBIFilterTargetedUserList -> "FILTER_TARGETED_USER_LIST" PGBIFilterDataProvider -> "FILTER_DATA_PROVIDER" PGBIFilterOrderId -> "FILTER_ORDER_ID" PGBIFilterVideoPlayerSize -> "FILTER_VIDEO_PLAYER_SIZE" PGBIFilterVideoDurationSeconds -> "FILTER_VIDEO_DURATION_SECONDS" PGBIFilterKeyword -> "FILTER_KEYWORD" PGBIFilterPageCategory -> "FILTER_PAGE_CATEGORY" PGBIFilterCampaignDailyFrequency -> "FILTER_CAMPAIGN_DAILY_FREQUENCY" PGBIFilterLineItemDailyFrequency -> "FILTER_LINE_ITEM_DAILY_FREQUENCY" PGBIFilterLineItemLifetimeFrequency -> "FILTER_LINE_ITEM_LIFETIME_FREQUENCY" PGBIFilterOS -> "FILTER_OS" PGBIFilterBrowser -> "FILTER_BROWSER" PGBIFilterCarrier -> "FILTER_CARRIER" PGBIFilterSiteLanguage -> "FILTER_SITE_LANGUAGE" PGBIFilterInventoryFormat -> "FILTER_INVENTORY_FORMAT" PGBIFilterZipCode -> "FILTER_ZIP_CODE" PGBIFilterVideoRatingTier -> "FILTER_VIDEO_RATING_TIER" PGBIFilterVideoFormatSupport -> "FILTER_VIDEO_FORMAT_SUPPORT" PGBIFilterVideoSkippableSupport -> "FILTER_VIDEO_SKIPPABLE_SUPPORT" PGBIFilterVideoCreativeDuration -> "FILTER_VIDEO_CREATIVE_DURATION" PGBIFilterPageLayout -> "FILTER_PAGE_LAYOUT" PGBIFilterVideoAdPositionInStream -> "FILTER_VIDEO_AD_POSITION_IN_STREAM" PGBIFilterAge -> "FILTER_AGE" PGBIFilterGender -> "FILTER_GENDER" PGBIFilterQuarter -> "FILTER_QUARTER" PGBIFilterTrueviewConversionType -> "FILTER_TRUEVIEW_CONVERSION_TYPE" PGBIFilterMobileGeo -> "FILTER_MOBILE_GEO" PGBIFilterMraidSupport -> "FILTER_MRAID_SUPPORT" PGBIFilterActiveViewExpectedViewability -> "FILTER_ACTIVE_VIEW_EXPECTED_VIEWABILITY" PGBIFilterVideoCreativeDurationSkippable -> "FILTER_VIDEO_CREATIVE_DURATION_SKIPPABLE" PGBIFilterNielsenCountryCode -> "FILTER_NIELSEN_COUNTRY_CODE" PGBIFilterNielsenDeviceId -> "FILTER_NIELSEN_DEVICE_ID" PGBIFilterNielsenGender -> "FILTER_NIELSEN_GENDER" PGBIFilterNielsenAge -> "FILTER_NIELSEN_AGE" PGBIFilterInventorySourceType -> "FILTER_INVENTORY_SOURCE_TYPE" PGBIFilterCreativeWidth -> "FILTER_CREATIVE_WIDTH" PGBIFilterCreativeHeight -> "FILTER_CREATIVE_HEIGHT" PGBIFilterDfpOrderId -> "FILTER_DFP_ORDER_ID" PGBIFilterTrueviewAge -> "FILTER_TRUEVIEW_AGE" PGBIFilterTrueviewGender -> "FILTER_TRUEVIEW_GENDER" PGBIFilterTrueviewParentalStatus -> "FILTER_TRUEVIEW_PARENTAL_STATUS" PGBIFilterTrueviewRemarketingList -> "FILTER_TRUEVIEW_REMARKETING_LIST" PGBIFilterTrueviewInterest -> "FILTER_TRUEVIEW_INTEREST" PGBIFilterTrueviewAdGroupId -> "FILTER_TRUEVIEW_AD_GROUP_ID" PGBIFilterTrueviewAdGroupAdId -> "FILTER_TRUEVIEW_AD_GROUP_AD_ID" PGBIFilterTrueviewIarLanguage -> "FILTER_TRUEVIEW_IAR_LANGUAGE" PGBIFilterTrueviewIarGender -> "FILTER_TRUEVIEW_IAR_GENDER" PGBIFilterTrueviewIarAge -> "FILTER_TRUEVIEW_IAR_AGE" PGBIFilterTrueviewIarCategory -> "FILTER_TRUEVIEW_IAR_CATEGORY" PGBIFilterTrueviewIarCountry -> "FILTER_TRUEVIEW_IAR_COUNTRY" PGBIFilterTrueviewIarCity -> "FILTER_TRUEVIEW_IAR_CITY" PGBIFilterTrueviewIarRegion -> "FILTER_TRUEVIEW_IAR_REGION" PGBIFilterTrueviewIarZipcode -> "FILTER_TRUEVIEW_IAR_ZIPCODE" PGBIFilterTrueviewIarRemarketingList -> "FILTER_TRUEVIEW_IAR_REMARKETING_LIST" PGBIFilterTrueviewIarInterest -> "FILTER_TRUEVIEW_IAR_INTEREST" PGBIFilterTrueviewIarParentalStatus -> "FILTER_TRUEVIEW_IAR_PARENTAL_STATUS" PGBIFilterTrueviewIarTimeOfDay -> "FILTER_TRUEVIEW_IAR_TIME_OF_DAY" PGBIFilterTrueviewCustomAffinity -> "FILTER_TRUEVIEW_CUSTOM_AFFINITY" PGBIFilterTrueviewCategory -> "FILTER_TRUEVIEW_CATEGORY" PGBIFilterTrueviewKeyword -> "FILTER_TRUEVIEW_KEYWORD" PGBIFilterTrueviewPlacement -> "FILTER_TRUEVIEW_PLACEMENT" PGBIFilterTrueviewURL -> "FILTER_TRUEVIEW_URL" PGBIFilterTrueviewCountry -> "FILTER_TRUEVIEW_COUNTRY" PGBIFilterTrueviewRegion -> "FILTER_TRUEVIEW_REGION" PGBIFilterTrueviewCity -> "FILTER_TRUEVIEW_CITY" PGBIFilterTrueviewDma -> "FILTER_TRUEVIEW_DMA" PGBIFilterTrueviewZipcode -> "FILTER_TRUEVIEW_ZIPCODE" PGBIFilterNotSupported -> "FILTER_NOT_SUPPORTED" PGBIFilterMediaPlan -> "FILTER_MEDIA_PLAN" PGBIFilterTrueviewIarYouTubeChannel -> "FILTER_TRUEVIEW_IAR_YOUTUBE_CHANNEL" PGBIFilterTrueviewIarYouTubeVideo -> "FILTER_TRUEVIEW_IAR_YOUTUBE_VIDEO" PGBIFilterSkippableSupport -> "FILTER_SKIPPABLE_SUPPORT" PGBIFilterCompanionCreativeId -> "FILTER_COMPANION_CREATIVE_ID" PGBIFilterBudgetSegmentDescription -> "FILTER_BUDGET_SEGMENT_DESCRIPTION" PGBIFilterFloodlightActivityId -> "FILTER_FLOODLIGHT_ACTIVITY_ID" PGBIFilterDeviceModel -> "FILTER_DEVICE_MODEL" PGBIFilterDeviceMake -> "FILTER_DEVICE_MAKE" PGBIFilterDeviceType -> "FILTER_DEVICE_TYPE" PGBIFilterCreativeAttribute -> "FILTER_CREATIVE_ATTRIBUTE" PGBIFilterInventoryCommitmentType -> "FILTER_INVENTORY_COMMITMENT_TYPE" PGBIFilterInventoryRateType -> "FILTER_INVENTORY_RATE_TYPE" PGBIFilterInventoryDeliveryMethod -> "FILTER_INVENTORY_DELIVERY_METHOD" PGBIFilterInventorySourceExternalId -> "FILTER_INVENTORY_SOURCE_EXTERNAL_ID" PGBIFilterAuthorizedSellerState -> "FILTER_AUTHORIZED_SELLER_STATE" PGBIFilterVideoDurationSecondsRange -> "FILTER_VIDEO_DURATION_SECONDS_RANGE" PGBIFilterPartnerName -> "FILTER_PARTNER_NAME" PGBIFilterPartnerStatus -> "FILTER_PARTNER_STATUS" PGBIFilterAdvertiserName -> "FILTER_ADVERTISER_NAME" PGBIFilterAdvertiserIntegrationCode -> "FILTER_ADVERTISER_INTEGRATION_CODE" PGBIFilterAdvertiserIntegrationStatus -> "FILTER_ADVERTISER_INTEGRATION_STATUS" PGBIFilterCarrierName -> "FILTER_CARRIER_NAME" PGBIFilterChannelName -> "FILTER_CHANNEL_NAME" PGBIFilterCityName -> "FILTER_CITY_NAME" PGBIFilterCompanionCreativeName -> "FILTER_COMPANION_CREATIVE_NAME" PGBIFilterUserListFirstPartyName -> "FILTER_USER_LIST_FIRST_PARTY_NAME" PGBIFilterUserListThirdPartyName -> "FILTER_USER_LIST_THIRD_PARTY_NAME" PGBIFilterNielsenReStatementDate -> "FILTER_NIELSEN_RESTATEMENT_DATE" PGBIFilterNielsenDateRange -> "FILTER_NIELSEN_DATE_RANGE" PGBIFilterInsertionOrderName -> "FILTER_INSERTION_ORDER_NAME" PGBIFilterRegionName -> "FILTER_REGION_NAME" PGBIFilterDmaName -> "FILTER_DMA_NAME" PGBIFilterTrueviewIarRegionName -> "FILTER_TRUEVIEW_IAR_REGION_NAME" PGBIFilterTrueviewDmaName -> "FILTER_TRUEVIEW_DMA_NAME" PGBIFilterTrueviewRegionName -> "FILTER_TRUEVIEW_REGION_NAME" PGBIFilterActiveViewCustomMetricId -> "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_ID" PGBIFilterActiveViewCustomMetricName -> "FILTER_ACTIVE_VIEW_CUSTOM_METRIC_NAME" PGBIFilterAdType -> "FILTER_AD_TYPE" PGBIFilterAlgorithm -> "FILTER_ALGORITHM" PGBIFilterAlgorithmId -> "FILTER_ALGORITHM_ID" PGBIFilterAmpPageRequest -> "FILTER_AMP_PAGE_REQUEST" PGBIFilterAnonymousInventoryModeling -> "FILTER_ANONYMOUS_INVENTORY_MODELING" PGBIFilterAppURL -> "FILTER_APP_URL" PGBIFilterAppURLExcluded -> "FILTER_APP_URL_EXCLUDED" PGBIFilterAttributedUserList -> "FILTER_ATTRIBUTED_USERLIST" PGBIFilterAttributedUserListCost -> "FILTER_ATTRIBUTED_USERLIST_COST" PGBIFilterAttributedUserListType -> "FILTER_ATTRIBUTED_USERLIST_TYPE" PGBIFilterAttributionModel -> "FILTER_ATTRIBUTION_MODEL" PGBIFilterAudienceList -> "FILTER_AUDIENCE_LIST" PGBIFilterAudienceListCost -> "FILTER_AUDIENCE_LIST_COST" PGBIFilterAudienceListType -> "FILTER_AUDIENCE_LIST_TYPE" PGBIFilterAudienceName -> "FILTER_AUDIENCE_NAME" PGBIFilterAudienceType -> "FILTER_AUDIENCE_TYPE" PGBIFilterBillableOutcome -> "FILTER_BILLABLE_OUTCOME" PGBIFilterBrandLiftType -> "FILTER_BRAND_LIFT_TYPE" PGBIFilterChannelType -> "FILTER_CHANNEL_TYPE" PGBIFilterCmPlacementId -> "FILTER_CM_PLACEMENT_ID" PGBIFilterConversionSource -> "FILTER_CONVERSION_SOURCE" PGBIFilterConversionSourceId -> "FILTER_CONVERSION_SOURCE_ID" PGBIFilterCountryId -> "FILTER_COUNTRY_ID" PGBIFilterCreative -> "FILTER_CREATIVE" PGBIFilterCreativeAsset -> "FILTER_CREATIVE_ASSET" PGBIFilterCreativeIntegrationCode -> "FILTER_CREATIVE_INTEGRATION_CODE" PGBIFilterCreativeRenderedInAmp -> "FILTER_CREATIVE_RENDERED_IN_AMP" PGBIFilterCreativeSource -> "FILTER_CREATIVE_SOURCE" PGBIFilterCreativeStatus -> "FILTER_CREATIVE_STATUS" PGBIFilterDataProviderName -> "FILTER_DATA_PROVIDER_NAME" PGBIFilterDetailedDemographics -> "FILTER_DETAILED_DEMOGRAPHICS" PGBIFilterDetailedDemographicsId -> "FILTER_DETAILED_DEMOGRAPHICS_ID" PGBIFilterDevice -> "FILTER_DEVICE" PGBIFilterGamInsertionOrder -> "FILTER_GAM_INSERTION_ORDER" PGBIFilterGamLineItem -> "FILTER_GAM_LINE_ITEM" PGBIFilterGamLineItemId -> "FILTER_GAM_LINE_ITEM_ID" PGBIFilterDigitalContentLabel -> "FILTER_DIGITAL_CONTENT_LABEL" PGBIFilterDomain -> "FILTER_DOMAIN" PGBIFilterEligibleCookiesOnFirstPartyAudienceList -> "FILTER_ELIGIBLE_COOKIES_ON_FIRST_PARTY_AUDIENCE_LIST" PGBIFilterEligibleCookiesOnThirdPartyAudienceListAndInterest -> "FILTER_ELIGIBLE_COOKIES_ON_THIRD_PARTY_AUDIENCE_LIST_AND_INTEREST" PGBIFilterExchange -> "FILTER_EXCHANGE" PGBIFilterExchangeCode -> "FILTER_EXCHANGE_CODE" PGBIFilterExtension -> "FILTER_EXTENSION" PGBIFilterExtensionStatus -> "FILTER_EXTENSION_STATUS" PGBIFilterExtensionType -> "FILTER_EXTENSION_TYPE" PGBIFilterFirstPartyAudienceListCost -> "FILTER_FIRST_PARTY_AUDIENCE_LIST_COST" PGBIFilterFirstPartyAudienceListType -> "FILTER_FIRST_PARTY_AUDIENCE_LIST_TYPE" PGBIFilterFloodlightActivity -> "FILTER_FLOODLIGHT_ACTIVITY" PGBIFilterFormat -> "FILTER_FORMAT" PGBIFilterGmailAge -> "FILTER_GMAIL_AGE" PGBIFilterGmailCity -> "FILTER_GMAIL_CITY" PGBIFilterGmailCountry -> "FILTER_GMAIL_COUNTRY" PGBIFilterGmailCountryName -> "FILTER_GMAIL_COUNTRY_NAME" PGBIFilterGmailDeviceType -> "FILTER_GMAIL_DEVICE_TYPE" PGBIFilterGmailDeviceTypeName -> "FILTER_GMAIL_DEVICE_TYPE_NAME" PGBIFilterGmailGender -> "FILTER_GMAIL_GENDER" PGBIFilterGmailRegion -> "FILTER_GMAIL_REGION" PGBIFilterGmailRemarketingList -> "FILTER_GMAIL_REMARKETING_LIST" PGBIFilterHouseholdIncome -> "FILTER_HOUSEHOLD_INCOME" PGBIFilterImpressionCountingMethod -> "FILTER_IMPRESSION_COUNTING_METHOD" PGBIFilterYouTubeProgrammaticGuaranteedInsertionOrder -> "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_INSERTION_ORDER" PGBIFilterInsertionOrderIntegrationCode -> "FILTER_INSERTION_ORDER_INTEGRATION_CODE" PGBIFilterInsertionOrderStatus -> "FILTER_INSERTION_ORDER_STATUS" PGBIFilterInterest -> "FILTER_INTEREST" PGBIFilterInventorySourceGroup -> "FILTER_INVENTORY_SOURCE_GROUP" PGBIFilterInventorySourceGroupId -> "FILTER_INVENTORY_SOURCE_GROUP_ID" PGBIFilterInventorySourceId -> "FILTER_INVENTORY_SOURCE_ID" PGBIFilterInventorySourceName -> "FILTER_INVENTORY_SOURCE_NAME" PGBIFilterLifeEvent -> "FILTER_LIFE_EVENT" PGBIFilterLifeEvents -> "FILTER_LIFE_EVENTS" PGBIFilterLineItemIntegrationCode -> "FILTER_LINE_ITEM_INTEGRATION_CODE" PGBIFilterLineItemName -> "FILTER_LINE_ITEM_NAME" PGBIFilterLineItemStatus -> "FILTER_LINE_ITEM_STATUS" PGBIFilterMatchRatio -> "FILTER_MATCH_RATIO" PGBIFilterMeasurementSource -> "FILTER_MEASUREMENT_SOURCE" PGBIFilterMediaPlanName -> "FILTER_MEDIA_PLAN_NAME" PGBIFilterParentalStatus -> "FILTER_PARENTAL_STATUS" PGBIFilterPlacementAllYouTubeChannels -> "FILTER_PLACEMENT_ALL_YOUTUBE_CHANNELS" PGBIFilterPlatform -> "FILTER_PLATFORM" PGBIFilterPlaybackMethod -> "FILTER_PLAYBACK_METHOD" PGBIFilterPositionInContent -> "FILTER_POSITION_IN_CONTENT" PGBIFilterPublisherProperty -> "FILTER_PUBLISHER_PROPERTY" PGBIFilterPublisherPropertyId -> "FILTER_PUBLISHER_PROPERTY_ID" PGBIFilterPublisherPropertySection -> "FILTER_PUBLISHER_PROPERTY_SECTION" PGBIFilterPublisherPropertySectionId -> "FILTER_PUBLISHER_PROPERTY_SECTION_ID" PGBIFilterRefundReason -> "FILTER_REFUND_REASON" PGBIFilterRemarketingList -> "FILTER_REMARKETING_LIST" PGBIFilterRewarded -> "FILTER_REWARDED" PGBIFilterSensitiveCategory -> "FILTER_SENSITIVE_CATEGORY" PGBIFilterServedPixelDensity -> "FILTER_SERVED_PIXEL_DENSITY" PGBIFilterTargetedDataProviders -> "FILTER_TARGETED_DATA_PROVIDERS" PGBIFilterThirdPartyAudienceListCost -> "FILTER_THIRD_PARTY_AUDIENCE_LIST_COST" PGBIFilterThirdPartyAudienceListType -> "FILTER_THIRD_PARTY_AUDIENCE_LIST_TYPE" PGBIFilterTrueviewAd -> "FILTER_TRUEVIEW_AD" PGBIFilterTrueviewAdGroup -> "FILTER_TRUEVIEW_AD_GROUP" PGBIFilterTrueviewDetailedDemographics -> "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS" PGBIFilterTrueviewDetailedDemographicsId -> "FILTER_TRUEVIEW_DETAILED_DEMOGRAPHICS_ID" PGBIFilterTrueviewHouseholdIncome -> "FILTER_TRUEVIEW_HOUSEHOLD_INCOME" PGBIFilterTrueviewIarCountryName -> "FILTER_TRUEVIEW_IAR_COUNTRY_NAME" PGBIFilterTrueviewRemarketingListName -> "FILTER_TRUEVIEW_REMARKETING_LIST_NAME" PGBIFilterVariantId -> "FILTER_VARIANT_ID" PGBIFilterVariantName -> "FILTER_VARIANT_NAME" PGBIFilterVariantVersion -> "FILTER_VARIANT_VERSION" PGBIFilterVerificationVideoPlayerSize -> "FILTER_VERIFICATION_VIDEO_PLAYER_SIZE" PGBIFilterVerificationVideoPosition -> "FILTER_VERIFICATION_VIDEO_POSITION" PGBIFilterVideoCompanionCreativeSize -> "FILTER_VIDEO_COMPANION_CREATIVE_SIZE" PGBIFilterVideoContinuousPlay -> "FILTER_VIDEO_CONTINUOUS_PLAY" PGBIFilterVideoDuration -> "FILTER_VIDEO_DURATION" PGBIFilterYouTubeAdaptedAudienceList -> "FILTER_YOUTUBE_ADAPTED_AUDIENCE_LIST" PGBIFilterYouTubeAdVideo -> "FILTER_YOUTUBE_AD_VIDEO" PGBIFilterYouTubeAdVideoId -> "FILTER_YOUTUBE_AD_VIDEO_ID" PGBIFilterYouTubeChannel -> "FILTER_YOUTUBE_CHANNEL" PGBIFilterYouTubeProgrammaticGuaranteedAdvertiser -> "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_ADVERTISER" PGBIFilterYouTubeProgrammaticGuaranteedPartner -> "FILTER_YOUTUBE_PROGRAMMATIC_GUARANTEED_PARTNER" PGBIFilterYouTubeVideo -> "FILTER_YOUTUBE_VIDEO" PGBIFilterZipPostalCode -> "FILTER_ZIP_POSTAL_CODE" PGBIFilterPlacementNameAllYouTubeChannels -> "FILTER_PLACEMENT_NAME_ALL_YOUTUBE_CHANNELS" PGBIFilterTrueviewPlacementId -> "FILTER_TRUEVIEW_PLACEMENT_ID" PGBIFilterPathPatternId -> "FILTER_PATH_PATTERN_ID" PGBIFilterPathEventIndex -> "FILTER_PATH_EVENT_INDEX" PGBIFilterEventType -> "FILTER_EVENT_TYPE" PGBIFilterChannelGrouping -> "FILTER_CHANNEL_GROUPING" PGBIFilterOmSdkAvailable -> "FILTER_OM_SDK_AVAILABLE" PGBIFilterDataSource -> "FILTER_DATA_SOURCE" PGBIFilterCM360PlacementId -> "FILTER_CM360_PLACEMENT_ID" PGBIFilterTrueviewClickTypeName -> "FILTER_TRUEVIEW_CLICK_TYPE_NAME" PGBIFilterTrueviewAdTypeName -> "FILTER_TRUEVIEW_AD_TYPE_NAME" PGBIFilterVideoContentDuration -> "FILTER_VIDEO_CONTENT_DURATION" PGBIFilterMatchedGenreTarget -> "FILTER_MATCHED_GENRE_TARGET" PGBIFilterVideoContentLiveStream -> "FILTER_VIDEO_CONTENT_LIVE_STREAM" PGBIFilterBudgetSegmentType -> "FILTER_BUDGET_SEGMENT_TYPE" PGBIFilterBudgetSegmentBudget -> "FILTER_BUDGET_SEGMENT_BUDGET" PGBIFilterBudgetSegmentStartDate -> "FILTER_BUDGET_SEGMENT_START_DATE" PGBIFilterBudgetSegmentEndDate -> "FILTER_BUDGET_SEGMENT_END_DATE" PGBIFilterBudgetSegmentPacingPercentage -> "FILTER_BUDGET_SEGMENT_PACING_PERCENTAGE" PGBIFilterLineItemBudget -> "FILTER_LINE_ITEM_BUDGET" PGBIFilterLineItemStartDate -> "FILTER_LINE_ITEM_START_DATE" PGBIFilterLineItemEndDate -> "FILTER_LINE_ITEM_END_DATE" PGBIFilterInsertionOrderGoalType -> "FILTER_INSERTION_ORDER_GOAL_TYPE" PGBIFilterLineItemPacingPercentage -> "FILTER_LINE_ITEM_PACING_PERCENTAGE" PGBIFilterInsertionOrderGoalValue -> "FILTER_INSERTION_ORDER_GOAL_VALUE" PGBIFilterOmidCapable -> "FILTER_OMID_CAPABLE" PGBIFilterVendorMeasurementMode -> "FILTER_VENDOR_MEASUREMENT_MODE" instance FromJSON ParametersGroupBysItem where parseJSON = parseJSONText "ParametersGroupBysItem" instance ToJSON ParametersGroupBysItem where toJSON = toJSONText -- | Error code that shows why the report was not created. data ReportFailureErrorCode = AuthenticationError -- ^ @AUTHENTICATION_ERROR@ | UnauthorizedAPIAccess -- ^ @UNAUTHORIZED_API_ACCESS@ | ServerError -- ^ @SERVER_ERROR@ | ValidationError -- ^ @VALIDATION_ERROR@ | ReportingFatalError -- ^ @REPORTING_FATAL_ERROR@ | ReportingTransientError -- ^ @REPORTING_TRANSIENT_ERROR@ | ReportingImcompatibleMetrics -- ^ @REPORTING_IMCOMPATIBLE_METRICS@ | ReportingIllegalFilename -- ^ @REPORTING_ILLEGAL_FILENAME@ | ReportingQueryNotFound -- ^ @REPORTING_QUERY_NOT_FOUND@ | ReportingBucketNotFound -- ^ @REPORTING_BUCKET_NOT_FOUND@ | ReportingCreateBucketFailed -- ^ @REPORTING_CREATE_BUCKET_FAILED@ | ReportingDeleteBucketFailed -- ^ @REPORTING_DELETE_BUCKET_FAILED@ | ReportingUpdateBucketPermissionFailed -- ^ @REPORTING_UPDATE_BUCKET_PERMISSION_FAILED@ | ReportingWriteBucketObjectFailed -- ^ @REPORTING_WRITE_BUCKET_OBJECT_FAILED@ | DeprecatedReportingInvalidQuery -- ^ @DEPRECATED_REPORTING_INVALID_QUERY@ | ReportingInvalidQueryTooManyUnfilteredLargeGroupBys -- ^ @REPORTING_INVALID_QUERY_TOO_MANY_UNFILTERED_LARGE_GROUP_BYS@ | ReportingInvalidQueryTitleMissing -- ^ @REPORTING_INVALID_QUERY_TITLE_MISSING@ | ReportingInvalidQueryMissingPartnerAndAdvertiserFilters -- ^ @REPORTING_INVALID_QUERY_MISSING_PARTNER_AND_ADVERTISER_FILTERS@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable ReportFailureErrorCode instance FromHttpApiData ReportFailureErrorCode where parseQueryParam = \case "AUTHENTICATION_ERROR" -> Right AuthenticationError "UNAUTHORIZED_API_ACCESS" -> Right UnauthorizedAPIAccess "SERVER_ERROR" -> Right ServerError "VALIDATION_ERROR" -> Right ValidationError "REPORTING_FATAL_ERROR" -> Right ReportingFatalError "REPORTING_TRANSIENT_ERROR" -> Right ReportingTransientError "REPORTING_IMCOMPATIBLE_METRICS" -> Right ReportingImcompatibleMetrics "REPORTING_ILLEGAL_FILENAME" -> Right ReportingIllegalFilename "REPORTING_QUERY_NOT_FOUND" -> Right ReportingQueryNotFound "REPORTING_BUCKET_NOT_FOUND" -> Right ReportingBucketNotFound "REPORTING_CREATE_BUCKET_FAILED" -> Right ReportingCreateBucketFailed "REPORTING_DELETE_BUCKET_FAILED" -> Right ReportingDeleteBucketFailed "REPORTING_UPDATE_BUCKET_PERMISSION_FAILED" -> Right ReportingUpdateBucketPermissionFailed "REPORTING_WRITE_BUCKET_OBJECT_FAILED" -> Right ReportingWriteBucketObjectFailed "DEPRECATED_REPORTING_INVALID_QUERY" -> Right DeprecatedReportingInvalidQuery "REPORTING_INVALID_QUERY_TOO_MANY_UNFILTERED_LARGE_GROUP_BYS" -> Right ReportingInvalidQueryTooManyUnfilteredLargeGroupBys "REPORTING_INVALID_QUERY_TITLE_MISSING" -> Right ReportingInvalidQueryTitleMissing "REPORTING_INVALID_QUERY_MISSING_PARTNER_AND_ADVERTISER_FILTERS" -> Right ReportingInvalidQueryMissingPartnerAndAdvertiserFilters x -> Left ("Unable to parse ReportFailureErrorCode from: " <> x) instance ToHttpApiData ReportFailureErrorCode where toQueryParam = \case AuthenticationError -> "AUTHENTICATION_ERROR" UnauthorizedAPIAccess -> "UNAUTHORIZED_API_ACCESS" ServerError -> "SERVER_ERROR" ValidationError -> "VALIDATION_ERROR" ReportingFatalError -> "REPORTING_FATAL_ERROR" ReportingTransientError -> "REPORTING_TRANSIENT_ERROR" ReportingImcompatibleMetrics -> "REPORTING_IMCOMPATIBLE_METRICS" ReportingIllegalFilename -> "REPORTING_ILLEGAL_FILENAME" ReportingQueryNotFound -> "REPORTING_QUERY_NOT_FOUND" ReportingBucketNotFound -> "REPORTING_BUCKET_NOT_FOUND" ReportingCreateBucketFailed -> "REPORTING_CREATE_BUCKET_FAILED" ReportingDeleteBucketFailed -> "REPORTING_DELETE_BUCKET_FAILED" ReportingUpdateBucketPermissionFailed -> "REPORTING_UPDATE_BUCKET_PERMISSION_FAILED" ReportingWriteBucketObjectFailed -> "REPORTING_WRITE_BUCKET_OBJECT_FAILED" DeprecatedReportingInvalidQuery -> "DEPRECATED_REPORTING_INVALID_QUERY" ReportingInvalidQueryTooManyUnfilteredLargeGroupBys -> "REPORTING_INVALID_QUERY_TOO_MANY_UNFILTERED_LARGE_GROUP_BYS" ReportingInvalidQueryTitleMissing -> "REPORTING_INVALID_QUERY_TITLE_MISSING" ReportingInvalidQueryMissingPartnerAndAdvertiserFilters -> "REPORTING_INVALID_QUERY_MISSING_PARTNER_AND_ADVERTISER_FILTERS" instance FromJSON ReportFailureErrorCode where parseJSON = parseJSONText "ReportFailureErrorCode" instance ToJSON ReportFailureErrorCode where toJSON = toJSONText -- | The file type of the report. data ReportStatusFormat = RSFCSV -- ^ @CSV@ | RSFExcelCSV -- ^ @EXCEL_CSV@ | RSFXlsx -- ^ @XLSX@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable ReportStatusFormat instance FromHttpApiData ReportStatusFormat where parseQueryParam = \case "CSV" -> Right RSFCSV "EXCEL_CSV" -> Right RSFExcelCSV "XLSX" -> Right RSFXlsx x -> Left ("Unable to parse ReportStatusFormat from: " <> x) instance ToHttpApiData ReportStatusFormat where toQueryParam = \case RSFCSV -> "CSV" RSFExcelCSV -> "EXCEL_CSV" RSFXlsx -> "XLSX" instance FromJSON ReportStatusFormat where parseJSON = parseJSONText "ReportStatusFormat" instance ToJSON ReportStatusFormat where toJSON = toJSONText
brendanhay/gogol
gogol-doubleclick-bids/gen/Network/Google/DoubleClickBids/Types/Sum.hs
mpl-2.0
313,813
0
11
55,581
28,865
15,185
13,680
4,451
0
{-# LANGUAGE DeriveDataTypeable, UnicodeSyntax, OverloadedStrings, FlexibleContexts, RankNTypes #-} module Text.LogMerger.Logs.COM ( comLogFormat , comStartLogFormat ) where import Pipes.Dissect as Diss import Control.Applicative import Control.Monad import Data.Attoparsec.ByteString.Char8 import qualified Data.ByteString as B import qualified Data.ByteString.Lazy.Internal as B import Text.LogMerger.Types import Text.LogMerger.Logs.Types import Text.LogMerger.Logs.Util import Prelude hiding (takeWhile) import Data.Time.Clock import Data.Attoparsec.Combinator.Skip comLogFormat = LogFormat { _dissector = evalStateT comLogDissector , _nameRegex = mkRegex "com\\.log\\.[0-9]+$" , _formatName = "sgsn-mme-com-log" , _formatDescription = "Log of COM application at SGSN-MME node" , _timeAs = AsLocalTime } comStartLogFormat = LogFormat { _dissector = evalStateT comStartLogDissector , _nameRegex = mkRegex "com_start\\.log\\.[0-9]+$" , _formatName = "sgsn-mme-com-start-log" , _formatDescription = "Log of COM application at SGSN-MME node" , _timeAs = AsLocalTime } comLogDissector ∷ (Monad m) β‡’ Dissector SGSNBasicEntry m (Either String ()) comLogDissector = tillEnd $ do prefix ← takeTill (==':') <* ":" <?> "COM log prefix" skipWhile isSpace day ← (yymmdd <* skipWhile isSpace) <?> "COM Day" time ← hhmmss <?> "Time" nanoseconds ← "." *> decimal (rest, _) ← (match $ (takeTill (=='\n') <* "\n")) <?> "rest" return BasicLogEntry { _basic_origin = [] , _basic_date = UTCTime { utctDay = day , utctDayTime = time + (picosecondsToDiffTime $ 1000000*nanoseconds) } , _basic_text = B.concat [prefix, ":", rest] } comStartLogDissector ∷ (Monad m) β‡’ Dissector SGSNBasicEntry m (Either String ()) comStartLogDissector = dissect where entryHead = do day ← (yymmdd <* skipWhile isSpace) <?> "COM-start Day" time ← hhmmss <?> "Time" microseconds ← "." *> decimal return UTCTime { utctDay = day , utctDayTime = time + (picosecondsToDiffTime $ 1000000000*microseconds) } entryHead' = endOfLine *> entryHead dissect = do e0 ← Diss.parse $ do time' ← entryHead txt' ← matchManyTill anyChar (() <$ entryHead' <|> endOfInput) return BasicLogEntry { _basic_origin = [] , _basic_date = time' , _basic_text = txt' } case e0 of (Right e0') -> do yieldD e0' tillEnd $ do time ← entryHead' txt ← matchManyTill anyChar (() <$ entryHead' <|> endOfInput) return BasicLogEntry { _basic_origin = [] , _basic_date = time , _basic_text = txt } (Left r) -> do e ← eof return $ if e then Right () else Left r
k32/visualsgsn
src/Text/LogMerger/Logs/COM.hs
unlicense
3,026
0
22
860
749
411
338
79
3
import Control.Concurrent type Process a b = (a->b)->[b->IO()]->IO () data ProcessData a b = ProcessData (a->b) [b->IO()] executeProcess::ProcessData a b -> a -> IO() executeProcess (ProcessData f dependencies) a = mapM_ (forkIO . ($ (f a))) dependencies myDependencies = [print, printDouble, printTriple, printQuad, printQuint] myProc = ProcessData id myDependencies main = executeProcess myProc 1 printN n x = print (n*x) printDouble = printN 2 printTriple = printN 3 printQuad = printN 4 printQuint = printN 5
Crazycolorz5/Haskell-Code
EventHandling.hs
unlicense
520
0
10
83
233
124
109
13
1
{-# LANGUAGE MultiParamTypeClasses, TypeFamilies, TypeOperators #-} module Object.Types where import Prelude hiding ((.)) import Control.Applicative import Control.Monad class Action object action output where (.) :: object -> action -> output data a := b = a := b deriving (Show,Read,Eq) infixl 8 . infixl 9 :=
yokto/Object
Object/Types.hs
apache-2.0
316
0
8
50
94
56
38
10
0
#!/usr/bin/env stack {- stack --resolver lts-11.10 --install-ghc runghc --package base -- -hide-all-packages -} -- Copyright 2018 Google LLC. All Rights Reserved. -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. data Term = Term String [Term] deriving Show data K = Id | Choice K K | Seq K K | All K | AllList | Try | TopDown | Repeat | One K | OneList | OnceTopDown | OuterMost | R | MkJust | MkNothing | TopDownRec K | RepeatRec K | OnceTopDownRec K | ChoiceLeftFailed K Term K K | SeqLeftSucceeded K K K | AllSubtermsSucceeded String K | AllTailSucceeded Term K | AllHeadSucceeded K [Term] K K | OneSubtermSucceeded String K | OneHeadSucceeded [Term] K | OneHeadFailed K [Term] K K deriving Show apply Id t sk fk = apply_t sk t apply (Choice s1 s2) t sk fk = apply s1 t sk (ChoiceLeftFailed s2 t sk fk) apply (Seq s1 s2) t sk fk = apply s1 t (SeqLeftSucceeded s2 sk fk) fk apply (All s) (Term f ts) sk fk = apply_ts' AllList s ts (AllSubtermsSucceeded f sk) fk apply (TopDownRec s) t sk fk = apply (apply_s TopDown s) t sk fk apply (RepeatRec s) t sk fk = apply (apply_s Repeat s) t sk fk apply (One s) (Term f ts) sk fk = apply_ts' OneList s ts (OneSubtermSucceeded f sk) fk apply (OnceTopDownRec s) t sk fk = apply (apply_s OnceTopDown s) t sk fk apply R (Term "plus" [Term "z" [], m]) sk fk = apply_t sk m apply R (Term "plus" [Term "s" [n], m]) sk fk = apply_t sk (Term "s" [Term "plus" [n, m]]) apply R t sk fk = apply_u fk () apply_u (ChoiceLeftFailed s2 t sk fk) () = apply s2 t sk fk apply_u MkNothing () = Nothing apply_u (OneHeadFailed s ts sk fk) () = apply_ts' OneList s ts sk fk apply_s Try s = Choice s Id apply_s TopDown s = Seq s (All (TopDownRec s)) apply_s Repeat s = Choice (Seq s (RepeatRec s)) Id apply_s OnceTopDown s = Choice s (One (OnceTopDownRec s)) apply_s OuterMost s = apply_s Repeat (apply_s OnceTopDown s) apply_t MkJust x1 = Just x1 apply_t (OneHeadSucceeded ts sk) t = apply_ts sk (t:ts) apply_t (SeqLeftSucceeded s2 sk fk) t' = apply s2 t' sk fk apply_t (AllHeadSucceeded s ts sk fk) t = apply_ts' AllList s ts (AllTailSucceeded t sk) fk apply_ts (AllSubtermsSucceeded f sk) ts = apply_t sk (Term f ts) apply_ts (AllTailSucceeded t sk) ts = apply_ts sk (t:ts) apply_ts (OneSubtermSucceeded f sk) ts = apply_t sk (Term f ts) apply_ts' AllList s [] sk fk = apply_ts sk [] apply_ts' AllList s (t:ts) sk fk = apply s t (AllHeadSucceeded s ts sk fk) fk apply_ts' OneList s [] sk fk = apply_u fk () apply_ts' OneList s (t:ts) sk fk = apply s t (OneHeadSucceeded ts sk) (OneHeadFailed s ts sk fk) plus a b = Term "plus" [a, b] z = Term "z" [] s n = Term "s" [n] main = print $ apply (apply_s OuterMost R) (plus (s (s z)) (s z)) MkJust MkNothing
polux/snippets
sl_cps_defunc.hs
apache-2.0
3,266
0
12
692
1,276
652
624
64
1
{-# LANGUAGE ScopedTypeVariables #-} {-| Module: HaskHOL.Core.Ext Copyright: (c) Evan Austin 2015 LICENSE: BSD3 Maintainer: [email protected] Stability: unstable Portability: unknown This module exports HaskHOL's non-trivial extensions to the underlying HOL system, i.e. the compile time operations. These operations are split into three categories: * Methods related to the Protect and Serve Mechanism for sealing and unsealing data against a provided theory context. * Methods related to quasi-quoting of 'HOLTerm's. * Methods related to compile time extension and caching of theory contexts. -} module HaskHOL.Core.Ext ( -- * Protected Data Methods -- $Protect module HaskHOL.Core.Ext.Protected -- * Quasi-Quoter Methods -- $QQ , module HaskHOL.Core.Ext.QQ , module Language.Haskell.TH {-| Re-exports 'Q', 'Dec', and 'Exp' for the purpose of writing type signatures external to this module. -} , module Language.Haskell.TH.Quote {-| Re-exports 'QuasiQuoter' for the purpose of writing type signatures external to this module. -} ) where import HaskHOL.Core.Ext.Protected import HaskHOL.Core.Ext.QQ import Language.Haskell.TH (Q, Dec, Exp) import Language.Haskell.TH.Quote (QuasiQuoter) import Prelude hiding (FilePath) -- Documentation copied from sub-modules {-$Protect The basic goal behind the Protect and Serve mechanism is to recapture some of the efficiency lost as a result of moving from an impure, interpretted host language to a pure, compiled one. We do this by forcing the evaluation of large computations, usually proofs, such that they are only run once. To maintain soundness of our proof system, we must track what information was used to force the computation and guarantee that information is present in all cases where this new value is to be used. This is the purpose of the @Protected@ class and the 'liftProtectedExp' and 'liftProtected' methods. -} {-$QQ Quasi-quoting provides a way to parse 'HOLTerm's at compile time safely. Just as with proofs, we seal these terms against the theory context used to parse them with 'protect' and 'serve' to preserve soundness. See the documentation for 'base' for a brief discussion on when quasi-quoting should be used vs. 'toHTm'. -}
ecaustin/haskhol-core
src/HaskHOL/Core/Ext.hs
bsd-2-clause
2,374
0
5
492
103
75
28
12
0
{-# LANGUAGE TupleSections #-} -- | Alignment and synchronization. Currently works only with positional tagsets. module NLP.Concraft.Morphosyntax.Align ( align , sync ) where import Prelude hiding (Word) import Control.Applicative ((<|>)) import Data.Maybe (fromJust) import Data.List (find) import qualified Data.Set as S import qualified Data.Map as M import qualified Data.Char as C import qualified Data.Text as T import qualified Data.Tagset.Positional as P import NLP.Concraft.Morphosyntax -- | Synchronize two datasets, taking disamb tags from the first one -- and the rest of information form the second one. -- In case of differences in token-level segmentation, reference segmentation -- (token-level) is assumed. Otherwise, it would be difficult to choose -- correct disamb tags. sync :: Word w => P.Tagset -> [Seg w P.Tag] -> [Seg w P.Tag] -> [Seg w P.Tag] sync tagset xs ys = concatMap (uncurry (moveDisamb tagset)) (align xs ys) -- | If both arguments contain only one segment, insert disamb interpretations -- from the first segment into the second segment. Otherwise, the first list -- of segments will be returned unchanged. moveDisamb :: P.Tagset -> [Seg w P.Tag] -> [Seg w P.Tag] -> [Seg w P.Tag] moveDisamb tagset [v] [w] = [w {tags = mkWMap (map (,0) tagsNew ++ disambNew)}] where -- Return list of (tag, weight) pairs assigned to the segment. tagPairs = M.toList . unWMap . tags -- New tags domain. tagsNew = map fst (tagPairs w) -- Disamb list with tags mapped to the new domain. disambNew = [(newDom x, c) | (x, c) <- tagPairs v, c > 0] -- Find corresonding tag in the new tags domain. newDom tag = fromJust $ find ( ==tag) tagsNew -- Exact match <|> find (~==tag) tagsNew -- Expanded tag match <|> Just tag -- Controversial where x ~== y = S.size (label x `S.intersection` label y) > 0 label = S.fromList . P.expand tagset -- Do nothing in this case. moveDisamb _ xs _ = xs -- | Align two lists of segments. align :: Word w => [Seg w t] -> [Seg w t] -> [([Seg w t], [Seg w t])] align [] [] = [] align [] _ = error "align: null xs, not null ys" align _ [] = error "align: not null xs, null ys" align xs ys = let (x, y) = match xs ys rest = align (drop (length x) xs) (drop (length y) ys) in (x, y) : rest -- | Find the shortest, length-matching prefixes in the two input lists. match :: Word w => [Seg w t] -> [Seg w t] -> ([Seg w t], [Seg w t]) match xs' ys' = doIt 0 xs' 0 ys' where doIt i (x:xs) j (y:ys) | n == m = ([x], [y]) | n < m = addL x $ doIt n xs j (y:ys) | otherwise = addR y $ doIt i (x:xs) m ys where n = i + size x m = j + size y doIt _ [] _ _ = error "match: the first argument is null" doIt _ _ _ [] = error "match: the second argument is null" size w = T.length . T.filter (not.C.isSpace) $ orth w addL x (xs, ys) = (x:xs, ys) addR y (xs, ys) = (xs, y:ys)
kawu/concraft
src/NLP/Concraft/Morphosyntax/Align.hs
bsd-2-clause
3,094
1
13
834
1,033
556
477
51
3
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ViewPatterns #-} -- | Parsing a CSV file row-by-row. module CSVParsing where import Control.Condition import Control.Exception import Control.Monad.Fix import Control.Monad.IO.Class import Control.Monad.Trans.Resource import Data.CSV.Conduit import Data.Conduit import Data.Conduit.Binary import Data.Conduit.List import Data.Typeable import Text.Read -- | A file opening condition. data DummyCondition = DummyCondition [String] (Maybe Int) deriving (Typeable) instance Show DummyCondition where show _ = "Dummy condition" instance Exception DummyCondition instance Condition DummyCondition (ConduitM [[Char]] Int (ResourceT IO) ()) -- | Get dummy ints from the CSV file. getDummyInts :: Handlers => FilePath -> IO [Int] getDummyInts fp = runResourceT $ sourceFile fp $= intoCSV defCSVSettings $= loop $$ consume where loop = do mrow <- await case mrow of Just xs@[s,readMaybe -> Just i] -> do case s of "dummy" -> yield i _ -> signal (DummyCondition xs (Just i)) loop Just xs -> do signal (DummyCondition xs Nothing) loop Nothing -> return () -- | Main entry point. main :: IO () main = withConditions (do is <- handler (\(DummyCondition xs mi) -> do liftIO (do putStrLn ("Invalid row in CSV file: ") print xs putStrLn "Restarts: " case mi of Just{} -> putStrLn "(0) Ignore the first column." _ -> return () putStrLn "(1) Skip the row." putStrLn "(2) Provide a different value for this row.") fix (\loop -> do n <- liftIO readLn case n :: Int of 0 -> case mi of Just i -> yield i _ -> return () 1 -> return () 2 -> do this <- liftIO (do putStrLn "Enter a value: " readLn) yield this _ -> loop)) (getDummyInts "example.csv") print is)
chrisdone/conditions
examples/CSV.hs
bsd-3-clause
2,835
0
30
1,291
589
293
296
66
6
module TypeUtilsSpec (main, spec) where import Test.Hspec import TestUtils import qualified GHC.SYB.Utils as SYB import qualified GHC as GHC import qualified GhcMonad as GHC import qualified Name as GHC import qualified RdrName as GHC import qualified Module as GHC import Data.Maybe import Language.Haskell.GHC.ExactPrint.Types import Language.Haskell.GHC.ExactPrint.Parsers import Language.Haskell.GHC.ExactPrint.Transform import Language.Haskell.GHC.ExactPrint.Utils import Language.Haskell.Refact.Utils.Binds import Language.Haskell.Refact.Utils.GhcVersionSpecific import Language.Haskell.Refact.Utils.LocUtils import Language.Haskell.Refact.Utils.Monad import Language.Haskell.Refact.Utils.MonadFunctions import Language.Haskell.Refact.Utils.TypeUtils import Language.Haskell.Refact.Utils.Utils import Language.Haskell.Refact.Utils.Variables import qualified Data.Map as Map import Data.List main :: IO () main = do hspec spec spec :: Spec spec = do -- ------------------------------------------------------------------- describe "findAllNameOccurences" $ do it "finds all occurrences of the given name in a syntax phrase" $ do (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/S.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just ((GHC.L _l n)) = locToName (4,5) renamed (showGhcQual n) `shouldBe` "x" let res = findAllNameOccurences n renamed (showGhcQual res) `shouldBe` "[x, x]" -- NOTE: does not get the x's in line 8 (showGhcQual $ map startEndLocGhc res) `shouldBe` "[((4, 5), (4, 6)), ((4, 17), (4, 18))]" -- ------------------------------------------------------------------- describe "locToName" $ do it "returns a GHC.Name for a given source location, if it falls anywhere in an identifier #1" $ do -- ((_,renamed,_), _toks) <- parsedFileBGhc (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/B.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (res@(GHC.L l n)) = locToName (7,3) renamed showGhcQual l `shouldBe` "TypeUtils/B.hs:7:1-3" getLocatedStart res `shouldBe` (7,1) showGhcQual n `shouldBe` "TypeUtils.B.foo" -- --------------------------------- it "returns a GHC.Name for a given source location, if it falls anywhere in an identifier #2" $ do -- ((_, renamed,_),_toks) <- parsedFileBGhc (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/B.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (res@(GHC.L l n)) = locToName (25,8) renamed showGhcQual n `shouldBe` "TypeUtils.B.bob" showGhcQual l `shouldBe` "TypeUtils/B.hs:25:7-9" getLocatedStart res `shouldBe` (25,7) -- --------------------------------- it "returns Nothing for a given source location, if it does not fall in an identifier" $ do (t, _toks,_) <- ct $ parsedFileGhc "TypeUtils/B.hs" let renamed = fromJust $ GHC.tm_renamed_source t let res = locToName (7,7) renamed (showGhcQual res) `shouldBe` "Nothing" -- --------------------------------- it "gets a short name too" $ do (t, _toks,_) <- ct $ parsedFileGhc "./Demote/WhereIn2.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (res@(GHC.L l n)) = locToName (14,1) renamed showGhcQual n `shouldBe` "Demote.WhereIn2.sq" showGhcQual l `shouldBe` "Demote/WhereIn2.hs:14:1-2" getLocatedStart res `shouldBe` (14,1) -- --------------------------------- it "gets a type variable name" $ do (t, _toks,_) <- ct $ parsedFileGhc "./Renaming/ConstructorIn3.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (res@(GHC.L l n)) = locToName (9,12) renamed showGhcQual n `shouldBe` "a" -- Note: loc does not line up due to multiple matches in FunBind showGhcQual l `shouldBe` "Renaming/ConstructorIn3.hs:9:12" getLocatedStart res `shouldBe` (9,12) -- --------------------------------- it "gets an instance class name" $ do (t, _toks,_) <- ct $ parsedFileGhc "./Renaming/ClassIn3.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just (res@(GHC.L l n)) = locToName (16,10) renamed showGhcQual n `shouldBe` "GHC.Classes.Eq" showGhcQual l `shouldBe` "Renaming/ClassIn3.hs:16:10-11" getLocatedStart res `shouldBe` (16,10) -- ------------------------------------------------------------------- describe "locToRdrName" $ do it "returns a GHC.RdrName for a given source location, if it falls anywhere in an identifier" $ do (t, _toks,_) <- ct $ parsedFileGhc "./Renaming/D5.hs" let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (res@(GHC.L l n)) = locToRdrName (20,1) parsed -- showGhcQual l `shouldBe` "Renaming/D5.hs:20:1-10" (show $ ss2span l) `shouldBe` "((20,1),(20,11))" getLocatedStart res `shouldBe` (20,1) showGhcQual n `shouldBe` "sumSquares" it "returns a GHC.RdrName for a source location, in a MatchGroup" $ do (t, _toks,_) <- ct $ parsedFileGhc "./LocToName.hs" let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (res@(GHC.L l n)) = locToRdrName (24,2) parsed showGhcQual n `shouldBe` "sumSquares" getLocatedStart res `shouldBe` (24,1) showGhcQual l `shouldBe` "LocToName.hs:24:1-10" -- ------------------------------------------------------------------- describe "allNames" $ do it "lists all Names" $ do (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/S.hs" let renamed = fromJust $ GHC.tm_renamed_source t let res = allNames renamed -- let res' = map (\(GHC.L l n) -> (showGhcQual $ GHC.nameUnique n,showGhcQual (l, n))) res let res' = map (\(GHC.L l n) -> (showGhcQual $ GHC.nameUnique n,showGhcQual (l, GHC.getSrcSpan n, n))) res -- Map.insertWith :: Ord k => (a -> a -> a) -> k -> a -> Map k a -> Map k a let res'' = foldl' (\m (k,a) -> Map.insertWith (++) k a m) Map.empty res' (sort $ Map.elems res'') `shouldBe` ["(TypeUtils/S.hs:10:12, TypeUtils/S.hs:10:8, n)(TypeUtils/S.hs:10:8, TypeUtils/S.hs:10:8, n)" ,"(TypeUtils/S.hs:10:14, <no location info>, GHC.Num.+)" ,"(TypeUtils/S.hs:10:5-6, TypeUtils/S.hs:10:5-6, zz)(TypeUtils/S.hs:10:5-6, TypeUtils/S.hs:10:5-6, zz)(TypeUtils/S.hs:8:13-14, TypeUtils/S.hs:10:5-6, zz)" ,"(TypeUtils/S.hs:4:1-3, TypeUtils/S.hs:4:1-3, TypeUtils.S.foo)(TypeUtils/S.hs:4:1-3, TypeUtils/S.hs:4:1-3, TypeUtils.S.foo)" ,"(TypeUtils/S.hs:4:13-15, <no location info>, GHC.Real.odd)" ,"(TypeUtils/S.hs:4:17, TypeUtils/S.hs:4:5, x)(TypeUtils/S.hs:4:5, TypeUtils/S.hs:4:5, x)" ,"(TypeUtils/S.hs:6:10, TypeUtils/S.hs:6:10, TypeUtils.S.A)" ,"(TypeUtils/S.hs:6:14, TypeUtils/S.hs:6:14-21, TypeUtils.S.B)" ,"(TypeUtils/S.hs:6:25, TypeUtils/S.hs:6:25, TypeUtils.S.C)" ,"(TypeUtils/S.hs:6:6, TypeUtils/S.hs:6:1-25, TypeUtils.S.D)" ,"(TypeUtils/S.hs:8:1-7, TypeUtils/S.hs:8:1-7, TypeUtils.S.subdecl)(TypeUtils/S.hs:8:1-7, TypeUtils/S.hs:8:1-7, TypeUtils.S.subdecl)" ,"(TypeUtils/S.hs:8:16, TypeUtils/S.hs:8:9, x)(TypeUtils/S.hs:8:9, TypeUtils/S.hs:8:9, x)" ] -- ------------------------------------------------------------------- describe "getName" $ do it "gets a qualified Name at the top level" $ do (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/B.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just n = getName "TypeUtils.B.foo'" renamed (showGhcQual n) `shouldBe` "TypeUtils.B.foo'" (showGhcQual $ GHC.getSrcSpan n) `shouldBe` "TypeUtils/B.hs:14:1-4" it "gets any instance of an unqualified Name" $ do (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/B.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just n = getName "foo" renamed (showGhcQual n) `shouldBe` "foo" (showGhcQual $ GHC.getSrcSpan n) `shouldBe` "TypeUtils/B.hs:9:15-17" it "returns Nothing if the Name is not found" $ do (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/B.hs" let renamed = fromJust $ GHC.tm_renamed_source t let res = getName "baz" renamed (showGhcQual res) `shouldBe` "Nothing" -- ------------------------------------------------------------------- describe "definingDeclsRdrNames" $ do it "returns [] if not found" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let renamed = fromJust $ GHC.tm_renamed_source t let nameMap = initRdrNameMap t let Just ((GHC.L _ n)) = locToName (16,6) renamed let decls = GHC.hsmodDecls $ GHC.unLoc parsed let res = definingDeclsRdrNames nameMap [n] decls False False showGhcQual res `shouldBe` "[]" -- --------------------------------- it "finds declarations at the top level" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let nameMap = initRdrNameMap t let Just (GHC.L _ n) = locToName (3,3) renamed let decls = GHC.hsmodDecls $ GHC.unLoc parsed let res = definingDeclsRdrNames nameMap [n] decls False False showGhcQual res `shouldBe` "[toplevel x = c * x]" -- --------------------------------- it "finds in a patbind" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let nameMap = initRdrNameMap t let Just (GHC.L _ n) = locToName (14,1) renamed let decls = GHC.hsmodDecls $ GHC.unLoc parsed let res = definingDeclsRdrNames nameMap [n] decls False False showGhcQual res `shouldBe` "[tup@(h, t)\n = head $ zip [1 .. 10] [3 .. ff]\n where\n ff :: Int\n ff = 15]" -- --------------------------------- it "finds recursively in sub-binds" $ do {- modInfo@((_, _, mod@(GHC.L l (GHC.HsModule name exps imps ds _ _))), toks) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let res = definingDecls [(PN (mkRdrName "zz"))] ds False True showGhcQual res `shouldBe` "[zz n = n + 1]" -- TODO: Currently fails, will come back to it -} pending -- "Currently fails, will come back to it" -- --------------------------------- it "only finds recursively in sub-binds if asked" $ do {- modInfo@((_, _, mod@(GHC.L l (GHC.HsModule name exps imps ds _ _))), toks) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let res = definingDecls [(PN (mkRdrName "zz"))] ds False False showGhcQual res `shouldBe` "[]" -} pending -- "Convert to definingDeclsNames" -- ------------------------------------------------------------------- describe "definingDeclsNames" $ do it "returns [] if not found" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just ((GHC.L _ n)) = locToName (16,6) renamed let res = definingDeclsNames [n] (hsBinds renamed) False False showGhcQual res `shouldBe` "[]" it "finds declarations at the top level" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ n) = locToName (3,3) renamed let res = definingDeclsNames [n] (hsBinds renamed) False False showGhcQual res `shouldBe` "[DupDef.Dd1.toplevel x = DupDef.Dd1.c GHC.Num.* x]" it "finds in a patbind" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ n) = locToName (14,1) renamed let res = definingDeclsNames [n] (hsBinds renamed) False False showGhcQual res `shouldBe` "[DupDef.Dd1.tup@(DupDef.Dd1.h, DupDef.Dd1.t)\n = GHC.List.head GHC.Base.$ GHC.List.zip [1 .. 10] [3 .. ff]\n where\n ff :: GHC.Types.Int\n ff = 15]" it "finds recursively in sub-binds" $ do {- modInfo@((_, _, mod@(GHC.L l (GHC.HsModule name exps imps ds _ _))), toks) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let res = definingDecls [(PN (mkRdrName "zz"))] ds False True showGhcQual res `shouldBe` "[zz n = n + 1]" -- TODO: Currently fails, will come back to it -} pending -- "Currently fails, will come back to it" it "only finds recursively in sub-binds if asked" $ do {- modInfo@((_, _, mod@(GHC.L l (GHC.HsModule name exps imps ds _ _))), toks) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let res = definingDecls [(PN (mkRdrName "zz"))] ds False False showGhcQual res `shouldBe` "[]" -} pending -- "Convert to definingDeclsNames" -- ------------------------------------------------------------------- describe "definingSigsRdrNames" $ do it "returns [] if not found" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let nameMap = initRdrNameMap t let Just ((GHC.L _ n)) = locToName (21,1) renamed showGhcQual n `shouldBe` "DupDef.Dd1.ff" let res = definingSigsRdrNames nameMap [n] parsed showGhcQual res `shouldBe` "[]" -- --------------------------------- it "finds signatures at the top level" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let nameMap = initRdrNameMap t let Just ((GHC.L _ n)) = locToName (4,1) renamed showGhcQual n `shouldBe` "DupDef.Dd1.toplevel" let res = definingSigsRdrNames nameMap [n] parsed showGhcQual res `shouldBe` "[toplevel :: Integer -> Integer]" -- --------------------------------- it "returns only the single signature where there are others too" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let nameMap = initRdrNameMap t let Just ((GHC.L _ n)) = locToName (7,1) renamed showGhcQual n `shouldBe` "DupDef.Dd1.c" let res = definingSigsRdrNames nameMap [n] parsed showGhcQual res `shouldBe` "[c :: Integer]" -- --------------------------------- it "finds signatures at lower levels" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let nameMap = initRdrNameMap t let Just ((GHC.L _ n)) = locToName (16,5) renamed showGhcQual n `shouldBe` "ff" let res = definingSigsRdrNames nameMap [n] parsed showGhcQual res `shouldBe` "[ff :: Int]" -- --------------------------------- it "finds multiple signatures" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let nameMap = initRdrNameMap t let Just ((GHC.L _ n1)) = locToName (21,1) renamed showGhcQual n1 `shouldBe` "DupDef.Dd1.ff" let Just ((GHC.L _ n2)) = locToName (16,5) renamed showGhcQual n2 `shouldBe` "ff" let Just ((GHC.L _ n3)) = locToName (4,1) renamed showGhcQual n3 `shouldBe` "DupDef.Dd1.toplevel" let res = definingSigsRdrNames nameMap [n1,n2,n3] parsed showGhcQual res `shouldBe` "[toplevel :: Integer -> Integer, ff :: Int]" -- ------------------------------------------------------------------- describe "definingSigsNames" $ do it "returns [] if not found" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just ((GHC.L _ n)) = locToName (21,1) renamed showGhcQual n `shouldBe` "DupDef.Dd1.ff" let res = definingSigsNames [n] renamed showGhcQual res `shouldBe` "[]" it "finds signatures at the top level" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just ((GHC.L _ n)) = locToName (4,1) renamed showGhcQual n `shouldBe` "DupDef.Dd1.toplevel" let res = definingSigsNames [n] renamed showGhcQual res `shouldBe` "[DupDef.Dd1.toplevel ::\n GHC.Integer.Type.Integer -> GHC.Integer.Type.Integer]" it "returns only the single signature where there are others too" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just ((GHC.L _ n)) = locToName (7,1) renamed showGhcQual n `shouldBe` "DupDef.Dd1.c" let res = definingSigsNames [n] renamed showGhcQual res `shouldBe` "[DupDef.Dd1.c :: GHC.Integer.Type.Integer]" it "finds signatures at lower levels" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just ((GHC.L _ n)) = locToName (16,5) renamed showGhcQual n `shouldBe` "ff" let res = definingSigsNames [n] renamed showGhcQual res `shouldBe` "[ff :: GHC.Types.Int]" it "finds multiple signatures" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just ((GHC.L _ n1)) = locToName (21,1) renamed showGhcQual n1 `shouldBe` "DupDef.Dd1.ff" let Just ((GHC.L _ n2)) = locToName (16,5) renamed showGhcQual n2 `shouldBe` "ff" let Just ((GHC.L _ n3)) = locToName (4,1) renamed showGhcQual n3 `shouldBe` "DupDef.Dd1.toplevel" let res = definingSigsNames [n1,n2,n3] renamed showGhcQual res `shouldBe` "[ff :: GHC.Types.Int,\n DupDef.Dd1.toplevel ::\n GHC.Integer.Type.Integer -> GHC.Integer.Type.Integer]" -- ------------------------------------------------------------------- describe "definingTyClDeclsNames" $ do it "returns [] if not found" $ do (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/TyClDecls.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just ((GHC.L _ n)) = locToName (10,29) renamed let res = definingTyClDeclsNames [n] renamed showGhcQual res `shouldBe` "[]" -- --------------------------------- it "finds foreign type declarations" $ do pending -- --------------------------------- it "finds family declarations" $ do (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/TyClDecls.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just (GHC.L _ n) = locToName (7,14) renamed let res = definingTyClDeclsNames [n] renamed showGhcQual res `shouldBe` "[data family TypeUtils.TyClDEcls.XList a]" -- --------------------------------- it "finds data declarations" $ do (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/TyClDecls.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ n) = locToName (12,6) renamed let res = definingTyClDeclsNames [n] renamed (unspace $ showGhcQual res) `shouldBe` "[data TypeUtils.TyClDEcls.Foo\n = TypeUtils.TyClDEcls.Foo GHC.Types.Int]" -- --------------------------------- it "finds type declarations" $ do (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/TyClDecls.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ n) = locToName (14,6) renamed let res = definingTyClDeclsNames [n] renamed showGhcQual res `shouldBe` "[type TypeUtils.TyClDEcls.Foo2 = GHC.Base.String]" -- --------------------------------- it "finds class declarations" $ do (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/TyClDecls.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ n) = locToName (16,7) renamed let res = definingTyClDeclsNames [n] renamed showGhcQual res `shouldBe` "[class TypeUtils.TyClDEcls.Bar a where\n TypeUtils.TyClDEcls.bar :: a -> GHC.Types.Bool]" -- --------------------------------- it "finds multiple declarations" $ do (t, _toks,_) <- ct $ parsedFileGhc "./TypeUtils/TyClDecls.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ n1) = locToName (14,6) renamed let Just (GHC.L _ n2) = locToName (16,7) renamed let res = definingTyClDeclsNames [n1,n2] renamed showGhcQual res `shouldBe` "[type TypeUtils.TyClDEcls.Foo2 = GHC.Base.String,\n"++ " class TypeUtils.TyClDEcls.Bar a where\n"++ " TypeUtils.TyClDEcls.bar :: a -> GHC.Types.Bool]" -- ------------------------------------------------------------------- describe "isFunBindR" $ do it "Returns False if not a function definition" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just tup = getName "DupDef.Dd1.tup" renamed let [decl] = definingDeclsNames [tup] (hsBinds renamed) False False isFunBindR decl `shouldBe` False it "Returns True if a function definition" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let (GHC.L _l (GHC.HsModule _name _exps _imps _ds _ _)) = GHC.pm_parsed_source $ GHC.tm_parsed_module t let renamed = fromJust $ GHC.tm_renamed_source t let Just toplevel = getName "DupDef.Dd1.toplevel" renamed let [decl] = definingDeclsNames [toplevel] (hsBinds renamed) False False isFunBindR decl `shouldBe` True -- ------------------------------------------------------------------- describe "isFunOrPatName" $ do it "Return True if a PName is a function/pattern name defined in t" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just tup = getName "DupDef.Dd1.tup" renamed isFunOrPatName tup renamed `shouldBe` True it "Return False if a PName is a function/pattern name defined in t" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" (t2, _toks2,_) <- ct $ parsedFileGhc "./DupDef/Dd2.hs" let renamed = fromJust $ GHC.tm_renamed_source t let renamed2 = fromJust $ GHC.tm_renamed_source t2 let Just tup = getName "DupDef.Dd1.tup" renamed isFunOrPatName tup renamed2 `shouldBe` False -- ------------------------------------------------------------------- describe "hsFreeAndDeclaredPNs" $ do it "finds declared in type class definitions" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/DeclareTypes.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" -- (SYB.showData SYB.Renamer 0 $ hsTyDecls renamed) `shouldBe` "" let comp = do let tds = nub $ concatMap getDeclaredTypes $ concat $ hsTyDecls renamed return (tds) -- ((res),_s) <- runRefactGhc comp $ initialState { rsModule = initialStateRefactModule t toks } ((res),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (res)) `shouldBe` "[(FreeAndDeclared.DeclareTypes.XList, (8, 13)),\n"++ " (FreeAndDeclared.DeclareTypes.Foo, (21, 6)),\n"++ " (FreeAndDeclared.DeclareTypes.X, (19, 6)),\n"++ " (FreeAndDeclared.DeclareTypes.Y, (19, 10)),\n"++ " (FreeAndDeclared.DeclareTypes.Z, (19, 22)),\n"++ " (FreeAndDeclared.DeclareTypes.W, (19, 26)),\n"++ " (FreeAndDeclared.DeclareTypes.Bar, (23, 7)),\n"++ " (FreeAndDeclared.DeclareTypes.doBar, (27, 3)),\n"++ " (FreeAndDeclared.DeclareTypes.BarVar, (24, 8)),\n"++ " (FreeAndDeclared.DeclareTypes.BarData, (25, 8))]" -- --------------------------------- it "finds declared HsVar" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/Declare.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let comp = do let r = hsFreeAndDeclaredPNsOld renamed rg <- hsFreeAndDeclaredPNs renamed let ff = map (\b -> getFreeVars [b]) $ hsBinds renamed return (r,rg,ff) -- ((res,resg,_fff),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((res,resg,_fff),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (showGhcQual _fff) `shouldBe` "" -- Free Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (fst res)) `shouldBe` "[(Data.Generics.Text.gshow, (-1, -1)),\n "++ "(System.IO.getChar, (-1, -1)), "++ "(System.IO.putStrLn, (-1, -1)),\n "++ "(GHC.Base.return, (-1, -1)), "++ "(GHC.Base.$, (-1, -1)),\n "++ "(GHC.List.head, (-1, -1)), "++ "(GHC.List.zip, (-1, -1)),\n "++ "(GHC.Num.fromInteger, (-1, -1)), "++ "(GHC.Num.*, (-1, -1))]" -- Declared Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (snd res)) `shouldBe` "[(FreeAndDeclared.Declare.ff, (36, 1)),\n "++ "(FreeAndDeclared.Declare.mkT, (34, 1)),\n "++ "(FreeAndDeclared.Declare.main, (30, 1)),\n "++ "(FreeAndDeclared.Declare.unF, (27, 1)),\n "++ "(FreeAndDeclared.Declare.unD, (21, 1)),\n "++ "(FreeAndDeclared.Declare.tup, (16, 1)),\n "++ -- ++AZ++ addition "(FreeAndDeclared.Declare.h, (16, 6)),\n "++ "(FreeAndDeclared.Declare.t, (16, 8)),\n "++ "(FreeAndDeclared.Declare.d, (10, 1)),\n "++ "(FreeAndDeclared.Declare.c, (9, 1)),\n "++ "(FreeAndDeclared.Declare.toplevel, (6, 1))]" -- GHC version -- Free Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (fst resg)) `shouldBe` "[(Data.Generics.Text.gshow, (-1, -1)),\n "++ "(System.IO.getChar, (-1, -1)), "++ "(System.IO.putStrLn, (-1, -1)),\n "++ "(GHC.List.head, (-1, -1)), "++ "(GHC.Base.$, (-1, -1)),\n "++ "(GHC.List.zip, (-1, -1)), "++ "(GHC.Num.*, (-1, -1))]" -- Declared Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (snd resg)) `shouldBe` "[(FreeAndDeclared.Declare.ff, (36, 1)),\n "++ "(FreeAndDeclared.Declare.mkT, (34, 1)),\n "++ "(FreeAndDeclared.Declare.main, (30, 1)),\n "++ "(FreeAndDeclared.Declare.unF, (27, 1)),\n "++ "(FreeAndDeclared.Declare.unD, (21, 1)),\n "++ "(FreeAndDeclared.Declare.tup, (16, 1)),\n "++ -- ++AZ++ not found by old "(FreeAndDeclared.Declare.h, (16, 6)),\n "++ "(FreeAndDeclared.Declare.t, (16, 8)),\n "++ "(FreeAndDeclared.Declare.d, (10, 1)),\n "++ "(FreeAndDeclared.Declare.c, (9, 1)),\n "++ "(FreeAndDeclared.Declare.toplevel, (6, 1)),\n "++ "(GHC.Types.Int, (-1, -1)), (GHC.Integer.Type.Integer, (-1, -1)),\n "++ "(FreeAndDeclared.Declare.D, (18, 6)),\n "++ "(FreeAndDeclared.Declare.A, (18, 10)),\n "++ "(FreeAndDeclared.Declare.B, (18, 14)),\n "++ "(FreeAndDeclared.Declare.C, (18, 25)),\n "++ "(FreeAndDeclared.Declare.F, (25, 6)),\n "++ "(FreeAndDeclared.Declare.G, (25, 10)),\n "++ "(FreeAndDeclared.Declare.:|, (25, 14))]" -- --------------------------------- it "finds free and declared in a single bind PrefixCon" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/Declare.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let comp = do let b = head $ drop 4 $ hsBinds renamed rg <- hsFreeAndDeclaredPNs [b] return (b,rg) -- ((bb,resg),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((bb,resg),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual bb) `shouldBe` "FreeAndDeclared.Declare.unD (FreeAndDeclared.Declare.B y) = y" -- (SYB.showData SYB.Renamer 0 bb) `shouldBe` "" -- GHC version -- Free Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (fst resg)) `shouldBe` "[(FreeAndDeclared.Declare.B, (18, 14))]" -- Declared Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (snd resg)) `shouldBe` "[(FreeAndDeclared.Declare.unD, (21, 1))]" -- --------------------------------- it "finds free and declared in a single bind InfixCon" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/Declare.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let comp = do let b = head $ drop 3 $ hsBinds renamed rg <- hsFreeAndDeclaredPNs [b] return (b,rg) -- ((bb,resg),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((bb,resg),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual bb) `shouldBe` "FreeAndDeclared.Declare.unF (a FreeAndDeclared.Declare.:| b)\n = (a, b)" -- (SYB.showData SYB.Renamer 0 bb) `shouldBe` "" -- GHC version -- Free Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (fst resg)) `shouldBe` "[(FreeAndDeclared.Declare.:|, (25, 14))]" -- Declared Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (snd resg)) `shouldBe` "[(FreeAndDeclared.Declare.unF, (27, 1))]" -- --------------------------------- it "finds free and declared in a single bind RecCon" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/DeclareRec.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let comp = do let b = head $ drop 0 $ hsBinds renamed rg <- hsFreeAndDeclaredPNs [b] return (b,rg) -- ((bb,resg),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((bb,resg),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual bb) `shouldBe` "FreeAndDeclared.DeclareRec.unR2\n (FreeAndDeclared.DeclareRec.RCon {FreeAndDeclared.DeclareRec.r1 = a})\n = a" -- (SYB.showData SYB.Renamer 0 bb) `shouldBe` "" -- GHC version -- Free Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (fst resg)) `shouldBe` "[(FreeAndDeclared.DeclareRec.RCon, (3, 10))]" -- Declared Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (snd resg)) `shouldBe` "[(FreeAndDeclared.DeclareRec.unR2, (7, 1))]" -- ----------------------------------------------------------------- it "hsFreeAndDeclaredPNs simplest" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/DeclareS.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let comp = do r <- hsFreeAndDeclaredPNs renamed return r -- ((res),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((res),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- Free Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (fst res)) `shouldBe` "[]" -- Declared Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (snd res)) `shouldBe` "[(FreeAndDeclared.DeclareS.c, (6, 1))]" -- ----------------------------------------------------------------- it "finds free and declared in a single bind #2" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just tup = getName "DupDef.Dd1.ff" renamed let [decl] = definingDeclsNames [tup] (hsBinds renamed) False False let comp = do -- let r = hsFreeAndDeclaredPNs decl r <- hsFreeAndDeclaredPNs [decl] return (r,decl) -- ((res,d),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((res,d),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual d) `shouldBe` "DupDef.Dd1.ff y\n = y GHC.Num.+ zz\n where\n zz = 1" -- (SYB.showData SYB.Renamer 0 d) `shouldBe` "" -- Free Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (fst res)) `shouldBe` "[(GHC.Num.+, (-1, -1))]" -- Declared Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (snd res)) `shouldBe` "[(DupDef.Dd1.ff, (21, 1))]" -- ----------------------------------------------------------------- it "finds free and declared at the top level 1" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./LiftToToplevel/WhereIn1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let comp = do -- r <- hsFreeAndDeclaredPNs renamed r <- hsFreeAndDeclaredPNs $ hsBinds renamed return r -- ((res),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((res),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- Declared Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (snd res)) `shouldBe` "[(LiftToToplevel.WhereIn1.anotherFun, (15, 1)),\n "++ "(LiftToToplevel.WhereIn1.sumSquares, (9, 1))]" -- Free Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (fst res)) `shouldBe` "[(GHC.Real.^, (-1, -1)), (GHC.Num.+, (-1, -1))]" -- ----------------------------------------------------------------- it "finds free and declared at the top level 2" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./Renaming/IdIn3.hs" let renamed = fromJust $ GHC.tm_renamed_source t let comp = do r <- hsFreeAndDeclaredPNs renamed -- r <- hsFreeAndDeclaredPNs $ hsBinds renamed return r -- ((res),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((res),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- Declared Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (snd res)) `shouldBe` "[(IdIn3.bar, (14, 1))"++ ", (IdIn3.x, (10, 1))"++ ", (IdIn3.foo, (12, 1)),\n "++ "(IdIn3.main, (18, 1))]" -- Free Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (fst res)) `shouldBe` "[(GHC.Num.+, (-1, -1))]" -- ----------------------------------------------------------------- it "finds free and declared in a GRHSs" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./LiftOneLevel/LetIn2.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just tup = getName "LiftOneLevel.LetIn2.sumSquares" renamed let [decl] = definingDeclsNames [tup] (hsBinds renamed) False False let (GHC.L _ (GHC.FunBind _ _ (GHC.MG [match] _ _ _) _ _ _)) = decl let (GHC.L _ (GHC.Match _ _pat _ grhss)) = match -- (SYB.showData SYB.Renamer 0 grhss) `shouldBe` "" let comp = do r <- hsFreeAndDeclaredPNs grhss -- r <- hsFreeAndDeclaredPNs $ hsBinds renamed return r -- ((res),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((res),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- Declared Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (snd res)) `shouldBe` "[]" -- Free Vars (showGhcQual $ map (\n -> (n, getGhcLoc $ GHC.nameSrcSpan n)) (fst res)) `shouldBe` "[(GHC.Real.^, (-1, -1)), (x, (10, 12)), (GHC.Num.+, (-1, -1)),\n (y, (10, 14))]" -- ----------------------------------------------------------------- it "finds free and declared in a single bind" $ do pending -- "fix the prior test" -- --------------------------------------------------------------------- describe "hsFDsFromInsideRdr" $ do it "does something useful" $ do pending -- "Complete this" describe "hsFDsFromInside" $ do it "does something useful" $ do pending -- "Complete this" describe "hsFDNamesFromInside" $ do it "does something useful" $ do pending -- "Complete this" -- --------------------------------------------------------------------- describe "hsVisibleNames" $ do it "does something useful" $ do pending -- "Complete this" -- --------------------------------------------------------------------- describe "hsVisiblePNs" $ do -- --------------------------------- it "returns [] if e does not occur in t" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just tl1 = locToExp (4,13) (4,40) renamed :: (Maybe (GHC.Located (GHC.HsExpr GHC.Name))) let Just tup = getName "DupDef.Dd1.tup" renamed -- let [decl] = definingDeclsNames [tup] (hsBinds renamed) False False let comp = do r <- hsVisiblePNs tup tl1 return r -- ((res),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((res),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual $ res) `shouldBe` "[]" -- ----------------------------------------------------------------- it "returns visible vars if e does occur in t #1" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just tl1 = locToExp (28,4) (28,12) renamed :: (Maybe (GHC.Located (GHC.HsExpr GHC.Name))) (showGhcQual tl1) `shouldBe` "ll GHC.Num.+ z" -- (SYB.showData SYB.Renamer 0 tl1) `shouldBe` "" let Just tup = getName "DupDef.Dd1.l" renamed let [decl] = definingDeclsNames [tup] (hsBinds renamed) False False (showGhcQual decl) `shouldBe` "DupDef.Dd1.l z = let ll = 34 in ll GHC.Num.+ z" -- (SYB.showData SYB.Renamer 0 decl) `shouldBe` "" let comp = do r <- hsVisiblePNs tl1 decl -- let r2 = hsVisiblePNsOld tl1 decl return (r) -- ((res),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((res),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual res ) `shouldBe` "[z, ll]" -- (showGhcQual res2 ) `shouldBe` "[z, ll]" -- ----------------------------------------------------------------- it "returns visible vars if e does occur in t #2" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just tl1 = locToExp (28,4) (28,12) renamed :: (Maybe (GHC.Located (GHC.HsExpr GHC.Name))) (showGhcQual tl1) `shouldBe` "ll GHC.Num.+ z" let Just rhs = locToExp (26,1) (28,12) renamed :: (Maybe (GHC.Located (GHC.HsExpr GHC.Name))) (showGhcQual rhs) `shouldBe` "let ll = 34 in ll GHC.Num.+ z" let comp = do r <- hsVisiblePNs tl1 rhs return r -- ((res),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((res),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual res) `shouldBe` "[ll]" -- ----------------------------------------------------------------- {- it "returns visible vars if e does occur in t #3" $ do (t,toks) <- ct $ parsedFileGhc "./TypeUtils/VisiblePNs.hs" let renamed = fromJust $ GHC.tm_renamed_source t (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let comp = do renamed <- getRefactRenamed let Just tl1 = locToName (41,11) renamed -- :: (Maybe (GHC.Located (GHC.HsExpr GHC.Name))) r <- hsVisiblePNs tl1 renamed -- let r = hsVisiblePNsGhc tl1 renamed let fvs = map (\b -> (showGhcQual b,getFreeVars [b])) (hsBinds renamed) let dvs = getDeclaredVars $ hsBinds renamed return (tl1,r,fvs,dvs) -- ((tl,res,_f,d),_s) <- runRefactGhc comp $ initialState { rsModule = initRefactModule t } ((tl,res,_f,d),_s) <- runRefactGhc comp $ initialLogOnState { rsModule = initRefactModule t } (showGhcQual tl) `shouldBe` "modu" -- (showGhcQual f) `shouldBe` "" (showGhcQual d) `shouldBe` "[TypeUtils.VisiblePNs.parsedFileGhc,\n"++ " TypeUtils.VisiblePNs.parsedFileBGhc,\n"++ " TypeUtils.VisiblePNs.runRefactGhcState,"++ " TypeUtils.VisiblePNs.zz,\n"++ " TypeUtils.VisiblePNs.yy,"++ " TypeUtils.VisiblePNs.xx,\n"++ " TypeUtils.VisiblePNs.ww,"++ " TypeUtils.VisiblePNs.spec,\n"++ " TypeUtils.VisiblePNs.main]" (showGhcQual res) `shouldBe` "[TypeUtils.VisiblePNs.parsedFileGhc,\n"++ " TypeUtils.VisiblePNs.parsedFileBGhc,\n"++ " TypeUtils.VisiblePNs.runRefactGhcState,"++ " TypeUtils.VisiblePNs.zz,\n"++ " TypeUtils.VisiblePNs.yy,"++ " TypeUtils.VisiblePNs.xx,\n"++ " TypeUtils.VisiblePNs.ww,"++ " TypeUtils.VisiblePNs.spec,\n"++ " TypeUtils.VisiblePNs.main,"++ " modu,"++ " t,"++ " _toks,"++ " expr]" -} -- --------------------------------------------------------------------- describe "hsVisibleDsRdr" $ do it "Rdr:finds function arguments visible in RHS 1" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./Visible/Simple.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just e = locToExp (5,11) (5,19) parsed :: (Maybe (GHC.LHsExpr GHC.RdrName)) (showGhcQual e) `shouldBe` "a + b" -- (SYB.showData SYB.Renamer 0 e) `shouldBe` "" let Just n = getName "Visible.Simple.params" renamed let comp = do nameMap <- getRefactNameMap declsp <- liftT $ hsDecls parsed let [decl] = definingDeclsRdrNames nameMap [n] declsp False False fds' <- hsVisibleDsRdr nameMap e decl return (fds') -- ((fds),_s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions ((fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show fds) `shouldBe` "DN [a, b, GHC.Num.+]" -- ----------------------------------- it "Rdr:finds function arguments visible in RHS 2" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./Visible/Simple.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just e = locToExp (9,15) (9,17) parsed :: (Maybe (GHC.LHsExpr GHC.RdrName)) (showGhcQual e) `shouldBe` "x" let Just n = getName "Visible.Simple.param2" renamed let comp = do nameMap <- getRefactNameMap declsp <- liftT $ hsDecls parsed let [decl] = definingDeclsRdrNames nameMap [n] declsp False False fds' <- hsVisibleDsRdr nameMap e decl return (fds') -- ((fds),_s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions ((fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show fds) `shouldBe` "DN [x]" -- ----------------------------------- it "Rdr:finds visible vars inside a function" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./Renaming/IdIn5.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" -- putStrLn $ "parsed:" ++ SYB.showData SYB.Parser 0 parsed let Just rhsr = locToExp (14,6) (15,14) renamed :: (Maybe (GHC.LHsExpr GHC.Name)) let Just rhs = locToExp (14,6) (15,14) parsed :: (Maybe (GHC.LHsExpr GHC.RdrName)) (showGhcQual rhs) `shouldBe` "x + y + z" -- let Just er = getName "IdIn5.x" renamed let Just e = locToRdrName (10,1) parsed let comp = do nameMap <- getRefactNameMap fds' <- hsVisibleDsRdr nameMap e rhs ffds <- hsFreeAndDeclaredGhc rhsr return (fds',ffds) -- ((fds,_fds),_s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions ((fds,_fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show fds) `shouldBe` "DN [GHC.Num.+, y, z]" (show _fds) `shouldBe` "(FN [IdIn5.x, GHC.Num.+, y, z],DN [])" -- ----------------------------------- it "Rdr:finds visible vars inside a data declaration" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./Renaming/D1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" -- let Just (GHC.L _ n) = locToName (6, 6) renamed -- (showGhcQual n) `shouldBe` "Renaming.D1.Tree" let Just ln@(GHC.L _ n) = locToRdrName (6, 6) parsed (showGhcQual n) `shouldBe` "Tree" let comp = do nameMap <- getRefactNameMap fds' <- hsVisibleDsRdr nameMap ln parsed ffds <- hsFreeAndDeclaredGhc renamed return (fds',ffds) ((fds,_fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show _fds) `shouldBe` "(FN [:, GHC.Num.+, GHC.Real.^, [], Renaming.D1.Leaf,\n" ++" Renaming.D1.Branch, GHC.Base.++],DN [Renaming.D1.sumSquares," ++" Renaming.D1.fringe, Renaming.D1.Tree, a,\n" ++" Renaming.D1.SameOrNot, Renaming.D1.isSame, Renaming.D1.isNotSame])" (show fds) `shouldBe` "DN [Renaming.D1.Tree]" -- --------------------------------------------------------------------- describe "hsVisibleDs" $ do it "finds function arguments visible in RHS 1" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./Visible/Simple.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just e = locToExp (5,11) (5,19) renamed :: (Maybe (GHC.LHsExpr GHC.Name)) (showGhcQual e) `shouldBe` "a GHC.Num.+ b" let Just n = getName "Visible.Simple.params" renamed let [decl] = definingDeclsNames [n] (hsBinds renamed) False False let binds = hsValBinds [decl] -- (SYB.showData SYB.Renamer 0 $ head $ hsBinds binds) `shouldBe` "" let comp = do fds' <- hsVisibleDs e $ head $ hsBinds binds -- let fds'o = hsVisiblePNsOld e $ head $ hsBinds binds return (fds') ((fds),_s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions -- ((fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show fds) `shouldBe` "DN [a, b, GHC.Num.+]" -- ----------------------------------- it "finds function arguments visible in RHS 2" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./Visible/Simple.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just e = locToExp (9,15) (9,17) renamed :: (Maybe (GHC.LHsExpr GHC.Name)) (showGhcQual e) `shouldBe` "x" let Just n = getName "Visible.Simple.param2" renamed let [decl] = definingDeclsNames [n] (hsBinds renamed) False False let binds = hsValBinds [decl] let comp = do fds' <- hsVisibleDs e $ head $ hsBinds binds return (fds') -- ((fds),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show fds) `shouldBe` "DN [x]" -- ----------------------------------- it "finds visible vars inside a function" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./Renaming/IdIn5.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just rhs = locToExp (14,6) (15,14) renamed :: (Maybe (GHC.LHsExpr GHC.Name)) (showGhcQual rhs) `shouldBe` "IdIn5.x GHC.Num.+ y GHC.Num.+ z" -- (SYB.showData SYB.Renamer 0 rhs) `shouldBe` "" let Just e = getName "IdIn5.x" renamed let comp = do fds' <- hsVisibleDs e rhs ffds <- hsFreeAndDeclaredGhc rhs return (fds',ffds) -- ((fds,_fds),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((fds,_fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show _fds) `shouldBe` "(FN [IdIn5.x, GHC.Num.+, y, z],DN [])" (show fds) `shouldBe` "DN [GHC.Num.+, y, z]" -- ----------------------------------- it "finds visible vars inside a data declaration" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./Renaming/D1.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just (GHC.L _ n) = locToName (6, 6) renamed (showGhcQual n) `shouldBe` "Renaming.D1.Tree" let comp = do fds' <- hsVisibleDs n renamed ffds <- hsFreeAndDeclaredGhc renamed return (fds',ffds) ((fds,_fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show _fds) `shouldBe` "(FN [:, GHC.Num.+, GHC.Real.^, [], Renaming.D1.Leaf,\n" ++" Renaming.D1.Branch, GHC.Base.++],DN [Renaming.D1.sumSquares," ++" Renaming.D1.fringe, Renaming.D1.Tree, a,\n" ++" Renaming.D1.SameOrNot, Renaming.D1.isSame, Renaming.D1.isNotSame])" (show fds) `shouldBe` "DN [Renaming.D1.Tree]" -- --------------------------------------------------------------------- describe "hsFreeAndDeclaredGhc" $ do it "finds function arguments visible in RHS fd" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./Visible/Simple.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just e = locToExp (5,11) (5,19) renamed :: (Maybe (GHC.Located (GHC.HsExpr GHC.Name))) (showGhcQual e) `shouldBe` "a GHC.Num.+ b" let Just n = getName "Visible.Simple.params" renamed let [decl] = definingDeclsNames [n] (hsBinds renamed) False False let binds = hsValBinds [decl] let comp = do fds' <- hsFreeAndDeclaredGhc $ head $ hsBinds binds return (fds') -- ((fds),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show fds) `shouldBe` "(FN [GHC.Num.+],DN [Visible.Simple.params])" -- ----------------------------------- it "finds function arguments and free vars visible in RHS" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./Visible/Simple.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just e = locToExp (9,15) (9,17) renamed :: (Maybe (GHC.LHsExpr GHC.Name)) (showGhcQual e) `shouldBe` "x" let Just n = getName "Visible.Simple.param2" renamed let [decl] = definingDeclsNames [n] (hsBinds renamed) False False let binds = hsValBinds [decl] let (GHC.L _ (GHC.FunBind _ _ (GHC.MG matches _ _ _) _ _ _)) = head $ hsBinds binds let [(GHC.L _ (GHC.Match _ pats _ _))] = matches let lpat = head pats let comp = do fds' <- hsFreeAndDeclaredGhc $ lpat return (fds') -- ((fds),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show fds) `shouldBe` "(FN [Visible.Simple.B],DN [x])" -- ----------------------------------- it "finds imported functions used in the rhs" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/Declare.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just n = getName "FreeAndDeclared.Declare.tup" renamed let decls = definingDeclsNames [n] (hsBinds renamed) False False -- (SYB.showData SYB.Renamer 0 decls) `shouldBe` "" let comp = do fds' <- hsFreeAndDeclaredGhc $ decls return (fds') -- ((fds),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show fds) `shouldBe` "(FN [GHC.List.head, GHC.Base.$, GHC.List.zip],"++ "DN [FreeAndDeclared.Declare.tup, FreeAndDeclared.Declare.h,\n "++ "FreeAndDeclared.Declare.t])" -- ----------------------------------- it "finds free vars in HsWithBndrs" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/Binders.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just n = getName "FreeAndDeclared.Binders.findNewPName" renamed let [decl] = definingDeclsNames [n] (hsBinds renamed) False False let (GHC.L _ (GHC.FunBind _ _ (GHC.MG [match] _ _ _) _ _ _)) = decl let (GHC.L _ (GHC.Match _ _pats _rhs binds)) = match -- (SYB.showData SYB.Renamer 0 binds) `shouldBe` "" -- (showGhcQual binds) `shouldBe` "" let comp = do fds' <- hsFreeAndDeclaredGhc $ binds return (fds') -- ((fds),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show fds) `shouldBe` "(FN [FreeAndDeclared.Binders.gfromJust,"++ " FreeAndDeclared.Binders.Name,\n"++ " FreeAndDeclared.Binders.occNameString,"++ " GHC.Base.$,\n"++ " FreeAndDeclared.Binders.getOccName,"++ " GHC.Classes.==,"++ " name,\n"++ " Data.Maybe.Just,"++ " Data.Maybe.Nothing,\n"++ " FreeAndDeclared.Binders.somethingStaged,\n"++ " FreeAndDeclared.Binders.Renamer, renamed],"++ "DN [worker, res])" -- ----------------------------------- it "finds free vars in TH files" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./TH/Main.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" -- (SYB.showData SYB.Renamer 0 binds) `shouldBe` "" -- (showGhcQual binds) `shouldBe` "" let comp = do fds' <- hsFreeAndDeclaredGhc renamed return (fds') -- ((fds),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((fds),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (show fds) `shouldBe` "(FN [System.IO.putStrLn, TH.Printf.pr],"++ "DN [TH.Main.baz, TH.Main.sillyString, TH.Main.main])" -- --------------------------------------------- describe "getParsedForRenamedLPat" $ do it "gets the ParsedSource version of a RenamedSource LPat" $ do (t,_toks,_) <- ct $ parsedFileGhc "./Visible/Simple.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just e = locToExp (9,15) (9,17) renamed :: (Maybe (GHC.LHsExpr GHC.Name)) (showGhcQual e) `shouldBe` "x" let Just n = getName "Visible.Simple.param2" renamed let [decl] = definingDeclsNames [n] (hsBinds renamed) False False let binds = hsValBinds [decl] let (GHC.L _ (GHC.FunBind _ _ (GHC.MG matches _ _ _) _ _ _)) = head $ hsBinds binds let [(GHC.L _ (GHC.Match _ pats _ _))] = matches let pat@(GHC.L lp _) = head pats let pat' = getParsedForRenamedLPat parsed pat let (GHC.L lp' _) = pat' lp `shouldBe` lp' (SYB.showData SYB.Renamer 0 pat') `shouldBe` "\n"++ "(L {Visible/Simple.hs:9:8-12} \n"++ " (ParPat \n"++ " (L {Visible/Simple.hs:9:9-11} \n"++ " (ConPatIn \n"++ " (L {Visible/Simple.hs:9:9} \n"++ " (Unqual {OccName: B})) \n"++ " (PrefixCon \n"++ " [\n"++ " (L {Visible/Simple.hs:9:11} \n"++ " (VarPat \n"++ " (Unqual {OccName: x})))])))))" (SYB.showData SYB.Renamer 0 pat) `shouldBe` "\n"++ "(L {Visible/Simple.hs:9:8-12} \n"++ " (ParPat \n"++ " (L {Visible/Simple.hs:9:9-11} \n"++ " (ConPatIn \n"++ " (L {Visible/Simple.hs:9:9} {Name: B}) \n"++ " (PrefixCon \n"++ " [\n"++ " (L {Visible/Simple.hs:9:11} \n"++ " (VarPat {Name: x}))])))))" -- --------------------------------------------- describe "isLocalPN" $ do it "returns True if the name is local to the module" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ n) = locToName (17, 5) renamed (showGhcQual n) `shouldBe` "ff" isLocalPN n `shouldBe` True it "returns False if the name is not local to the module" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ n) = locToName (21, 1) renamed (showGhcQual n) `shouldBe` "DupDef.Dd1.ff" isLocalPN n `shouldBe` False -- --------------------------------------------- describe "isTopLevelPN" $ do it "returns False if the name is not defined at the top level of the module" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do renamed <- getRefactRenamed let Just (GHC.L _ n) = locToName (17, 5) renamed topLevel <- isTopLevelPN n return (n,topLevel) -- ((nf,tl),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((nf,tl),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual nf) `shouldBe` "ff" tl `shouldBe` False it "returns True if the name is defined at the top level of the module" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do renamed <- getRefactRenamed let Just (GHC.L _ n) = locToName (21, 1) renamed topLevel <- isTopLevelPN n return (n,topLevel) -- ((nf,tl),_s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((nf,tl),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual nf) `shouldBe` "DupDef.Dd1.ff" tl `shouldBe` True -- --------------------------------------------- describe "definedPNs" $ do it "gets the PNs defined in a declaration" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ pn) = locToName (3, 1) renamed (showGhcQual pn) `shouldBe` "DupDef.Dd1.toplevel" let origDecls = hsBinds renamed let demotedDecls'= definingDeclsNames [pn] origDecls True False let declaredPns = nub $ concatMap definedPNs demotedDecls' (showGhcQual declaredPns) `shouldBe` "[DupDef.Dd1.toplevel]" -- --------------------------------- it "gets the PNs defined in an as-match" $ do (t, _toks,_) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ pn) = locToName (14, 1) renamed (showGhcQual pn) `shouldBe` "DupDef.Dd1.tup" let origDecls = hsBinds renamed let demotedDecls'= definingDeclsNames [pn] origDecls True False let declaredPns = nub $ concatMap definedPNs demotedDecls' (showGhcQual declaredPns) `shouldBe` "[DupDef.Dd1.tup, DupDef.Dd1.h, DupDef.Dd1.t]" -- --------------------------------------------- describe "inScopeInfo" $ do it "returns 4 element tuples for in scope names" $ do pending -- "is this still needed?" {- ((inscopes, _renamed, _parsed), _toks) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let info = inScopeInfo inscopes (show $ head info) `shouldBe` "foo" -- (show $ info) `shouldBe` "foo" -} -- --------------------------------------------- describe "isInScopeAndUnqualified" $ do it "True if the identifier is in scope and unqualified" $ do pending -- "needed?" {- ((inscopes, _renamed, _parsed), _toks) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let info = inScopeInfo inscopes (show $ head info) `shouldBe` "foo" -} -- inScopeInfo for c is -- (\"DupDef.Dd1.c\",VarName,DupDef.Dd1,Nothing) -- --------------------------------------------- describe "isInScopeAndUnqualifiedGhc" $ do it "True if the identifier is in scope and unqualified" $ do (t,_toks,tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do ctx <- GHC.getContext res1 <- isInScopeAndUnqualifiedGhc "c" Nothing res2 <- isInScopeAndUnqualifiedGhc "DupDef.Dd1.c" Nothing res3 <- isInScopeAndUnqualifiedGhc "nonexistent" Nothing return (res1,res2,res3,ctx) -- ((r1,r2,r3,_c),_s) <- ct $ runRefactGhcState comp tgt ((r1,r2,r3,_c),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (showGhcQual c) `shouldBe` "[*DupDef.Dd1]" r1 `shouldBe` True r2 `shouldBe` True r3 `shouldBe` False -- --------------------------------- it "Requires qualification on name clash with an import" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./ScopeAndQual.hs" let comp = do -- (t,toks) <- parseSourceFileTest "./ScopeAndQual.hs" putParsedModule t renamed <- getRefactRenamed logm $ "renamed=" ++ (SYB.showData SYB.Renamer 0 renamed) -- ++AZ++ ctx <- GHC.getContext let Just sumSquares = locToName (13,15) renamed ssUnqual <- isQualifiedPN $ GHC.unLoc sumSquares names <- GHC.parseName "sum" names2 <- GHC.parseName "mySumSq" res1 <- isInScopeAndUnqualifiedGhc "sum" Nothing res2 <- isInScopeAndUnqualifiedGhc "L.sum" Nothing return (res1,res2,names,names2,sumSquares,ssUnqual,ctx) -- ((r1,r2,ns,ns2,ss,ssu,c),_s) <- runRefactGhcStateLog comp True -- ((r1,r2,ns,ns2,ss,ssu,_c),_s) <- ct $ runRefactGhcState comp ((r1,r2,ns,ns2,ss,ssu,_c),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (showGhcQual c) `shouldBe` "[*ScopeAndQual]" (prettyprint ss) `shouldBe` "sumSquares" (showGhcQual ss) `shouldBe` "ScopeAndQual.sumSquares" (show $ ssu) `shouldBe` "False" (showGhcQual ns) `shouldBe` "[ScopeAndQual.sum]" (showGhcQual ns2) `shouldBe` "[ScopeAndQual.mySumSq]" "1" ++ (show r1) `shouldBe` "1True" "2" ++ (show r2) `shouldBe` "2True" -- --------------------------------------------- describe "mkNewGhcName" $ do it "Creates a new GHC.Name" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do renamed <- getRefactRenamed let Just (GHC.L _ topLevel) = locToName (3,1) renamed name1 <- mkNewGhcName Nothing "foo" name2 <- mkNewGhcName Nothing "bar" name3 <- mkNewGhcName (Just (GHC.nameModule topLevel)) "baz" return (name1,name2,name3) -- ((n1,n2,n3),_s) <- ct $ runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((n1,n2,n3),_s) <- ct $ runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions GHC.getOccString n1 `shouldBe` "foo" showGhcQual n1 `shouldBe` "foo" GHC.getOccString n2 `shouldBe` "bar" showGhcQual n2 `shouldBe` "bar" (showGhcQual $ GHC.nameModule n3) `shouldBe` "main@main:DupDef.Dd1" (SYB.showData SYB.Renamer 0 n3) `shouldBe` "{Name: baz}" GHC.getOccString n3 `shouldBe` "baz" showGhcQual n3 `shouldBe` "DupDef.Dd1.baz" (showGhcQual $ GHC.nameUnique n1) `shouldBe` "H2" (showGhcQual $ GHC.nameUnique n2) `shouldBe` "H3" (showGhcQual $ GHC.nameUnique n3) `shouldBe` "H4" -- --------------------------------------------- describe "prettyprint" $ do it "Prints a GHC.Name ready for parsing into tokens" $ do -- (_t, _toks, tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do name1 <- mkNewGhcName Nothing "foo" name2 <- mkNewGhcName Nothing "bar" return (name1,name2) ((n1,n2),_s) <- ct $ runRefactGhcState comp "./DupDef/Dd1.hs" GHC.getOccString n1 `shouldBe` "foo" showGhcQual n1 `shouldBe` "foo" GHC.getOccString n2 `shouldBe` "bar" showGhcQual n2 `shouldBe` "bar" -- prettyprint n1 `shouldBe` "foo" showGhcQual n1 `shouldBe` "foo" -- --------------------------------------------- describe "duplicateDecl" $ do it "duplicates a RenamedSource bind, and updates the token stream" $ do (t, toks,tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- let declsr = hsBinds renamed let Just (GHC.L _ n) = locToName (3, 1) renamed let comp = do parsed <- getRefactParsed declsp <- liftT $ hsDecls parsed newName2 <- mkNewGhcName Nothing "bar2" newBinding <- duplicateDecl declsp renamed n newName2 return newBinding -- (nb,s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } (nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "DupDef.Dd1.toplevel" (GHC.showRichTokenStream $ toks) `shouldBe` "module DupDef.Dd1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\n" (sourceFromState s) `shouldBe` "module DupDef.Dd1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nbar2 :: Integer -> Integer\nbar2 x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\n" (showGhcQual nb) `shouldBe` "[bar2 x = DupDef.Dd1.c GHC.Num.* x]" -- --------------------------------------------- it "duplicates a bind with a signature, and an offset" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- let declsr = hsBinds renamed let Just (GHC.L _l n) = locToName (17, 6) renamed (showGhcQual n) `shouldBe` "ff" let comp = do parsed <- getRefactParsed declsp <- liftT $ hsDecls parsed newName2 <- mkNewGhcName Nothing "gg" nm <- getRefactNameMap let declsToDup = definingDeclsRdrNames nm [n] declsp True True funBinding = filter isFunOrPatBindP declsToDup --get the fun binding. newBinding <- duplicateDecl declsToDup parsed n newName2 -- return newBinding return (funBinding,declsToDup,newBinding) -- ((fb,dd,newb),s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((fb,dd,newb),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "ff" (showGhcQual dd) `shouldBe` "[ff = 15]" (showGhcQual fb) `shouldBe` "[ff = 15]" (show $ getStartEndLoc fb) `shouldBe` "((17,5),(17,12))" (GHC.showRichTokenStream $ toks) `shouldBe` "module DupDef.Dd1 where\n\n toplevel :: Integer -> Integer\n toplevel x = c * x\n\n c,d :: Integer\n c = 7\n d = 9\n\n -- Pattern bind\n tup :: (Int, Int)\n h :: Int\n t :: Int\n tup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\n data D = A | B String | C\n\n ff y = y + zz\n where\n zz = 1\n\n l z =\n let\n ll = 34\n in ll + z\n\n dd q = do\n let ss = 5\n return (ss + q)\n\n " (sourceFromState s) `shouldBe` "module DupDef.Dd1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\n gg :: Int\n gg = 15\n\ndata D = A | B String | C\n\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\n" (showGhcQual newb) `shouldBe` "[gg = 15]" (showGhcQual fb) `shouldBe` "[ff = 15]" -- --------------------------------------------- describe "addParamsToDecl" $ do it "adds parameters to a declaration, and updates the token stream" $ do (t, toks,tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- let declsr = hsBinds renamed let Just (GHC.L _ n) = locToName (3, 1) renamed let comp = do declsp <- liftT $ hsDecls parsed -- newName2 <- mkNewGhcName Nothing "bar2" let newName2 = mkRdrName "bar2" declsp' <- addParamsToDecls declsp n [newName2] True parsed' <- liftT $ replaceDecls parsed declsp' putRefactParsed parsed' emptyAnns return declsp' -- (nb,s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } (nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "MoveDef.Md1.toplevel" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" -- (showToks $ take 20 $ toksFromState s) `shouldBe` "" (sourceFromState s) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel bar2 x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (showGhcQual $ head $ tail nb) `shouldBe` "toplevel bar2 x = c * x" -- --------------------------------- it "adds parameters to a declaration with multiple matches" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./AddParams1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- let declsr = hsBinds renamed let Just (GHC.L _ n) = locToName (3, 1) renamed let comp = do declsp <- liftT $ hsDecls parsed let newName = mkRdrName "pow" declsp' <- addParamsToDecls declsp n [newName] True parsed' <- liftT $ replaceDecls parsed declsp' putRefactParsed parsed' emptyAnns return declsp' (_nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (_nb,s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions -- putStrLn $ "anntree\n" ++ showAnnDataFromState s -- putStrLn $ "_nb\n" ++ showAnnDataItemFromState s _nb (showGhcQual n) `shouldBe` "AddParams1.sq" (GHC.showRichTokenStream $ toks) `shouldBe` "module AddParams1 where\n\nsq 0 = 0\nsq z = z^2\n\nfoo = 3\n\n" (sourceFromState s) `shouldBe` "module AddParams1 where\n\nsq pow 0 = 0\nsq pow z = z^2\n\nfoo = 3\n\n" -- (showGhcQual $ last $ init nb) `shouldBe` "" -- --------------------------------- it "adds parameters to a declaration with no existing ones" $ do (t, toks,tgt) <- ct $ parsedFileGhc "./AddParams1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" -- let declsr = hsBinds renamed let Just (GHC.L _ n) = locToName (6, 1) renamed let comp = do declsp <- liftT $ hsDecls parsed let newName1 = mkRdrName "baz" let newName2 = mkRdrName "bar" declsp' <- addParamsToDecls declsp n [newName1,newName2] True parsed' <- liftT $ replaceDecls parsed declsp' putRefactParsed parsed' emptyAnns return declsp' -- (_nb,s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } (_nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "AddParams1.foo" (GHC.showRichTokenStream $ toks) `shouldBe` "module AddParams1 where\n\nsq 0 = 0\nsq z = z^2\n\nfoo = 3\n\n" -- (showToks $ take 20 $ toksFromState s) `shouldBe` "" (sourceFromState s) `shouldBe` "module AddParams1 where\n\nsq 0 = 0\nsq z = z^2\n\nfoo baz bar = 3\n\n" -- (showGhcQual $ last $ init nb) `shouldBe` "" -- --------------------------------------------- describe "addActualParamsToRhs" $ do it "adds a parameter to the rhs of a declaration, and updates the token stream" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./LiftToToplevel/D1.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let declsr = hsBinds renamed -- let decl@(GHC.L _ (GHC.FunBind _ _ (GHC.MatchGroup [GHC.L _ (GHC.Match _ _ rhs) ] _) _ _ _)) = head declsr let decl = head declsr (showGhcQual decl) `shouldBe` "LiftToToplevel.D1.sumSquares (x : xs)\n = sq x GHC.Num.+ LiftToToplevel.D1.sumSquares xs\n where\n sq x = x GHC.Real.^ pow\n pow = 2\nLiftToToplevel.D1.sumSquares [] = 0" -- (SYB.showData SYB.Renamer 0 rhs) `shouldBe` "" let Just (GHC.L _ n) = locToName (6, 21) renamed let comp = do let newName2 = mkRdrName "bar2" newBinding <- addActualParamsToRhs True n [newName2] decl return newBinding -- (nb,s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } (nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "sq" -- (showToks $ take 20 $ toksFromState s) `shouldBe` "" (showGhcQual nb) `shouldBe` "LiftToToplevel.D1.sumSquares (x : xs)\n = (sq bar2) x GHC.Num.+ LiftToToplevel.D1.sumSquares xs\n where\n sq x = x GHC.Real.^ pow\n pow = 2\nLiftToToplevel.D1.sumSquares [] = 0" (sourceFromState s) `shouldBe` "module LiftToToplevel.D1 where\n\n{-lift 'sq' to top level. This refactoring\n affects module 'D1' and 'C1' -}\n\nsumSquares (x:xs) = (sq bar2)x + sumSquares xs\n where\n sq x = x ^ pow\n pow =2\n\nsumSquares [] = 0\n\nmain = sumSquares [1..4]\n\n\n" -- -------------------- it "adds parameters to a complex rhs of a declaration, and updates the token stream" $ do (t, _toks,tgt) <- ct $ parsedFileGhc "./LiftToToplevel/WhereIn7.hs" let renamed = fromJust $ GHC.tm_renamed_source t -- let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let declsr = hsBinds renamed -- let decl@(GHC.L _ (GHC.FunBind _ _ (GHC.MatchGroup [GHC.L _ (GHC.Match _ _ rhs) ] _) _ _ _)) = head declsr let decl = head declsr (showGhcQual decl) `shouldBe` "LiftToToplevel.WhereIn7.fun x y z\n = inc addthree\n where\n inc a = a GHC.Num.+ 1\n addthree = x GHC.Num.+ y GHC.Num.+ z" -- (SYB.showData SYB.Renamer 0 rhs) `shouldBe` "" let Just (GHC.L _ n) = locToName (10, 17) renamed let comp = do let newName1 = mkRdrName "x1" let newName2 = mkRdrName "y1" let newName3 = mkRdrName "z1" newBinding <- addActualParamsToRhs True n [newName1,newName2,newName3] decl return newBinding -- (nb,s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } (nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "addthree" -- (showToks $ take 20 $ toksFromState s) `shouldBe` "" (sourceFromState s) `shouldBe` "module LiftToToplevel.WhereIn7 where\n\n--A definition can be lifted from a where or let to the top level binding group.\n--Lifting a definition widens the scope of the definition.\n\n--In this example, lift 'addthree' defined in 'fun'.\n--This example aims to test adding parenthese.\n\n\nfun x y z =inc (addthree x1 y1 z1)\n where inc a =a +1\n addthree=x+y+z\n" (showGhcQual nb) `shouldBe` "LiftToToplevel.WhereIn7.fun x y z\n = inc (addthree x1 y1 z1)\n where\n inc a = a GHC.Num.+ 1\n addthree = x GHC.Num.+ y GHC.Num.+ z" -- --------------------------------------------- describe "rmDecl" $ do it "removes a top level declaration, leaving type signature" $ do (t, toks,tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- putStrLn $ SYB.showData SYB.Parser 0 parsed let Just (GHC.L _ n) = locToName (22, 1) renamed let comp = do (parsed',_removedDecl,_removedSig) <- rmDecl n False parsed putRefactParsed parsed' emptyAnns return parsed' -- (_nb,s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (_nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "MoveDef.Md1.ff" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (sourceFromState s) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" -- --------------------------------- it "removes a top level declaration, and type signature" $ do (t, toks,tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- putStrLn $ "parsed" ++ SYB.showData SYB.Parser 0 parsed let Just (GHC.L _ n) = locToName (22, 1) renamed let comp = do (newDecls,_removedDecl,_removedSig) <- rmDecl n True parsed putRefactParsed newDecls emptyAnns return newDecls -- (_nb,s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (_nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "MoveDef.Md1.ff" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (sourceFromState s) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" -- ----------------------------------- it "removes the last local decl in a let/in clause" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./LiftToToplevel/LetIn1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" -- let declsr = hsBinds renamed let declsp = hsBinds parsed let Just (GHC.L _ n) = locToName (11, 22) renamed let comp = do -- (newDecls,_removedDecl,_removedSig) <- rmDecl n True declsr (newDecls,_removedDecl,_removedSig) <- rmDecl n True declsp let parsed' = replaceBinds parsed newDecls putRefactParsed parsed' emptyAnns return newDecls (_nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (_nb,s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "sq" (GHC.showRichTokenStream $ toks) `shouldBe` "module LiftToToplevel.LetIn1 where\n\n--A definition can be lifted from a where or let to the top level binding group.\n--Lifting a definition widens the scope of the definition.\n\n--In this example, lift 'sq' in 'sumSquares'\n--This example aims to test lifting a definition from a let clause to top level,\n--and the elimination of the keywords 'let' and 'in'\n\nsumSquares x y = let sq 0=0\n sq z=z^pow\n in sq x + sq y\n where pow=2\n\nanotherFun 0 y = sq y\n where sq x = x^2\n" -- (showToks $ take 20 $ toksFromState s) `shouldBe` "" (sourceFromState s) `shouldBe` "module LiftToToplevel.LetIn1 where\n\n--A definition can be lifted from a where or let to the top level binding group.\n--Lifting a definition widens the scope of the definition.\n\n--In this example, lift 'sq' in 'sumSquares'\n--This example aims to test lifting a definition from a let clause to top level,\n--and the elimination of the keywords 'let' and 'in'\n\nsumSquares x y = sq x + sq y\n where pow=2\n\nanotherFun 0 y = sq y\n where sq x = x^2\n" -- ----------------------------------- it "removes the last local decl in a where clause" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./RmDecl3.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" -- let declsr = hsBinds renamed -- let declsp = hsBinds parsed let Just (GHC.L _ n) = locToName (6, 5) renamed let comp = do (parsed',_removedDecl,_removedSig) <- rmDecl n True parsed putRefactParsed parsed' emptyAnns return parsed' (_nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (_nb,s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "zz" (GHC.showRichTokenStream $ toks) `shouldBe` "module RmDecl3 where\n\n-- Remove last declaration from a where clause, where should disappear too\nff y = y + zz\n where\n zz = 1\n\n-- EOF\n" (sourceFromState s) `shouldBe` "module RmDecl3 where\n\n-- Remove last declaration from a where clause, where should disappear too\nff y = y + zz\n\n-- EOF\n" -- ----------------------------------- it "removes the non last local decl in a let/in clause" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Demote/LetIn1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- (SYB.showData SYB.Renamer 0 renamed) `shouldBe` "" let Just (GHC.L _ n) = locToName (12, 22) renamed let comp = do (parsed',_removedDecl,_removedSig) <- rmDecl n False parsed putRefactParsed parsed' emptyAnns return parsed' -- (_nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (_nb,s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions -- putStrLn $ "anntree\n" ++ showAnnDataFromState s (showGhcQual n) `shouldBe` "pow" (GHC.showRichTokenStream $ toks) `shouldBe` "module Demote.LetIn1 where\n\n--A definition can be demoted to the local 'where' binding of a friend declaration,\n--if it is only used by this friend declaration.\n\n--Demoting a definition narrows down the scope of the definition.\n--In this example, demote the local 'pow' to 'sq'\n--This example also aims to test the demoting a local declaration in 'let'.\n\nsumSquares x y = let sq 0=0\n sq z=z^pow\n pow=2\n in sq x + sq y\n\n\nanotherFun 0 y = sq y\n where sq x = x^2\n\n " -- putStrLn $ showAnnDataFromState s (sourceFromState s) `shouldBe` "module Demote.LetIn1 where\n\n--A definition can be demoted to the local 'where' binding of a friend declaration,\n--if it is only used by this friend declaration.\n\n--Demoting a definition narrows down the scope of the definition.\n--In this example, demote the local 'pow' to 'sq'\n--This example also aims to test the demoting a local declaration in 'let'.\n\nsumSquares x y = let sq 0=0\n sq z=z^pow\n in sq x + sq y\n\n\nanotherFun 0 y = sq y\n where sq x = x^2\n\n " -- ----------------------------------------------------------------- it "removes a decl with a trailing comment" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Demote/WhereIn3.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _ n) = locToName (14, 1) renamed let comp = do (parsed',_removedDecl,_removedSig1) <- rmDecl n False parsed putRefactParsed parsed' emptyAnns return parsed' (_nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (_nb,s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions -- putStrLn $ showAnnDataFromState s (showGhcQual n) `shouldBe` "Demote.WhereIn3.sq" (GHC.showRichTokenStream $ toks) `shouldBe` "module Demote.WhereIn3 where\n\n--A definition can be demoted to the local 'where' binding of a friend declaration,\n--if it is only used by this friend declaration.\n\n--Demoting a definition narrows down the scope of the definition.\n--In this example, demote the top level 'sq' to 'sumSquares'\n--In this case (there are multi matches), the parameters are not folded after demoting.\n\nsumSquares x y = sq p x + sq p y\n where p=2 {-There is a comment-}\n\nsq :: Int -> Int -> Int\nsq pow 0 = 0\nsq pow z = z^pow --there is a comment\n\n{- foo bar -}\nanotherFun 0 y = sq y\n where sq x = x^2\n" (sourceFromState s) `shouldBe` "module Demote.WhereIn3 where\n\n--A definition can be demoted to the local 'where' binding of a friend declaration,\n--if it is only used by this friend declaration.\n\n--Demoting a definition narrows down the scope of the definition.\n--In this example, demote the top level 'sq' to 'sumSquares'\n--In this case (there are multi matches), the parameters are not folded after demoting.\n\nsumSquares x y = sq p x + sq p y\n where p=2 {-There is a comment-}\n\nsq :: Int -> Int -> Int\n\n{- foo bar -}\nanotherFun 0 y = sq y\n where sq x = x^2\n" -- --------------------------------------------- describe "rmTypeSig" $ do it "removes a type signature from the top level 1" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- putStrLn $ "parsed:\n" ++ SYB.showData SYB.Parser 0 parsed let Just (GHC.L _ n) = locToName (22, 1) renamed let comp = do anns <- getRefactAnns logm $ "pristine\n" ++ showAnnData anns 0 parsed (renamed',sigRemoved) <- rmTypeSig n parsed putRefactParsed renamed' emptyAnns return (renamed',sigRemoved) ((_nb,os),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((_nb,os),s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions -- putStrLn $ "anntree\n" ++ showAnnDataFromState s (showGhcQual n) `shouldBe` "MoveDef.Md1.ff" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (sourceFromState s) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (showGhcQual os) `shouldBe` "Just ff :: Int -> Int" -- ----------------------------------------------------------------- it "removes a type signature from the top level, after decl removed" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Demote/WhereIn3.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _ n) = locToName (14, 1) renamed let comp = do (parsed',_removedDecl,_removedSig1) <- rmDecl n False parsed (parsed2,_removedSig2) <- rmTypeSig n parsed' -- let parsed2 = parsed' putRefactParsed parsed2 emptyAnns return parsed2 (_nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (_nb,s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions -- putStrLn $ showAnnDataFromState s (showGhcQual n) `shouldBe` "Demote.WhereIn3.sq" (GHC.showRichTokenStream $ toks) `shouldBe` "module Demote.WhereIn3 where\n\n--A definition can be demoted to the local 'where' binding of a friend declaration,\n--if it is only used by this friend declaration.\n\n--Demoting a definition narrows down the scope of the definition.\n--In this example, demote the top level 'sq' to 'sumSquares'\n--In this case (there are multi matches), the parameters are not folded after demoting.\n\nsumSquares x y = sq p x + sq p y\n where p=2 {-There is a comment-}\n\nsq :: Int -> Int -> Int\nsq pow 0 = 0\nsq pow z = z^pow --there is a comment\n\n{- foo bar -}\nanotherFun 0 y = sq y\n where sq x = x^2\n" (sourceFromState s) `shouldBe` "module Demote.WhereIn3 where\n\n--A definition can be demoted to the local 'where' binding of a friend declaration,\n--if it is only used by this friend declaration.\n\n--Demoting a definition narrows down the scope of the definition.\n--In this example, demote the top level 'sq' to 'sumSquares'\n--In this case (there are multi matches), the parameters are not folded after demoting.\n\nsumSquares x y = sq p x + sq p y\n where p=2 {-There is a comment-}\n\n{- foo bar -}\nanotherFun 0 y = sq y\n where sq x = x^2\n" -- ----------------------------------------------------------------- it "removes a type signature from non-top level" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _ n) = locToName (16, 5) renamed let comp = do -- (renamed',_removedSig) <- rmTypeSig n renamed (renamed',_removedSig) <- rmTypeSig n parsed putRefactParsed renamed' emptyAnns return renamed' -- (_nb,s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } (_nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "ff" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" -- (showToks $ take 20 $ toksFromState s) `shouldBe` "" (sourceFromState s) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" -- ----------------------------------------------------------------- it "removes a type signature within multi signatures 1" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./TypeUtils/TypeSigs.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _ b) = locToName (12, 1) renamed let comp = do (renamed',_removedSig) <- rmTypeSig b parsed putRefactParsed renamed' emptyAnns return renamed' (_nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- putStrLn $ showAnnDataFromState s (showGhcQual b) `shouldBe` "TypeSigs.b" (GHC.showRichTokenStream $ toks) `shouldBe` "module TypeSigs where\n\nsq,anotherFun :: Int -> Int\nsq 0 = 0\nsq z = z^2\n\nanotherFun x = x^2\n\na,b,c::Int->Integer->Char\n\na x y = undefined\nb x y = undefined\nc x y = undefined\n\n" (sourceFromState s) `shouldBe` "module TypeSigs where\n\nsq,anotherFun :: Int -> Int\nsq 0 = 0\nsq z = z^2\n\nanotherFun x = x^2\n\na,c::Int->Integer->Char\n\na x y = undefined\nb x y = undefined\nc x y = undefined\n\n" -- ----------------------------------------------------------------- it "removes a type signature within multi signatures 2" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./TypeUtils/TypeSigs.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _ n) = locToName (4, 1) renamed let comp = do (renamed',removedSig) <- rmTypeSig n parsed putRefactParsed renamed' emptyAnns return (renamed',removedSig) -- ((_nb,os),s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions ((_nb,os),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- putStrLn $ "anntree\n" ++ showAnnDataFromState s (showGhcQual n) `shouldBe` "TypeSigs.sq" (GHC.showRichTokenStream $ toks) `shouldBe` "module TypeSigs where\n\nsq,anotherFun :: Int -> Int\nsq 0 = 0\nsq z = z^2\n\nanotherFun x = x^2\n\na,b,c::Int->Integer->Char\n\na x y = undefined\nb x y = undefined\nc x y = undefined\n\n" (sourceFromState s) `shouldBe` "module TypeSigs where\n\nanotherFun :: Int -> Int\nsq 0 = 0\nsq z = z^2\n\nanotherFun x = x^2\n\na,b,c::Int->Integer->Char\n\na x y = undefined\nb x y = undefined\nc x y = undefined\n\n" (showGhcQual os) `shouldBe` "Just sq :: Int -> Int" -- ----------------------------------------------------------------- {- it "removes a type signature for a pattern in a bind" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./LiftToToplevel/PatBindIn1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ n) = locToName (GHC.mkFastString "./test/testdata/LiftToToplevel/PatBindIn1.hs") (18, 7) renamed let comp = do (renamed',removedSig) <- rmTypeSig n renamed let (Just (GHC.L ss _)) = removedSig oldSigToks <- getToksForSpan ss return (renamed',removedSig,oldSigToks) -- ((nb,os,ot),s) <- runRefactGhc comp $ initialState { rsModule = initRefactModule t } ((nb,os,ot),s) <- runRefactGhc comp $ initialLogOnState { rsModule = initRefactModule t } (showGhcQual n) `shouldBe` "tup" (GHC.showRichTokenStream $ toks) `shouldBe` "module LiftToToplevel.PatBindIn1 where\n\n --A definition can be lifted from a where or let into the surrounding binding group.\n --Lifting a definition widens the scope of the definition.\n\n --In this example, lift 'tup' defined in 'foo'\n --This example aims to test renaming and the lifting of type signatures.\n\n main :: Int\n main = foo 3\n\n foo :: Int -> Int\n foo x = h + t + (snd tup)\n where\n h :: Int\n t :: Int\n tup :: (Int,Int)\n tup@(h,t) = head $ zip [1..10] [3..15]\n " -- (showToks $ take 20 $ toksFromState s) `shouldBe` "" (sourceFromState s) `shouldBe` "module LiftToToplevel.PatBindIn1 where\n\n --A definition can be lifted from a where or let into the surrounding binding group.\n --Lifting a definition widens the scope of the definition.\n\n --In this example, lift 'tup' defined in 'foo'\n --This example aims to test renaming and the lifting of type signatures.\n\n main :: Int\n main = foo 3\n\n foo :: Int -> Int\n foo x = h + t + (snd tup)\n where\n \n \n\n tup@(h,t) = head $ zip [1..10] [3..15]\n " (showGhcQual nb) `shouldBe` "" (showGhcQual os) `shouldBe` "" (GHC.showRichTokenStream ot) `shouldBe` "" -} -- --------------------------------------------- describe "addDecl" $ do it "adds a top level declaration without a type signature, in default pos" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let comp = do parsed <- getRefactParsed (decl,declAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "a" parseDecl "nn = n2") -- let declAnns' = setPrecedingLines declAnns newDecl 2 parsed' <- addDecl parsed Nothing (decl,Nothing,Just declAnns) True putRefactParsed parsed' emptyAnns return parsed' (nb,s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (nb,s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions -- (showGhcQual n) `shouldBe` "MoveDef.Md1.ff" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" -- (pprFromState s) `shouldBe` [] (sourceFromState s) `shouldBe` "module MoveDef.Md1 where\n\nnn = n2\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (unspace $ showGhcQual nb) `shouldBe` unspace "module MoveDef.Md1 where\nnn = n2\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\nc, d :: Integer\nc = 7\nd = 9\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h, t)\n = head $ zip [1 .. 10] [3 .. ff]\n where\n ff :: Int\n ff = 15\ndata D = A | B String | C\nff :: Int -> Int\nff y\n = y + zz\n where\n zz = 1\nl z = let ll = 34 in ll + z\ndd q\n = do { let ss = 5;\n return (ss + q) }\nzz1 a = 1 + toplevel a\ntlFunc :: Integer -> Integer\ntlFunc x = c * x" -- ------------------------------------------- it "adds a top level declaration with a type signature" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let comp = do parsed <- getRefactParsed (decl,declAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "decl" parseDecl "nn = 2") ((GHC.L ls (GHC.SigD sig)), sigAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "sig" parseDecl "nn :: Int") parsed' <- addDecl parsed Nothing (decl,Just (GHC.L ls sig),Just $ mergeAnns sigAnns declAnns) True putRefactParsed parsed' emptyAnns return (sig,parsed') -- ((_hs,nb),s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((_hs,nb),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (showGhcQual n) `shouldBe` "MoveDef.Md1.ff" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (sourceFromState s) `shouldBe` "module MoveDef.Md1 where\n\nnn :: Int\nnn = 2\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (unspace $ showGhcQual nb) `shouldBe` unspace "module MoveDef.Md1 where\nnn :: Int\nnn = 2\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\nc, d :: Integer\nc = 7\nd = 9\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h, t)\n = head $ zip [1 .. 10] [3 .. ff]\n where\n ff :: Int\n ff = 15\ndata D = A | B String | C\nff :: Int -> Int\nff y\n = y + zz\n where\n zz = 1\nl z = let ll = 34 in ll + z\ndd q\n = do { let ss = 5;\n return (ss + q) }\nzz1 a = 1 + toplevel a\ntlFunc :: Integer -> Integer\ntlFunc x = c * x" -- ------------------------------------------- it "adds a top level declaration after a specified one" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let comp = do parsed <- getRefactParsed (decl,declAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "a" parseDecl "nn = nn2") renamed <- getRefactRenamed let Just (GHC.L _l n) = locToName (21, 1) renamed parsed' <- addDecl parsed (Just n) (decl,Nothing,Just declAnns) True putRefactParsed parsed' emptyAnns return (n,parsed') -- ((n,nb),s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((n,nb),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "MoveDef.Md1.ff" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (sourceFromState s) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nnn = nn2\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (unspace $ showGhcQual nb) `shouldBe` unspace "module MoveDef.Md1 where\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\nc, d :: Integer\nc = 7\nd = 9\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h, t)\n = head $ zip [1 .. 10] [3 .. ff]\n where\n ff :: Int\n ff = 15\ndata D = A | B String | C\nff :: Int -> Int\nff y\n = y + zz\n where\n zz = 1\nnn = nn2\nl z = let ll = 34 in ll + z\ndd q\n = do { let ss = 5;\n return (ss + q) }\nzz1 a = 1 + toplevel a\ntlFunc :: Integer -> Integer\ntlFunc x = c * x" -- ------------------------------------------- it "adds a top level declaration with a type signature after a specified one" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let comp = do renamed <- getRefactRenamed let Just (GHC.L _l n) = locToName (21, 1) renamed parsed <- getRefactParsed (decl,declAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "decl" parseDecl "nn = nn2") ((GHC.L ls (GHC.SigD sig)), sigAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "sig" parseDecl "nn :: Int") parsed' <- addDecl parsed (Just n) (decl,Just (GHC.L ls sig),Just $ mergeAnns sigAnns declAnns) True putRefactParsed parsed' emptyAnns return (n,parsed') ((nn,nb),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual nn) `shouldBe` "MoveDef.Md1.ff" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (sourceFromState s) `shouldBe`"module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nnn :: Int\nnn = nn2\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (unspace $ showGhcQual nb) `shouldBe` unspace "module MoveDef.Md1 where\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\nc, d :: Integer\nc = 7\nd = 9\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h, t)\n = head $ zip [1 .. 10] [3 .. ff]\n where\n ff :: Int\n ff = 15\ndata D = A | B String | C\nff :: Int -> Int\nff y\n = y + zz\n where\n zz = 1\nnn :: Int\nnn = nn2\nl z = let ll = 34 in ll + z\ndd q\n = do { let ss = 5;\n return (ss + q) }\nzz1 a = 1 + toplevel a\ntlFunc :: Integer -> Integer\ntlFunc x = c * x" -- ------------------------------------------- it "adds a local declaration without a type signature 1" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let comp = do parsed <- getRefactParsed renamed <- getRefactRenamed nameMap <- getRefactNameMap let Just (GHC.L _ tl) = locToName (4, 1) renamed decls <- refactRunTransform (hsDecls parsed) let [tlDecl] = definingDeclsRdrNames nameMap [tl] decls True False (decl,declAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "decl" parseDecl "nn = nn2") newDecl <- addDecl tlDecl Nothing (decl,Nothing,Just declAnns) False logm $ "test:addDecl done" return (tlDecl,newDecl) -- ((tl,nb),s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions ((tl,nb),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual tl) `shouldBe` "toplevel x = c * x" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" -- putStrLn (showAnnDataItemFromState s nb) (exactPrintFromState s nb) `shouldBe` "\ntoplevel x = c * x\n where\n nn = nn2" (showGhcQual nb) `shouldBe` "toplevel x\n = c * x\n where\n nn = nn2" -- ------------------------------------------- it "adds a local declaration with a type signature 1" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let comp = do parsed <- getRefactParsed renamed <- getRefactRenamed nameMap <- getRefactNameMap let Just (GHC.L _ tl) = locToName (4, 1) renamed decls <- refactRunTransform (hsDecls parsed) let [tlDecl] = definingDeclsRdrNames nameMap [tl] decls True False (decl,declAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "decl" parseDecl "nn = nn2") ((GHC.L ls (GHC.SigD sig)), sigAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "sig" parseDecl "nn :: Int") newDecl <- addDecl tlDecl Nothing (decl,Just (GHC.L ls sig),Just $ mergeAnns sigAnns declAnns) False return (tlDecl,newDecl) ((tl,nb),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((tl,nb),s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (showGhcQual tl) `shouldBe` "toplevel x = c * x" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md1 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" -- putStrLn (showAnnDataItemFromState s nb) (exactPrintFromState s nb) `shouldBe` "\ntoplevel x = c * x\n where\n nn :: Int\n nn = nn2" (showGhcQual nb) `shouldBe` "toplevel x\n = c * x\n where\n nn :: Int\n nn = nn2" -- ------------------------------------------- it "adds a local declaration with a where clause" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Demote.hs" let comp = do parsed <- getRefactParsed renamed <- getRefactRenamed nameMap <- getRefactNameMap let Just (GHC.L _ tl) = locToName (4, 1) renamed decls <- refactRunTransform (hsDecls parsed) let [tlDecl] = definingDeclsRdrNames nameMap [tl] decls True False (decl,declAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "decl" parseDecl "nn = nn2") newDecl <- addDecl tlDecl Nothing (decl,Nothing,Just declAnns) False return (tlDecl,newDecl) ((tl,nb),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual tl) `shouldBe` "toplevel x = c * x" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Demote where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x\n\n-- c,d :: Integer\nc = 7\nd = 9\n\n\n" (exactPrintFromState s nb) `shouldBe` "\ntoplevel x = c * x\n where\n nn = nn2" (showGhcQual nb) `shouldBe` "toplevel x\n = c * x\n where\n nn = nn2" -- ------------------------------------------- it "adds a local declaration to an existing one" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Md2.hs" let comp = do parsed <- getRefactParsed renamed <- getRefactRenamed nameMap <- getRefactNameMap -- logParsedSource "start" let Just (GHC.L _ tl) = locToName (4, 1) renamed decls <- refactRunTransform (hsDecls parsed) let [tlDecl] = definingDeclsRdrNames nameMap [tl] decls True False (decl,declAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "decl" parseDecl "nn = nn2") newDecl <- addDecl tlDecl Nothing (decl,Nothing,Just declAnns) False return (tlDecl,newDecl) ((tl,nb),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((tl,nb),s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (showGhcQual tl) `shouldBe` "toplevel x\n = c * x * b\n where\n b = 3" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md2 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x * b\n where\n b = 3\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" -- putStrLn (showAnnDataItemFromState s nb) (exactPrintFromState s nb) `shouldBe` "\ntoplevel x = c * x * b\n where\n nn = nn2\n b = 3" (showGhcQual nb) `shouldBe` "toplevel x\n = c * x * b\n where\n nn = nn2\n b = 3" -- ------------------------------------------- it "adds a local declaration with a type signature to an existing one" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Md2.hs" let comp = do parsed <- getRefactParsed renamed <- getRefactRenamed nameMap <- getRefactNameMap let Just (GHC.L _ tl) = locToName (4, 1) renamed decls <- refactRunTransform (hsDecls parsed) let [tlDecl] = definingDeclsRdrNames nameMap [tl] decls True False (decl,declAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "decl" parseDecl "nn = nn2") ((GHC.L ls (GHC.SigD sig)), sigAnns) <- GHC.liftIO $ withDynFlags (\df -> parseToAnnotated df "sig" parseDecl "nn :: Int") newDecl <- addDecl tlDecl Nothing (decl,Just (GHC.L ls sig),Just $ mergeAnns sigAnns declAnns) False return (tlDecl,newDecl) ((tl,nb),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual tl) `shouldBe` "toplevel x\n = c * x * b\n where\n b = 3" (GHC.showRichTokenStream $ toks) `shouldBe` "module MoveDef.Md2 where\n\ntoplevel :: Integer -> Integer\ntoplevel x = c * x * b\n where\n b = 3\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff :: Int -> Int\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\nzz1 a = 1 + toplevel a\n\n-- General Comment\n-- |haddock comment\ntlFunc :: Integer -> Integer\ntlFunc x = c * x\n-- Comment at end\n\n\n" (exactPrintFromState s nb) `shouldBe` "\ntoplevel x = c * x * b\n where\n nn :: Int\n nn = nn2\n b = 3" (showGhcQual nb) `shouldBe` "toplevel x\n = c * x * b\n where\n nn :: Int\n nn = nn2\n b = 3" -- ------------------------------------------- it "adds a local decl with type signature to an existing one, with a comment" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./Demote/WhereIn3.hs" let comp = do parsed <- getRefactParsed renamed <- getRefactRenamed nameMap <- getRefactNameMap let Just (GHC.L _ tl) = locToName (10, 1) renamed decls <- refactRunTransform (hsDecls parsed) let -- decls = hsBinds parsed [tlDecl] = definingDeclsRdrNames nameMap [tl] decls True False let Just (GHC.L _ sq) = locToName (14, 1) renamed let Just (GHC.L _ af) = locToName (18, 1) renamed let [sqSig] = definingSigsRdrNames nameMap [sq] parsed [sqDecl] = definingDeclsRdrNames nameMap [sq] decls False False [afDecl] = definingDeclsRdrNames nameMap [af] decls False False refactRunTransform (balanceComments sqDecl afDecl) newDecl <- addDecl tlDecl Nothing (sqDecl,Just sqSig,Nothing) False return (sqSig,sqDecl,tlDecl,afDecl,newDecl) -- ((sigs,_sd,tl,aa,nb),s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions ((sigs,_sd,tl,aa,nb),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- putStrLn $ show (annsFromState s) -- putStrLn $ showAnnDataFromState s -- putStrLn $ showAnnDataItemFromState s _sd -- putStrLn $ showAnnDataItemFromState s nb -- putStrLn $ showAnnDataItemFromState s aa (showGhcQual sigs) `shouldBe` "sq :: Int -> Int -> Int" (showGhcQual tl) `shouldBe` "sumSquares x y\n = sq p x + sq p y\n where\n p = 2" (showGhcQual aa) `shouldBe` "anotherFun 0 y\n = sq y\n where\n sq x = x ^ 2" (exactPrintFromState s nb) `shouldBe` "\n\n--A definition can be demoted to the local 'where' binding of a friend declaration,\n--if it is only used by this friend declaration.\n\n--Demoting a definition narrows down the scope of the definition.\n--In this example, demote the top level 'sq' to 'sumSquares'\n--In this case (there are multi matches), the parameters are not folded after demoting.\n\nsumSquares x y = sq p x + sq p y\n where sq :: Int -> Int -> Int\n sq pow 0 = 0\n sq pow z = z^pow --there is a comment\n p=2" (showGhcQual nb) `shouldBe` "sumSquares x y\n = sq p x + sq p y\n where\n sq :: Int -> Int -> Int\n sq pow 0 = 0\n sq pow z = z ^ pow\n p = 2" -- --------------------------------------------- describe "renamePN" $ do it "replaces a Name with another, updating tokens 1" $ do let comp = do parsed <- getRefactParsed renamed <- getRefactRenamed let Just (GHC.L _l n') = locToName (3, 1) renamed newName <- mkNewGhcName Nothing "bar2" new <- renamePN' n' newName False parsed putRefactParsed new emptyAnns return (new,newName,n') let ((nb,nn,n),s) <- ct $ runTestGhc comp "./DupDef/Dd1.hs" -- ((nb,nn,n),s) <- ct $ runLogTestGhc comp "./DupDef/Dd1.hs" (showGhcQual (n,nn)) `shouldBe` "(DupDef.Dd1.toplevel, bar2)" -- error (show $ annsFromState s) (sourceFromState s) `shouldBe` "module DupDef.Dd1 where\n\nbar2 :: Integer -> Integer\nbar2 x = c * x\n\nc,d :: Integer\nc = 7\nd = 9\n\n-- Pattern bind\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h,t) = head $ zip [1..10] [3..ff]\n where\n ff :: Int\n ff = 15\n\ndata D = A | B String | C\n\nff y = y + zz\n where\n zz = 1\n\nl z =\n let\n ll = 34\n in ll + z\n\ndd q = do\n let ss = 5\n return (ss + q)\n\n" (showGhcQual nb) `shouldBe` "module DupDef.Dd1 where\nbar2 :: Integer -> Integer\nbar2 x = c * x\nc, d :: Integer\nc = 7\nd = 9\ntup :: (Int, Int)\nh :: Int\nt :: Int\ntup@(h, t)\n = head $ zip [1 .. 10] [3 .. ff]\n where\n ff :: Int\n ff = 15\ndata D = A | B String | C\nff y\n = y + zz\n where\n zz = 1\nl z = let ll = 34 in ll + z\ndd q\n = do { let ss = 5;\n return (ss + q) }" -- ----------------------------------------------------------------- it "replaces a Name with another, updating tokens 2" $ do -- (t, toks, tgt) <- ct $ parsedFileGhc "./Demote/WhereIn4.hs" -- let renamed = fromJust $ GHC.tm_renamed_source t -- let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- let declsr = hsBinds parsed -- let decl = head $ drop 0 declsr -- let Just (GHC.L _l n) = locToName (11, 21) renamed let comp = do renamed <- getRefactRenamed parsed <- getRefactParsed let declsr = hsBinds parsed let decl = head $ drop 0 declsr let Just (GHC.L _l n') = locToName (11, 21) renamed newName <- mkNewGhcName Nothing "p_1" -- new <- renamePN n newName False decl new <- renamePN' n' newName False decl let parsed' = replaceBinds parsed (new:tail declsr) putRefactParsed parsed' emptyAnns return (new,newName,decl,n') let -- ((nb,nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions ((nb,nn,d,n),s) <- ct $ runTestGhc comp "./Demote/WhereIn4.hs" (showGhcQual d) `shouldBe` "sumSquares x y\n = sq p x + sq p y\n where\n p = 2" (showGhcQual (n,nn)) `shouldBe` "(p, p_1)" -- (GHC.showRichTokenStream $ toks) `shouldBe` "module Demote.WhereIn4 where\n\n--A definition can be demoted to the local 'where' binding of a friend declaration,\n--if it is only used by this friend declaration.\n\n--Demoting a definition narrows down the scope of the definition.\n--In this example, demote the top level 'sq' to 'sumSquares'\n--In this case (there is single matches), if possible,\n--the parameters will be folded after demoting and type sigature will be removed.\n\nsumSquares x y = sq p x + sq p y\n where p=2 {-There is a comment-}\n\nsq::Int->Int->Int\nsq pow z = z^pow --there is a comment\n\nanotherFun 0 y = sq y\n where sq x = x^2\n\n" (sourceFromState s) `shouldBe` "module Demote.WhereIn4 where\n\n--A definition can be demoted to the local 'where' binding of a friend declaration,\n--if it is only used by this friend declaration.\n\n--Demoting a definition narrows down the scope of the definition.\n--In this example, demote the top level 'sq' to 'sumSquares'\n--In this case (there is single matches), if possible,\n--the parameters will be folded after demoting and type sigature will be removed.\n\nsumSquares x y = sq p_1 x + sq p_1 y\n where p_1=2 {-There is a comment-}\n\nsq::Int->Int->Int\nsq pow z = z^pow --there is a comment\n\nanotherFun 0 y = sq y\n where sq x = x^2\n\n" (showGhcQual nb) `shouldBe` "sumSquares x y\n = sq p_1 x + sq p_1 y\n where\n p_1 = 2" -- --------------------------------- it "replaces a Name with another in limited scope, updating tokens 1" $ do (t,toks, tgt) <- ct $ parsedFileGhc "./TokenTest.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- let decls = hsBinds renamed let decls = hsBinds parsed let (GHC.L l _) = head $ drop 3 decls (showGhcQual (ss2span l)) `shouldBe` "((19, 1), (21, 14))" let Just (GHC.L _ n) = locToName (19, 1) renamed (showGhcQual n) `shouldBe` "TokenTest.foo" let comp = do newName <- mkNewGhcName Nothing "bar2" -- new <- renamePN n newName False (head decls) new <- renamePN' n newName False (head $ drop 3 decls) let parsed' = replaceBinds parsed (take 3 decls ++ [new] ++ drop 4 decls) putRefactParsed parsed' emptyAnns return (new,newName) let ((nb,nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((nb,nn),s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (showGhcQual (n,nn)) `shouldBe` "(TokenTest.foo, bar2)" (GHC.showRichTokenStream $ toks) `shouldBe` "module TokenTest where\n\n-- Test new style token manager\n\nbob a b = x\n where x = 3\n\nbib a b = x\n where\n x = 3\n\n\nbab a b =\n let bar = 3\n in b + bar -- ^trailing comment\n\n\n-- leading comment\nfoo x y =\n do c <- getChar\n return c\n\n\n\n\n" (sourceFromState s) `shouldBe` "module TokenTest where\n\n-- Test new style token manager\n\nbob a b = x\n where x = 3\n\nbib a b = x\n where\n x = 3\n\n\nbab a b =\n let bar = 3\n in b + bar -- ^trailing comment\n\n\n-- leading comment\nbar2 x y =\n do c <- getChar\n return c\n\n\n\n\n" (showGhcQual nb) `shouldBe` "bar2 x y\n = do { c <- getChar;\n return c }" -- (showToks $ take 20 $ toksFromState s) `shouldBe` "" -- --------------------------------- it "replace a Name with another in limited scope, updating tokens 2" $ do (t,toks, tgt) <- ct $ parsedFileGhc "./TokenTest.hs" -- let forest = mkTreeFromTokens toks let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- let decls = hsBinds renamed let decls = hsBinds parsed let decl@(GHC.L l _) = head $ drop 3 decls -- (showGhcQual l) `shouldBe` "test/testdata/TokenTest.hs:(19,1)-(21,13)" (show $ ss2span l) `shouldBe` "((19,1),(21,14))" let Just (GHC.L _ n) = locToName (19, 1) renamed (showGhcQual n) `shouldBe` "TokenTest.foo" let decl' = decl let comp = do newName <- mkNewGhcName Nothing "bar2" -- toksForOp <- getToksForSpan sspan -- The new span this time -- new <- renamePN n newName False decl' new <- renamePN' n newName False decl' let parsed' = replaceBinds parsed (take 3 decls ++ [new] ++ drop 4 decls) putRefactParsed parsed' emptyAnns return (new,newName) ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (show tfo) `shouldBe` "" (showGhcQual n) `shouldBe` "TokenTest.foo" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((19,1),(19,5),\"bar2\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module TokenTest where\n\n-- Test new style token manager\n\nbob a b = x\n where x = 3\n\nbib a b = x\n where\n x = 3\n\n\nbab a b =\n let bar = 3\n in b + bar -- ^trailing comment\n\n\n-- leading comment\nfoo x y =\n do c <- getChar\n return c\n\n\n\n\n" (sourceFromState s) `shouldBe` "module TokenTest where\n\n-- Test new style token manager\n\nbob a b = x\n where x = 3\n\nbib a b = x\n where\n x = 3\n\n\nbab a b =\n let bar = 3\n in b + bar -- ^trailing comment\n\n\n-- leading comment\nbar2 x y =\n do c <- getChar\n return c\n\n\n\n\n" (showGhcQual nb) `shouldBe` "bar2 x y\n = do { c <- getChar;\n return c }" -- (showToks $ take 20 $ toksFromState s) `shouldBe` "" ------------------------------------ it "replaces a name in a data declaration too" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/Field1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (5, 19) renamed -- let Just (GHC.L _l n) = locToName (5, 19) parsed let comp = do newName <- mkNewGhcName Nothing "pointx1" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) let -- ((nb,nn),s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "Field1.pointx" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((5,18),(5,25),\"pointx1\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module Field1 where\n\n--Rename field name 'pointx' to 'pointx1'\n\ndata Point = Pt {pointx, pointy :: Float}\n\nabsPoint :: Point -> Float\nabsPoint p = sqrt (pointx p * pointx p +\n pointy p * pointy p)\n\n" (sourceFromState s) `shouldBe` "module Field1 where\n\n--Rename field name 'pointx' to 'pointx1'\n\ndata Point = Pt {pointx1, pointy :: Float}\n\nabsPoint :: Point -> Float\nabsPoint p = sqrt (pointx1 p * pointx1 p +\n pointy p * pointy p)\n\n" (unspace $ showGhcQual nb) `shouldBe` "module Field1 where\ndata Point = Pt {pointx1, pointy :: Float}\nabsPoint :: Point -> Float\nabsPoint p = sqrt (pointx1 p * pointx1 p + pointy p * pointy p)" ------------------------------------ it "replaces a name in a type signature too" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/Field1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (5, 6) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName Nothing "NewPoint" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) let ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((nb,nn),s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "Field1.Point" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((5,6),(5,14),\"NewPoint\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module Field1 where\n\n--Rename field name 'pointx' to 'pointx1'\n\ndata Point = Pt {pointx, pointy :: Float}\n\nabsPoint :: Point -> Float\nabsPoint p = sqrt (pointx p * pointx p +\n pointy p * pointy p)\n\n" (sourceFromState s) `shouldBe` "module Field1 where\n\n--Rename field name 'pointx' to 'pointx1'\n\ndata NewPoint = Pt {pointx, pointy :: Float}\n\nabsPoint :: NewPoint -> Float\nabsPoint p = sqrt (pointx p * pointx p +\n pointy p * pointy p)\n\n" (unspace $ showGhcQual nb) `shouldBe` "module Field1 where\ndata NewPoint = Pt {pointx, pointy :: Float}\nabsPoint :: NewPoint -> Float\nabsPoint p = sqrt (pointx p * pointx p + pointy p * pointy p)" ------------------------------------ it "replace a name in a FunBind with multiple patterns" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./LocToName.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (20, 1) renamed let comp = do newName <- mkNewGhcName Nothing "newPoint" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) let -- ((nb,_nn),s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "LocToName.sumSquares" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((20,1),(20,9),\"newPoint\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module LocToName where\n\n{-\n\n\n\n\n\n\n\n\n-}\n\n\n\n\n\n\n\nsumSquares (x:xs) = x ^2 + sumSquares xs\n -- where sq x = x ^pow \n -- pow = 2\n\nsumSquares [] = 0\n" (sourceFromState s) `shouldBe` "module LocToName where\n\n{-\n\n\n\n\n\n\n\n\n-}\n\n\n\n\n\n\n\nnewPoint (x:xs) = x ^2 + newPoint xs\n -- where sq x = x ^pow \n -- pow = 2\n\nnewPoint [] = 0\n" (unspace $ showGhcQual nb) `shouldBe` "module LocToName where\nnewPoint (x : xs) = x ^ 2 + newPoint xs\nnewPoint [] = 0" ------------------------------------ it "replaces a qualified name in a FunBind with multiple patterns" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./LocToName.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t -- putStrLn $ "original parsed:" ++ SYB.showData SYB.Parser 0 parsed let modu = GHC.mkModule (GHC.stringToPackageKey "mypackage-1.0") (GHC.mkModuleName "LocToName") let Just (GHC.L _l n) = locToName (20, 1) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName (Just modu) "newPoint" -- new <- renamePN n newName True renamed new <- renamePN' n newName True parsed putRefactParsed new emptyAnns return (new,newName) ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((nb,_nn),s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions -- putStrLn $ "annotated parsed':" ++ showAnnDataFromState s -- putStrLn $ "annotated anns':" ++ showGhc (annsFromState s) (showGhcQual n) `shouldBe` "LocToName.sumSquares" (GHC.showRichTokenStream $ toks) `shouldBe` "module LocToName where\n\n{-\n\n\n\n\n\n\n\n\n-}\n\n\n\n\n\n\n\nsumSquares (x:xs) = x ^2 + sumSquares xs\n -- where sq x = x ^pow \n -- pow = 2\n\nsumSquares [] = 0\n" (unspace $ showGhcQual nb) `shouldBe` "module LocToName where\nLocToName.newPoint (x : xs) = x ^ 2 + LocToName.newPoint xs\nLocToName.newPoint [] = 0" (sourceFromState s) `shouldBe` "module LocToName where\n\n{-\n\n\n\n\n\n\n\n\n-}\n\n\n\n\n\n\n\nnewPoint (x:xs) = x ^2 + LocToName.newPoint xs\n -- where sq x = x ^pow \n -- pow = 2\n\nnewPoint [] = 0\n" ------------------------------------ it "replaces a parameter name in a FunBind" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/LayoutIn2.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _ n) = locToName (8, 7) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName Nothing "ls" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (showGhcQual n) `shouldBe` "list" -- (showGhcQual $ retrieveTokensPpr $ fromJust $ layoutFromState s) `shouldBe` "" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((8,7),(8,9),\"ls\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module LayoutIn2 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'list' to 'ls'.\n\nsilly :: [Int] -> Int\nsilly list = case list of (1:xs) -> 1\n--There is a comment\n (2:xs)\n | x < 10 -> 4 where x = last xs\n otherwise -> 12\n\n" -- (showGhcQual $ sourceTreeFromState s) `shouldBe` "" (sourceFromState s) `shouldBe` "module LayoutIn2 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'list' to 'ls'.\n\nsilly :: [Int] -> Int\nsilly ls = case ls of (1:xs) -> 1\n--There is a comment\n (2:xs)\n | x < 10 -> 4 where x = last xs\n otherwise -> 12\n\n" (unspace $ showGhcQual nb) `shouldBe` "module LayoutIn2 where\nsilly :: [Int] -> Int\nsilly ls\n = case ls of {\n (1 : xs) -> 1\n (2 : xs)\n | x < 10 -> 4\n where\n x = last xs\n otherwise -> 12 }" ------------------------------------ it "does not qualify a name in an import hiding clause" $ do (t,toks,tgt) <- ct $ parsedFileGhc "./ScopeAndQual.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let modu = GHC.mkModule (GHC.stringToPackageKey "mypackage-1.0") (GHC.mkModuleName "LocToName") let Just (GHC.L _l n) = locToName (4, 24) renamed let comp = do newName <- mkNewGhcName (Just modu) "mySum" -- new <- renamePN n newName True renamed new <- renamePN' n newName True parsed putRefactParsed new emptyAnns return (new,newName) ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((nb,_nn),s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "Data.Foldable.sum" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((4,24),(4,29),\"mySum\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module ScopeAndQual where\n\nimport qualified Data.List as L\nimport Prelude hiding (sum)\n\nmain :: IO ()\nmain = putStrLn (show $ L.sum [1,2,3])\n\nsum a b = a + b\n\nsumSquares xs = L.sum $ map (\\x -> x*x) xs\n\nmySumSq = sumSquares\n" (sourceFromState s) `shouldBe` "module ScopeAndQual where\n\nimport qualified Data.List as L\nimport Prelude hiding (mySum)\n\nmain :: IO ()\nmain = putStrLn (show $ L.mySum [1,2,3])\n\nsum a b = a + b\n\nsumSquares xs = L.mySum $ map (\\x -> x*x) xs\n\nmySumSq = sumSquares\n" (unspace $ showGhcQual nb) `shouldBe` "module ScopeAndQual where\nimport qualified Data.List as L\nimport Prelude hiding ( mySum )\nmain :: IO ()\nmain = putStrLn (show $ L.mySum [1, 2, 3])\nsum a b = a + b\nsumSquares xs = L.mySum $ map (\\ x -> x * x) xs\nmySumSq = sumSquares" ------------------------------------ it "does not qualify the subject of a type signature" $ do (t,toks, tgt) <- ct $ parsedFileGhc "./Renaming/C7.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let modu = GHC.mkModule (GHC.stringToPackageKey "mypackage-1.0") (GHC.mkModuleName "LocToName") let Just (GHC.L _l n) = locToName (5, 1) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName (Just modu) "myNewFringe" new <- renamePN' n newName True parsed putRefactParsed new emptyAnns return (new,newName) ((nb,_nn),s) <- ct $ runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((nb,_nn),s) <- ct $ runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "Renaming.C7.myFringe" (GHC.showRichTokenStream $ toks) `shouldBe` "module Renaming.C7(myFringe) where\n\nimport Renaming.D7\n\nmyFringe:: Tree a -> [a]\nmyFringe (Leaf x ) = [x]\nmyFringe (Branch left right) = myFringe left ++ fringe right\n\n\n\n\n" (sourceFromState s) `shouldBe` "module Renaming.C7(LocToName.myNewFringe) where\n\nimport Renaming.D7\n\nmyNewFringe:: Tree a -> [a]\nmyNewFringe (Leaf x ) = [x]\nmyNewFringe (Branch left right) = LocToName.myNewFringe left ++ fringe right\n\n\n\n\n" (unspace $ showGhcQual nb) `shouldBe` "module Renaming.C7 (\n LocToName.myNewFringe\n ) where\nimport Renaming.D7\nmyNewFringe :: Tree a -> [a]\nLocToName.myNewFringe (Leaf x) = [x]\nLocToName.myNewFringe (Branch left right)\n = LocToName.myNewFringe left ++ fringe right" ------------------------------------ it "realigns toks in a case for a shorter name" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/LayoutIn2.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (8, 7) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName Nothing "ls" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "list" (GHC.showRichTokenStream $ toks) `shouldBe` "module LayoutIn2 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'list' to 'ls'.\n\nsilly :: [Int] -> Int\nsilly list = case list of (1:xs) -> 1\n--There is a comment\n (2:xs)\n | x < 10 -> 4 where x = last xs\n otherwise -> 12\n\n" (sourceFromState s) `shouldBe` "module LayoutIn2 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'list' to 'ls'.\n\nsilly :: [Int] -> Int\nsilly ls = case ls of (1:xs) -> 1\n--There is a comment\n (2:xs)\n | x < 10 -> 4 where x = last xs\n otherwise -> 12\n\n" (unspace $ showGhcQual nb) `shouldBe` "module LayoutIn2 where\nsilly :: [Int] -> Int\nsilly ls\n = case ls of {\n (1 : xs) -> 1\n (2 : xs)\n | x < 10 -> 4\n where\n x = last xs\n otherwise -> 12 }" ------------------------------------ it "realigns toks in a case for a longer name" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/LayoutIn2.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (8, 7) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName Nothing "listlonger" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) -- ((nb,nn),s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "list" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((8,7),(8,17),\"listlonger\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module LayoutIn2 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'list' to 'ls'.\n\nsilly :: [Int] -> Int\nsilly list = case list of (1:xs) -> 1\n--There is a comment\n (2:xs)\n | x < 10 -> 4 where x = last xs\n otherwise -> 12\n\n" -- (showGhcQual $ linesFromState s) `shouldBe` "" -- (showGhcQual $ sourceTreeFromState s) `shouldBe` "" (sourceFromState s) `shouldBe` "module LayoutIn2 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'list' to 'ls'.\n\nsilly :: [Int] -> Int\nsilly listlonger = case listlonger of (1:xs) -> 1\n --There is a comment\n (2:xs)\n | x < 10 -> 4 where x = last xs\n otherwise -> 12\n\n" (unspace $ showGhcQual nb) `shouldBe` "module LayoutIn2 where\nsilly :: [Int] -> Int\nsilly listlonger\n = case listlonger of {\n (1 : xs) -> 1\n (2 : xs)\n | x < 10 -> 4\n where\n x = last xs\n otherwise -> 12 }" ------------------------------------ it "realigns toks in a do for a shorter name" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/LayoutIn4.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (7, 8) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName Nothing "io" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) -- ((nb,nn),s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "ioFun" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((7,8),(7,10),\"io\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module LayoutIn4 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'ioFun' to 'io'\n\nmain = ioFun \"hello\" where ioFun s= do let k = reverse s\n --There is a comment\n s <- getLine\n let q = (k ++ s)\n putStr q\n putStr \"foo\"\n\n" -- (pprFromState s) `shouldBe` [] (sourceFromState s) `shouldBe` "module LayoutIn4 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'ioFun' to 'io'\n\nmain = io \"hello\" where io s= do let k = reverse s\n--There is a comment\n s <- getLine\n let q = (k ++ s)\n putStr q\n putStr \"foo\"\n\n" (unspace $ showGhcQual nb) `shouldBe` "module LayoutIn4 where\nmain\n = io \"hello\"\n where\n io s\n = do { let k = reverse s;\n s <- getLine;\n let q = (k ++ s);\n putStr q;\n putStr \"foo\" }" ------------------------------------ it "realigns toks in a do for a longer name" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/LayoutIn4.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (7, 8) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName Nothing "ioFunLong" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) -- ((nb,nn),s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "ioFun" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((7,8),(7,17),\"ioFunLong\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module LayoutIn4 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'ioFun' to 'io'\n\nmain = ioFun \"hello\" where ioFun s= do let k = reverse s\n --There is a comment\n s <- getLine\n let q = (k ++ s)\n putStr q\n putStr \"foo\"\n\n" (sourceFromState s) `shouldBe` "module LayoutIn4 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'ioFun' to 'io'\n\nmain = ioFunLong \"hello\" where ioFunLong s= do let k = reverse s\n --There is a comment\n s <- getLine\n let q = (k ++ s)\n putStr q\n putStr \"foo\"\n\n" (unspace $ showGhcQual nb) `shouldBe` "module LayoutIn4 where\nmain\n = ioFunLong \"hello\"\n where\n ioFunLong s\n = do { let k = reverse s;\n s <- getLine;\n let q = (k ++ s);\n putStr q;\n putStr \"foo\" }" ------------------------------------ it "realigns toks in a where for a shorter name" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/LayoutIn1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (7, 17) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName Nothing "q" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) -- ((nb,nn),s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "sq" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((7,17),(7,18),\"q\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module LayoutIn1 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'sq' to 'square'.\n\nsumSquares x y= sq x + sq y where sq x= x^pow\n --There is a comment.\n pow=2\n" (sourceFromState s) `shouldBe` "module LayoutIn1 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'sq' to 'square'.\n\nsumSquares x y= q x + q y where q x= x^pow\n--There is a comment.\n pow=2\n" (unspace $ showGhcQual nb) `shouldBe` "module LayoutIn1 where\nsumSquares x y\n = q x + q y\n where\n q x = x ^ pow\n pow = 2" ------------------------------------ it "realigns toks in a where for a longer name" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/LayoutIn1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (7, 17) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName Nothing "square" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "sq" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((7,17),(7,23),\"square\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module LayoutIn1 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'sq' to 'square'.\n\nsumSquares x y= sq x + sq y where sq x= x^pow\n --There is a comment.\n pow=2\n" -- (pprFromState s) `shouldBe` [] (sourceFromState s) `shouldBe` "module LayoutIn1 where\n\n--Layout rule applies after 'where','let','do' and 'of'\n\n--In this Example: rename 'sq' to 'square'.\n\nsumSquares x y= square x + square y where square x= x^pow\n --There is a comment.\n pow=2\n" (unspace $ showGhcQual nb) `shouldBe` "module LayoutIn1 where\nsumSquares x y\n = square x + square y\n where\n square x = x ^ pow\n pow = 2" ------------------------------------ it "realigns toks in a let/in for a shorter name" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./TypeUtils/LayoutLet1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (6, 6) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName Nothing "x" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) -- ((nb,nn),s) <- runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "xxx" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((6,5),(6,6),\"x\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module LayoutLet1 where\n\n-- Simple let expression, rename xxx to something longer or shorter\n-- and the let/in layout should adjust accordingly\n\nfoo xxx = let a = 1\n b = 2\n in xxx + a + b\n\n" (sourceFromState s) `shouldBe` "module LayoutLet1 where\n\n-- Simple let expression, rename xxx to something longer or shorter\n-- and the let/in layout should adjust accordingly\n\nfoo x = let a = 1\n b = 2\n in x + a + b\n\n" (unspace $ showGhcQual nb) `shouldBe` "module LayoutLet1 where\nfoo x\n = let\n a = 1\n b = 2\n in x + a + b" ------------------------------------ it "realigns toks in a let/in for a longer name 1" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./TypeUtils/LayoutLet1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (6, 6) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName Nothing "xxxlong" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) ((nb,_nn),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((nb,nn),s) <- runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "xxx" -- (showToks $ [newNameTok False l nn]) `shouldBe` "[((6,5),(6,12),\"xxxlong\")]" (GHC.showRichTokenStream $ toks) `shouldBe` "module LayoutLet1 where\n\n-- Simple let expression, rename xxx to something longer or shorter\n-- and the let/in layout should adjust accordingly\n\nfoo xxx = let a = 1\n b = 2\n in xxx + a + b\n\n" -- (pprFromState s) `shouldBe` [] (sourceFromState s) `shouldBe` "module LayoutLet1 where\n\n-- Simple let expression, rename xxx to something longer or shorter\n-- and the let/in layout should adjust accordingly\n\nfoo xxxlong = let a = 1\n b = 2\n in xxxlong + a + b\n\n" (unspace $ showGhcQual nb) `shouldBe` "module LayoutLet1 where\nfoo xxxlong\n = let\n a = 1\n b = 2\n in xxxlong + a + b" ------------------------------------ it "realigns toks in a let/in for a longer name 2" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./TypeUtils/LayoutLet2.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (7, 6) renamed let comp = do logm $ "renamed:" ++ (SYB.showData SYB.Renamer 0 renamed) newName <- mkNewGhcName Nothing "xxxlong" -- new <- renamePN n newName False renamed new <- renamePN' n newName False parsed putRefactParsed new emptyAnns return (new,newName) ((nb,nn),s) <- ct $ runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((nb,nn),s) <- ct $ runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (showGhcQual (n,nn)) `shouldBe` "(xxx, xxxlong)" (GHC.showRichTokenStream $ toks) `shouldBe` "module LayoutLet2 where\n\n-- Simple let expression, rename xxx to something longer or shorter\n-- and the let/in layout should adjust accordingly\n-- In this case the tokens for xxx + a + b should also shift out\n\nfoo xxx = let a = 1\n b = 2 in xxx + a + b\n\n" (sourceFromState s) `shouldBe` "module LayoutLet2 where\n\n-- Simple let expression, rename xxx to something longer or shorter\n-- and the let/in layout should adjust accordingly\n-- In this case the tokens for xxx + a + b should also shift out\n\nfoo xxxlong = let a = 1\n b = 2 in xxxlong + a + b\n\n" (unspace $ showGhcQual nb) `shouldBe` "module LayoutLet2 where\nfoo xxxlong\n = let\n a = 1\n b = 2\n in xxxlong + a + b" ------------------------------------ it "renames an exported data type" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/RenameInExportedType2.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let Just (GHC.L _l n) = locToName (6, 24) renamed let comp = do logm $ "parsed:" ++ (SYB.showData SYB.Parser 0 parsed) newName <- mkNewGhcName Nothing "NewType" new <- renamePN' n newName False parsed putRefactParsed new emptyAnns logm $ "parsed:after" ++ (SYB.showData SYB.Parser 0 new) return (new,newName) ((_nb,nn),s) <- ct $ runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((_nb,nn),s) <- ct $ runRefactGhc comp tgt (initialLogOnState { rsModule = initRefactModule t }) testOptions (showGhcQual (n,nn)) `shouldBe` "(Renaming.RenameInExportedType.NT, NewType)" (GHC.showRichTokenStream $ toks) `shouldBe` "module Renaming.RenameInExportedType\n (\n MyType (NT)\n ) where\n\ndata MyType = MT Int | NT\n\n\n" (sourceFromState s) `shouldBe` "module Renaming.RenameInExportedType\n (\n MyType (NewType)\n ) where\n\ndata MyType = MT Int | NewType\n\n\n" -- --------------------------------------------- describe "qualifyToplevelName" $ do it "qualifies a name at the top level, updating tokens" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/C7.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _l n) = locToName (7, 1) renamed let comp = do _new <- qualifyToplevelName n return () let -- (_,s) <- ct $ runRefactGhc comp tgt $ initialState { rsModule = initRefactModule t } (_,s) <- ct $ runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n) `shouldBe` "Renaming.C7.myFringe" (GHC.showRichTokenStream $ toks) `shouldBe` "module Renaming.C7(myFringe) where\n\n import Renaming.D7\n\n myFringe:: Tree a -> [a]\n myFringe (Leaf x ) = [x]\n myFringe (Branch left right) = myFringe left ++ fringe right\n\n\n\n\n " -- (showGhcQual $ sourceTreeFromState s) `shouldBe` "" (sourceFromState s) `shouldBe` "module Renaming.C7(Renaming.C7.myFringe) where\n\nimport Renaming.D7\n\nmyFringe:: Tree a -> [a]\nmyFringe (Leaf x ) = [x]\nmyFringe (Branch left right) = Renaming.C7.myFringe left ++ fringe right\n\n\n\n\n" -- --------------------------------------------- describe "findEntity" $ do it "returns true if a (Located) Name is part of a HsBind 1" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do -- (t, toks) <- parseSourceFileTest "./DupDef/Dd1.hs" -- putParsedModule t toks parentr <- getRefactRenamed let mn = locToName (4,1) parentr let (Just (ln@(GHC.L _ n))) = mn let declsr = hsBinds parentr duplicatedDecls = definingDeclsNames [n] declsr True False res = findEntity ln duplicatedDecls res2 = findEntity n duplicatedDecls -- res = findEntity' ln duplicatedDecls return (res,res2,duplicatedDecls,ln) -- ((r,r2,d,_l),_s) <- ct $ runRefactGhcState comp ((r,r2,d,_l),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual d) `shouldBe` "[DupDef.Dd1.toplevel x = DupDef.Dd1.c GHC.Num.* x]" (showGhcQual _l) `shouldBe` "DupDef.Dd1.toplevel" ("1" ++ show r) `shouldBe` "1True" ("2" ++ show r2) `shouldBe` "2True" -- --------------------------------- it "returns true if a (Located) Name is part of a HsBind 2" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do -- (t, toks) <- parseSourceFileTest "./DupDef/Dd1.hs" -- putParsedModule t toks parentr <- getRefactRenamed let mn = locToName (31,7) parentr let (Just (ln@(GHC.L _ n))) = mn let mnd = locToName (30,1) parentr let (Just ((GHC.L _ nd))) = mnd let declsr = hsBinds parentr duplicatedDecls = definingDeclsNames [nd] declsr True False res = findEntity ln duplicatedDecls res2 = findEntity n duplicatedDecls -- res = findEntity' ln duplicatedDecls return (res,res2,duplicatedDecls,ln) -- ((r,r2,d,_l),_s) <- ct $ runRefactGhcState comp ((r,r2,d,_l),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual d) `shouldBe` "[DupDef.Dd1.dd q\n = do { let ss = 5;\n"++ " GHC.Base.return (ss GHC.Num.+ q) }]" (showGhcQual _l) `shouldBe` "ss" ("1" ++ show r) `shouldBe` "1True" ("2" ++ show r2) `shouldBe` "2True" -- ----------------------------------------------------------------- it "returns false if a syntax phrase is not part of another" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do -- (t, toks) <- parseSourceFileTest "./DupDef/Dd1.hs" -- putParsedModule t toks parentr <- getRefactRenamed let mn = locToName (4,1) parentr let (Just (ln@(GHC.L _ n))) = mn let mltup = locToName (11,1) parentr let (Just ((GHC.L _ tup))) = mltup let declsr = hsBinds parentr duplicatedDecls = definingDeclsNames [n] declsr True False res = findEntity tup duplicatedDecls -- res = findEntity' ln duplicatedDecls return (res,duplicatedDecls,ln) -- ((r,d,_l),_s) <- ct $ runRefactGhcState comp ((r,d,_l),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual d) `shouldBe` "[DupDef.Dd1.toplevel x = DupDef.Dd1.c GHC.Num.* x]" -- (show l) `shouldBe` "foo" -- (show r) `shouldBe` "foo" ("1" ++ show r) `shouldBe` "1False" -- ("2" ++ show r2) `shouldBe` "2False" -- ----------------------------------------------------------------- it "Finds an entity in [HsBind Name]" $ do pending -- "write this test" -- ----------------------------------------------------------------- it "Finds an entity in (MatchGroup matches _)" $ do pending -- "write this test" -- ----------------------------------------------------------------- it "Finds an entity in (HsLet decls _)" $ do pending -- "write this test" -- ----------------------------------------------------------------- it "Finds an entity in (HsLet _ e1)" $ do pending -- "write this test" -- ----------------------------------------------------------------- it "Finds an entity in (HsLet decls _)" $ do pending -- "write this test" -- ----------------------------------------------------------------- it "Finds an entity in (PatBind pat rhs _ _ _)" $ do pending -- "write this test" -- ----------------------------------------------------------------- it "Finds an entity in (Match _ _ rhs)" $ do pending -- "write this test" -- ----------------------------------------------------------------- it "Finds an entity in (LetStmt binds)" $ do pending -- "write this test" -- ----------------------------------------------------------------- it "Finds an entity in (BindStmt _ rhs _ _)" $ do pending -- "write this test" -- --------------------------------------------- describe "modIsExported" $ do it "Returns True if the module is explicitly exported" $ do (t, _toks, _tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/Declare.hs" let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let renamed = fromJust $ GHC.tm_renamed_source t let (Just (modName,_)) = getModuleName parsed (modIsExported modName renamed) `shouldBe` True it "Returns True if the module is exported by default" $ do (t, _toks, _tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/Declare1.hs" let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let renamed = fromJust $ GHC.tm_renamed_source t let (Just (modName,_)) = getModuleName parsed (modIsExported modName renamed) `shouldBe` True it "Returns False if the module is explicitly not exported" $ do (t, _toks, _tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/Declare2.hs" let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let renamed = fromJust $ GHC.tm_renamed_source t let (Just (modName,_)) = getModuleName parsed (modIsExported modName renamed) `shouldBe` False -- --------------------------------------------- describe "isExported" $ do it "returns True if a GHC.Name is exported" $ do -- (t, toks, tgt) <- ct $ parsedFileGhc "./Renaming/B1.hs" let comp = do renamed <- getRefactRenamed let mn1 = locToName (11,1) renamed let (Just (GHC.L _ myFringe)) = mn1 let mn2 = locToName (15,1) renamed let (Just (GHC.L _ sumSquares)) = mn2 exMyFring <- isExported myFringe exSumSquares <- isExported sumSquares return (myFringe,exMyFring,sumSquares,exSumSquares) ((mf,emf,ss,ess),_s) <- ct $ runRefactGhc comp [Left "./Renaming/B1.hs"] initialState testOptions (showGhcQual mf) `shouldBe` "Renaming.B1.myFringe" emf `shouldBe` True (showGhcQual ss) `shouldBe` "Renaming.B1.sumSquares" ess `shouldBe` False -- --------------------------------------------- describe "addHiding" $ do it "add a hiding entry to the imports with no existing hiding" $ do (t1, _toks1, tgt1) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do -- (t1,_toks1) <- parseSourceFileTest "./DupDef/Dd1.hs" clearParsedModule (t2, toks2,_) <- parseSourceFileTest "./DupDef/Dd2.hs" -- clearParsedModule let renamed1 = fromJust $ GHC.tm_renamed_source t1 let renamed2 = fromJust $ GHC.tm_renamed_source t2 let parsed1 = GHC.pm_parsed_source $ GHC.tm_parsed_module t1 let parsed2 = GHC.pm_parsed_source $ GHC.tm_parsed_module t2 let mn = locToName (4,1) renamed1 let (Just (GHC.L _ _n)) = mn let Just (modName,_) = getModuleName parsed1 -- n1 <- mkNewGhcName Nothing "n1" -- n2 <- mkNewGhcName Nothing "n2" let n1 = mkRdrName "n1" n2 = mkRdrName "n2" -- res <- addHiding modName renamed2 [n1,n2] res <- addHiding modName parsed2 [n1,n2] putRefactParsed res emptyAnns return (res,renamed2,toks2) -- ((_r,_r2,_tk2),s) <- ct $ runRefactGhc comp tgt1 (initialState { rsModule = initRefactModule t1}) testOptions ((_r,_r2,_tk2),s) <- ct $ runRefactGhc comp tgt1 (initialLogOnState { rsModule = initRefactModule t1}) testOptions putStrLn $ showAnnDataFromState s putStrLn $ showGhc $ annsFromState s (sourceFromState s) `shouldBe` "module DupDef.Dd2 where\n\nimport DupDef.Dd1 hiding (n1,n2)\n\n\nf2 x = ff (x+1)\n\nmm = 5\n\n\n" ------------------------------------ it "adds a hiding entry to the imports with an existing hiding" $ do (t1, _toks1, tgt1) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do -- (t1,_toks1) <- parseSourceFileTest ".//DupDef/Dd1.hs" clearParsedModule (t2, toks2,_) <- parseSourceFileTest "./DupDef/Dd3.hs" -- clearParsedModule let renamed1 = fromJust $ GHC.tm_renamed_source t1 let renamed2 = fromJust $ GHC.tm_renamed_source t2 let parsed1 = GHC.pm_parsed_source $ GHC.tm_parsed_module t1 let parsed2 = GHC.pm_parsed_source $ GHC.tm_parsed_module t2 let mn = locToName (4,1) renamed1 let (Just (GHC.L _ _n)) = mn let Just (modName,_) = getModuleName parsed1 let n1 = mkRdrName "n1" n2 = mkRdrName "n2" res <- addHiding modName parsed2 [n1,n2] return (res,renamed2,toks2) -- ((_r,t,_r2,_tk2),_s) <- ct $ runRefactGhcState comp ((_r,_r2,_tk2),s) <- ct $ runRefactGhc comp tgt1 (initialState { rsModule = initRefactModule t1}) testOptions (sourceFromState s) `shouldBe` "module DupDef.Dd3 where\n\nimport DupDef.Dd1 hiding (dd,n1,n2)\n\n\nf2 x = ff (x+1)\n\nmm = 5\n\n\n" -- --------------------------------------------- describe "usedWithoutQualR" $ do it "Returns True if the identifier is used unqualified" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do -- (t, toks) <- parseSourceFileTest "./DupDef/Dd1.hs" -- putParsedModule t toks renamed <- getRefactRenamed parsed <- getRefactParsed let Just n@(GHC.L _ name) = locToName (14,21) renamed let res = usedWithoutQualR name parsed return (res,n,name) -- ((r,n1,n2),s) <- runRefactGhc comp $ initialState { rsTokenStream = toks } -- ((r,n1,n2),_s) <- ct $ runRefactGhcState comp ((r,n1,n2),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (GHC.getOccString n2) `shouldBe` "zip" (showGhcQual n1) `shouldBe` "GHC.List.zip" r `shouldBe` True -- --------------------------------- it "Returns False if the identifier is used qualified" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./FreeAndDeclared/Declare.hs" let comp = do -- (t, toks) <- parseSourceFileTest "./FreeAndDeclared/Declare.hs" -- putParsedModule t toks renamed <- getRefactRenamed parsed <- getRefactParsed let Just n@(GHC.L _ name) = locToName (36,12) renamed -- let PNT np@(GHC.L _ namep) = locToPNT (GHC.mkFastString "./test/testdata/FreeAndDeclared/Declare.hs") (36,12) parsed let Just (GHC.L _ namep) = locToRdrName (36,12) parsed let res = usedWithoutQualR name parsed return (res,namep,name,n) -- ((r,np,n1,n2),s) <- runRefactGhc comp $ initialState { rsTokenStream = toks } -- ((r,np,n1,n2),_s) <- ct $ runRefactGhcState comp ((r,np,n1,n2),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (myShow np) `shouldBe` "Qual:G:gshow" (myShow $ GHC.getRdrName n1) `shouldBe` "Exact:Data.Generics.Text.gshow" -- (GHC.showRdrName $ GHC.getRdrName n1) `shouldBe` "Data.Generics.Text.gshow" (showGhcQual $ GHC.getRdrName n1) `shouldBe` "Data.Generics.Text.gshow" -- (showGhcQual $ GHC.occNameFS $ GHC.getOccName name) `shouldBe` "G.gshow" -- (GHC.getOccString name) `shouldBe` "G.gshow" (showGhcQual n2) `shouldBe` "Data.Generics.Text.gshow" r `shouldBe` False -- --------------------------------------------- describe "isExplicitlyExported" $ do it "Returns True if the identifier is explicitly exported" $ do pending -- "write this " it "Returns False if the identifier is not explicitly exported" $ do pending -- "write this " -- --------------------------------------------- describe "causeNameClashInExports" $ do it "Returns True if there is a clash" $ do (t, _toks, _tgt) <- ct $ parsedFileGhc "./Renaming/ConflictExport.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let modu = GHC.ms_mod $ GHC.pm_mod_summary $ GHC.tm_parsed_module t -- Is this the right module? let Just (modName,_) = getModuleName parsed let Just (GHC.L _ myFringe) = locToName (9,1) renamed (showGhcQual myFringe) `shouldBe` "Renaming.ConflictExport.myFringe" -- old name is myFringe -- new name is "Renaming.ConflictExport.fringe" let newName = mkTestGhcName 1 (Just modu) "fringe" (showGhcQual modu) `shouldBe` "main@main:Renaming.ConflictExport" (showGhcQual newName) `shouldBe` "Renaming.ConflictExport.fringe" (showGhcQual $ GHC.localiseName newName) `shouldBe` "fringe" let res = causeNameClashInExports myFringe newName modName renamed res `shouldBe` True it "Returns False if there is no clash" $ do (t, _toks, _tgt) <- ct $ parsedFileGhc "./Renaming/ConflictExport.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let modu = GHC.ms_mod $ GHC.pm_mod_summary $ GHC.tm_parsed_module t -- Is this the right module? let Just (modName,_) = getModuleName parsed let Just (GHC.L _ myFringe) = locToName (9,1) renamed (showGhcQual myFringe) `shouldBe` "Renaming.ConflictExport.myFringe" -- old name is myFringe -- new name is "Renaming.ConflictExport.fringe" let newName = mkTestGhcName 1 (Just modu) "fringeOk" (showGhcQual modu) `shouldBe` "main@main:Renaming.ConflictExport" (showGhcQual newName) `shouldBe` "Renaming.ConflictExport.fringeOk" (showGhcQual $ GHC.localiseName newName) `shouldBe` "fringeOk" let res = causeNameClashInExports myFringe newName modName renamed res `shouldBe` False -- -------------------------------------- describe "getDeclAndToks" $ do it "returns a declaration and its associated tokens" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Md1.hs" let comp = do -- (t, toks) <- parseSourceFileTest "./MoveDef/Md1.hs" -- putParsedModule t toks renamed <- getRefactRenamed let Just n@(GHC.L _ name) = locToName (40,4) renamed let res = getDeclAndToks name True toks renamed return (res,n,name) -- (((d,_t),n1,_n2),_s) <- ct $ runRefactGhcState comp (((d,_t),n1,_n2),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- (((d,t),n1,n2),s) <- runRefactGhcStateLog comp Debug (showGhcQual n1) `shouldBe` "MoveDef.Md1.tlFunc" (showGhcQual d) `shouldBe` "[MoveDef.Md1.tlFunc x = MoveDef.Md1.c GHC.Num.* x]" (show $ getStartEndLoc d) `shouldBe` "((40,1),(40,17))" -- (show $ startEndLocIncComments (toksFromState s) d) `shouldBe` "((40,1),(41,18))" -- (showToks t) `shouldBe` "[((40,0),(40,0),\"\"),((40,0),(40,6),\"tlFunc\"),((40,7),(40,8),\"x\"),((40,9),(40,10),\"=\"),((40,11),(40,12),\"c\"),((40,13),(40,14),\"*\"),((40,15),(40,16),\"x\"),((41,0),(41,17),\"-- Comment at end\")]" -- -------------------------------------- {- This function is not used and has been removed describe "getDeclToks" $ do it "Returns a the tokens associated with a declaration" $ do let comp = do (t, toks) <- parseSourceFileTest "./Demote/D1.hs" putParsedModule t toks renamed <- getRefactRenamed let Just n@(GHC.L _ name) = locToName (GHC.mkFastString "./test/testdata/Demote/D1.hs") (9,1) renamed let res = getDeclToks name False (hsBinds renamed) toks return (res,n,name) ((dt,n1,n2),s) <- ct $ runRefactGhcState comp (showGhcQual n1) `shouldBe` "Demote.D1.sq" (showToks dt) `shouldBe` "[(((9,1),(9,1)),ITsemi,\"\"),(((9,1),(9,3)),ITvarid \"sq\",\"sq\"),(((9,4),(9,5)),ITvarid \"x\",\"x\"),(((9,6),(9,7)),ITequal,\"=\"),(((9,8),(9,9)),ITvarid \"x\",\"x\"),(((9,10),(9,11)),ITvarsym \"^\",\"^\"),(((9,11),(9,14)),ITvarid \"pow\",\"pow\")]" -} -- --------------------------------------- describe "rmQualifier" $ do it "Removes the qualifiers from a list of identifiers in a given syntax phrase" $ do (t, toks, tgt) <- ct $ parsedFileGhc "./Demote/WhereIn3.hs" let comp = do renamed <- getRefactRenamed let Just (GHC.L _ sq) = locToName (14, 1) renamed let ([sqDecl],_declToks) = getDeclAndToks sq True toks renamed res <- rmQualifier [sq] sqDecl return (res,sqDecl,sq) -- ((r,d,n1),_s) <- runRefactGhc comp $ initialState { rsModule = initRefactModule t } ((r,d,n1),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n1) `shouldBe` "Demote.WhereIn3.sq" (showGhcQual d) `shouldBe` "Demote.WhereIn3.sq pow 0 = 0\nDemote.WhereIn3.sq pow z = z GHC.Real.^ pow" (showGhcQual r) `shouldBe` "sq pow 0 = 0\nsq pow z = z GHC.Real.^ pow" it "Removes the qualifiers and updates the tokens" $ do pending -- "Is this needed?" -- --------------------------------------- describe "usedByRhs" $ do it "Returns True if a given identifier is used in the RHS of a syntax element" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./MoveDef/Demote.hs" let comp = do -- (t, toks) <- parseSourceFileTest "./MoveDef/Demote.hs" -- putParsedModule t toks renamed <- getRefactRenamed let Just (GHC.L _ tl) = locToName (4,1) renamed let Just (GHC.L _ name) = locToName (7,1) renamed let decls = (definingDeclsNames [tl] (hsBinds renamed) False False) decls' <- rmQualifier [name] decls -- let res = usedByRhs decls [name] let res = usedByRhs decls' [name] return (res,decls,tl,name) -- ((r,d,n1,n2),_s) <- ct $ runRefactGhcState comp ((r,d,n1,n2),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n1) `shouldBe` "MoveDef.Demote.toplevel" (showGhcQual n2) `shouldBe` "MoveDef.Demote.c" (showGhcQual d) `shouldBe` "[MoveDef.Demote.toplevel x = MoveDef.Demote.c GHC.Num.* x]" r `shouldBe` True -- --------------------------------------- describe "autoRenameLocalVar" $ do it "renames an identifier if it is used, no token update" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./Demote/WhereIn4.hs" let comp = do -- (t, toks) <- parseSourceFileTest "./Demote/WhereIn4.hs" -- putParsedModule t toks renamed <- getRefactRenamed let Just (GHC.L _ tl) = locToName (11,1) renamed let Just (GHC.L _ name) = locToName (11,21) renamed let decls = (definingDeclsNames [tl] (hsBinds renamed) False False) decls' <- autoRenameLocalVar False name decls return (decls',decls,tl,name) -- ((r,d,n1,n2),s) <- ct $ runRefactGhcState comp ((r,d,n1,n2),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n1) `shouldBe` "Demote.WhereIn4.sumSquares" (showGhcQual n2) `shouldBe` "p" (showGhcQual d) `shouldBe` "[Demote.WhereIn4.sumSquares x y\n = Demote.WhereIn4.sq p x GHC.Num.+ Demote.WhereIn4.sq p y\n where\n p = 2]" (showGhcQual r) `shouldBe` "[Demote.WhereIn4.sumSquares x y\n = Demote.WhereIn4.sq p_1 x GHC.Num.+ Demote.WhereIn4.sq p_1 y\n where\n p_1 = 2]" (sourceFromState s) `shouldBe` "module Demote.WhereIn4 where\n\n--A definition can be demoted to the local 'where' binding of a friend declaration,\n--if it is only used by this friend declaration.\n\n--Demoting a definition narrows down the scope of the definition.\n--In this example, demote the top level 'sq' to 'sumSquares'\n--In this case (there is single matches), if possible,\n--the parameters will be folded after demoting and type sigature will be removed.\n\nsumSquares x y = sq p x + sq p y\n where p=2 {-There is a comment-}\n\nsq::Int->Int->Int\nsq pow z = z^pow --there is a comment\n\nanotherFun 0 y = sq y\n where sq x = x^2\n\n" -- --------------------------------- it "renames an identifier if it is used and updates tokens" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./Demote/WhereIn4.hs" let comp = do -- (t, toks) <- parseSourceFileTest "./Demote/WhereIn4.hs" -- putParsedModule t toks renamed <- getRefactRenamed let Just (GHC.L _ tl) = locToName (11,1) renamed let Just (GHC.L _ name) = locToName (11,21) renamed let decls = (definingDeclsNames [tl] (hsBinds renamed) False False) decls' <- autoRenameLocalVar True name decls return (decls',decls,tl,name) -- ((r,d,n1,n2),s) <- ct $ runRefactGhcState comp ((r,d,n1,n2),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (showGhcQual n1) `shouldBe` "Demote.WhereIn4.sumSquares" (showGhcQual n2) `shouldBe` "p" (showGhcQual d) `shouldBe` "[Demote.WhereIn4.sumSquares x y\n = Demote.WhereIn4.sq p x GHC.Num.+ Demote.WhereIn4.sq p y\n where\n p = 2]" (showGhcQual r) `shouldBe` "[Demote.WhereIn4.sumSquares x y\n = Demote.WhereIn4.sq p_1 x GHC.Num.+ Demote.WhereIn4.sq p_1 y\n where\n p_1 = 2]" (sourceFromState s) `shouldBe` "module Demote.WhereIn4 where\n\n--A definition can be demoted to the local 'where' binding of a friend declaration,\n--if it is only used by this friend declaration.\n\n--Demoting a definition narrows down the scope of the definition.\n--In this example, demote the top level 'sq' to 'sumSquares'\n--In this case (there is single matches), if possible,\n--the parameters will be folded after demoting and type sigature will be removed.\n\nsumSquares x y = sq p_1 x + sq p_1 y\n where p_1=2 {-There is a comment-}\n\nsq::Int->Int->Int\nsq pow z = z^pow --there is a comment\n\nanotherFun 0 y = sq y\n where sq x = x^2\n\n" -- --------------------------------------- describe "mkNewName" $ do it "Makes a new name that does not clash with existing ones" $ do (mkNewName "f" ["f"] 0) `shouldBe` "f_1" (mkNewName "f" ["g"] 0) `shouldBe` "f" (mkNewName "f" ["g","f_1","f"] 0) `shouldBe` "f_2" -- --------------------------------------- describe "addImportDecl" $ do it "adds an import entry to a module with already existing, non conflicting imports and other declarations" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs" let comp = do -- (_t1,_toks1) <- parseSourceFileTest ".//DupDef/Dd1.hs" clearParsedModule getModuleGhc "./DupDef/Dd2.hs" -- (t2, toks2,_) <- parsedFileGhc "./DupDef/Dd2.hs" -- clearParsedModule -- let renamed1 = fromJust $ GHC.tm_renamed_source t1 -- let renamed2 = fromJust $ GHC.tm_renamed_source t2 renamed2 <- getRefactRenamed parsed2 <- getRefactParsed -- let parsed1 = GHC.pm_parsed_source $ GHC.tm_parsed_module t1 let listModName = GHC.mkModuleName "Data.List" -- n1 <- mkNewGhcName Nothing "n1" -- n2 <- mkNewGhcName Nothing "n2" res <- addImportDecl parsed2 listModName Nothing False False False Nothing False [] return (res,renamed2) ((_r,_r2),s) <- ct $ runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (sourceFromState s) `shouldBe` "module DupDef.Dd2 where\n\nimport DupDef.Dd1\nimport Data.List\n\nf2 x = ff (x+1)\n\nmm = 5\n\n\n" -- --------------------------------- it "adds an import entry to a module with some declaration, but no explicit imports." $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./TypeUtils/Simplest.hs" let comp = do -- (t1,_toks1) <- parseSourceFileTest "./TypeUtils/Simplest.hs" -- clearParsedModule let renamed1 = fromJust $ GHC.tm_renamed_source t parsed <- getRefactParsed let listModName = GHC.mkModuleName "Data.List" res <- addImportDecl parsed listModName Nothing False False False Nothing False [] return (res,renamed1) -- ((_r,t,_r2,_tk2),_s) <- ct $ runRefactGhcState comp ((_r,_r2),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (sourceFromState s) `shouldBe` "module Simplest where\n import Data.List\n\n simple x = x\n " -- --------------------------------- it "adds an import entry to a module with explicit imports, but no declarations." $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./TypeUtils/JustImports.hs" let comp = do -- (t1,_toks1) <- parseSourceFileTest "./TypeUtils/JustImports.hs" -- clearParsedModule let renamed1 = fromJust $ GHC.tm_renamed_source t parsed <- getRefactParsed let listModName = GHC.mkModuleName "Data.List" res <- addImportDecl parsed listModName Nothing False False False Nothing False [] return (res,renamed1) ((_r,_r2),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (sourceFromState s) `shouldBe` "module JustImports where\n\n import Data.Maybe\n import Data.List" -- --------------------------------- it "adds an import entry to a module with no declarations and no explicit imports" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./TypeUtils/Empty.hs" let comp = do -- (t1,toks1) <- parseSourceFileTest "./TypeUtils/Empty.hs" -- putParsedModule t1 toks1 renamed1 <- getRefactRenamed parsed <- getRefactParsed -- let renamed1 = fromJust $ GHC.tm_renamed_source t1 let listModName = GHC.mkModuleName "Data.List" res <- addImportDecl parsed listModName Nothing False False False Nothing False [] return (res,renamed1) ((_r,_r2),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (sourceFromState s) `shouldBe` "module Empty where\nimport Data.List" -- --------------------------------------- describe "addItemsToImport" $ do it "adds an item to an import entry with no items" $ do (t,_toks, tgt) <- ct $ parsedFileGhc "./TypeUtils/JustImports.hs" let comp = do -- (t1,_toks1) <- parseSourceFileTest "./TypeUtils/JustImports.hs" -- clearParsedModule let renamed1 = fromJust $ GHC.tm_renamed_source t parsed <- getRefactParsed let modName = GHC.mkModuleName "Data.Maybe" -- itemName <- mkNewGhcName Nothing "fromJust" let itemName = mkRdrName "fromJust" res <- addItemsToImport modName parsed [itemName] toks <- fetchToks return (res,toks,renamed1,toks) -- ((_r,_t,_r2,_tk2),s) <- ct $ runRefactGhcState comp ((_r,_t,_r2,_tk2),s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- This is the correct behavior. If the import doesn't have an import list, creating -- one for an item effectively reduces the imported interface. (sourceFromState s) `shouldBe` "module JustImports where\n\nimport Data.Maybe\n" -- (GHC.showRichTokenStream t) `shouldBe` "module JustImports where\n\n import Data.Maybe\n " -- Not sure if this should be a test {- it "Try adding more than one item to an existing import entry with no items, using separate calls." $ do let comp = do (t1,_toks1) <- parseSourceFileTest "./TypeUtils/JustImports.hs" -- clearParsedModule let renamed1 = fromJust $ GHC.tm_renamed_source t1 let modName = GHC.mkModuleName "Data.Maybe" itemName <- mkNewGhcName Nothing "fromJust" res <- addItemsToImport modName renamed1 [itemName] --listModName Nothing False False False Nothing False [] itemName2 <- mkNewGhcName Nothing "isJust" res2 <- addItemsToImport modName res [itemName2] toks <- fetchToks return (res2,toks,renamed,_toks1) ((_r,t,r2,tk2),s) <- ct $ runRefactGhcState comp (GHC.showRichTokenStream t) `shouldBe` "module JustImports where\n\n import Data.Maybe (fromJust,isJust)\n " -} -- --------------------------------- it "adds an item to an import entry with existing items." $ do (t, toks, tgt) <- ct $ parsedFileGhc "./TypeUtils/SelectivelyImports.hs" let comp = do -- (t1,_toks1) <- parseSourceFileTest "./TypeUtils/SelectivelyImports.hs" let renamed1 = fromJust $ GHC.tm_renamed_source t parsed <-getRefactParsed let modName = GHC.mkModuleName "Data.Maybe" -- itemName <- mkNewGhcName Nothing "isJust" let itemName = mkRdrName "fromJust" res <- addItemsToImport modName parsed [itemName] return (res,renamed1,toks) -- ((_r,_r2,_tk2),_s) <- ct $ runRefactGhcState comp ((_r,_r2,_tk2),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions (GHC.showRichTokenStream toks) `shouldBe` "module SelectivelyImports where\n\nimport Data.Maybe (fromJust,isJust)\n\n__ = id\n" {- -- test after properly inserting conditional identifier it "Add an item to an import entry with existing items, passing existing conditional identifier." $ do let comp = do (t1,_toks1) <- parseSourceFileTest "./TypeUtils/SelectivelyImports.hs" -- clearParsedModule let renamed1 = fromJust $ GHC.tm_renamed_source t1 let modName = GHC.mkModuleName "Data.Maybe" itemName <- mkNewGhcName Nothing "isJust" conditionalId <- mkNewGhcName Nothing "fromJust" res <- addItemsToImport modName renamed1 [itemName] (Just conditionalId) toks <- fetchToks return (res,toks,renamed1,_toks1) ((_r,t,r2,tk2),s) <- ct $ runRefactGhcState comp (GHC.showRichTokenStream t) `shouldBe` "module SelectivelyImports where\n\n import Data.Maybe (fromJust,isJust)\n\n __ = id\n " it "Add an item to an import entry with existing items, passing missing conditional identifier" $ do let comp = do (t1,_toks1) <- parseSourceFileTest "./TypeUtils/SelectivelyImports.hs" -- clearParsedModule let renamed1 = fromJust $ GHC.tm_renamed_source t1 let modName = GHC.mkModuleName "Data.Maybe" itemName <- mkNewGhcName Nothing "isJust" res <- addItemsToImport modName renamed1 [itemName] (Just itemName) toks <- fetchToks return (res,toks,renamed1,_toks1) ((_r,t,r2,tk2),s) <- ct $ runRefactGhcState comp (GHC.showRichTokenStream t) `shouldBe` "module SelectivelyImports where\n\n import Data.Maybe (fromJust)\n\n __ = id\n " -} -- --------------------------------------- describe "hsValBinds" $ do it "returns ValBinds for RenamedSource" $ do (t,_toks,_) <- ct $ parsedFileGhc "./LiftOneLevel/D1.hs" let renamed = fromJust $ GHC.tm_renamed_source t let vb = hsValBinds renamed (showGhcQual vb) `shouldBe` "LiftOneLevel.D1.sumSquares (x : xs)\n = sq x GHC.Num.+ LiftOneLevel.D1.sumSquares xs\n where\n sq x = x GHC.Real.^ pow\n pow = 2\nLiftOneLevel.D1.sumSquares [] = 0\nLiftOneLevel.D1.main = LiftOneLevel.D1.sumSquares [1 .. 4]" -- --------------------------------- it "returns ValBinds for (HsGroup Name)" $ do (t,_toks,_) <- ct $ parsedFileGhc "./LiftOneLevel/D1.hs" let (g,_,_,_) = fromJust $ GHC.tm_renamed_source t let vb = hsValBinds g (showGhcQual vb) `shouldBe` "LiftOneLevel.D1.sumSquares (x : xs)\n = sq x GHC.Num.+ LiftOneLevel.D1.sumSquares xs\n where\n sq x = x GHC.Real.^ pow\n pow = 2\nLiftOneLevel.D1.sumSquares [] = 0\nLiftOneLevel.D1.main = LiftOneLevel.D1.sumSquares [1 .. 4]" -- --------------------------------------- describe "stripLeadingSpaces" $ do it "Strips the longest common space prefix from a list of Strings" $ do (stripLeadingSpaces []) `shouldBe` [] (stripLeadingSpaces ["a"," b"," c"]) `shouldBe` ["a"," b"," c"] (stripLeadingSpaces [" a"," b"," c"]) `shouldBe` ["a","b"," c"] -- --------------------------------------- describe "unspace" $ do it "Reduces all sequences of more than one space to a single one" $ do (unspace []) `shouldBe` [] (unspace "a") `shouldBe` "a" (unspace "a bc") `shouldBe` "a bc" (unspace "a bc") `shouldBe` "a bc" (unspace "ab c") `shouldBe` "ab c" (unspace " ab c") `shouldBe` " ab c" (unspace "abc ") `shouldBe` "abc " -- --------------------------------------- describe "isFieldName" $ do it "returns True if a Name is a field name" $ do (t,_toks,_) <- ct $ parsedFileGhc "./Renaming/Field3.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ nf) = locToName (10,21) renamed let Just (GHC.L _ n) = locToName (10,1) renamed (showGhcQual n) `shouldBe` "Field3.absPoint" (showGhcQual nf) `shouldBe` "Field3.pointx" -- (show $ isFieldName nf) `shouldBe` "True" -- (show $ isFieldName n) `shouldBe` "False" -- --------------------------------------- describe "name predicates" $ do it "classifies names" $ do (t,_toks,_) <- ct $ parsedFileGhc "./Con.hs" let renamed = fromJust $ GHC.tm_renamed_source t let Just (GHC.L _ n1) = locToName (3,6) renamed let Just (GHC.L _ n2) = locToName (3,12) renamed let Just (GHC.L _ n3) = locToName (3,16) renamed let Just (GHC.L _ n4) = locToName (5,1) renamed let Just (GHC.L _ n5) = locToName (8,5) renamed (showGhcQual n1) `shouldBe` "Main.Foo" "11" ++ (show $ GHC.isTyVarName n1) `shouldBe` "11False" "12" ++ (show $ GHC.isTyConName n1) `shouldBe` "12True" "13" ++ (show $ GHC.isDataConName n1) `shouldBe` "13False" "14" ++ (show $ GHC.isValName n1) `shouldBe` "14False" "15" ++ (show $ GHC.isVarName n1) `shouldBe` "15False" (showGhcQual n2) `shouldBe` "Main.Ff" "21" ++ (show $ GHC.isTyVarName n2) `shouldBe` "21False" "22" ++ (show $ GHC.isTyConName n2) `shouldBe` "22False" "23" ++ (show $ GHC.isDataConName n2) `shouldBe` "23True" "24" ++ (show $ GHC.isValName n2) `shouldBe` "24True" "25" ++ (show $ GHC.isVarName n2) `shouldBe` "25False" (showGhcQual n3) `shouldBe` "Main.fooA" -- field name "31" ++ (show $ GHC.isTyVarName n3) `shouldBe` "31False" "32" ++ (show $ GHC.isTyConName n3) `shouldBe` "32False" "33" ++ (show $ GHC.isDataConName n3) `shouldBe` "33False" "34" ++ (show $ GHC.isValName n3) `shouldBe` "34True" "35" ++ (show $ GHC.isVarName n3) `shouldBe` "35True" (showGhcQual n4) `shouldBe` "Main.xx" "41" ++ (show $ GHC.isTyVarName n4) `shouldBe` "41False" "42" ++ (show $ GHC.isTyConName n4) `shouldBe` "42False" "43" ++ (show $ GHC.isDataConName n4) `shouldBe` "43False" "44" ++ (show $ GHC.isValName n4) `shouldBe` "44True" "45" ++ (show $ GHC.isVarName n4) `shouldBe` "45True" (showGhcQual n5) `shouldBe` "GHC.Classes.==" "51" ++ (show $ GHC.isTyVarName n5) `shouldBe` "51False" "52" ++ (show $ GHC.isTyConName n5) `shouldBe` "52False" "53" ++ (show $ GHC.isDataConName n5) `shouldBe` "53False" "54" ++ (show $ GHC.isValName n5) `shouldBe` "54True" "55" ++ (show $ GHC.isVarName n5) `shouldBe` "55True" -- (show $ isFieldName n3) `shouldBe` "True" -- (show $ isFieldName n2) `shouldBe` "False" -- --------------------------------------------------------------------- describe "rdrName2Name" $ do it "finds a Name for a top-level RdrName" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./TokenTest.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let comp = do let (Just rdr) = locToRdrName (5,1) parsed (Just name) = locToName (5,1) renamed nname <- rdrName2Name rdr return (rdr,name,nname) ((r,n,nn),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((r,n,nn),_s) <- runRefactGhc comp (initialLogOnState { rsModule = initRefactModule t }) testOptions -- (showGhcQual (r,nn,GHC.nameUnique nn)) `shouldBe` "" (showGhcQual (r,n,nn)) `shouldBe` "(bob, TokenTest.bob, TokenTest.bob)" -- --------------------------------- it "finds a Name for a local RdrName" $ do (t, _toks, tgt) <- ct $ parsedFileGhc "./TokenTest.hs" let renamed = fromJust $ GHC.tm_renamed_source t let parsed = GHC.pm_parsed_source $ GHC.tm_parsed_module t let comp = do let (Just rdr) = locToRdrName (14,7) parsed (Just name) = locToName (14,7) renamed nname <- rdrName2Name rdr return (rdr,name,nname) ((r,n,nn),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions -- ((r,n,nn),_s) <- runRefactGhc comp (initialLogOnState { rsModule = initRefactModule t }) testOptions -- (showGhcQual (r,nn,GHC.nameUnique nn)) `shouldBe` "" (showGhcQual (r,n,nn)) `shouldBe` "(bar, bar, bar)" -- --------------------------------------- myShow :: GHC.RdrName -> String myShow n = case n of GHC.Unqual on -> ("Unqual:" ++ (showGhcQual on)) GHC.Qual ms on -> ("Qual:" ++ (showGhcQual ms) ++ ":" ++ (showGhcQual on)) GHC.Orig ms on -> ("Orig:" ++ (showGhcQual ms) ++ ":" ++ (showGhcQual on)) GHC.Exact en -> ("Exact:" ++ (showGhcQual en)) -- --------------------------------------------------------------------- -- Helper functions -- ---------------------------------------------------- -- EOF
mpickering/HaRe
test/TypeUtilsSpec.hs
bsd-3-clause
205,994
0
40
50,576
39,274
19,587
19,687
-1
-1
{-# OPTIONS_GHC -XBangPatterns -fglasgow-exts -XUndecidableInstances #-} -- | Ideals are represented as lists of tuples consisting of -- a Poly and a Sugar. module Yaiba.Ideal where import Yaiba.Monomial import Yaiba.Polynomial import Yaiba.Sugar import qualified Data.List as DL import qualified Data.Vector as DV import Control.DeepSeq import Data.Ord import Prelude hiding (rem) newtype Ideal ord = I (DV.Vector (Poly ord,Sugar ord)) instance NFData (Ideal ord) where rnf (I a) = DV.foldl' (\_ x -> rnf x) () a instance Ord (Mon ord) => Show (Ideal ord) where show = show . getPolys getPolys :: Ideal ord -> [Poly ord] getPolys (I a) = DL.map fst (DV.toList a) initSugars :: Ord (Mon ord) => [Poly ord] -> [(Poly ord, Sugar ord)] initSugars as = DL.map initSugar (DL.sort as) initPolySugars :: Ord (Mon ord) => [Poly ord] -> [PolySug ord] initPolySugars as = DL.map (\a -> PS (initSugar a)) (DL.sort as) initSugarsIdeal :: Ord (Mon ord) => [Poly ord] -> DV.Vector (Poly ord, Sugar ord) initSugarsIdeal as = DV.fromList $! DL.map initSugar (DL.sort as) initSugar :: Ord (Mon ord) => Poly ord -> (Poly ord, Sugar ord) initSugar a = (a, S $! totalDeg a) tau :: Ideal ord -> Int -> Mon ord -> Mon ord tau (I as) index mon = lcmMon (monLT $ fst $ as DV.! index) mon ifoldl' :: (a -> Int -> (Poly t, Sugar t) -> a) -> a -> Ideal t -> a ifoldl' f acc (I as) = DV.ifoldl' f acc as foldl' :: (a -> (Poly t, Sugar t) -> a) -> a -> Ideal t -> a foldl' f acc (I as) = DV.foldl' f acc as snoc :: Ideal ord -> (Poly ord, Sugar ord) -> Ideal ord snoc (I as) a = I $! DV.snoc as a numGens :: Ideal t -> Int numGens (I as) = DV.length as null :: Ideal ord -> Bool null (I as) = DV.null as (!) :: Ideal t -> Int -> (Poly t, Sugar t) (!) (I as) index = (DV.!) as index find :: Mon ord -> Ideal ord -> [(Poly ord, Sugar ord)] find tauk fs = [ gi | i <- [0..(numGens fs -1)], let gi@(polyi,_) = fs!i, let taui = monLT polyi, taui `isFactor` tauk ] totalRed :: (Ord (Mon ord)) => (Poly ord, Sugar ord) -> Ideal ord -> (Poly ord, Sugar ord) totalRed p (I fs) = totalRed' p nullPoly where totalRed' polysug rem = let !(rem', poly', sug, divOcc) = lppRedDivOcc polysug rem False in if divOcc then totalRed' (poly',sug) rem' else (rem'+poly',sug) lppRedDivOcc (poly,S psug) rem divOcc = if isNull poly then (rem,poly, S psug, divOcc) else let !((tauk,ck),newpoly) = deleteFindLT poly !fi = DV.find (\(f,_) -> monLT f `isFactor` tauk) fs in case fi of Just (polyf,S sugf) -> let (taui,ci) = leadTerm polyf tauki = divide tauk taui in --(scalePoly ci rem, scalePoly ci poly - monMult (divide tauk taui) ck polyf, (rem, poly - monMult tauki (ck/ci) polyf, S $ max psug (degree tauki * sugf), True) Nothing -> lppRedDivOcc (newpoly,S psug) (monAdd tauk ck rem) divOcc lppRed :: (Ord (Mon ord)) => (Poly ord, Sugar ord) -> Ideal ord -> (Poly ord, Sugar ord) lppRed p (I fs) = lppRed' p nullPoly where lppRed' polysug rem = let !(rem', poly', sug, _) = lppRedDivOcc polysug rem False in (rem'+poly',sug) lppRedDivOcc (poly,S psug) rem divOcc = if isNull poly then (rem,poly, S psug, divOcc) else let !((tauk,ck),newpoly) = deleteFindLT poly !fi = DV.find (\(f,_) -> monLT f `isFactor` tauk) fs in case fi of Just (polyf,S sugf) -> let (taui,ci) = leadTerm polyf tauki = divide tauk taui in --(scalePoly ci rem, scalePoly ci poly - monMult (divide tauk taui) ck polyf, (rem, poly - monMult tauki (ck/ci) polyf, S $ max psug (degree tauki * sugf), True) Nothing -> lppRedDivOcc (newpoly,S psug) (monAdd tauk ck rem) divOcc {- totalSaccRed p fs = totalRed' p nullPoly where totalRed' polysug rem = let !(rem', poly', sug, divOcc) = lppRedDivOcc polysug rem False in if divOcc then totalRed' (poly',sug) rem' --let (red,redsug) = (poly',sug) /. fs --in (rem'+red,redsug) else (rem'+poly',sug) lppRedDivOcc (poly,S psug) rem divOcc = if isNull poly then (rem,poly, S psug, divOcc) else let !((tauk,ck),newpoly) = deleteFindLT poly -- !fi = DV.find (\(f,_) -> monLT f `isFactor` tauk) fs !fi = find tauk fs in if fi == [] then lppRedDivOcc (newpoly, S psug) (monAdd tauk ck rem) divOcc else let gs = DL.minimumBy (\a b -> comparing case fi of Just (polyf,S sugf) -> let (taui,ci) = leadTerm polyf in --(scalePoly ci rem, scalePoly ci poly - monMult (divide tauk taui) ck polyf, (rem, poly - monMult (divide tauk taui) (ck/ci) polyf, S $ max psug (degree tauk * sugf), True) Nothing -> lppRedDivOcc (newpoly,S psug) (monAdd tauk ck rem) divOcc -}
jeremyong/Yaiba
Yaiba/Ideal.hs
bsd-3-clause
6,757
0
20
3,146
1,773
914
859
-1
-1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE BangPatterns #-} module Main where import Control.Applicative import qualified Data.ByteString.Lazy as BS import Data.Csv import Data.List (nub, sort) import qualified Data.Map as M import Data.Text.Lazy (Text) import Data.Vector (Vector) import qualified Data.Vector as V import System.IO import Test.Hspec import Data.FCA -- | Test data for translations. names, names1, names2 :: Vector Text names = V.fromList ["abcd", "defg", "abcd", "ghij", "abcd", "jklm"] names1 = names names2 = V.reverse names -- | Test reading and parsing of input data. formatsSpec :: Spec formatsSpec = do describe "Value translation" $ do it "maintains lexicographic order of input values" $ do let (imap1, _omap1) = toTranslation names1 let (imap2, _omap2) = toTranslation names2 imap1 `shouldBe` imap2 it "maintains lexicographic order of output values" $ do let (_imap1, omap1) = toTranslation names1 let (_imap2, omap2) = toTranslation names2 omap1 `shouldBe` omap2 it "filters duplicates in the input" $ do let (omap, imap) = toTranslation names let unique = nub . sort . V.toList $ names unique `shouldBe` M.keys imap unique `shouldBe` (nub . sort . M.fold (:) [] $ omap) describe "Reading input formats" $ do it "should read EA and EAV identically" $ do eav <- readEAV "data/fruit.eav" ea <- readEA "data/fruit.ea" ea `shouldBe` eav it "should read EA and TAB identically" $ do ea <- readEA "data/fruit.ea" tab <- readTab "data/fruit.tab" ea `shouldBe` tab it "should read EAV and TAB identically" $ do eav <- readEAV "data/fruit.eav" tab <- readTab "data/fruit.tab" eav `shouldBe` tab describe "Graph generation" $ do it "should result in the same graph for EA and EAV input" $ do f1 <- readEA "data/fruit.ea" f2 <- readEAV "data/fruit.eav" graphsShouldBeEqual f1 f2 it "should result in the same graph for EA and TAB input" $ do f1 <- readEA "data/fruit.ea" f2 <- readTab "data/fruit.tab" graphsShouldBeEqual f1 f2 it "should result in the same graph for EAV and TAB input" $ do f1 <- readEAV "data/fruit.eav" f2 <- readTab "data/fruit.tab" graphsShouldBeEqual f1 f2 -- | Use a given parser to load a 'Frame' loadWithParser :: FromRecord a => (Vector a -> Frame) -> FilePath -> IO Frame loadWithParser p f = withFile f ReadMode $ \h -> do !csv <- either error id . decode NoHeader <$> BS.hGetContents h let frame = p csv return frame readEA :: FilePath -> IO Frame readEA = loadWithParser parseEA readEAV :: FilePath -> IO Frame readEAV = loadWithParser parseEAV readTab :: FilePath -> IO Frame readTab = loadWithParser parseTabular -- | Check that the graphs generated by two frames are equal. graphsShouldBeEqual :: Frame -> Frame -> IO () graphsShouldBeEqual (Frame f1C f1O f1A) (Frame f2C f2O f2A) = do -- Generate the attribute-extent tables. let f1T = buildAETable f1C let f2T = buildAETable f2C -- Generate the graphs. let f1G = generateGraph f1T f1O f1A let f2G = generateGraph f2T f2O f2A f1G `shouldBe` f2G main :: IO () main = hspec formatsSpec
thsutton/fca
test/fca-test.hs
bsd-3-clause
3,610
0
18
1,103
932
456
476
80
1
module Input where import Graphics.UI.GLUT import Data.IORef import Control.Monad import Game -- | handle input from keyboard input gameRef (Char 'j') Down = modifyIORef gameRef $ go (-1) input gameRef (Char 'j') Up = modifyIORef gameRef $ go 0 input gameRef (Char 'k') Down = modifyIORef gameRef $ go (1) input gameRef (Char 'k') Up = modifyIORef gameRef $ go 0 input gameRef (Char 'f') Down = fireShot gameRef input gameRef _ _ = return ()
flazz/tooHS
src/Input.hs
bsd-3-clause
448
0
8
82
182
91
91
11
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} module Main where import Vaultaire.Collector.Common.Process import Vaultaire.Collector.Common.Types import Control.Exception import Control.Monad import Control.Monad.Reader import Control.Monad.State import Control.Monad.Trans import Data.Bifunctor import qualified Data.ByteString.Char8 as BSC (pack) import qualified Data.HashMap.Strict as H (fromList) import qualified Data.Text as T (pack, strip) import Data.Word (Word64) import Network.URI import Options.Applicative import System.ZMQ4 import Marquise.Client (hashIdentifier) import Vaultaire.Types helpfulParser :: ParserInfo String helpfulParser = info (helper <*> optionsParser) fullDesc optionsParser :: Parser String optionsParser = parseBroker where parseBroker = strOption $ long "broker" <> short 'b' <> metavar "BROKER" <> value "tcp://localhost:6660" <> showDefault <> help "Vault broker URI" main :: IO () main = runCollector optionsParser initialiseExtraState cleanup collect initialiseExtraState :: CollectorOpts String -> IO (Context,Socket Sub) initialiseExtraState (_,broker) = do c <- context sock <- socket c Sub connect sock broker subscribe sock "" return (c,sock) cleanup :: Collector String (Context,Socket a) IO () cleanup = return () makeCollectableThing :: TeleResp -> Either String (Address, SourceDict, TimeStamp, Word64) makeCollectableThing TeleResp{..} = let TeleMsg{..} = _msg sdPairs = [ ("agent_id", show _aid) , ("origin", show _origin) , ("uom", show $ msgTypeUOM $ _type) , ("telemetry_msg_type", show _type) ] addr = hashIdentifier $ BSC.pack $ concatMap snd sdPairs in do sd <- makeSourceDict . H.fromList $ map mkTag sdPairs return (addr, sd, _timestamp, _payload) where -- Pack pairs into Texts, and strip off the whitespace padding from -- the values. mkTag = bimap T.pack (T.strip . T.pack) collect :: Receiver a => Collector String (Context,Socket a) IO () collect = do (_, (_, sock)) <- get forever $ do datum <- liftIO $ receive sock case (fromWire datum :: Either SomeException TeleResp) of Right x -> either (liftIO . putStrLn) collectData (makeCollectableThing x) Left e -> liftIO $ print e collectData :: (Address, SourceDict, TimeStamp, Word64) -> Collector o s IO () collectData (addr, sd, ts, p) = do collectSource addr sd collectSimple (SimplePoint addr ts p)
anchor/vaultaire-collector-vaultaire-telemetry
src/Main.hs
bsd-3-clause
2,877
0
15
858
787
421
366
66
2
{-# LANGUAGE CPP, BangPatterns #-} module Language.Haskell.GhcMod.Find where import Data.Function (on) import Data.List (groupBy, sort) import Data.Maybe (fromMaybe) import qualified GHC as G import Language.Haskell.GhcMod.Browse (browseAll) import Language.Haskell.GhcMod.Monad import Language.Haskell.GhcMod.Convert import Language.Haskell.GhcMod.Types #ifndef MIN_VERSION_containers #define MIN_VERSION_containers(x,y,z) 1 #endif #if MIN_VERSION_containers(0,5,0) import Control.DeepSeq (force) import Data.Map.Strict (Map) import qualified Data.Map.Strict as M #else import Data.Map (Map) import qualified Data.Map as M #endif import Control.Applicative ((<$>)) -- | Type of key for `SymMdlDb`. type Symbol = String -- | Database from 'Symbol' to modules. newtype SymMdlDb = SymMdlDb (Map Symbol [ModuleString]) -- | Finding modules to which the symbol belong. findSymbol :: Symbol -> GhcMod String findSymbol sym = convert' =<< lookupSym sym <$> getSymMdlDb -- | Creating 'SymMdlDb'. getSymMdlDb :: GhcMod SymMdlDb getSymMdlDb = do sm <- G.getSessionDynFlags >>= browseAll #if MIN_VERSION_containers(0,5,0) let !sms = force $ map tieup $ groupBy ((==) `on` fst) $ sort sm !m = force $ M.fromList sms #else let !sms = map tieup $ groupBy ((==) `on` fst) $ sort sm !m = M.fromList sms #endif return (SymMdlDb m) where tieup x = (head (map fst x), map snd x) -- | Looking up 'SymMdlDb' with 'Symbol' to find modules. lookupSym :: Symbol -> SymMdlDb -> [ModuleString] lookupSym sym (SymMdlDb db) = fromMaybe [] (M.lookup sym db) lookupSym' :: Options -> Symbol -> SymMdlDb -> String lookupSym' opt sym db = convert opt $ lookupSym sym db
darthdeus/ghc-mod-ng
Language/Haskell/GhcMod/Find.hs
bsd-3-clause
1,686
0
14
279
412
234
178
29
1
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-| This module provides implementations of cryptographic utilities that only work for GHC (as opposed to GHCJS) -} module Dhall.Crypto ( SHA256Digest(..) , sha256DigestFromByteString , sha256Hash , toString ) where import Control.DeepSeq (NFData) import Data.ByteString (ByteString) import GHC.Generics (Generic) import qualified Crypto.Hash.SHA256 import qualified Data.ByteString as ByteString import qualified Data.ByteString.Base16 as Base16 import qualified Data.ByteString.Char8 as ByteString.Char8 -- | A SHA256 digest newtype SHA256Digest = SHA256Digest { unSHA256Digest :: ByteString } deriving (Eq, Generic, Ord, NFData) instance Show SHA256Digest where show = toString {-| Attempt to interpret a `ByteString` as a `SHA256Digest`, returning `Nothing` if the conversion fails -} sha256DigestFromByteString :: ByteString -> Maybe SHA256Digest sha256DigestFromByteString bytes | ByteString.length bytes == 32 = Just (SHA256Digest bytes) | otherwise = Nothing -- | Hash a `ByteString` and return the hash as a `SHA256Digest` sha256Hash :: ByteString -> SHA256Digest sha256Hash = SHA256Digest . Crypto.Hash.SHA256.hash -- | 'String' representation of a 'SHA256Digest' toString :: SHA256Digest -> String toString (SHA256Digest bytes) = ByteString.Char8.unpack $ Base16.encode bytes
Gabriel439/Haskell-Dhall-Library
dhall/ghc-src/Dhall/Crypto.hs
bsd-3-clause
1,477
0
10
285
251
148
103
26
1
import Data.Char (digitToInt) solve input = maximum $ rec 12 digits where digits = map digitToInt $ concat $ words input rec 0 xs = xs rec n xs = zipWith (*) xs $ rec (n - 1) (tail xs) main = (print . solve) =<< readFile "input/p008_input.txt"
foreverbell/project-euler-solutions
src/8.hs
bsd-3-clause
258
0
10
62
117
59
58
6
2
-- Copyright (c) 2016-present, Facebook, Inc. -- All rights reserved. -- -- This source code is licensed under the BSD-style license found in the -- LICENSE file in the root directory of this source tree. An additional grant -- of patent rights can be found in the PATENTS file in the same directory. {-# LANGUAGE OverloadedStrings #-} module Duckling.Ordinal.RO.Corpus ( corpus ) where import Prelude import Data.String import Duckling.Lang import Duckling.Ordinal.Types import Duckling.Resolve import Duckling.Testing.Types corpus :: Corpus corpus = (testContext {lang = RO}, allExamples) allExamples :: [Example] allExamples = concat [ examples (OrdinalData 1) [ "primul" , "prima" ] , examples (OrdinalData 2) [ "al doilea" , "al doi-lea" , "al doi lea" , "al 2-lea" , "al 2 lea" , "a 2 a" , "a 2-a" , "a doua" ] , examples (OrdinalData 3) [ "al treilea" , "al trei-lea" , "a treia" , "a 3-lea" , "a 3a" ] , examples (OrdinalData 4) [ "a patra" , "a patru-lea" ] , examples (OrdinalData 6) [ "al saselea" , "al șaselea" , "al sase-lea" , "al șase-lea" , "a sasea" , "a șase a" ] , examples (OrdinalData 9) [ "al noualea" , "al noua-lea" , "al noua lea" , "al nouălea" , "al nouă lea" , "a noua" ] ]
rfranek/duckling
Duckling/Ordinal/RO/Corpus.hs
bsd-3-clause
1,686
0
9
691
261
159
102
48
1
-- Source: https://github.com/bsl/GLFW-b-demo -- Adapted to move a basic object around the screen in 2D {-# LANGUAGE TemplateHaskell #-} module Main (main) where -------------------------------------------------------------------------------- import Control.Concurrent.STM (TQueue, atomically, newTQueueIO, tryReadTQueue) import Control.Lens import Control.Monad (unless, void, when) import Control.Monad.RWS.Strict (RWST, asks, evalRWST, liftIO) import qualified Graphics.Rendering.OpenGL as GL import qualified Graphics.UI.GLFW as GLFW import Util -------------------------------------------------------------------------------- data Env = Env { envEventsChan :: TQueue Event , envWindow :: !GLFW.Window } type Pos = GL.Vector2 GL.GLdouble data State = State { _stateWindowWidth :: Int , _stateWindowHeight :: Int , _statePlayer :: Pos } makeLenses ''State type Demo = RWST Env () State IO ----------------------------------------------------------------------------- playerSize :: GL.GLdouble playerSize = 20 playerSpeed :: Int playerSpeed = 10 -------------------------------------------------------------------------------- runDemo :: Env -> State -> IO () runDemo env state = void $ evalRWST (adjustWindow >> run) env state run :: Demo () run = do win <- asks envWindow liftIO GLFW.pollEvents processEvents l <- liftIO $ keyIsPressed win GLFW.Key'Left r <- liftIO $ keyIsPressed win GLFW.Key'Right u <- liftIO $ keyIsPressed win GLFW.Key'Up d <- liftIO $ keyIsPressed win GLFW.Key'Down let xd = (if l then (-1) else 0) + (if r then 1 else 0) yd = (if d then (-1) else 0) + (if u then 1 else 0) (GL.Vector2 xpos ypos) <- statePlayer <%= (\(GL.Vector2 x y) -> GL.Vector2 (x + (fromIntegral (xd * playerSpeed))) (y + (fromIntegral (yd * playerSpeed)))) liftIO $ do GL.clear [GL.ColorBuffer] GL.color $ GL.Color4 0 0 0 (1 :: GL.GLfloat) GL.renderPrimitive GL.Quads $ do GL.vertex $ GL.Vertex2 (xpos - playerSize/2) (ypos - playerSize/2) GL.vertex $ GL.Vertex2 (xpos + playerSize/2) (ypos - playerSize/2) GL.vertex $ GL.Vertex2 (xpos + playerSize/2) (ypos + playerSize/2) GL.vertex $ GL.Vertex2 (xpos - playerSize/2) (ypos + playerSize/2) GL.color $ GL.Color4 1 1 1 (1 :: GL.GLfloat) GL.flush GLFW.swapBuffers win q <- liftIO $ GLFW.windowShouldClose win unless q run ----------------------------------------------------------------------------- adjustWindow :: Demo () adjustWindow = do width <- use stateWindowWidth height <- use stateWindowHeight let pos = GL.Position 0 0 size = GL.Size (fromIntegral width) (fromIntegral height) liftIO $ do GL.viewport GL.$= (pos, size) GL.matrixMode GL.$= GL.Projection GL.loadIdentity ----------------------------------------------------------------------------- processEvents :: Demo () processEvents = do tc <- asks envEventsChan me <- liftIO $ atomically $ tryReadTQueue tc case me of Just e -> do processEvent e processEvents Nothing -> return () processEvent :: Event -> Demo () processEvent ev = case ev of (EventError e s) -> do printEvent "error" [show e, show s] win <- asks envWindow liftIO $ GLFW.setWindowShouldClose win True (EventWindowPos _ x y) -> printEvent "window pos" [show x, show y] (EventWindowSize _ width height) -> printEvent "window size" [show width, show height] (EventWindowClose _) -> printEvent "window close" [] (EventWindowRefresh _) -> printEvent "window refresh" [] (EventWindowFocus _ fs) -> printEvent "window focus" [show fs] (EventWindowIconify _ is) -> printEvent "window iconify" [show is] (EventFramebufferSize _ width height) -> do printEvent "framebuffer size" [show width, show height] stateWindowWidth .= width stateWindowHeight .= height adjustWindow (EventMouseButton _ mb mbs mk) -> printEvent "mouse button" [show mb, show mbs, showModifierKeys mk] (EventCursorPos _ x y) -> do let x' = round x :: Int y' = round y :: Int printEvent "cursor pos" [show x', show y'] (EventCursorEnter _ cs) -> printEvent "cursor enter" [show cs] (EventScroll _ x y) -> do let x' = round x :: Int y' = round y :: Int printEvent "scroll" [show x', show y'] adjustWindow (EventKey win k scancode ks mk) -> do printEvent "key" [show k, show scancode, show ks, showModifierKeys mk] when (ks == GLFW.KeyState'Pressed) $ do when (k == GLFW.Key'Q || k == GLFW.Key'Escape) $ liftIO $ GLFW.setWindowShouldClose win True when (k == GLFW.Key'Slash && GLFW.modifierKeysShift mk) $ liftIO printInstructions when (k == GLFW.Key'I) $ liftIO $ printInformation win (EventChar _ c) -> printEvent "char" [show c] printEvent :: String -> [String] -> Demo () printEvent cbname fields = liftIO $ putStrLn $ cbname ++ ": " ++ unwords fields ----------------------------------------------------------------------------- main :: IO () main = do let width = 640 height = 480 eventsChan <- newTQueueIO :: IO (TQueue Event) withWindow width height "Demo" $ \win -> do GLFW.setErrorCallback $ Just $ errorCallback eventsChan GLFW.setWindowPosCallback win $ Just $ windowPosCallback eventsChan GLFW.setWindowSizeCallback win $ Just $ windowSizeCallback eventsChan GLFW.setWindowCloseCallback win $ Just $ windowCloseCallback eventsChan GLFW.setWindowRefreshCallback win $ Just $ windowRefreshCallback eventsChan GLFW.setWindowFocusCallback win $ Just $ windowFocusCallback eventsChan GLFW.setWindowIconifyCallback win $ Just $ windowIconifyCallback eventsChan GLFW.setFramebufferSizeCallback win $ Just $ framebufferSizeCallback eventsChan GLFW.setMouseButtonCallback win $ Just $ mouseButtonCallback eventsChan GLFW.setCursorPosCallback win $ Just $ cursorPosCallback eventsChan GLFW.setCursorEnterCallback win $ Just $ cursorEnterCallback eventsChan GLFW.setScrollCallback win $ Just $ scrollCallback eventsChan GLFW.setKeyCallback win $ Just $ keyCallback eventsChan GLFW.setCharCallback win $ Just $ charCallback eventsChan GL.lineSmooth GL.$= GL.Enabled GL.blend GL.$= GL.Enabled GL.blendFunc GL.$= (GL.SrcAlpha,GL.OneMinusSrcAlpha) GL.lineWidth GL.$= 2.0 GL.clearColor GL.$= GL.Color4 1 1 1 1 GL.viewport GL.$= (GL.Position 0 0, GL.Size (fromIntegral width) (fromIntegral height)) GL.ortho 0 (fromIntegral width) 0 (fromIntegral height) (-1) 1 (fbWidth, fbHeight) <- GLFW.getFramebufferSize win let env = Env { envEventsChan = eventsChan , envWindow = win } state = State { _stateWindowWidth = fbWidth , _stateWindowHeight = fbHeight , _statePlayer = GL.Vector2 (fromIntegral $ div fbWidth 4) (fromIntegral $ div fbHeight 4) } runDemo env state putStrLn "Done" ----------------------------------------------------------------------------- -- Experiments -- GLFW.swapInterval 1 -- GL.position (GL.Light 0) GL.$= GL.Vertex4 5 5 10 0 -- GL.light (GL.Light 0) GL.$= GL.Enabled -- GL.lighting GL.$= GL.Enabled -- GL.cullFace GL.$= Just GL.Back -- GL.depthFunc GL.$= Just GL.Less -- GL.clearColor GL.$= GL.Color4 0.05 0.05 0.05 1 -- GL.normalize GL.$= GL.Enabled
cmahon/opengl-examples
executable/Move.hs
bsd-3-clause
7,879
0
20
1,986
2,343
1,155
1,188
166
14
{-# LANGUAGE MultiParamTypeClasses #-} -- | Vacuum world implementation, with agents module AIMA.Agent.Vacuum where import Prelude hiding (Left, Right) import qualified Data.Map as M import Data.Map (Map) import AIMA.Agent.Core -------------------------------- -- VacuumWorld implementation -- -------------------------------- -- Possible positions data Position = A | B deriving (Eq, Ord, Show) -- Possible status for positions data Status = Clean | Dirty deriving (Eq, Show) -- Possible actions data Action = Left | Suck | Right | NoOp deriving (Eq, Show) -- Data contained in a percept data Percept = Percept (Maybe Position) (Maybe Status) deriving (Eq, Show) -- VacuumWorld is just positions and statuses, with vacuum position data VacuumWorld m p a = VW { vwMap :: Map Position Status , vwPos :: Position , vwMeasure :: m } deriving (Eq) instance TaskEnv VacuumWorld Int Percept Action where percept (VW ps p _) = Percept (Just p) (M.lookup p ps) execute (VW ps p m) a | a == Left = VW ps A (m' - 1) -- Move left, reduce score | a == Right = VW ps B (m' - 1) -- Move right, reduce score | a == Suck = VW (M.insert p Clean ps) p m' -- Just clean position | otherwise = VW ps p m' -- Just update score where m' = m + sum (f <$> ps) -- Old score plus new score f Clean = 1 f Dirty = 0 measure = vwMeasure instance (Show m) => Show (VacuumWorld m p a) where show (VW ps p m) = M.foldMapWithKey f ps ++ "score: " ++ show m where f k v = "[ " ++ show k ++ " " ++ show v ++ noot k ++ " ] " noot x = if x == p then " noot" else "" -- | Initial state generator initialWorld :: [VacuumWorld Int Percept Action] initialWorld = [ VW m p 0 | m <- map toMap states, p <- [A, B] ] where states = [ (s1, s2) | s1 <- [Clean, Dirty] , s2 <- [Clean, Dirty] ] toMap (s1, s2) = M.fromList [ (A, s1), (B, s2) ] --------------------------- -- Agent implementations -- --------------------------- -- | Simple reflex agent data ReflexVacuumAgent p a = RVA deriving (Eq) instance Show (ReflexVacuumAgent p a) where show _ = "ReflexAgent" instance Agent ReflexVacuumAgent Percept Action where agent a (Percept _ (Just Dirty)) = (Suck, a) agent a (Percept (Just A) _) = (Right, a) agent a (Percept (Just B) _) = (Left, a) agent a _ = (NoOp, a) -- | Vacuum world agent which holds a model of the world data ModelVacuumAgent p a = MVA { mvaModel :: Model } deriving (Eq) instance Show (ModelVacuumAgent p a) where show (MVA m) = "ModelAgent: " ++ show m instance Agent ModelVacuumAgent Percept Action where agent a (Percept (Just p) (Just s)) | s == Dirty = (Suck, a') | otherwise = case M.lookup (sflip p) (mMap $ mvaModel a) of Just Dirty -> (moveFrom p, a') _ -> (NoOp, a') where sflip A = B sflip B = A moveFrom A = Right moveFrom B = Left a' = MVA . Model $ M.insert p Clean (mMap $ mvaModel a) agent a (Percept _ _) = (NoOp, a) -- | Internal model of environment data Model = Model { mMap :: Map Position Status } deriving (Eq) instance Show Model where show (Model ps) = M.foldMapWithKey f ps where f k v = "[ " ++ show k ++ " " ++ show v ++ " ] " -- | Initial model of world (assume it is all dirty!) initialModelAgent :: ModelVacuumAgent Percept Action initialModelAgent = MVA $ Model (M.fromList [ (A, Dirty), (B, Dirty) ])
tomsmalley/aima
src/AIMA/Agent/Vacuum.hs
bsd-3-clause
3,641
0
13
1,053
1,262
679
583
63
1
module Data.IGraph.Basic ( vCount , eCount ) where import Data.IGraph.Internal import Data.IGraph.Internal.Constants import Data.IGraph.Types import Foreign hiding (unsafePerformIO) import Foreign.C import System.IO.Unsafe (unsafePerformIO) import Control.Monad (unless) -- | 2. The basic interface -- | 2.2. Basic Query Operations -- | 2.2.1. igraph_vcount β€” The number of vertices in a graph. vCount :: Graph d a -> Int vCount g = fromIntegral.unsafePerformIO $ withGraph g $ \gp -> c_igraph_vcount gp foreign import ccall "igraph_vcount" c_igraph_vcount :: GraphPtr -> IO CInt -- | 2.2.2. igraph_ecount β€” The number of edges in a graph. eCount :: Graph d a -> Int eCount g = fromIntegral.unsafePerformIO $ withGraph g $ \gp -> c_igraph_ecount gp foreign import ccall "igraph_ecount" c_igraph_ecount :: GraphPtr -> IO CInt
kaizhang/igraph
Data/IGraph/Basic.hs
bsd-3-clause
862
0
7
153
197
111
86
20
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE BangPatterns #-} {-| Module : $Header$ CopyRight : (c) 8c6794b6 License : BSD3 Maintainer : [email protected] Stability : unstable Portability : non-portable Simple adhoc ixset search indexer and web ui. Document database contains word chunks of given file, separating the contents with spaces and couple punctuation characters. May not so much useful with documents that not using space as separator. Since the server is loading the whole set of indice, the first query after running the server will be slow. From the second query, it will be better. When index command has specified with '.html' suffix, the body contents of html document would be stored in database for searching. -} module Main where import Prelude hiding ((.),id,div,head,span) import Control.Category import Control.Exception (evaluate) import Control.Parallel import Control.Monad import Data.Char (toLower) import Data.List hiding (head,insert,find,span) import Data.Map (Map) import Data.Ord import Data.Time.Clock.POSIX (getPOSIXTime) import System.FilePath import Text.Printf (printf) import Control.Monad.Reader (ask) import Control.Monad.State (put) import Control.Monad.Trans (liftIO) import Data.Acid import Data.Iteratee (run, stream2stream) import Data.Iteratee.IO (enumFile) import Data.IxSet import Data.SafeCopy hiding (extension) import Data.Text (Text) import Happstack.Server hiding (body, method, port) import System.Console.CmdArgs hiding (name) import System.FilePath.Find import Text.Blaze.Html5 hiding (base,map,summary) import Text.Blaze.Html5.Attributes hiding (dir,id,title,form,style,span,size,start,summary) import Text.HTML.TagSoup (Tag(..), (~==), innerText, parseTags, sections) import qualified Data.Map as M import qualified Data.Text as T import qualified Happstack.Server as H import qualified Text.Blaze.Html5.Attributes as A ------------------------------------------------------------------------------ -- Database data Document = Document { docPath :: DocPath , docWordCount :: Int , docWordMap :: Map Text Int , docContents :: Contents } deriving (Eq,Ord,Data,Typeable) instance Show Document where show (Document (DocPath dp) _ _ _) = "Document " ++ dp newtype DocPath = DocPath {unDocPath::FilePath} deriving (Eq,Ord,Show,Data,Typeable) newtype Contents = Contents {unContents::Text} deriving (Eq,Ord,Show,Data,Typeable) newtype Word = Word {unWord::Text} deriving (Eq,Ord,Show,Data,Typeable) newtype DocDB = DocDB {docIx :: IxSet Document} deriving (Eq,Show,Data,Typeable) instance Indexable Document where empty = ixSet [ ixFun (return . docPath) , ixFun (map Word . nub . mkChunks . unContents . docContents) ] mkChunks :: Text -> [Text] mkChunks = T.split (`elem` " \t\n.,!?&()[]{}<>;/\"'") deriveSafeCopy 0 'base ''Document deriveSafeCopy 0 'base ''DocPath deriveSafeCopy 0 'base ''Contents deriveSafeCopy 0 'base ''Word deriveSafeCopy 0 'base ''DocDB saveDoc :: IxSet Document -> Update DocDB () saveDoc ixs = put (DocDB ixs) loadDoc :: Query DocDB DocDB loadDoc = ask makeAcidic ''DocDB ['saveDoc, 'loadDoc] getDB :: FilePath -> IO DocDB getDB filepath = do acid <- openLocalStateFrom filepath (DocDB empty) query acid LoadDoc ------------------------------------------------------------------------------ -- Indexer index :: FilePath -> Maybe String -> FilePath -> IO () index root extn outpath = do acid <- openLocalStateFrom outpath (DocDB empty) let mkIx = case extn of Just e | "html" `isSuffixOf` e -> mkHtmlDocument | otherwise -> mkDocument (fileName ~~? ('*':e)) T.pack Nothing -> mkDocument always T.pack ixs <- mkIx root putStrLn "Creating index .... " update acid (SaveDoc ixs) closeAcidState acid putStrLn "Done." mkDocument :: FilterPredicate -> (String -> Text) -> FilePath -> IO (IxSet Document) mkDocument cond f root = foldM go empty =<< find always cond root where go acc fi = do contents <- f `fmap` work fi let dp = DocPath ("static" </> drop (length root + 1) fi) dc = Contents contents ws = mkChunks contents dm = foldr (\w m -> M.insertWith (+) w 1 m) M.empty ws document = Document dp (length ws) dm dc return $! document `par` (acc `pseq` insert document acc) work :: FilePath -> IO String work filepath = do putStrLn $ "Reading: " ++ filepath run =<< enumFile 8192 filepath stream2stream htmlBody :: String -> Text htmlBody = T.pack . innerText . join . sections (~== TagOpen "body" []) . parseTags . map toLower mkHtmlDocument :: FilePath -> IO (IxSet Document) mkHtmlDocument = mkDocument (extension ==? ".html") htmlBody ------------------------------------------------------------------------------ -- Server serve :: Int -> FilePath -> DocDB -> IO () serve portnum stt docdb = do putStrLn $ "Starting server with port: " ++ show portnum simpleHTTP nullConf {H.port=portnum} $ msum [ dir "favicon.ico" $ serveFile (asContentType "http://image/vnd.microsoft.icon") "favicon.ico" , dir "static" $ serveDirectory EnableBrowsing [] stt , searchPage docdb , seeOther "" $ toResponse () ] searchPage :: DocDB -> ServerPartT IO Response searchPage docdb = do qs <- getDataFn $ look "q" case qs of Left _ -> ok $ toResponse $ do preEscapedString "<!doctype html>" html $ do headWithTitle "ixfts - home" body $ do div ! class_ (toValue "wrapper") $ (about >> inputForm "") Right qs' -> do (diff,res) <- liftIO $ do start <- getPOSIXTime ix <- evaluate $ docIx docdb @* (map Word . T.words . T.pack $ map toLower qs') end <- getPOSIXTime return (1000 * realToFrac (end-start) :: Double, ix) ok $ toResponse $ do preEscapedString "<!doctype html>" html $ do headWithTitle $ "ixfts - search result for '" ++ qs' ++ "'" body $ do div ! class_ (toValue "wrapper") $ do about inputForm qs' div ! class_ (toValue "summary") $ toHtml $ "search result for: \"" ++ qs' ++ "\", " ++ "hit: " ++ show (size res) ++ " (" ++ printf "%0.4f ms" diff ++ ")" ul $ mapM_ mkLink $ sortBy (comparing $ score qs') $ toList res headWithTitle :: String -> Html headWithTitle str = head $ title (toHtml str) >> favicon >> css score :: String -> Document -> Double score ws document = foldr f 0 (T.words $ T.pack ws) where f w acc = acc + (total / fromIntegral (M.findWithDefault 0 w dmap)) dmap = docWordMap document total = fromIntegral $ docWordCount document mkLink :: Document -> Html mkLink document = li $ do a ! href (toValue filepath) $ do toHtml $ dropWhile (/= '/') filepath where filepath = unDocPath $ docPath document inputForm :: String -> Html inputForm val = div ! class_ (toValue "input_form") $ do form ! enctype (toValue "multipart/form-data") ! method (toValue "GET") ! action (toValue "/") $ do input ! type_ (toValue "text") ! name (toValue "q") ! A.size (toValue "40") ! value (toValue val) input ! type_ (toValue "submit") ! value (toValue "search") a ! class_ (toValue "src") ! href (toValue "https://github.com/8c6794b6/ixfts") $ toHtml "src" css :: Html css = style ! type_ (toValue "text/css") $ toHtml "body { font-size: 15px; } \ \input { margin: 5px; border: 1px solid #868686; }\ \div.wrapper { padding: 20px; } \ \div.wrapper div.about {padding-left: 8px}\ \div.wrapper div.about span.desc {font-size: 75%}\ \div.wrapper ul { list-style: none; padding-left: 10px; } \ \div.wrapper ul li { margin: 5px 0 } \ \div.wrapper ul li a { text-decoration: none; } \ \div.wrapper a.src {font-size: 75%}\ \div.summary { font-size: 75%; padding-left: 20px; }" favicon :: Html favicon = link ! rel (toValue "shortcut icon") ! type_ (toValue "image/vnd.microsoft.icon") ! href (toValue "/favicon.ico") about :: Html about = div ! class_ (toValue "about") $ do toHtml "ixfts " span ! class_ (toValue "desc") $ toHtml "- minimalistic html search" ------------------------------------------------------------------------------ -- CLI data IxFts = Index { doc :: FilePath , ext :: Maybe String , out :: String } | Serve { port :: Int , db :: String , static :: FilePath } deriving (Eq,Show,Data,Typeable) commands :: IxFts commands = modes [ Index { doc = def &= typDir &= help "Path to directory containing target documents" , ext = def &= typ "EXTENSION" &= help "File extension to read" , out = "state" &= typDir &= help "Path to output index directory (default:state)" } &= help "Index documents under given path" , Serve { port = 8000 &= typ "PORT" &= help "Port number to serve (default:8000)" , db = "state" &= typDir &= help "Path to index database (default:state)" , static = "static" &= typDir &= help "Path to static contents directory (default:static)" } &= help "Start HTTP server" ] &= summary "ixfts: simple text search indexer and server" main :: IO () main = do arguments <- cmdArgs commands case arguments of Index inpath e o -> index inpath e o Serve pn d s -> getDB d >>= \acid -> acid `seq` serve pn s $! acid
8c6794b6/ixfts
ixfts.hs
bsd-3-clause
9,682
0
36
2,244
2,812
1,454
1,358
225
2
-- !!! hiding an entity T (where T is both a type and a dcon.) module M where import Mod124_A hiding (T) --x :: T x = T
FranklinChen/Hugs
tests/static/mod125.hs
bsd-3-clause
122
0
5
30
20
14
6
3
1
{-# LANGUAGE FlexibleContexts #-} -- | Variation of "Futhark.CodeGen.ImpCode" that contains the notion -- of a kernel invocation. module Futhark.CodeGen.ImpCode.Kernels ( Program , Function , FunctionT (Function) , Code , KernelCode , HostOp (..) , KernelOp (..) , CallKernel (..) , MapKernel (..) , Kernel (..) , KernelUse (..) , module Futhark.CodeGen.ImpCode -- * Utility functions , getKernels ) where import Control.Monad.Writer import Data.List import qualified Data.HashSet as HS import Data.Traversable import Prelude import Futhark.CodeGen.ImpCode hiding (Function, Code) import qualified Futhark.CodeGen.ImpCode as Imp import Futhark.Representation.AST.Attributes.Names import Futhark.Representation.AST.Pretty () import Futhark.Util.Pretty type Program = Functions HostOp type Function = Imp.Function HostOp -- | Host-level code that can call kernels. type Code = Imp.Code CallKernel -- | Code inside a kernel. type KernelCode = Imp.Code KernelOp data HostOp = CallKernel CallKernel | GetNumGroups VName | GetGroupSize VName deriving (Show) data CallKernel = Map MapKernel | AnyKernel Kernel | MapTranspose PrimType VName Exp VName Exp Exp Exp Exp deriving (Show) -- | A generic kernel containing arbitrary kernel code. data MapKernel = MapKernel { mapKernelThreadNum :: VName -- ^ Binding position - also serves as a unique -- name for the kernel. , mapKernelBody :: Imp.Code KernelOp , mapKernelUses :: [KernelUse] , mapKernelNumGroups :: DimSize , mapKernelGroupSize :: DimSize , mapKernelSize :: Imp.Exp -- ^ Do not actually execute threads past this. } deriving (Show) data Kernel = Kernel { kernelBody :: Imp.Code KernelOp , kernelLocalMemory :: [(VName, MemSize, PrimType)] -- ^ In-kernel name, per-workgroup size in bytes, and -- alignment restriction. , kernelUses :: [KernelUse] -- ^ The host variables referenced by the kernel. , kernelNumGroups :: DimSize , kernelGroupSize :: DimSize , kernelName :: VName -- ^ Unique name for the kernel. , kernelDesc :: Maybe String -- ^ An optional short descriptive name - should be -- alphanumeric and without spaces. } deriving (Show) data KernelUse = ScalarUse VName PrimType | MemoryUse VName Imp.DimSize deriving (Eq, Show) getKernels :: Program -> [CallKernel] getKernels = nubBy sameKernel . execWriter . traverse getFunKernels where getFunKernels (CallKernel kernel) = tell [kernel] getFunKernels _ = return () sameKernel (MapTranspose bt1 _ _ _ _ _ _ _) (MapTranspose bt2 _ _ _ _ _ _ _) = bt1 == bt2 sameKernel _ _ = False instance Pretty KernelUse where ppr (ScalarUse name t) = text "scalar_copy" <> parens (commasep [ppr name, ppr t]) ppr (MemoryUse name size) = text "mem_copy" <> parens (commasep [ppr name, ppr size]) instance Pretty HostOp where ppr (GetNumGroups dest) = ppr dest <+> text "<-" <+> text "get_num_groups()" ppr (GetGroupSize dest) = ppr dest <+> text "<-" <+> text "get_group_size()" ppr (CallKernel c) = ppr c instance Pretty CallKernel where ppr (Map k) = ppr k ppr (AnyKernel k) = ppr k ppr (MapTranspose bt dest destoffset src srcoffset num_arrays size_x size_y) = text "mapTranspose" <> parens (ppr bt <> comma </> ppMemLoc dest destoffset <> comma </> ppMemLoc src srcoffset <> comma </> ppr num_arrays <> comma <+> ppr size_x <> comma <+> ppr size_y) where ppMemLoc base offset = ppr base <+> text "+" <+> ppr offset instance Pretty MapKernel where ppr kernel = text "mapKernel" <+> brace (text "uses" <+> brace (commasep $ map ppr $ mapKernelUses kernel) </> text "body" <+> brace (ppr (mapKernelThreadNum kernel) <+> text "<- get_thread_number()" </> ppr (mapKernelBody kernel))) instance Pretty Kernel where ppr kernel = text "kernel" <+> brace (text "groups" <+> brace (ppr $ kernelNumGroups kernel) </> text "group_size" <+> brace (ppr $ kernelGroupSize kernel) </> text "local_memory" <+> brace (commasep $ map ppLocalMemory $ kernelLocalMemory kernel) </> text "uses" <+> brace (commasep $ map ppr $ kernelUses kernel) </> text "body" <+> brace (ppr $ kernelBody kernel)) where ppLocalMemory (name, size, bt) = ppr name <+> parens (ppr size <+> text "bytes" <> comma <+> text "align to" <+> ppr bt) instance FreeIn MapKernel where freeIn kernel = mapKernelThreadNum kernel `HS.delete` freeIn (mapKernelBody kernel) data KernelOp = GetGroupId VName Int | GetLocalId VName Int | GetLocalSize VName Int | GetGlobalSize VName Int | GetGlobalId VName Int | GetLockstepWidth VName | Barrier deriving (Show) instance Pretty KernelOp where ppr (GetGroupId dest i) = ppr dest <+> text "<-" <+> text "get_group_id" <> parens (ppr i) ppr (GetLocalId dest i) = ppr dest <+> text "<-" <+> text "get_local_id" <> parens (ppr i) ppr (GetLocalSize dest i) = ppr dest <+> text "<-" <+> text "get_local_size" <> parens (ppr i) ppr (GetGlobalSize dest i) = ppr dest <+> text "<-" <+> text "get_global_size" <> parens (ppr i) ppr (GetGlobalId dest i) = ppr dest <+> text "<-" <+> text "get_global_id" <> parens (ppr i) ppr (GetLockstepWidth dest) = ppr dest <+> text "<-" <+> text "get_lockstep_width()" ppr Barrier = text "barrier()" instance FreeIn KernelOp where freeIn = const mempty brace :: Doc -> Doc brace body = text " {" </> indent 2 body </> text "}"
CulpaBS/wbBach
src/Futhark/CodeGen/ImpCode/Kernels.hs
bsd-3-clause
6,354
0
21
2,012
1,684
873
811
149
3
{-# LANGUAGE OverloadedStrings #-} module Trombone.Parse ( lines , uri , method , parseRoutesFromFile ) where import Control.Monad import Data.Aeson ( decode, eitherDecode ) import Data.List ( foldl' ) import Data.List.Utils ( split, replace ) import Data.Maybe ( maybeToList, catMaybes, mapMaybe ) import Data.Text ( Text, pack, unpack ) import Data.Text.Encoding ( encodeUtf8 ) import Network.HTTP.Types.Method import Text.ParserCombinators.Parsec import Trombone.Db.Parse import Trombone.Db.Reflection import Trombone.Db.Template import Trombone.Pipeline import Trombone.Pipeline.Json import Trombone.Response import Trombone.Route import Trombone.RoutePattern import qualified Data.ByteString.Lazy.Char8 as L8 -- | Parse an HTTP method. method :: GenParser Char st Method method = try ( string "GET" >> return "GET" ) <|> try ( string "POST" >> return "POST" ) <|> try ( string "PUT" >> return "PUT" ) <|> try ( string "PATCH" >> return "PATCH" ) <|> try ( string "DELETE" >> return "DELETE" ) <|> ( string "OPTIONS" >> return "OPTIONS" ) -- | Parse a route pattern. uri :: GenParser Char st RoutePattern uri = do optional $ char '/' liftM RoutePattern $ sepEndBy (variable <|> atom) $ char '/' -- | Parse a uri variable segment. variable :: GenParser Char st RouteSegment variable = char ':' >> liftM Variable literal -- | Parse a text uri segment. atom :: GenParser Char st RouteSegment atom = liftM Atom literal -- | Parse a string made up strictly of alphanumeric characters together -- with a small subset of the special ascii characters. literal :: GenParser Char st Text literal = liftM pack $ many1 (alphaNum <|> oneOf "-_!~") -- | Parse a single line of input, which may be a comment, a blank line, or -- a valid route description. line :: GenParser Char st [Route] line = do blankspaces r <- optionMaybe routeOrBlock optional comment eol return $ concat $ maybeToList r -- | A comment may appear at the end of any line, and starts with a '#'. comment :: GenParser Char st () comment = char '#' >> skipMany (noneOf "\n\r") routeOrBlock :: GenParser Char st [Route] routeOrBlock = try route <|> dryBlock -- | Parse a route (i.e., method, uri, and action). route :: GenParser Char st [Route] route = do m <- method blankspaces u <- uri blankspaces a <- action return [Route m u a] dryBlock :: GenParser Char st [Route] dryBlock = do string "DRY" blankspaces s <- many (noneOf "\n\r") eol openingBracket xs <- sepEndBy (item s) (char ';') eol closingBracket return $ concatMap f xs where f r = case parse route "" r of Left _ -> [] Right rs -> rs alone :: Char -> GenParser Char st () alone c = do skip1 (char c) blankspaces skip1 eol openingBracket :: GenParser Char st () openingBracket = alone '{' closingBracket :: GenParser Char st () closingBracket = alone '}' item :: String -> GenParser Char st String item s = do m <- segm u <- segm a <- segm blankspaces t <- many (noneOf ";\n\r") return $ concat [ m , " " , u , " " , a , " " , replace "{{..}}" t s , ";" ] where segm = blankspaces >> many (noneOf " \n\r") -- | Any of the valid route action types. action :: GenParser Char st RouteAction action = try sqlRoute <|> try pipelineRoute <|> try inlineRoute <|> try staticRoute <|> nodeJsRoute -- | A database query route. sqlRoute :: GenParser Char st RouteAction sqlRoute = try sqlNoResult <|> try sqlItem <|> try sqlItemOk <|> try sqlCollection <|> try sqlLastInsert <|> sqlCount -- | An optional list of field names used for db routes. hints :: GenParser Char st [Text] hints = do char '(' r <- elements char ')' return $ map pack r -- | A comma-separated list of items. elements :: GenParser Char st [String] elements = sepBy cell $ char ',' -- | A list item. cell :: GenParser Char st String cell = do spaces s <- many (noneOf ",\n\r) ") spaces return s result :: DbResult -> GenParser Char st RouteAction result res = liftM (RouteSql . mkQuery res) (many $ noneOf "\n\r") resultFromTemplate :: DbResult -> DbTemplate -> GenParser Char st RouteAction resultFromTemplate res = return . RouteSql . DbQuery res -- | A PostgreSQL route of type that returns no result. sqlNoResult :: GenParser Char st RouteAction sqlNoResult = do symbolSqlNoResult blankspaces result NoResult -- | A PostgreSQL route of type that returns a single item. sqlItem :: GenParser Char st RouteAction sqlItem = do symbolSqlItem blankspaces h <- optionMaybe hints case h of Just hs -> result $ Item hs Nothing -> inspect Item -- | A PostgreSQL route of type that returns a single item with an 'Ok' status -- message. sqlItemOk :: GenParser Char st RouteAction sqlItemOk = do symbolSqlItemOk blankspaces h <- optionMaybe hints case h of Just hs -> result $ ItemOk hs Nothing -> inspect ItemOk -- | A PostgreSQL route of type that returns a collection. sqlCollection :: GenParser Char st RouteAction sqlCollection = do symbolSqlCollection blankspaces h <- optionMaybe hints case h of Just hs -> result $ Collection hs Nothing -> inspect Collection inspect :: ([Text] -> DbResult) -> GenParser Char st RouteAction inspect res = do q <- many $ noneOf "\n\r" let tpl = parseDbTemplate $ pack q case probeTemplate tpl of (Just tbl, Just ["*"]) -> resultFromTemplate (res ["*", tbl]) tpl (_, Just cs) -> resultFromTemplate (res cs) tpl _ -> error "Unable to extract column names from SQL template. \ \An explicit parameter list is required in the route configuration." -- | A PostgreSQL route of type that returns the last inserted id. sqlLastInsert :: GenParser Char st RouteAction sqlLastInsert = do symbolSqlLastInsert blankspaces h <- optionMaybe hints case h of Just [table, seq] -> result $ LastInsert table seq _ -> do q <- many $ noneOf "\n\r" let tpl = parseDbTemplate $ pack q case probeTemplate tpl of (Just tbl, _) -> resultFromTemplate (LastInsert tbl "id") tpl _ -> error "Unable to infer table name from SQL statement." -- | A PostgreSQL route of type that returns a row count. sqlCount :: GenParser Char st RouteAction sqlCount = symbolSqlCount >> blankspaces >> result Count -- | Parse a pipeline route. pipelineRoute :: GenParser Char st RouteAction pipelineRoute = symbolPipeline >> arg RoutePipes -- | Parse an inline route. inlineRoute :: GenParser Char st RouteAction inlineRoute = do symbolInline >> blankspaces >> eol >> firstline liftM (route . eitherDecode . L8.pack . wrap . concat) lines where route :: Either String Pipeline -> RouteAction route (Left e) = error $ "Error parsing pipeline : " ++ e route (Right p) = RouteInline p lines = many jsonLine >>= \p -> lastline >> return p firstline = char '{' >> blankspaces >> eol lastline = char '}' >> blankspaces >> eol >> blankspaces wrap x = '{':x ++ "}" jsonLine :: GenParser Char st String jsonLine = do x <- noneOf "}" xs <- many (noneOf "\n\r") eol return (x:xs) -- | Parse a static route. staticRoute :: GenParser Char st RouteAction staticRoute = do symbolStatic blankspaces liftM f $ many (noneOf "\n\r") where f :: String -> RouteAction f x = case decode $ L8.pack x of Just v -> RouteStatic $ RouteResponse [] 200 v Nothing -> error "Failed to parse JSON data in static route pattern." -- | Parse a nodejs route. nodeJsRoute :: GenParser Char st RouteAction nodeJsRoute = symbolNodeJs >> arg RouteNodeJs arg :: (Text -> RouteAction) -> GenParser Char st RouteAction arg t = do blankspaces r <- many (noneOf ",\n\r) ") blankspaces return $ t $ pack r mkQuery :: DbResult -> String -> DbQuery mkQuery res = DbQuery res . parseDbTemplate . pack skip1 :: GenParser Char st a -> GenParser Char st () skip1 = liftM $ const () symbol :: String -> GenParser Char st () symbol = skip1 . string -- | Symbol to indicate that the route is a PostgreSQL query template of type -- that returns no result. symbolSqlNoResult :: GenParser Char st () symbolSqlNoResult = symbol "--" -- | Symbol for PostgreSQL query of type that returns a single item. symbolSqlItem :: GenParser Char st () symbolSqlItem = symbol "~>" -- | Symbol for PostgreSQL query of type that returns a single item with -- an 'Ok' status message. symbolSqlItemOk :: GenParser Char st () symbolSqlItemOk = symbol "->" -- | Symbol for PostgreSQL query of type that returns a collection. symbolSqlCollection :: GenParser Char st () symbolSqlCollection = symbol ">>" -- | Symbol for PostgreSQL query of type that returns the last inserted id. symbolSqlLastInsert :: GenParser Char st () symbolSqlLastInsert = symbol "<>" -- | Symbol for PostgreSQL query of type that returns a row count result. symbolSqlCount :: GenParser Char st () symbolSqlCount = symbol "><" -- | Symbol which indicates that the route is a nodejs script. -- e.g., GET /resource <js> myscript symbolNodeJs :: GenParser Char st () symbolNodeJs = symbol "<js>" -- | Symbol which indicates that the route is a pipeline. -- e.g., GET /resource || some-system symbolPipeline :: GenParser Char st () symbolPipeline = symbol "||" -- | Symbol which indicates that the route is an inline pipeline. -- e.g., GET /resource |> {"processors":[...],"connections":[...]} symbolInline :: GenParser Char st () symbolInline = symbol "|>" -- | Symbol to denote a static route. -- e.g., GET /resource {..} {"hello":"is it me you're looking for?"} symbolStatic :: GenParser Char st () symbolStatic = symbol "{..}" -- | Zero or more blank spaces (unlike the default "spaces", this combinator -- accepts only "true" spaces). blankspaces :: GenParser Char st () blankspaces = skipMany (char ' ') -- | End of line. eol :: GenParser Char st String eol = try (string "\n\r") <|> try (string "\r\n") <|> string "\n" <|> string "\r" -- | Read and parse routes from a configuration file with a progress bar -- being printed to stdout. parseRoutesFromFile :: FilePath -> IO [Route] parseRoutesFromFile file = do putStr "Reading configuration\n|" chars 80 ' ' >> putStr "|" >> chars 81 '\b' r <- readFile file let ls = preprocess r x <- liftM concat $ mapM go $ zip (dots $ length ls) ls putChar '\n' return x where go :: (String, String) -> IO [Route] go (dots,x) = case parse line "" (x ++ "\n") of Left e -> error $ show e ++ '\n':x Right xs -> putStr dots >> return xs chars n = putStr . replicate n dots n = f 0 0 [] "" where f i y xs d | i == 81 = xs f i y xs d = let y' = div (i*n) 80 f' = flip f y' $ succ i in if y' == y then f' xs ('.':d) else f' (d:fill y y' ++ xs) "." fill x x' = replicate (x' - x - 1) "" preprocess :: String -> [String] preprocess xs = let (_, ys) = foldl' f ("", []) $ lines xs ++ ["\n"] in filter notNull $ reverse ys where f :: (String, [String]) -> String -> (String, [String]) f (x, xs) y | null y = (x' ++ y , xs) | null x = (x ++ y' , xs) | '{' == head y = (x' ++ "\n{\n" , xs) | '}' == head y = (x' ++ "\n}\n" , xs) | ind y = (x' ++ y' , xs) | otherwise = (y', trimLine x:xs) where x' = trimLine x y' = ' ':trimLine (rc y) ind y = ' ' == head y || '#' == head y rc "" = "" rc x = head (split "#" x) notNull :: String -> Bool notNull = not . null trimLeft :: String -> String trimLeft "" = "" trimLeft (x:xs) | ' ' == x = trimLeft xs | otherwise = x:xs trimRight :: String -> String trimRight = reverse . trimLeft . reverse trimLine :: String -> String trimLine = trimLeft . trimRight
johanneshilden/principle
Trombone/Parse.hs
bsd-3-clause
12,724
0
17
3,642
3,614
1,787
1,827
299
4
import Distribution.Simple import Distribution.Simple.InstallDirs import Distribution.Simple.LocalBuildInfo import Distribution.PackageDescription import System -- After Epic is built, we need a run time system. -- FIXME: This is probably all done the wrong way, I don't really understand -- Cabal properly... buildLib args flags desc local = do exit <- system "make -C evm" return () -- This is a hack. I don't know how to tell cabal that a data file needs -- installing but shouldn't be in the distribution. And it won't make the -- distribution if it's not there, so instead I just delete -- the file after configure. postConfLib args flags desc local = do exit <- system "make -C evm clean" return () addPrefix pfx var c = "export " ++ var ++ "=" ++ show pfx ++ "/" ++ c ++ ":$" ++ var postInstLib args flags desc local = do let pfx = prefix (installDirTemplates local) exit <- system $ "make -C evm install PREFIX=" ++ show pfx return () main = defaultMainWithHooks (simpleUserHooks { postBuild = buildLib, postConf = postConfLib, postInst = postInstLib })
avsm/EpiVM
Setup.hs
bsd-3-clause
1,215
0
12
332
229
117
112
19
1
module Pos.Core.Metrics.Constants ( cardanoNamespace , withCardanoNamespace ) where import Universum cardanoNamespace :: Text cardanoNamespace = "cardano" withCardanoNamespace :: Text -> Text withCardanoNamespace label = cardanoNamespace <> "." <> label
input-output-hk/cardano-sl
core/src/Pos/Core/Metrics/Constants.hs
apache-2.0
317
0
6
92
54
32
22
8
1
----------------------------------------------------------------------------- -- | -- Module : Control.Comonad.Density -- Copyright : (C) 2008 Edward Kmett -- License : BSD-style (see the file LICENSE) -- -- Maintainer : Edward Kmett <[email protected]> -- Stability : experimental -- Portability : non-portable (rank-2 polymorphism) -- -- The density comonad for a functor. aka the comonad cogenerated by a functor -- The ''density'' term dates back to Dubuc''s 1974 thesis. The term -- ''monad genererated by a functor'' dates back to 1972 in Street''s -- ''Formal Theory of Monads''. ---------------------------------------------------------------------------- module Control.Comonad.Density ( Density(..) , densityToLan, lanToDensity , toDensity, fromDensity , liftDensity, lowerDensity , densityToAdjunction, adjunctionToDensity , densityToComposedAdjunction, composedAdjunctionToDensity , improveCofree ) where import Prelude hiding (abs) import Control.Comonad.Context import Control.Comonad.Cofree import Control.Comonad.Trans import Control.Comonad.Reader import Control.Functor.Adjunction import Control.Functor.Composition import Control.Functor.Extras import Control.Functor.Pointed () import Control.Functor.KanExtension import Control.Monad.Identity data Density k a = forall b. Density (k b -> a) (k b) densityToLan :: Density k a -> Lan k k a densityToLan (Density f v) = Lan f v lanToDensity :: Lan k k a -> Density k a lanToDensity (Lan f v) = Density f v -- | @Nat(k, s.k)@ is isomorphic to @Nat (Density k, s)@ (forwards) toDensity :: Functor s => (forall a. k a -> s (k a)) -> Density k :~> s toDensity s (Density f v) = fmap f $ s v -- | @Nat(k, s.k)@ is isomorphic to @Nat (Density k, s)@ (backwards) fromDensity :: (Density k :~> s) -> k a -> s (k a) fromDensity s = s . Density id instance ComonadTrans Density where colift = liftDensity instance Functor (Density f) where fmap f (Density g h) = Density (f . g) h instance Copointed (Density f) where extract (Density f a) = f a instance Comonad (Density f) where duplicate (Density f ws) = Density (Density f) ws -- | The natural isomorphism between a comonad w and the comonad generated by w (forwards). liftDensity :: Comonad w => w a -> Density w a liftDensity = Density extract -- | The natural isomorphism between a comonad w and the comonad generated by w (backwards). lowerDensity :: Comonad w => Density w a -> w a lowerDensity (Density f c) = extend f c densityToAdjunction :: Adjunction f g => Density f a -> f (g a) densityToAdjunction (Density f v) = fmap (leftAdjunct f) v adjunctionToDensity :: Adjunction f g => f (g a) -> Density f a adjunctionToDensity = Density counit densityToComposedAdjunction :: (Composition o, Adjunction f g) => Density f :~> (f `o` g) densityToComposedAdjunction (Density f v) = compose (fmap (leftAdjunct f) v) composedAdjunctionToDensity :: (Composition o, Adjunction f g) => (f `o` g) :~> Density f composedAdjunctionToDensity = Density counit . decompose instance ComonadReader e w => ComonadReader e (Density w) where askC = askC . lowerDensity instance ComonadContext e w => ComonadContext e (Density w) where getC = getC . lowerDensity modifyC f = modifyC f . lowerDensity instance ComonadCofree f w => ComonadCofree f (Density w) where outCofree (Density f c) = fmap (Density f) (outCofree c) instance RunComonadCofree f w => RunComonadCofree f (Density w) where anaCofree l r = liftDensity . anaCofree l r improveCofree :: Functor f => (forall w. ComonadCofree f w => w a) -> Cofree f a improveCofree m = lowerDensity m
urska19/MFP---Samodejno-racunanje-dvosmernih-preslikav
Control/Comonad/Density.hs
apache-2.0
3,624
7
13
629
1,047
544
503
-1
-1
{-# LANGUAGE FlexibleContexts, TypeFamilies #-} {-| An implementation of the game Tic Tac Toe. Try playing against the minimax algorithm by running in GHCi: >>> playTicTacToe Valid moves are the integers [0..8], where each integer names square of the board, starting in the upper left and proceeding in the order that you would read a book. The named square must be empty. -} module Hagl.Examples.TicTacToe where import Control.Monad.Trans (liftIO) import Data.List (elemIndices, intersperse, transpose) import Text.Printf (printf) import Hagl -- -- * Game represetation. -- -- | Each square either has an X, an O, or is empty. data Square = X | O | E deriving Eq -- | A board is a 3x3 grid of squares, represented as a 9-element list. type Board = [Square] -- | A move is indicated by a naming the index of the square to put your -- mark in, [0..8]. type Mark = Int -- | A trivial data type for Tic Tac Toe. data TicTacToe = TicTacToe -- | The initial board. start :: Board start = replicate 9 E -- | The mark corresponding to each player. xo 1 = X xo 2 = O -- | Get a list of empty squares. markable :: Board -> [Mark] markable = elemIndices E -- | The player whose turn it is. who :: Board -> PlayerID who b = if (odd . length . markable) b then 1 else 2 -- | Player p marks square m. mark :: Board -> Mark -> Board mark b m = take m b ++ xo (who b) : drop (m+1) b -- | True if player p has won. win :: PlayerID -> Board -> Bool win p b = any (all (xo p ==)) (h ++ v ++ d) where h = chunk 3 b v = transpose h d = map (map (b !!)) [[0,4,8],[2,4,6]] -- | True if the game is over. end :: Board -> Bool end b = null (markable b) || win 1 b || win 2 b -- | The payoff awarded for a final state. pay :: Board -> Payoff pay b | win 1 b = winner 2 1 | win 2 b = winner 2 2 | otherwise = tie 2 -- | Play a game against minimax! playTicTacToe = evalGame TicTacToe ["Puny Human" ::: human, "Minimax" ::: minimax] (run >> printScore) where run = printGame >> step >>= maybe run (\p -> printGame >> return p) -- Game instance instance Game TicTacToe where type TreeType TicTacToe = Discrete type Move TicTacToe = Mark type State TicTacToe = Board gameTree _ = stateTreeD who end markable mark pay start -- -- * Pretty printing -- instance Show Square where show X = "X" show O = "O" show E = " " -- | A string representation of the game board. showGame :: Board -> String showGame = concat . intersperse "\n" . intersperse line . map row . chunk 3 . map show where row [a,b,c] = printf " %s | %s | %s " a b c line = "-----------" -- | Print out the game during game execution. printGame :: GameM m TicTacToe => m () printGame = gameState >>= liftIO . putStrLn . showGame
pparkkin/Hagl
Hagl/Examples/TicTacToe.hs
bsd-3-clause
2,797
0
12
680
755
408
347
49
2
module LintSpec where import Language.Haskell.GhcMod import Test.Hspec import TestUtils spec :: Spec spec = do describe "lint" $ do it "can detect a redundant import" $ do res <- runD $ lint "test/data/hlint.hs" res `shouldBe` "test/data/hlint.hs:4:8: Error: Redundant do\NULFound:\NUL do putStrLn \"Hello, world!\"\NULWhy not:\NUL putStrLn \"Hello, world!\"\n" context "when no suggestions are given" $ do it "doesn't output an empty line" $ do res <- runD $ lint "test/data/ghc-mod-check/Data/Foo.hs" res `shouldBe` ""
cabrera/ghc-mod
test/LintSpec.hs
bsd-3-clause
614
0
18
165
114
56
58
14
1
{-# LANGUAGE RankNTypes, NamedFieldPuns, RecordWildCards #-} module Distribution.Server.Features.Distro ( DistroFeature(..), DistroResource(..), initDistroFeature ) where import Distribution.Server.Framework import Distribution.Server.Features.Core import Distribution.Server.Features.Users import Distribution.Server.Users.Group (UserGroup(..), GroupDescription(..), nullDescription) import Distribution.Server.Features.Distro.State import Distribution.Server.Features.Distro.Types import Distribution.Server.Features.Distro.Backup (dumpBackup, restoreBackup) import Distribution.Server.Util.Parse (unpackUTF8) import Distribution.Text (display, simpleParse) import Distribution.Package import Data.List (intercalate) import Text.CSV (parseCSV) -- TODO: -- 1. write an HTML view for this module, and delete the text -- 2. use GroupResource from the Users feature -- 3. use MServerPart to support multiple views data DistroFeature = DistroFeature { distroFeatureInterface :: HackageFeature, distroResource :: DistroResource, maintainersGroup :: DynamicPath -> IO (Maybe UserGroup), queryPackageStatus :: forall m. MonadIO m => PackageName -> m [(DistroName, DistroPackageInfo)] } instance IsHackageFeature DistroFeature where getFeatureInterface = distroFeatureInterface data DistroResource = DistroResource { distroIndexPage :: Resource, distroAllPage :: Resource, distroPackages :: Resource, distroPackage :: Resource } initDistroFeature :: ServerEnv -> IO (UserFeature -> CoreFeature -> IO DistroFeature) initDistroFeature ServerEnv{serverStateDir} = do distrosState <- distrosStateComponent serverStateDir return $ \user core -> do let feature = distroFeature user core distrosState return feature distrosStateComponent :: FilePath -> IO (StateComponent AcidState Distros) distrosStateComponent stateDir = do st <- openLocalStateFrom (stateDir </> "db" </> "Distros") initialDistros return StateComponent { stateDesc = "" , stateHandle = st , getState = query st GetDistributions , putState = \(Distros dists versions) -> update st (ReplaceDistributions dists versions) , backupState = \_ -> dumpBackup , restoreState = restoreBackup , resetState = distrosStateComponent } distroFeature :: UserFeature -> CoreFeature -> StateComponent AcidState Distros -> DistroFeature distroFeature UserFeature{..} CoreFeature{coreResource=CoreResource{packageInPath}} distrosState = DistroFeature{..} where distroFeatureInterface = (emptyHackageFeature "distro") { featureResources = map ($ distroResource) [ distroIndexPage , distroAllPage , distroPackages , distroPackage ] , featureState = [abstractAcidStateComponent distrosState] } queryPackageStatus :: MonadIO m => PackageName -> m [(DistroName, DistroPackageInfo)] queryPackageStatus pkgname = queryState distrosState (PackageStatus pkgname) distroResource = DistroResource { distroIndexPage = (resourceAt "/distros/.:format") { resourceGet = [("txt", textEnumDistros)], resourcePost = [("", distroPostNew)] } , distroAllPage = (resourceAt "/distro/:distro") { resourcePut = [("", distroPutNew)], resourceDelete = [("", distroDelete)] } , distroPackages = (resourceAt "/distro/:distro/packages.:format") { resourceGet = [("txt", textDistroPkgs), ("csv", csvDistroPackageList)], resourcePut = [("csv", distroPackageListPut)] } , distroPackage = (resourceAt "/distro/:distro/package/:package.:format") { resourceGet = [("txt", textDistroPkg)], resourcePut = [("", distroPackagePut)], resourceDelete = [("", distroPackageDelete)] } } maintainersGroup = \dpath -> case simpleParse =<< lookup "distro" dpath of Nothing -> return Nothing Just dname -> getMaintainersGroup adminGroup dname textEnumDistros _ = fmap (toResponse . intercalate ", " . map display) (queryState distrosState EnumerateDistros) textDistroPkgs dpath = withDistroPath dpath $ \dname pkgs -> do let pkglines = map (\(name, info) -> display name ++ " at " ++ display (distroVersion info) ++ ": " ++ distroUrl info) $ pkgs return $ toResponse (unlines $ ("Packages for " ++ display dname):pkglines) csvDistroPackageList dpath = withDistroPath dpath $ \_dname pkgs -> do return $ toResponse $ packageListToCSV $ pkgs textDistroPkg dpath = withDistroPackagePath dpath $ \_ _ info -> return . toResponse $ show info -- result: see-other uri, or an error: not authenticated or not found (todo) distroDelete dpath = withDistroNamePath dpath $ \distro -> do guardAuthorised_ [InGroup adminGroup] --TODO: use the per-distro maintainer groups -- should also check for existence here of distro here void $ updateState distrosState $ RemoveDistro distro seeOther ("/distros/") (toResponse ()) -- result: ok response or not-found error distroPackageDelete dpath = withDistroPackagePath dpath $ \dname pkgname info -> do guardAuthorised_ [AnyKnownUser] --TODO: use the per-distro maintainer groups case info of Nothing -> notFound . toResponse $ "Package not found for " ++ display pkgname Just {} -> do void $ updateState distrosState $ DropPackage dname pkgname ok $ toResponse "Ok!" -- result: see-other response, or an error: not authenticated or not found (todo) distroPackagePut dpath = withDistroPackagePath dpath $ \dname pkgname _ -> lookPackageInfo $ \newPkgInfo -> do guardAuthorised_ [AnyKnownUser] --TODO: use the per-distro maintainer groups void $ updateState distrosState $ AddPackage dname pkgname newPkgInfo seeOther ("/distro/" ++ display dname ++ "/" ++ display pkgname) $ toResponse "Ok!" -- result: see-other response, or an error: not authentcated or bad request distroPostNew _ = lookDistroName $ \dname -> do guardAuthorised_ [AnyKnownUser] --TODO: use the per-distro maintainer groups success <- updateState distrosState $ AddDistro dname if success then seeOther ("/distro/" ++ display dname) $ toResponse "Ok!" else badRequest $ toResponse "Selected distribution name is already in use" distroPutNew dpath = withDistroNamePath dpath $ \dname -> do guardAuthorised_ [AnyKnownUser] --TODO: use the per-distro maintainer groups _success <- updateState distrosState $ AddDistro dname -- it doesn't matter if it exists already or not ok $ toResponse "Ok!" -- result: ok repsonse or not-found error distroPackageListPut dpath = withDistroPath dpath $ \dname _pkgs -> do guardAuthorised_ [AnyKnownUser] --TODO: use the per-distro maintainer groups lookCSVFile $ \csv -> case csvToPackageList csv of Left msg -> badRequest $ toResponse $ "Could not parse CSV File to a distro package list: " ++ msg Right list -> do void $ updateState distrosState $ PutDistroPackageList dname list ok $ toResponse "Ok!" withDistroNamePath :: DynamicPath -> (DistroName -> ServerPartE Response) -> ServerPartE Response withDistroNamePath dpath = require (return $ simpleParse =<< lookup "distro" dpath) withDistroPath :: DynamicPath -> (DistroName -> [(PackageName, DistroPackageInfo)] -> ServerPartE Response) -> ServerPartE Response withDistroPath dpath func = withDistroNamePath dpath $ \dname -> do isDist <- queryState distrosState (IsDistribution dname) case isDist of False -> notFound $ toResponse "Distribution does not exist" True -> do pkgs <- queryState distrosState (DistroStatus dname) func dname pkgs -- guards on the distro existing, but not the package withDistroPackagePath :: DynamicPath -> (DistroName -> PackageName -> Maybe DistroPackageInfo -> ServerPartE Response) -> ServerPartE Response withDistroPackagePath dpath func = withDistroNamePath dpath $ \dname -> do pkgname <- packageInPath dpath isDist <- queryState distrosState (IsDistribution dname) case isDist of False -> notFound $ toResponse "Distribution does not exist" True -> do pkgInfo <- queryState distrosState (DistroPackageStatus dname pkgname) func dname pkgname pkgInfo lookPackageInfo :: (DistroPackageInfo -> ServerPartE Response) -> ServerPartE Response lookPackageInfo func = do mInfo <- getDataFn $ do pVerStr <- look "version" pUriStr <- look "uri" case simpleParse pVerStr of Nothing -> mzero Just pVer -> return $ DistroPackageInfo pVer pUriStr case mInfo of (Left errs) -> ok $ toResponse $ unlines $ "Sorry, something went wrong there." : errs (Right pInfo) -> func pInfo lookDistroName :: (DistroName -> ServerPartE Response) -> ServerPartE Response lookDistroName func = withDataFn (look "distro") $ \dname -> case simpleParse dname of Just distro -> func distro _ -> badRequest $ toResponse "Not a valid distro name" getMaintainersGroup :: UserGroup -> DistroName -> IO (Maybe UserGroup) getMaintainersGroup admins dname = do isDist <- queryState distrosState (IsDistribution dname) case isDist of False -> return Nothing True -> return . Just $ UserGroup { groupDesc = maintainerGroupDescription dname , queryUserGroup = queryState distrosState $ GetDistroMaintainers dname , addUserToGroup = updateState distrosState . AddDistroMaintainer dname , removeUserFromGroup = updateState distrosState . RemoveDistroMaintainer dname , groupsAllowedToAdd = [admins] , groupsAllowedToDelete = [admins] } maintainerGroupDescription :: DistroName -> GroupDescription maintainerGroupDescription dname = nullDescription { groupTitle = "Maintainers" , groupEntity = Just (str, Just $ "/distro/" ++ display dname) , groupPrologue = "Maintainers for a distribution can map packages to it." } where str = display dname -- TODO: This calls parseCSV rather that importCSV -- not sure if that -- matters (in particular, importCSV chops off the last, extranenous, -- null entry that parseCSV adds) lookCSVFile :: (CSVFile -> ServerPartE Response) -> ServerPartE Response lookCSVFile func = do fileContents <- expectCSV case parseCSV "PUT input" (unpackUTF8 fileContents) of Left err -> badRequest $ toResponse $ "Could not parse CSV File: " ++ show err Right csv -> func (CSVFile csv) packageListToCSV :: [(PackageName, DistroPackageInfo)] -> CSVFile packageListToCSV entries = CSVFile $ map (\(pn,DistroPackageInfo version url) -> [display pn, display version, url]) entries csvToPackageList :: CSVFile -> Either String [(PackageName, DistroPackageInfo)] csvToPackageList (CSVFile records) = mapM fromRecord records where fromRecord [packageStr, versionStr, uri] | Just package <- simpleParse packageStr , Just version <- simpleParse versionStr = return (package, DistroPackageInfo version uri) fromRecord rec = Left $ "Invalid distro package entry: " ++ show rec
edsko/hackage-server
Distribution/Server/Features/Distro.hs
bsd-3-clause
11,955
0
23
3,025
2,735
1,417
1,318
202
11
{- (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 \section[PrelInfo]{The @PrelInfo@ interface to the compiler's prelude knowledge} -} {-# LANGUAGE CPP #-} module PrelInfo ( wiredInIds, ghcPrimIds, primOpRules, builtinRules, ghcPrimExports, knownKeyNames, primOpId, -- Random other things maybeCharLikeCon, maybeIntLikeCon, -- Class categories isNumericClass, isStandardClass ) where #include "HsVersions.h" import Constants ( mAX_TUPLE_SIZE ) import BasicTypes ( Boxity(..) ) import ConLike ( ConLike(..) ) import PrelNames import PrelRules import Avail import PrimOp import DataCon import Id import Name import MkId import TysPrim import TysWiredIn import HscTypes import Class import TyCon import Util import {-# SOURCE #-} TcTypeNats ( typeNatTyCons ) import Data.Array {- ************************************************************************ * * \subsection[builtinNameInfo]{Lookup built-in names} * * ************************************************************************ Notes about wired in things ~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Wired-in things are Ids\/TyCons that are completely known to the compiler. They are global values in GHC, (e.g. listTyCon :: TyCon). * A wired in Name contains the thing itself inside the Name: see Name.wiredInNameTyThing_maybe (E.g. listTyConName contains listTyCon. * The name cache is initialised with (the names of) all wired-in things * The type environment itself contains no wired in things. The type checker sees if the Name is wired in before looking up the name in the type environment. * MkIface prunes out wired-in things before putting them in an interface file. So interface files never contain wired-in things. -} knownKeyNames :: [Name] -- This list is used to ensure that when you say "Prelude.map" -- in your source code, or in an interface file, -- you get a Name with the correct known key -- (See Note [Known-key names] in PrelNames) knownKeyNames = concat [ tycon_kk_names funTyCon , concatMap tycon_kk_names primTyCons , concatMap tycon_kk_names wiredInTyCons -- Does not include tuples , concatMap tycon_kk_names typeNatTyCons , concatMap (rep_names . tupleTyCon Boxed) [2..mAX_TUPLE_SIZE] -- Yuk , cTupleTyConNames -- Constraint tuples are known-key but not wired-in -- They can't show up in source code, but can appear -- in intreface files , map idName wiredInIds , map (idName . primOpId) allThePrimOps , basicKnownKeyNames ] where -- "kk" short for "known-key" tycon_kk_names :: TyCon -> [Name] tycon_kk_names tc = tyConName tc : (rep_names tc ++ concatMap thing_kk_names (implicitTyConThings tc)) datacon_kk_names dc | Promoted tc <- promoteDataCon_maybe dc = dataConName dc : rep_names tc | otherwise = [dataConName dc] thing_kk_names :: TyThing -> [Name] thing_kk_names (ATyCon tc) = tycon_kk_names tc thing_kk_names (AConLike (RealDataCon dc)) = datacon_kk_names dc thing_kk_names thing = [getName thing] -- The TyConRepName for a known-key TyCon has a known key, -- but isn't itself an implicit thing. Yurgh. -- NB: if any of the wired-in TyCons had record fields, the record -- field names would be in a similar situation. Ditto class ops. -- But it happens that there aren't any rep_names tc = case tyConRepName_maybe tc of Just n -> [n] Nothing -> [] {- We let a lot of "non-standard" values be visible, so that we can make sense of them in interface pragmas. It's cool, though they all have "non-standard" names, so they won't get past the parser in user code. ************************************************************************ * * PrimOpIds * * ************************************************************************ -} primOpIds :: Array Int Id -- A cache of the PrimOp Ids, indexed by PrimOp tag primOpIds = array (1,maxPrimOpTag) [ (primOpTag op, mkPrimOpId op) | op <- allThePrimOps ] primOpId :: PrimOp -> Id primOpId op = primOpIds ! primOpTag op {- ************************************************************************ * * \subsection{Export lists for pseudo-modules (GHC.Prim)} * * ************************************************************************ GHC.Prim "exports" all the primops and primitive types, some wired-in Ids. -} ghcPrimExports :: [IfaceExport] ghcPrimExports = map (avail . idName) ghcPrimIds ++ map (avail . idName . primOpId) allThePrimOps ++ [ AvailTC n [n] [] | tc <- funTyCon : primTyCons, let n = tyConName tc ] {- ************************************************************************ * * \subsection{Built-in keys} * * ************************************************************************ ToDo: make it do the ``like'' part properly (as in 0.26 and before). -} maybeCharLikeCon, maybeIntLikeCon :: DataCon -> Bool maybeCharLikeCon con = con `hasKey` charDataConKey maybeIntLikeCon con = con `hasKey` intDataConKey {- ************************************************************************ * * \subsection{Class predicates} * * ************************************************************************ -} isNumericClass, isStandardClass :: Class -> Bool isNumericClass clas = classKey clas `is_elem` numericClassKeys isStandardClass clas = classKey clas `is_elem` standardClassKeys is_elem :: Eq a => a -> [a] -> Bool is_elem = isIn "is_X_Class"
elieux/ghc
compiler/prelude/PrelInfo.hs
bsd-3-clause
6,445
0
12
1,954
728
406
322
70
4
{- | Module : $Header$ Description : automatic proofs in the development graph calculus Copyright : (c) Jorina F. Gerken, Mossakowski, Luettich, Uni Bremen 2002-2006 License : GPLv2 or higher, see LICENSE.txt Maintainer : [email protected] Stability : provisional Portability : non-portable(Logic) automatic proofs in development graphs. Follows Sect. IV:4.4 of the CASL Reference Manual. -} {- References: T. Mossakowski, S. Autexier and D. Hutter: Extending Development Graphs With Hiding. H. Hussmann (ed.): Fundamental Approaches to Software Engineering 2001, Lecture Notes in Computer Science 2029, p. 269-283, Springer-Verlag 2001. -} module Proofs.Automatic (automatic, automaticFromList) where import Proofs.Global import Proofs.Local import Proofs.HideTheoremShift import Proofs.TheoremHideShift import Static.DevGraph import Static.DgUtils import Static.History import Common.LibName import qualified Common.Lib.SizedList as SizedList import qualified Data.Map as Map import Data.Maybe (fromMaybe) import Data.Graph.Inductive.Graph import Common.Result automaticFromList :: LibName -> [LEdge DGLinkLab] -> LibEnv -> LibEnv automaticFromList ln ls libEnv = let x = automaticRecursiveFromList ln libEnv ls y = localInferenceFromList ln ls x in y noChange :: LibEnv -> LibEnv -> Bool noChange oldLib newLib = and $ Map.elems $ Map.intersectionWith (\ a b -> SizedList.null . snd $ splitHistory a b) oldLib newLib automaticRecursiveFromList :: LibName -> LibEnv -> [LEdge DGLinkLab] -> LibEnv automaticRecursiveFromList ln proofstatus ls = let auxProofstatus = automaticApplyRulesToGoals ln ls proofstatus rulesWithGoals in if noChange proofstatus auxProofstatus then auxProofstatus else automaticRecursiveFromList ln auxProofstatus ls {- | automatically applies all rules to the library denoted by the library name of the given proofstatus -} automatic :: LibName -> LibEnv -> LibEnv automatic ln le = let nLib = localInference ln $ automaticRecursive 49 ln le in Map.intersectionWith (\ odg ndg -> groupHistory odg (DGRule "automatic") ndg) le nLib -- | applies the rules recursively until no further changes can be made automaticRecursive :: Int -> LibName -> LibEnv -> LibEnv automaticRecursive count ln proofstatus = let auxProofstatus = automaticApplyRules ln proofstatus in if noChange proofstatus auxProofstatus || count < 1 then auxProofstatus else automaticRecursive (count - 1) ln auxProofstatus wrapTheoremHideShift :: LibName -> LibEnv -> LibEnv wrapTheoremHideShift ln libEnv = fromMaybe libEnv (maybeResult $ theoremHideShift ln libEnv) -- | list of rules to use rules :: [LibName -> LibEnv -> LibEnv] rules = [ automaticHideTheoremShift , globDecomp , wrapTheoremHideShift ] rulesWithGoals :: [LibName -> [LEdge DGLinkLab] -> LibEnv -> LibEnv] rulesWithGoals = [automaticHideTheoremShiftFromList , locDecompFromList , globDecompFromList , globSubsumeFromList ] automaticApplyRulesToGoals :: LibName -> [LEdge DGLinkLab] -> LibEnv -> [LibName -> [LEdge DGLinkLab] -> LibEnv -> LibEnv] -> LibEnv automaticApplyRulesToGoals ln ls libEnv ll = case ll of [] -> libEnv f : l -> let nwLibEnv = f ln ls libEnv dgraph = lookupDGraph ln nwLibEnv updateList = filter (\ (_, _, lp) -> case thmLinkStatus $ dgl_type lp of Just LeftOpen -> True _ -> False) $ labEdgesDG dgraph in automaticApplyRulesToGoals ln updateList nwLibEnv l {- | sequentially applies all rules to the given proofstatus, ie to the library denoted by the library name of the proofstatus -} automaticApplyRules :: LibName -> LibEnv -> LibEnv automaticApplyRules ln = foldl (.) id $ map (\ f -> f ln) rules
mariefarrell/Hets
Proofs/Automatic.hs
gpl-2.0
4,107
0
19
1,022
806
426
380
68
3
{-# LANGUAGE ScopedTypeVariables #-} -- | Support for the LB (LambdaBot) monad module LBState ( -- ** Functions to access the module's state readMS, withMS, modifyMS, writeMS, accessorMS, -- ** Utility functions for modules that need state for each target. GlobalPrivate(global), mkGlobalPrivate, withPS, readPS, withGS, readGS, writePS, writeGS, -- * more LB support forkLB, liftLB ) where import Lambdabot import Lambdabot.Util (withMWriter, timeout) import Control.Concurrent (forkIO, readMVar, modifyMVar_, newMVar, MVar, ThreadId) import Control.Monad.Reader import Control.Monad.Trans (liftIO) import Message (Nick) -- withMWriter :: MVar a -> (a -> (a -> IO ()) -> IO b) -> IO b -- | Update the module's private state. -- This is the preferred way of changing the state. The state will be locked -- until the body returns. The function is exception-safe, i.e. even if -- an error occurs or the thread is killed (e.g. because it deadlocked and -- therefore exceeded its time limit), the state from the last write operation -- will be restored. If the writer escapes, calling it will have no observable -- effect. -- @withMS@ is not composable, in the sense that a readMS from within the body -- will cause a dead-lock. However, all other possibilies to access the state -- that came to my mind had even more serious deficiencies such as being prone -- to race conditions or semantic obscurities. withMS :: (s -> (s -> LB ()) -> LB a) -> ModuleT s LB a withMS f = do ref <- getRef lift . lbIO $ \conv -> withMWriter ref $ \x writer -> conv $ f x (liftIO . writer) -- | Read the module's private state. readMS :: ModuleT s LB s readMS = getRef >>= liftIO . readMVar -- | Produces a with-function. Needs a better name. accessorMS :: (s -> (t, t -> s)) -> (t -> (t -> LB ()) -> LB a) -> ModuleT s LB a accessorMS decompose f = withMS $ \s writer -> let (t,k) = decompose s in f t (writer . k) -- | Modify the module's private state. modifyMS :: (s -> s) -> ModuleT s LB () modifyMS f = getRef >>= liftIO . flip modifyMVar_ (return . f) -- | Write the module's private state. Try to use withMS instead. writeMS :: s -> ModuleT s LB () writeMS (x :: s) = modifyMS . const $ x -- need to help out 6.5 -- | This datatype allows modules to conviently maintain both global -- (i.e. for all clients they're interacting with) and private state. -- It is implemented on top of readMS\/withMS. -- -- This simple implementation is linear in the number of private states used. data GlobalPrivate g p = GP { global :: !g, private :: ![(Nick,MVar (Maybe p))], maxSize :: Int } -- | Creates a @GlobalPrivate@ given the value of the global state. No private -- state for clients will be created. mkGlobalPrivate :: Int -> g -> GlobalPrivate g p mkGlobalPrivate ms g = GP { global = g, private = [], maxSize = ms } -- Needs a better interface. The with-functions are hardly useful. -- | Writes private state. For now, it locks everything. withPS :: Nick -- ^ The target -> (Maybe p -> (Maybe p -> LB ()) -> LB a) -- ^ @Just x@ writes x in the user's private state, @Nothing@ removes it. -> ModuleT (GlobalPrivate g p) LB a withPS who f = do mvar <- accessPS return id who lift . lbIO $ \conv -> withMWriter mvar $ \x writer -> conv $ f x (liftIO . writer) -- | Reads private state. readPS :: Nick -> ModuleT (GlobalPrivate g p) LB (Maybe p) readPS = accessPS (liftIO . readMVar) (\_ -> return Nothing) -- | Reads private state, executes one of the actions success and failure -- which take an MVar and an action producing a @Nothing@ MVar, respectively. accessPS :: (MVar (Maybe p) -> LB a) -> (LB (MVar (Maybe p)) -> LB a) -> Nick -> ModuleT (GlobalPrivate g p) LB a accessPS success failure who = withMS $ \state writer -> case lookup who $ private state of Just mvar -> do let newPrivate = (who,mvar): filter ((/=who) . fst) (private state) length newPrivate `seq` writer (state { private = newPrivate }) success mvar Nothing -> failure $ do mvar <- liftIO $ newMVar Nothing let newPrivate = take (maxSize state) $ (who,mvar): private state length newPrivate `seq` writer (state { private = newPrivate }) return mvar -- | Writes global state. Locks everything withGS :: (g -> (g -> LB ()) -> LB ()) -> ModuleT (GlobalPrivate g p) LB () withGS f = withMS $ \state writer -> f (global state) $ \g -> writer $ state { global = g } -- | Reads global state. readGS :: ModuleT (GlobalPrivate g p) LB g readGS = global `fmap` readMS -- The old interface, as we don't wanna be too fancy right now. writePS :: Nick -> Maybe p -> ModuleT (GlobalPrivate g p) LB () writePS who x = withPS who (\_ writer -> writer x) writeGS :: g -> ModuleT (GlobalPrivate g p) LB () writeGS g = withGS (\_ writer -> writer g) -- | run an IO action in another thread, with a timeout, lifted into LB forkLB :: LB a -> LB ThreadId forkLB f = (`liftLB` f) $ \g -> do forkIO $ do timeout (15 * 1000 * 1000) g return () -- | lift an io transformer into LB liftLB :: (IO a -> IO b) -> LB a -> LB b liftLB f = LB . mapReaderT f . runLB -- lbIO (\conv -> f (conv lb))
zeekay/lambdabot
LBState.hs
mit
5,291
0
20
1,228
1,443
769
674
83
2
{-# LANGUAGE BangPatterns #-} {-| External data loader. This module holds the external data loading, and thus is the only one depending (via the specialized Text\/Rapi\/Luxi modules) on the actual libraries implementing the low-level protocols. -} {- Copyright (C) 2009, 2010, 2011, 2012 Google Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -} module Ganeti.HTools.ExtLoader ( loadExternalData , commonSuffix , maybeSaveData , queryAllMonDDCs , pMonDData ) where import Control.Monad import Control.Exception import Data.Maybe (isJust, fromJust, catMaybes) import Network.Curl import System.FilePath import System.IO import System.Time (getClockTime) import Text.Printf (hPrintf) import qualified Text.JSON as J import qualified Data.Map as Map import qualified Data.List as L import qualified Ganeti.Constants as C import qualified Ganeti.DataCollectors.CPUload as CPUload import qualified Ganeti.HTools.Container as Container import qualified Ganeti.HTools.Backend.Luxi as Luxi import qualified Ganeti.HTools.Backend.Rapi as Rapi import qualified Ganeti.HTools.Backend.Simu as Simu import qualified Ganeti.HTools.Backend.Text as Text import qualified Ganeti.HTools.Backend.IAlloc as IAlloc import qualified Ganeti.HTools.Node as Node import qualified Ganeti.HTools.Instance as Instance import Ganeti.HTools.Loader (mergeData, checkData, ClusterData(..) , commonSuffix, clearDynU) import Ganeti.BasicTypes import Ganeti.Cpu.Types import Ganeti.DataCollectors.Types import Ganeti.HTools.Types import Ganeti.HTools.CLI import Ganeti.JSON import Ganeti.Logging (logWarning) import Ganeti.Utils (sepSplit, tryRead, exitIfBad, exitWhen) -- | Error beautifier. wrapIO :: IO (Result a) -> IO (Result a) wrapIO = handle (\e -> return . Bad . show $ (e::IOException)) -- | Parses a user-supplied utilisation string. parseUtilisation :: String -> Result (String, DynUtil) parseUtilisation line = case sepSplit ' ' line of [name, cpu, mem, dsk, net] -> do rcpu <- tryRead name cpu rmem <- tryRead name mem rdsk <- tryRead name dsk rnet <- tryRead name net let du = DynUtil { cpuWeight = rcpu, memWeight = rmem , dskWeight = rdsk, netWeight = rnet } return (name, du) _ -> Bad $ "Cannot parse line " ++ line -- | External tool data loader from a variety of sources. loadExternalData :: Options -> IO ClusterData loadExternalData opts = do let mhost = optMaster opts lsock = optLuxi opts tfile = optDataFile opts simdata = optNodeSim opts iallocsrc = optIAllocSrc opts setRapi = mhost /= "" setLuxi = isJust lsock setSim = (not . null) simdata setFile = isJust tfile setIAllocSrc = isJust iallocsrc allSet = filter id [setRapi, setLuxi, setFile] exTags = case optExTags opts of Nothing -> [] Just etl -> map (++ ":") etl selInsts = optSelInst opts exInsts = optExInst opts exitWhen (length allSet > 1) "Only one of the rapi, luxi, and data\ \ files options should be given." util_contents <- maybe (return "") readFile (optDynuFile opts) util_data <- exitIfBad "can't parse utilisation data" . mapM parseUtilisation $ lines util_contents input_data <- case () of _ | setRapi -> wrapIO $ Rapi.loadData mhost | setLuxi -> wrapIO . Luxi.loadData $ fromJust lsock | setSim -> Simu.loadData simdata | setFile -> wrapIO . Text.loadData $ fromJust tfile | setIAllocSrc -> wrapIO . IAlloc.loadData $ fromJust iallocsrc | otherwise -> return $ Bad "No backend selected! Exiting." now <- getClockTime let ignoreDynU = optIgnoreDynu opts eff_u = if ignoreDynU then [] else util_data ldresult = input_data >>= (if ignoreDynU then clearDynU else return) >>= mergeData eff_u exTags selInsts exInsts now cdata <- exitIfBad "failed to load data, aborting" ldresult cdata' <- if optMonD opts then queryAllMonDDCs cdata opts else return cdata let (fix_msgs, nl) = checkData (cdNodes cdata') (cdInstances cdata') unless (optVerbose opts == 0) $ maybeShowWarnings fix_msgs return cdata' {cdNodes = nl} -- | Function to save the cluster data to a file. maybeSaveData :: Maybe FilePath -- ^ The file prefix to save to -> String -- ^ The suffix (extension) to add -> String -- ^ Informational message -> ClusterData -- ^ The cluster data -> IO () maybeSaveData Nothing _ _ _ = return () maybeSaveData (Just path) ext msg cdata = do let adata = Text.serializeCluster cdata out_path = path <.> ext writeFile out_path adata hPrintf stderr "The cluster state %s has been written to file '%s'\n" msg out_path -- | Type describing a data collector basic information. data DataCollector = DataCollector { dName :: String -- ^ Name of the data collector , dCategory :: Maybe DCCategory -- ^ The name of the category } -- | The actual data types for MonD's Data Collectors. data Report = CPUavgloadReport CPUavgload -- | The list of Data Collectors used by hail and hbal. collectors :: Options -> [DataCollector] collectors opts = if optIgnoreDynu opts then [] else [ DataCollector CPUload.dcName CPUload.dcCategory ] -- | MonDs Data parsed by a mock file. Representing (node name, list of reports -- produced by MonDs Data Collectors). type MonDData = (String, [DCReport]) -- | A map storing MonDs data. type MapMonDData = Map.Map String [DCReport] -- | Parse MonD data file contents. pMonDData :: String -> Result [MonDData] pMonDData input = loadJSArray "Parsing MonD's answer" input >>= mapM (pMonDN . J.fromJSObject) -- | Parse a node's JSON record. pMonDN :: JSRecord -> Result MonDData pMonDN a = do node <- tryFromObj "Parsing node's name" a "node" reports <- tryFromObj "Parsing node's reports" a "reports" return (node, reports) -- | Query all MonDs for all Data Collector. queryAllMonDDCs :: ClusterData -> Options -> IO ClusterData queryAllMonDDCs cdata opts = do map_mDD <- case optMonDFile opts of Nothing -> return Nothing Just fp -> do monDData_contents <- readFile fp monDData <- exitIfBad "can't parse MonD data" . pMonDData $ monDData_contents return . Just $ Map.fromList monDData let (ClusterData _ nl il _ _) = cdata (nl', il') <- foldM (queryAllMonDs map_mDD) (nl, il) (collectors opts) return $ cdata {cdNodes = nl', cdInstances = il'} -- | Query all MonDs for a single Data Collector. queryAllMonDs :: Maybe MapMonDData -> (Node.List, Instance.List) -> DataCollector -> IO (Node.List, Instance.List) queryAllMonDs m (nl, il) dc = do elems <- mapM (queryAMonD m dc) (Container.elems nl) let elems' = catMaybes elems if length elems == length elems' then let il' = foldl updateUtilData il elems' nl' = zip (Container.keys nl) elems' in return (Container.fromList nl', il') else do logWarning $ "Didn't receive an answer by all MonDs, " ++ dName dc ++ "'s data will be ignored." return (nl,il) -- | Query a specified MonD for a Data Collector. fromCurl :: DataCollector -> Node.Node -> IO (Maybe DCReport) fromCurl dc node = do (code, !body) <- curlGetString (prepareUrl dc node) [] case code of CurlOK -> case J.decodeStrict body :: J.Result DCReport of J.Ok r -> return $ Just r J.Error _ -> return Nothing _ -> do logWarning $ "Failed to contact node's " ++ Node.name node ++ " MonD for DC " ++ dName dc return Nothing -- | Return the data from correct combination of a Data Collector -- and a DCReport. mkReport :: DataCollector -> Maybe DCReport -> Maybe Report mkReport dc dcr = case dcr of Nothing -> Nothing Just dcr' -> case () of _ | CPUload.dcName == dName dc -> case fromJVal (dcReportData dcr') :: Result CPUavgload of Ok cav -> Just $ CPUavgloadReport cav Bad _ -> Nothing | otherwise -> Nothing -- | Get data report for the specified Data Collector and Node from the map. fromFile :: DataCollector -> Node.Node -> MapMonDData -> Maybe DCReport fromFile dc node m = let matchDCName dcr = dName dc == dcReportName dcr in maybe Nothing (L.find matchDCName) $ Map.lookup (Node.name node) m -- | Query a MonD for a single Data Collector. queryAMonD :: Maybe MapMonDData -> DataCollector -> Node.Node -> IO (Maybe Node.Node) queryAMonD m dc node = do dcReport <- case m of Nothing -> fromCurl dc node Just m' -> return $ fromFile dc node m' case mkReport dc dcReport of Nothing -> return Nothing Just report -> case report of CPUavgloadReport cav -> let ct = cavCpuTotal cav du = Node.utilLoad node du' = du {cpuWeight = ct} in return $ Just node {Node.utilLoad = du'} -- | Update utilization data. updateUtilData :: Instance.List -> Node.Node -> Instance.List updateUtilData il node = let ct = cpuWeight (Node.utilLoad node) n_uCpu = Node.uCpu node upd inst = if Node.idx node == Instance.pNode inst then let i_vcpus = Instance.vcpus inst i_util = ct / fromIntegral n_uCpu * fromIntegral i_vcpus i_du = Instance.util inst i_du' = i_du {cpuWeight = i_util} in inst {Instance.util = i_du'} else inst in Container.map upd il -- | Prepare url to query a single collector. prepareUrl :: DataCollector -> Node.Node -> URLString prepareUrl dc node = Node.name node ++ ":" ++ show C.defaultMondPort ++ "/" ++ show C.mondLatestApiVersion ++ "/report/" ++ getDCCName (dCategory dc) ++ "/" ++ dName dc -- | Get Category Name. getDCCName :: Maybe DCCategory -> String getDCCName dcc = case dcc of Nothing -> "default" Just c -> getCategoryName c
vladimir-ipatov/ganeti
src/Ganeti/HTools/ExtLoader.hs
gpl-2.0
10,807
0
18
2,688
2,679
1,379
1,300
218
5
{-# LANGUAGE TypeFamilies #-} -- Module : Data.UUID.Builder -- Copyright : (c) 2009 Mark Lentczner -- -- License : BSD-style -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable -- -- This module provides a system that can call a function that takes -- a sequence of some number of Word8 arguments. -- -- The twist is that the Word8 arguments can be supplied directly -- from Word8s, or from other sources that may provide more than -- one Word8 apiece. Examples are Word16 and Word32 that supply -- two and four Word8s respectively. Other ByteSource instances -- can be defined. -- -- This module is admittedly overkill. There are only three places -- in the uuid package that need to call buildFromBytes with 16 -- Word8 values, but each place uses Words of different lengths: -- version 1 uuids: 32-16-16-16-8-8-8-8-8-8 -- version 4 uuids: 24-24-32-24-24 -- version 5 uuids: 32-32-32-32 -- Originally, these three constructions were hand coded but the -- code was ungainly. Using this module makes the code very -- concise, and turns out to optimize to just as fast, or faster! module Data.UUID.Builder (ByteSource(..) ,ByteSink ,Takes1Byte ,Takes2Bytes ,Takes3Bytes ,Takes4Bytes ) where import Data.Bits import Data.Word type Takes1Byte g = Word8 -> g type Takes2Bytes g = Word8 -> Word8 -> g type Takes3Bytes g = Word8 -> Word8 -> Word8 -> g type Takes4Bytes g = Word8 -> Word8 -> Word8 -> Word8 -> g -- | Type of function that a given ByteSource needs. -- This function must take as many Word8 arguments as the ByteSource provides type family ByteSink w g type instance ByteSink Word8 g = Takes1Byte g type instance ByteSink Word16 g = Takes2Bytes g type instance ByteSink Word32 g = Takes4Bytes g type instance ByteSink Int g = Takes4Bytes g -- | Class of types that can add Word8s to a Builder. -- Instances for Word8, Word16, Word32 and Int provide 1, 2, 4 and 4 bytes, -- respectively, into a ByteSink class ByteSource w where -- | Apply the source's bytes to the sink (/-/) :: ByteSink w g -> w -> g infixl 6 /-/ instance ByteSource Word8 where f /-/ w = f w instance ByteSource Word16 where f /-/ w = f b1 b2 where b1 = fromIntegral (w `shiftR` 8) b2 = fromIntegral w instance ByteSource Word32 where f /-/ w = f b1 b2 b3 b4 where b1 = fromIntegral (w `shiftR` 24) b2 = fromIntegral (w `shiftR` 16) b3 = fromIntegral (w `shiftR` 8) b4 = fromIntegral w instance ByteSource Int where f /-/ w = f b1 b2 b3 b4 where b1 = fromIntegral (w `shiftR` 24) b2 = fromIntegral (w `shiftR` 16) b3 = fromIntegral (w `shiftR` 8) b4 = fromIntegral w
necrobious/uuid
Data/UUID/Builder.hs
bsd-3-clause
2,808
0
10
689
495
288
207
40
0
module PackageTests.BuildDeps.TargetSpecificDeps3.Check where import Test.Tasty.HUnit import PackageTests.PackageTester import System.FilePath import Data.List import qualified Control.Exception as E import Text.Regex.Posix suite :: FilePath -> Assertion suite ghcPath = do let spec = PackageSpec { directory = "PackageTests" </> "BuildDeps" </> "TargetSpecificDeps3" , configOpts = [] , distPref = Nothing } result <- cabal_build spec ghcPath do assertEqual "cabal build should fail - see test-log.txt" False (successful result) assertBool "error should be in lemon.hs" $ "lemon.hs:" `isInfixOf` outputText result assertBool "error should be \"Could not find module `System.Time\"" $ (intercalate " " $ lines $ outputText result) =~ "Could not find module.*System.Time" `E.catch` \exc -> do putStrLn $ "Cabal result was "++show result E.throwIO (exc :: E.SomeException)
corngood/cabal
Cabal/tests/PackageTests/BuildDeps/TargetSpecificDeps3/Check.hs
bsd-3-clause
1,016
0
15
259
223
118
105
24
1
module Cow where import Bee fish = honeyEater
Numberartificial/workflow
snipets/src/Craft/Chapter15/Cow.hs
mit
48
0
4
10
12
8
4
3
1
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="ur-PK"> <title>Call Graph</title> <maps> <homeID>callgraph</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
thc202/zap-extensions
addOns/callgraph/src/main/javahelp/help_ur_PK/helpset_ur_PK.hs
apache-2.0
961
77
66
156
407
206
201
-1
-1
{-# LANGUAGE TemplateHaskell #-} module T4364 where data Z type N0 = $( [t| Z |] ) type N1 = $( [t| Z |] )
urbanslug/ghc
testsuite/tests/th/T4364.hs
bsd-3-clause
109
0
6
27
36
26
10
-1
-1
import Control.Concurrent import GHC.Conc import Data.List import Data.Maybe main = do t1 <- forkIO (threadDelay 100000000) m <- newEmptyMVar t2 <- forkIO (takeMVar m) t3 <- forkIO (let loop = do r <- tryTakeMVar m; _ <- newEmptyMVar -- do some allocation :( if isNothing r then loop else return () in loop) t4 <- forkIO (return ()) yield threadDelay 10000 print =<< mapM threadStatus [t1,t2,t3,t4] putMVar m ()
ghc-android/ghc
testsuite/tests/concurrent/should_run/conc070.hs
bsd-3-clause
508
0
18
165
188
89
99
17
2
{- # LANGUAGE MonoLocalBinds # -} module Main where import T4809_IdentityT (IdentityT(..), XML, runIdentityT) import T4809_XMLGenerator (XMLGenT(..), XMLGen(genElement), Child, EmbedAsChild(..), unXMLGenT) import System.IO (BufferMode(..), hSetBuffering, stdout) page :: XMLGenT (IdentityT IO) XML page = genElement (Nothing, "ul") [] [ asChild (asChild "foo")] -- where -- item :: XMLGenT (IdentityT IO) [Child (IdentityT IO)] -- item = (asChild $ asChild (return "bar" :: XMLGenT (IdentityT IO) String)) main :: IO () main = do hSetBuffering stdout LineBuffering r <- runIdentityT (unXMLGenT page) print r
urbanslug/ghc
testsuite/tests/typecheck/should_run/T4809.hs
bsd-3-clause
645
0
10
120
171
99
72
11
1
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverloadedStrings #-} module Shed.IndexServer.Postgresql where import Control.Monad import Data.Aeson.Types (Value (..)) import qualified Data.Map as M import Data.Maybe (listToMaybe) import Data.Text (Text) import Database.PostgreSQL.Simple import Database.PostgreSQL.Simple.FromField (FromField (..), fromJSONField) import Database.PostgreSQL.Simple.FromRow import Database.PostgreSQL.Simple.ToField import Shed.IndexServer import Shed.Types newtype PostgresIndexer = PG { unPostgresIndexer :: Connection } instance FromField SHA1 where fromField f b = SHA1 <$> fromField f b instance ToField SHA1 where toField (SHA1 a) = toField a instance FromField (M.Map Text Text) where fromField = fromJSONField instance FromRow Permanode where fromRow = Permanode <$> field <*> field <*> field <*> field instance IndexServer PostgresIndexer where wipe (PG conn) = void $ execute_ conn "delete from permanodes" makePermanode (PG conn) sha = void $ execute conn "INSERT INTO permanodes (sha1) VALUES (?) ON CONFLICT DO NOTHING" (Only sha) setPermanodeAttribute (PG conn) sha k v = void $ execute conn "UPDATE permanodes SET attributes = jsonb_set(attributes, ARRAY[?], ?) WHERE sha1 = ?" (k,String v,sha) permanodeHasContent (PG conn) (SHA1 sha) = do Just (Only n) <- listToMaybe <$> query conn "SELECT COUNT(*) FROM permanodes WHERE attributes->'camliContent' = ?" (Only (String sha)) return (n > (0 :: Int)) setPermanodeShowInUI (PG conn) (SHA1 sha) = void $ execute conn "UPDATE permanodes SET show_in_ui = true WHERE attributes->'camliContent' = ?" (Only (String sha)) setSearchHigh (PG conn) (SHA1 sha) text = void $ execute conn "UPDATE permanodes SET search_high = ? WHERE attributes->'camliContent' = ?" (text, String sha) setSearchLow (PG conn) (SHA1 sha) text = void $ execute conn "UPDATE permanodes SET search_low = ? WHERE attributes->'camliContent' = ?" (text, String sha) setPermanodeThumbnail (PG conn) (SHA1 sha) jpg = void $ execute conn "UPDATE permanodes SET thumbnail = ? WHERE attributes->'camliContent' = ?" (Binary jpg, String sha) setPermanodePreview (PG conn) (SHA1 sha) prev = void $ execute conn "UPDATE permanodes SET preview = ? WHERE attributes->'camliContent' = ?" (prev, String sha) getPermanode (PG conn) (SHA1 sha) = listToMaybe <$> query conn "SELECT sha1, attributes, thumbnail, preview FROM permanodes WHERE sha1 = ?" (Only sha) getPermanodes (PG conn) page = query conn "SELECT sha1, attributes, thumbnail, preview FROM permanodes WHERE show_in_ui = true ORDER BY sha1 DESC LIMIT 100 OFFSET ?" (Only (100 * page)) search (PG conn) t = query conn "SELECT sha1, attributes, thumbnail, preview FROM permanodes WHERE (setweight(to_tsvector(permanodes.search_high),'A') || setweight(to_tsvector(permanodes.search_low), 'B')) @@ to_tsquery('english', ?) ORDER BY ts_rank((setweight(to_tsvector(permanodes.search_high),'A') || setweight(to_tsvector(permanodes.search_low), 'B')), to_tsquery('english', ?)) DESC" (t,t) getThumbnail (PG conn) (SHA1 sha) = do res <- listToMaybe <$> query conn "SELECT thumbnail FROM permanodes WHERE sha1 = ?" (Only sha) case res of Nothing -> return Nothing Just (Only (Binary jpg)) -> return (Just jpg)
dbp/shed
src/Shed/IndexServer/Postgresql.hs
isc
3,670
0
15
898
830
430
400
54
0
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} module Shed.Indexer where import Control.Applicative ((<|>)) import Control.Logging (log') import Control.Monad (msum, void, when) import Data.Aeson (decode) import qualified Data.ByteString.Lazy as BL import Data.Monoid ((<>)) import qualified Shed.Blob.Email as Email import qualified Shed.Blob.File as File import qualified Shed.Blob.Permanode as Permanode import Shed.BlobServer import Shed.Images import Shed.IndexServer import Shed.Signing import Shed.Types decoders :: SomeBlobServer -> SomeIndexServer -> SHA1 -> BL.ByteString -> [Maybe (IO ())] decoders st se sha d = [Permanode.indexBlob st se sha <$> decode d ,File.indexBlob st se sha <$> decode d ,Email.indexBlob st se sha <$> decode d ] index :: SomeBlobServer -> SomeIndexServer -> IO () index a s = do enumerateBlobs a $ \sha dat -> do putStr $ "\r" <> show sha case msum $ decoders a s sha dat of Just a -> a Nothing -> return () putStrLn "\rDONE "
dbp/shed
src/Shed/Indexer.hs
isc
1,288
0
15
413
343
189
154
35
2
{-# htermination insert :: Int -> [Int] -> [Int] #-} import List
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/List_insert_5.hs
mit
65
0
3
12
5
3
2
1
0
module Data.ReedSolomon.Backend ( Backend(..) ) where import Data.Word (Word8) import Control.Monad.ST (ST) import Data.Vector.Storable as SV (Vector, MVector) data Backend s = Backend { backendName :: String , backendGalMulSlice :: Word8 -> SV.Vector Word8 -> SV.MVector s Word8 -> ST s () , backendGalMulSliceXor :: Word8 -> SV.Vector Word8 -> SV.MVector s Word8 -> ST s () }
NicolasT/reedsolomon
src/Data/ReedSolomon/Backend.hs
mit
752
0
13
432
143
81
62
14
0
module DrvDiffMake where import Derivation import DrvDiff import Data.List makeDrvDiff :: Derivation -> Derivation -> DrvDiff makeDrvDiff l r = uncurry DrvDiff $ (DrvPart, DrvPart) `tuply` diffWith listDiff drvOutputs `tuply` diffWith listDiff drvInputs `tuply` diffWith listDiff drvSources `tuply` diffWith itemDiff drvSystem `tuply` diffWith itemDiff drvBuilder `tuply` diffWith itemDiff drvArgs `tuply` diffWith listDiff drvEnv where diffWith :: (p -> p -> d) -> (Derivation -> p) -> d diffWith d f = d (f l) (f r) tuply :: (t3 -> t2, t1 -> t) -> (t3, t1) -> (t2, t) tuply (part1, part2) (prop1, prop2) = (part1 prop1, part2 prop2) listDiff :: Eq t => [t] -> [t] -> ([t], [t]) listDiff l r = (l \\ r, r \\ l) itemDiff :: Eq t => t -> t -> (Maybe t, Maybe t) itemDiff l r | l == r = (Nothing, Nothing) itemDiff l r = (Just l, Just r)
DavidEGrayson/drvdiff
src/DrvDiffMake.hs
mit
881
0
13
197
403
219
184
23
1