code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveFunctor #-} module Distribution.Solver.Types.Progress ( Progress(..) , foldProgress ) where #if !MIN_VERSION_base(4,8,0) import Control.Applicative ( Applicative(..) ) #endif import Control.Applicative ( Alternative(..) ) #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( Monoid(..) ) #endif import Prelude hiding (fail) -- | A type to represent the unfolding of an expensive long running -- calculation that may fail. We may get intermediate steps before the final -- result which may be used to indicate progress and\/or logging messages. -- data Progress step fail done = Step step (Progress step fail done) | Fail fail | Done done deriving (Functor) -- | Consume a 'Progress' calculation. Much like 'foldr' for lists but with two -- base cases, one for a final result and one for failure. -- -- Eg to convert into a simple 'Either' result use: -- -- > foldProgress (flip const) Left Right -- foldProgress :: (step -> a -> a) -> (fail -> a) -> (done -> a) -> Progress step fail done -> a foldProgress step fail done = fold where fold (Step s p) = step s (fold p) fold (Fail f) = fail f fold (Done r) = done r instance Monad (Progress step fail) where return = pure p >>= f = foldProgress Step Fail f p instance Applicative (Progress step fail) where pure a = Done a p <*> x = foldProgress Step Fail (flip fmap x) p instance Monoid fail => Alternative (Progress step fail) where empty = Fail mempty p <|> q = foldProgress Step (const q) Done p
headprogrammingczar/cabal
cabal-install/Distribution/Solver/Types/Progress.hs
bsd-3-clause
1,646
0
9
419
410
225
185
31
3
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ConstraintKinds #-} module T4175 where import GHC.Exts type family A a b type instance A Int Int = () type instance A (Maybe a) a = a type instance A (B a) b = () data family B a data instance B () = MkB class C a where type D a b instance C Int where type D Int b = String instance C () where type D () a = Bool type family E a where E () = Bool E Int = String class Z a class F (a :: Constraint) instance F (Z a) class G (a :: * -> *) instance G B
tjakway/ghcjvm
testsuite/tests/ghci/scripts/T4175.hs
bsd-3-clause
524
0
8
145
227
125
102
-1
-1
module FileIO where import System.IO import Foreign import Foreign.C foreign import ccall safe "fileio.h c_file_getresult" c_file_getresult :: CInt -> IO CInt
holzensp/ghc
testsuite/tests/concurrent/prog002/FileIO.hs
bsd-3-clause
165
0
7
27
38
22
16
6
0
import qualified Data.List as List import Data.Set (Set) import qualified Data.Set as Set main = do presents <- read <$> getContents let solution = head $ dropWhile ((< presents) . (* 11) . sum . factors) [1 ..] print solution factors :: Int -> Set Int factors n = Set.unions $ map (\r -> let r' = n `div` r in if r' <= 50 then Set.fromList [r, r'] else Set.singleton r') $ filter (\r -> n `mod` r == 0) $ takeWhile (\r -> r * r <= n) $ [1 .. 50]
SamirTalwar/advent-of-code
2015/AOC_20_2.hs
mit
485
0
16
132
234
128
106
14
2
-- Problem 4 -- (*) Find the number of elements of a list. myLength :: [a] -> Int myLength [] = 0 myLength (_:xs) = 1 + myLength xs myLength2 :: [a] -> Int myLength2 x = myLength2' x 0 where myLength2' [] acc = acc myLength2' (_:xs) acc = myLength2' xs (acc+1) myLength3 :: [a] -> Int myLength3 = sum . map (\_ -> 1)
usami-k/H-99-Ninety-Nine-Haskell-Problems
01-10/04.hs
mit
326
0
9
75
147
78
69
9
2
chain :: (Integral a) => a -> [a] chain 1 = [1] chain n | even n = n:chain(n `div` 2) | odd n = n:chain(n*3 + 1) numLongChains :: Int numLongChains = length (filter isLong (map chain [1..100])) where isLong xs = length xs > 15
ariedov/AwesomeHaskellApp
collatz.hs
mit
232
2
10
52
149
75
74
8
1
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE OverloadedStrings #-} module Main where import Data.Attoparsec.Text import Data.Bifunctor (bimap) import Data.Char (ord) import Data.Foldable (for_, traverse_) import Data.Map.Strict (Map) import qualified Data.Map.Strict as M import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.IO as T import Data.Vector.Unboxed (Vector) import qualified Data.Vector.Unboxed as V import qualified Data.Vector.Unboxed.Mutable as M type Polymer = [Char] -- aka String type Rules = Map (Char, Char) Char type Counter = Vector Word readPolymerization :: Text -> (Polymer, Rules) readPolymerization = either (error "Bad parse") (bimap T.unpack M.fromList) . parseOnly ((,) <$> template <*> (rules `sepBy1` "\n")) where template = takeTill isEndOfLine <* "\n\n" rules = (,) <$> ((,) <$> letter <*> letter) <*> (" -> " *> letter) step :: Rules -> Polymer -> Polymer step !rules !polymer = (p :) . concatMap lookupAndSplice $ zip polymer ps where !p = head polymer !ps = tail polymer lookupAndSplice (!a, !b) = case rules M.!? (a, b) of Nothing -> [] Just !x -> [x, b] toCounter :: Polymer -> Counter toCounter polymer = V.create $ do !v <- M.replicate 26 0 for_ polymer $ \p -> do let !i = ord p - 65 -- ord 'A' M.unsafeModify v succ i pure v iterateN :: Word -> (a -> a) -> a -> a iterateN 0 _ !x = x iterateN !n !f !x = let !x' = f x !n' = n - 1 in iterateN n' f x' range :: Counter -> Word range = (-) <$> V.maximum <*> (V.minimum . V.filter (> 0)) solve :: Word -> (Rules, Polymer) -> Word solve !n (!rules, !polymer) = range . toCounter $ iterateN n (step rules) polymer part1 :: (Rules, Polymer) -> Word part1 = solve 10 part2 :: (Rules, Polymer) -> Word part2 = solve 40 main :: IO () main = do (!polymer, !rules) <- readPolymerization <$> T.readFile "test.txt" traverse_ (print . ($ (rules, polymer))) [part1, part2]
genos/online_problems
advent_of_code_2021/day14/Main.hs
mit
2,201
0
16
648
800
431
369
54
2
module JSONSchema.Validator.Draft4.Array where import Import import qualified Data.List.NonEmpty as NE import qualified Data.Vector as V import qualified JSONPointer as JP import JSONSchema.Validator.Utils (allUniqueValues) -------------------------------------------------- -- * maxItems -------------------------------------------------- newtype MaxItems = MaxItems { _unMaxItems :: Int } deriving (Eq, Show) instance FromJSON MaxItems where parseJSON = withObject "MaxItems" $ \o -> MaxItems <$> o .: "maxItems" data MaxItemsInvalid = MaxItemsInvalid MaxItems (Vector Value) deriving (Eq, Show) -- | The spec requires @"maxItems"@ to be non-negative. maxItemsVal :: MaxItems -> Vector Value -> Maybe MaxItemsInvalid maxItemsVal a@(MaxItems n) xs | n < 0 = Nothing | V.length xs > n = Just (MaxItemsInvalid a xs) | otherwise = Nothing -------------------------------------------------- -- * minItems -------------------------------------------------- newtype MinItems = MinItems { _unMinItems :: Int } deriving (Eq, Show) instance FromJSON MinItems where parseJSON = withObject "MinItems" $ \o -> MinItems <$> o .: "minItems" data MinItemsInvalid = MinItemsInvalid MinItems (Vector Value) deriving (Eq, Show) -- | The spec requires @"minItems"@ to be non-negative. minItemsVal :: MinItems -> Vector Value -> Maybe MinItemsInvalid minItemsVal a@(MinItems n) xs | n < 0 = Nothing | V.length xs < n = Just (MinItemsInvalid a xs) | otherwise = Nothing -------------------------------------------------- -- * uniqueItems -------------------------------------------------- newtype UniqueItems = UniqueItems { _unUniqueItems :: Bool } deriving (Eq, Show) instance FromJSON UniqueItems where parseJSON = withObject "UniqueItems" $ \o -> UniqueItems <$> o .: "uniqueItems" newtype UniqueItemsInvalid = UniqueItemsInvalid (Vector Value) deriving (Eq, Show) uniqueItemsVal :: UniqueItems -> Vector Value -> Maybe UniqueItemsInvalid uniqueItemsVal (UniqueItems True) xs | allUniqueValues xs = Nothing | otherwise = Just (UniqueItemsInvalid xs) uniqueItemsVal (UniqueItems False) _ = Nothing -------------------------------------------------- -- * items -------------------------------------------------- data ItemsRelated schema = ItemsRelated { _irItems :: Maybe (Items schema) , _irAdditional :: Maybe (AdditionalItems schema) } deriving (Eq, Show) instance FromJSON schema => FromJSON (ItemsRelated schema) where parseJSON = withObject "ItemsRelated" $ \o -> ItemsRelated <$> o .:! "items" <*> o .:! "additionalItems" emptyItems :: ItemsRelated schema emptyItems = ItemsRelated { _irItems = Nothing , _irAdditional = Nothing } data Items schema = ItemsObject schema | ItemsArray [schema] deriving (Eq, Show) instance FromJSON schema => FromJSON (Items schema) where parseJSON v = fmap ItemsObject (parseJSON v) <|> fmap ItemsArray (parseJSON v) instance ToJSON schema => ToJSON (Items schema) where toJSON (ItemsObject hm) = toJSON hm toJSON (ItemsArray schemas) = toJSON schemas instance Arbitrary schema => Arbitrary (Items schema) where arbitrary = oneof [ ItemsObject <$> arbitrary , ItemsArray <$> arbitrary ] data ItemsRelatedInvalid err = IRInvalidItems (ItemsInvalid err) | IRInvalidAdditional (AdditionalItemsInvalid err) deriving (Eq, Show) data ItemsInvalid err = ItemsObjectInvalid (NonEmpty (JP.Index, NonEmpty err)) | ItemsArrayInvalid (NonEmpty (JP.Index, NonEmpty err)) deriving (Eq, Show) -- | @"additionalItems"@ only matters if @"items"@ exists -- and is a JSON Array. itemsRelatedVal :: forall err schema. (schema -> Value -> [err]) -> ItemsRelated schema -> Vector Value -> [ItemsRelatedInvalid err] -- NOTE: 'Data.These' would help here. itemsRelatedVal f a xs = let (itemsFailure, remaining) = case _irItems a of Nothing -> (Nothing, mempty) Just b -> itemsVal f b xs additionalFailure = (\b -> additionalItemsVal f b remaining) =<< _irAdditional a in catMaybes [ IRInvalidItems <$> itemsFailure , IRInvalidAdditional <$> additionalFailure ] -- | Internal. -- -- This is because 'itemsRelated' handles @"items"@ validation. itemsVal :: forall err schema. (schema -> Value -> [err]) -> Items schema -> Vector Value -> (Maybe (ItemsInvalid err), [(JP.Index, Value)]) -- ^ The second item in the tuple is the elements of the original -- JSON Array still remaining to be checked by @"additionalItems"@. itemsVal f a xs = case a of ItemsObject subSchema -> case NE.nonEmpty (mapMaybe (validateElem subSchema) indexed) of Nothing -> (Nothing, mempty) Just errs -> (Just (ItemsObjectInvalid errs), mempty) ItemsArray subSchemas -> let remaining = drop (length subSchemas) indexed res = catMaybes (zipWith validateElem subSchemas indexed) in case NE.nonEmpty res of Nothing -> (Nothing, remaining) Just errs -> (Just (ItemsArrayInvalid errs), remaining) where indexed :: [(JP.Index, Value)] indexed = zip (JP.Index <$> [0..]) (V.toList xs) validateElem :: schema -> (JP.Index, Value) -> Maybe (JP.Index, NonEmpty err) validateElem schema (index,x) = (\v -> (index, v)) <$> NE.nonEmpty (f schema x) -------------------------------------------------- -- * additionalItems -------------------------------------------------- data AdditionalItems schema = AdditionalBool Bool | AdditionalObject schema deriving (Eq, Show) instance FromJSON schema => FromJSON (AdditionalItems schema) where parseJSON v = fmap AdditionalBool (parseJSON v) <|> fmap AdditionalObject (parseJSON v) instance ToJSON schema => ToJSON (AdditionalItems schema) where toJSON (AdditionalBool b) = toJSON b toJSON (AdditionalObject hm) = toJSON hm instance Arbitrary schema => Arbitrary (AdditionalItems schema) where arbitrary = oneof [ AdditionalBool <$> arbitrary , AdditionalObject <$> arbitrary ] data AdditionalItemsInvalid err = AdditionalItemsBoolInvalid (NonEmpty (JP.Index, Value)) | AdditionalItemsObjectInvalid (NonEmpty (JP.Index, NonEmpty err)) deriving (Eq, Show) -- | Internal. -- -- This is because 'itemsRelated' handles @"additionalItems"@ validation. additionalItemsVal :: forall err schema. (schema -> Value -> [err]) -> AdditionalItems schema -> [(JP.Index, Value)] -- ^ The elements remaining to validate after the ones covered by -- @"items"@ have been removed. -> Maybe (AdditionalItemsInvalid err) additionalItemsVal _ (AdditionalBool True) _ = Nothing additionalItemsVal _ (AdditionalBool False) xs = AdditionalItemsBoolInvalid <$> NE.nonEmpty xs additionalItemsVal f (AdditionalObject subSchema) xs = let res = mapMaybe (\(index,x) -> (\v -> (index, v)) <$> NE.nonEmpty (f subSchema x)) xs in AdditionalItemsObjectInvalid <$> NE.nonEmpty res
seagreen/hjsonschema
src/JSONSchema/Validator/Draft4/Array.hs
mit
7,556
0
16
1,875
1,953
1,036
917
-1
-1
{-# LANGUAGE OverloadedStrings, GeneralizedNewtypeDeriving, TypeFamilies, FlexibleContexts #-} module Haste.JSArray.Typed where import Control.DeepSeq (NFData, force) import Control.Monad (liftM2) import Haste.Foreign import qualified Haste.JSArray as JA import Haste.Prim import System.IO.Unsafe (unsafePerformIO) newtype ArrayBuffer = ArrayBuffer JSAny deriving (Pack, Unpack) newArrayBuffer::Int->IO ArrayBuffer newArrayBuffer = ffi "(function(byteLength) {return ArrayBuffer(byteLength);})" arrayBufferLength::ArrayBuffer->Int arrayBufferLength = unsafePerformIO . ffi "(function(buf) {return buf.byteLength;})" class (Pack a, Unpack a)=>TypedArray a where type EltType a newSizedArray::Int->IO a copyTypedArray::TypedArray b=>b->a fromList::Unpack (EltType a)=>[EltType a]->a viewArrayBufferSized::Int->Int->ArrayBuffer->a viewArrayBuffer::ArrayBuffer->a fromJSArray::JA.JSArray (EltType a)->a byteLength::a->Int byteLength = unsafePerformIO . ffi "(function(arr) {return arr.byteLength;})" bytesPerElement::a->Int bytesPerElement = unsafePerformIO . ffi "(function(arr) {return arr.BYTES_PER_ELEMENT;})" arrayLength::a->Int arrayLength arr = quot (byteLength arr) (bytesPerElement arr) getIndex::Pack (EltType a)=>a->Int->IO (EltType a) getIndex = ffi "(function(arr, idx) {return arr[idx];})" setIndex::Unpack (EltType a)=>a->Int->EltType a->IO () setIndex = ffi "(function(arr, idx, val) {arr[idx] = val;})" toList::(Pack (EltType a), NFData (EltType a))=>a->[EltType a] toList arr = force $ fmap (unsafePerformIO . getIndex arr) [0..arrayLength arr - 1] toJSArray::a->JA.JSArray (EltType a) toJSArray = unsafePerformIO . ffi "(function(arr) {return Array.prototype.slice.call(arr);})" newtype Int8Array = Int8Array JSAny deriving (Pack, Unpack) instance TypedArray Int8Array where type (EltType Int8Array) = Int newSizedArray = ffi "(function(size) {return new Int8Array(size);})" copyTypedArray = unsafePerformIO . ffi "(function(other) {return new Int8Array(other);})" fromList = unsafePerformIO . ffi "(function(other) {return new Int8Array(other);})" viewArrayBufferSized start len buf = unsafePerformIO $ viewSized start len buf where viewSized::Int->Int->ArrayBuffer->IO Int8Array viewSized = ffi "(function(byteOffset, length, buffer) {return new Int8Array(buffer, byteOffset, length);})" viewArrayBuffer = unsafePerformIO . ffi "(function(buffer) {return new Int8Array(buffer);})" fromJSArray = unsafePerformIO . ffi "(function(buffer) {return new Int8Array(buffer);})" newtype Int16Array = Int16Array JSAny deriving (Pack, Unpack) instance TypedArray Int16Array where type (EltType Int16Array) = Int newSizedArray = ffi "(function(size) {return new Int16Array(size);})" copyTypedArray = unsafePerformIO . ffi "(function(other) {return new Int16Array(other);})" fromList = unsafePerformIO . ffi "(function(other) {return new Int16Array(other);})" viewArrayBufferSized start len buf = unsafePerformIO $ viewSized start len buf where viewSized::Int->Int->ArrayBuffer->IO Int16Array viewSized = ffi "(function(byteOffset, length, buffer) {return new Int16Array(buffer, byteOffset, length);})" viewArrayBuffer = unsafePerformIO . ffi "(function(buffer) {return new Int16Array(buffer);})" fromJSArray = unsafePerformIO . ffi "(function(buffer) {return new Int16Array(buffer);})" newtype Int32Array = Int32Array JSAny deriving (Pack, Unpack) instance TypedArray Int32Array where type (EltType Int32Array) = Int newSizedArray = ffi "(function(size) {return new Int32Array(size);})" copyTypedArray = unsafePerformIO . ffi "(function(other) {return new Int32Array(other);})" fromList = unsafePerformIO . ffi "(function(other) {return new Int32Array(other);})" viewArrayBufferSized start len buf = unsafePerformIO $ viewSized start len buf where viewSized::Int->Int->ArrayBuffer->IO Int32Array viewSized = ffi "(function(byteOffset, length, buffer) {return new Int32Array(buffer, byteOffset, length);})" viewArrayBuffer = unsafePerformIO . ffi "(function(buffer) {return new Int32Array(buffer);})" fromJSArray = unsafePerformIO . ffi "(function(buffer) {return new Int32Array(buffer);})" newtype Int64Array = Int64Array JSAny deriving (Pack, Unpack) instance TypedArray Int64Array where type (EltType Int64Array) = Int newSizedArray = ffi "(function(size) {return new Int64Array(size);})" copyTypedArray = unsafePerformIO . ffi "(function(other) {return new Int64Array(other);})" fromList = unsafePerformIO . ffi "(function(other) {return new Int64Array(other);})" viewArrayBufferSized start len buf = unsafePerformIO $ viewSized start len buf where viewSized::Int->Int->ArrayBuffer->IO Int64Array viewSized = ffi "(function(byteOffset, length, buffer) {return new Int64Array(buffer, byteOffset, length);})" viewArrayBuffer = unsafePerformIO . ffi "(function(buffer) {return new Int64Array(buffer);})" fromJSArray = unsafePerformIO . ffi "(function(buffer) {return new Int64Array(buffer);})" newtype Uint8Array = Uint8Array JSAny deriving (Pack, Unpack) instance TypedArray Uint8Array where type (EltType Uint8Array) = Int newSizedArray = ffi "(function(size) {return new Uint8Array(size);})" copyTypedArray = unsafePerformIO . ffi "(function(other) {return new Uint8Array(other);})" fromList = unsafePerformIO . ffi "(function(other) {return new Uint8Array(other);})" viewArrayBufferSized start len buf = unsafePerformIO $ viewSized start len buf where viewSized::Int->Int->ArrayBuffer->IO Uint8Array viewSized = ffi "(function(byteOffset, length, buffer) {return new Uint8Array(buffer, byteOffset, length);})" viewArrayBuffer = unsafePerformIO . ffi "(function(buffer) {return new Uint8Array(buffer);})" fromJSArray = unsafePerformIO . ffi "(function(buffer) {return new Uint8Array(buffer);})" newtype Uint16Array = Uint16Array JSAny deriving (Pack, Unpack) instance TypedArray Uint16Array where type (EltType Uint16Array) = Int newSizedArray = ffi "(function(size) {return new Uint16Array(size);})" copyTypedArray = unsafePerformIO . ffi "(function(other) {return new Uint16Array(other);})" fromList = unsafePerformIO . ffi "(function(other) {return new Uint16Array(other);})" viewArrayBufferSized start len buf = unsafePerformIO $ viewSized start len buf where viewSized::Int->Int->ArrayBuffer->IO Uint16Array viewSized = ffi "(function(byteOffset, length, buffer) {return new Uint16Array(buffer, byteOffset, length);})" viewArrayBuffer = unsafePerformIO . ffi "(function(buffer) {return new Uint16Array(buffer);})" fromJSArray = unsafePerformIO . ffi "(function(buffer) {return new Uint16Array(buffer);})" newtype Uint32Array = Uint32Array JSAny deriving (Pack, Unpack) instance TypedArray Uint32Array where type (EltType Uint32Array) = Int newSizedArray = ffi "(function(size) {return new Uint32Array(size);})" copyTypedArray = unsafePerformIO . ffi "(function(other) {return new Uint32Array(other);})" fromList = unsafePerformIO . ffi "(function(other) {return new Uint32Array(other);})" viewArrayBufferSized start len buf = unsafePerformIO $ viewSized start len buf where viewSized::Int->Int->ArrayBuffer->IO Uint32Array viewSized = ffi "(function(byteOffset, length, buffer) {return new Uint32Array(buffer, byteOffset, length);})" viewArrayBuffer = unsafePerformIO . ffi "(function(buffer) {return new Uint32Array(buffer);})" fromJSArray = unsafePerformIO . ffi "(function(buffer) {return new Uint32Array(buffer);})" newtype Float32Array = Float32Array JSAny deriving (Pack, Unpack) instance TypedArray Float32Array where type (EltType Float32Array) = Double newSizedArray = ffi "(function(size) {return new Float32Array(size);})" copyTypedArray = unsafePerformIO . ffi "(function(other) {return new Float32Array(other);})" fromList = unsafePerformIO . ffi "(function(other) {return new Float32Array(other);})" viewArrayBufferSized start len buf = unsafePerformIO $ viewSized start len buf where viewSized::Int->Int->ArrayBuffer->IO Float32Array viewSized = ffi "(function(byteOffset, length, buffer) {return new Float32Array(buffer, byteOffset, length);})" viewArrayBuffer = unsafePerformIO . ffi "(function(buffer) {return new Float32Array(buffer);})" fromJSArray = unsafePerformIO . ffi "(function(buffer) {return new Float32Array(buffer);})"
klarh/haste-jsarray
src/Haste/JSArray/Typed.hs
mit
8,483
0
12
1,163
1,720
885
835
124
1
module GHCJS.DOM.HTMLOutputElement ( ) where
manyoo/ghcjs-dom
ghcjs-dom-webkit/src/GHCJS/DOM/HTMLOutputElement.hs
mit
47
0
3
7
10
7
3
1
0
module PostgREST.ApiRequest where import qualified Data.Aeson as JSON import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as BL import qualified Data.Csv as CSV import Data.List (find, sortBy) import qualified Data.HashMap.Strict as M import qualified Data.Set as S import Data.Maybe (fromMaybe, isJust, isNothing, listToMaybe, fromJust) import Control.Arrow ((***)) import Control.Monad (join) import Data.Monoid ((<>)) import Data.Ord (comparing) import Data.String.Conversions (cs) import qualified Data.Text as T import qualified Data.Vector as V import Network.HTTP.Base (urlEncodeVars) import Network.HTTP.Types.Header (hAuthorization) import Network.HTTP.Types.URI (parseSimpleQuery) import Network.Wai (Request (..)) import Network.Wai.Parse (parseHttpAccept) import PostgREST.RangeQuery (NonnegRange, rangeRequested) import PostgREST.Types (QualifiedIdentifier (..), Schema, Payload(..), UniformObjects(..)) import Data.Ranged.Ranges (singletonRange) type RequestBody = BL.ByteString -- | Types of things a user wants to do to tables/views/procs data Action = ActionCreate | ActionRead | ActionUpdate | ActionDelete | ActionInfo | ActionInvoke | ActionInappropriate deriving Eq -- | The target db object of a user action data Target = TargetIdent QualifiedIdentifier | TargetProc QualifiedIdentifier | TargetRoot | TargetUnknown [T.Text] -- | How to return the inserted data data PreferRepresentation = Full | HeadersOnly | None deriving Eq -- | Enumeration of currently supported content types for -- route responses and upload payloads data ContentType = ApplicationJSON | TextCSV deriving Eq instance Show ContentType where show ApplicationJSON = "application/json; charset=utf-8" show TextCSV = "text/csv; charset=utf-8" {-| Describes what the user wants to do. This data type is a translation of the raw elements of an HTTP request into domain specific language. There is no guarantee that the intent is sensible, it is up to a later stage of processing to determine if it is an action we are able to perform. -} data ApiRequest = ApiRequest { -- | Similar but not identical to HTTP verb, e.g. Create/Invoke both POST iAction :: Action -- | Requested range of rows within response , iRange :: NonnegRange -- | The target, be it calling a proc or accessing a table , iTarget :: Target -- | The content type the client most desires (or JSON if undecided) , iAccepts :: Either BS.ByteString ContentType -- | Data sent by client and used for mutation actions , iPayload :: Maybe Payload -- | If client wants created items echoed back , iPreferRepresentation :: PreferRepresentation -- | If client wants first row as raw object , iPreferSingular :: Bool -- | Whether the client wants a result count (slower) , iPreferCount :: Bool -- | Filters on the result ("id", "eq.10") , iFilters :: [(String, String)] -- | &select parameter used to shape the response , iSelect :: String -- | &order parameters for each level , iOrder :: [(String,String)] -- | Alphabetized (canonical) request query string for response URLs , iCanonicalQS :: String -- | JSON Web Token , iJWT :: T.Text } -- | Examines HTTP request and translates it into user intent. userApiRequest :: Schema -> Request -> RequestBody -> ApiRequest userApiRequest schema req reqBody = let action = if isTargetingProc then if method == "POST" then ActionInvoke else ActionInappropriate else case method of "GET" -> ActionRead "POST" -> ActionCreate "PATCH" -> ActionUpdate "DELETE" -> ActionDelete "OPTIONS" -> ActionInfo _ -> ActionInappropriate target = case path of [] -> TargetRoot [table] -> TargetIdent $ QualifiedIdentifier schema table ["rpc", proc] -> TargetProc $ QualifiedIdentifier schema proc other -> TargetUnknown other payload = case pickContentType (lookupHeader "content-type") of Right ApplicationJSON -> either (PayloadParseError . cs) (\val -> case ensureUniform (pluralize val) of Nothing -> PayloadParseError "All object keys must match" Just json -> PayloadJSON json) (JSON.eitherDecode reqBody) Right TextCSV -> either (PayloadParseError . cs) (\val -> case ensureUniform (csvToJson val) of Nothing -> PayloadParseError "All lines must have same number of fields" Just json -> PayloadJSON json) (CSV.decodeByName reqBody) -- This is a Left value because form-urlencoded is not a content -- type which we ever use for responses, only something we handle -- just this once for requests Left "application/x-www-form-urlencoded" -> PayloadJSON . UniformObjects . V.singleton . M.fromList . map (cs *** JSON.String . cs) . parseSimpleQuery $ cs reqBody Left accept -> PayloadParseError $ "Content-type not acceptable: " <> accept relevantPayload = case action of ActionCreate -> Just payload ActionUpdate -> Just payload ActionInvoke -> Just payload _ -> Nothing in ApiRequest { iAction = action , iRange = if singular then singletonRange 0 else rangeRequested hdrs , iTarget = target , iAccepts = pickContentType $ lookupHeader "accept" , iPayload = relevantPayload , iPreferRepresentation = representation , iPreferSingular = singular , iPreferCount = not $ singular || hasPrefer "count=none" , iFilters = [ (cs k, fromJust v) | (k,v) <- qParams, isJust v, k /= "select", not (endingIn "order" k) ] , iSelect = if method == "DELETE" then "*" else fromMaybe "*" $ fromMaybe (Just "*") $ lookup "select" qParams , iOrder = [(cs k, fromJust v) | (k,v) <- qParams, isJust v, endingIn "order" k ] , iCanonicalQS = urlEncodeVars . sortBy (comparing fst) . map (join (***) cs) . parseSimpleQuery $ rawQueryString req , iJWT = tokenStr } where path = pathInfo req method = requestMethod req isTargetingProc = fromMaybe False $ (== "rpc") <$> listToMaybe path hdrs = requestHeaders req qParams = [(cs k, cs <$> v)|(k,v) <- queryString req] lookupHeader = flip lookup hdrs hasPrefer :: T.Text -> Bool hasPrefer val = any (\(h,v) -> h == "Prefer" && val `elem` split v) hdrs where split :: BS.ByteString -> [T.Text] split = map T.strip . T.split (==';') . cs singular = hasPrefer "plurality=singular" representation | hasPrefer "return=representation" = Full | hasPrefer "return=minimal" = None | otherwise = HeadersOnly auth = fromMaybe "" $ lookupHeader hAuthorization tokenStr = case T.split (== ' ') (cs auth) of ("Bearer" : t : _) -> t _ -> "" endingIn:: T.Text -> T.Text -> Bool endingIn word key = word == lastWord where lastWord = last $ T.split (=='.') key -- PRIVATE --------------------------------------------------------------- {-| Picks a preferred content type from an Accept header (or from Content-Type as a degenerate case). For example text/csv -> TextCSV */* -> ApplicationJSON text/csv, application/json -> TextCSV application/json, text/csv -> ApplicationJSON -} pickContentType :: Maybe BS.ByteString -> Either BS.ByteString ContentType pickContentType accept | isNothing accept || has ctAll || has ctJson = Right ApplicationJSON | has ctCsv = Right TextCSV | otherwise = Left accept' where ctAll = "*/*" ctCsv = "text/csv" ctJson = "application/json" Just accept' = accept findInAccept = flip find $ parseHttpAccept accept' has = isJust . findInAccept . BS.isPrefixOf type CsvData = V.Vector (M.HashMap T.Text BL.ByteString) {-| Converts CSV like a,b 1,hi 2,bye into a JSON array like [ {"a": "1", "b": "hi"}, {"a": 2, "b": "bye"} ] The reason for its odd signature is so that it can compose directly with CSV.decodeByName -} csvToJson :: (CSV.Header, CsvData) -> JSON.Array csvToJson (_, vals) = V.map rowToJsonObj vals where rowToJsonObj = JSON.Object . M.map (\str -> if str == "NULL" then JSON.Null else JSON.String $ cs str ) -- | Convert {foo} to [{foo}], leave arrays unchanged -- and truncate everything else to an empty array. pluralize :: JSON.Value -> JSON.Array pluralize obj@(JSON.Object _) = V.singleton obj pluralize (JSON.Array arr) = arr pluralize _ = V.empty -- | Test that Array contains only Objects having the same keys -- and if so mark it as UniformObjects ensureUniform :: JSON.Array -> Maybe UniformObjects ensureUniform arr = let objs :: V.Vector JSON.Object objs = foldr -- filter non-objects, map to raw objects (\val result -> case val of JSON.Object o -> V.cons o result _ -> result) V.empty arr keysPerObj = V.map (S.fromList . M.keys) objs canonicalKeys = fromMaybe S.empty $ keysPerObj V.!? 0 areKeysUniform = all (==canonicalKeys) keysPerObj in if (V.length objs == V.length arr) && areKeysUniform then Just (UniformObjects objs) else Nothing
league/postgrest
src/PostgREST/ApiRequest.hs
mit
10,234
110
17
3,107
2,015
1,132
883
-1
-1
{-# LANGUAGE TupleSections, Rank2Types #-} module TestInference where import Data.AEq import Control.Monad.Trans.Identity import Control.Monad.Bayes.Class import qualified Control.Monad.Bayes.Enumerator as Dist import Control.Monad.Bayes.Sampler import Control.Monad.Bayes.Weighted import Control.Monad.Bayes.Population import Control.Monad.Bayes.Trace import Control.Monad.Bayes.Inference import Sprinkler import qualified StrictlySmallerSupport sprinkler :: MonadBayes m => m Bool sprinkler = Sprinkler.soft enumerate :: Ord a => Dist.Dist Double a -> [(a,Double)] enumerate = Dist.enumerate check_terminate_smc = sampleIOfixed (smc' 2 5 sprinkler) check_preserve_smc = (enumerate . collapse . smc 2 2) sprinkler ~== enumerate sprinkler check_preserve_ismh = (enumerate . collapse . ismh 1 2) sprinkler ~== enumerate sprinkler check_preserve_smh = (enumerate . collapse . smh 2 2) sprinkler ~== enumerate sprinkler check_preserve_smcrm = (enumerate . collapse . smcrm 1 2 1) sprinkler ~== enumerate sprinkler sprinkler_posterior :: MonadBayes m => Weighted m Bool sprinkler_posterior = duplicateWeight sprinkler -- mhPriorrans :: MonadDist m => Weighted m Bool -> m Bool -- mhPriorrans d = fmap (!! 1) $ mh 2 d (MHKernel $ const $ fmap (,1) sprinkler) -- check_prior_trans = enumerate (fmap (!! 2) (mhPrior 2 sprinkler_posterior)) ~== -- enumerate sprinkler -- pimhTrans :: MonadDist m => Weighted m Bool -> m Bool -- pimhTrans d = fmap (!! 1) $ mh 2 d kernel where -- kernel = MHKernel $ const $ fmap (,1) $ collapse $ smc 2 2 sprinkler -- check_pimh_trans = enumerate (fmap (!! 2) (pimh 2 2 2 sprinkler_posterior)) ~== -- enumerate sprinkler check_trace_mh m m' = enumerate (dropTrace (mhStep (mhStep m))) ~== enumerate m' trace_mh_length n = fmap length (sampleIOfixed (traceMH n sprinkler)) check_trace_trans = check_trace_mh sprinkler sprinkler check_trace_support = check_trace_mh StrictlySmallerSupport.model StrictlySmallerSupport.model -- | Count the number of particles produced by SMC check_particles :: Int -> Int -> IO Int check_particles observations particles = sampleIOfixed (fmap length (runPopulation $ smc observations particles Sprinkler.soft))
ocramz/monad-bayes
test/TestInference.hs
mit
2,347
0
12
472
458
248
210
38
1
{-# OPTIONS_GHC -Wall #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} module Estimate where newtype Task = Task String deriving Show newtype Time = Time Float deriving (Eq, Show, Num, Fractional, Ord) data Estimate = Estimate {description :: Task, best :: Time, normal :: Time, worst :: Time} deriving Show data Eta = Eta {task :: Task, amount :: Time, delta :: Time} instance Show Eta where show (Eta t e d) = "ETA of " ++ show t ++ ": " ++ show e ++ " +/- " ++ show d estimate :: Estimate -> Eta estimate estimation = (Eta (description estimation) e d) where e = ((best estimation) + 4 * (normal estimation) + (worst estimation)) / 6 d = ((worst estimation) - (best estimation)) / 6
manuelp/estimate
Estimate.hs
mit
832
0
13
273
271
150
121
19
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} module WUnderground.Client ( -- * Weather conditions API withWU , coordinateConditions ) where ------------------------------------------------------------------------------- import Control.Applicative import Control.Exception as E import Control.Lens import Control.Monad.Catch import Control.Monad.Reader import Data.Aeson import Data.ByteString (ByteString) import qualified Data.ByteString.Char8 as BS import Data.Default.Class import Data.Monoid import Data.Text.Strict.Lens (packed, utf8) import Network.HTTP.Client import Network.HTTP.Types (Method) import URI.ByteString -- This silences redundant import warnings in 7.10 import Prelude ------------------------------------------------------------------------------- import WUnderground.Types ------------------------------------------------------------------------------- -- | Convenience function for when you don't have an externally -- configured manager and want to create one on the spot. withWU :: ManagerSettings -> APIKey -> WU IO a -> IO a withWU ms k f = withManager ms $ \mgr -> do let conf = defaultWUConfig mgr k runWU conf f ------------------------------------------------------------------------------- coordinateConditions :: (MonadWU m) => Lat -> Lng -> m ObservationResponse coordinateConditions lt lg = makeRequest "GET" path where path = "/geolookup/conditions/q/" <> showBS (lt ^. lat) <> "," <> showBS (lg ^. lng) <> ".json" ------------------------------------------------------------------------------- makeRequest :: ( FromJSON a , MonadWU m) => Method -> ByteString -> m a makeRequest meth p = do cfg <- getWUConfig --TODO: consolidate? let k = cfg ^. wuAPIKey . apiKeyText . re utf8 let pathAppend = k <> p let bu = cfg ^. wuBaseURI let finalURI = bu & uriPathL <>~ ("/" <> pathAppend) req <- setURI meth def finalURI httpRequest <- _wuHttpRequest <$> getWUConfig res <- liftIO $ E.try $ httpRequest req (cfg ^. wuManager) body <- either handleHttpException return res let parsed = eitherDecode body either (throwM . WUParseError . view packed) return parsed where handleHttpException = throwM . WUHttpException ------------------------------------------------------------------------------- setURI :: MonadThrow m => Method -> Request -> URI -> m Request setURI meth req URI{..} = do Authority {..} <- maybe missingUA return uriAuthority let req' = req { secure = isSecure , host = hostBS authorityHost , port = thePort , path = uriPath , method = meth } thePort = maybe defPort portNumber authorityPort addAuth = maybe id addAuth' authorityUserInfo return $ setQueryString theQueryString $ addAuth req' where missingUA = throwM $ InvalidUrlException "N/A" "Missing URI host/port" addAuth' UserInfo {..} = applyBasicProxyAuth uiUsername uiPassword defPort | isSecure = 443 | otherwise = 80 isSecure = case uriScheme of Scheme "https" -> True _ -> False theQueryString = [(k , Just v) | (k, v) <- queryPairs uriQuery] ------------------------------------------------------------------------------- showBS :: Show a => a -> ByteString showBS = BS.pack . show
Soostone/wunderground
src/WUnderground/Client.hs
mit
3,581
15
17
875
777
418
359
-1
-1
{-# LANGUAGE TemplateHaskell #-} module Bank.Models.Account.Commands ( accountCommands , OpenAccount (..) , CreditAccount (..) , DebitAccount (..) , TransferToAccount (..) , AcceptTransfer (..) ) where import Language.Haskell.TH (Name) import Eventful.UUID import Bank.Json accountCommands :: [Name] accountCommands = [ ''OpenAccount , ''CreditAccount , ''DebitAccount , ''TransferToAccount , ''AcceptTransfer ] data OpenAccount = OpenAccount { openAccountOwner :: UUID , openAccountInitialFunding :: Double } deriving (Show, Eq) data CreditAccount = CreditAccount { creditAccountAmount :: Double , creditAccountReason :: String } deriving (Show, Eq) data DebitAccount = DebitAccount { debitAccountAmount :: Double , debitAccountReason :: String } deriving (Show, Eq) data TransferToAccount = TransferToAccount { transferToAccountTransferId :: UUID , transferToAccountAmount :: Double , transferToAccountTargetAccount :: UUID } deriving (Show, Eq) data AcceptTransfer = AcceptTransfer { acceptTransferTransferId :: UUID , acceptTransferSourceAccount :: UUID , acceptTransferAmount :: Double } deriving (Show, Eq) deriveJSONUnPrefixLower ''OpenAccount deriveJSONUnPrefixLower ''CreditAccount deriveJSONUnPrefixLower ''DebitAccount deriveJSONUnPrefixLower ''TransferToAccount deriveJSONUnPrefixLower ''AcceptTransfer
jdreaver/eventful
examples/bank/src/Bank/Models/Account/Commands.hs
mit
1,396
0
8
227
320
189
131
50
1
{-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedLists #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TemplateHaskell #-} module Numeric.SSystem where import Control.Lens import Data.Foldable import Data.List (zipWith4) import Data.Sequence (Seq) import qualified Data.Sequence as Seq import Data.Utils import Text.Parse.ODEBench data SODE a = SODE { _alpha :: a , _posExp :: Seq a , _beta :: a , _negExp :: Seq a } deriving (Functor, Foldable, Traversable, Eq, Ord) data SSystem a = SSystem { _odes :: Seq (SODE a) , _size :: Int } deriving (Functor, Foldable, Traversable, Eq, Ord) instance FoldableWithIndex Int SSystem instance TraversableWithIndex Int SSystem instance FunctorWithIndex Int SSystem type instance Index (SSystem a) = Int type instance IxValue (SSystem a) = a instance Show a => Show (SSystem a) where showsPrec _ SSystem {..} = showTable 4 2 (2 + _size * 2) (["a"] ++ [ 'g' : show i | i <- [1.._size]] ++ ["b"] ++ [ 'h' : show i | i <- [1.._size]]) (flip imap _odes $ \i SODE {..} -> (show (succ i), _alpha : foldr (:) (_beta : foldr (:) [] _negExp) _posExp)) toEqn' :: SODE Double -> [Double] -> Double toEqn' SODE {..} v = foldl' (*) _alpha (zipWith (**) v (foldr (:) [] _posExp)) - foldl' (*) _beta (zipWith (**) v (foldr (:) [] _negExp)) toEqns' :: SSystem Double -> [Double] -> [Double] toEqns' SSystem {..} = traverse toEqn' (foldr (:) [] _odes) fromParams :: ParamInfo a -> SSystem a fromParams space = SSystem (toOde space) len where len = length (alphas space) toOde ParamInfo {..} = Seq.fromList $ zipWith4 SODE alphas (map Seq.fromList posExps) betas (map Seq.fromList negExps) makeLenses ''SODE makeLenses ''SSystem instance Ixed (SSystem a) where ix i f s | x == 0 = (odes. ix y . alpha) f s | x <= _size s = (odes . ix y . posExp . ix (x-1)) f s | x == _size s + 1 = (odes . ix y . beta) f s | otherwise = (odes . ix y . negExp . ix (x - _size s - 2)) f s where (y,x) = i `quotRem` _size s -- | >>> exampleSystem -- ┌──┬────┬────┬────┬────┬────┬────┬────┬────┬────┬────┬────┬────┐ -- │ │ a │ g1 │ g2 │ g3 │ g4 │ g5 │ b │ h1 │ h2 │ h3 │ h4 │ h5 │ -- ├──┼────┼────┼────┼────┼────┼────┼────┼────┼────┼────┼────┼────┤ -- │ 1│ 5.0│ 0.0│ 0.0│ 1.0│ 0.0│-1.0│10.0│ 2.0│ 0.0│ 0.0│ 0.0│ 0.0│ -- │ 2│10.0│ 2.0│ 0.0│ 0.0│ 0.0│ 0.0│10.0│ 0.0│ 2.0│ 0.0│ 0.0│ 0.0│ -- │ 3│10.0│ 0.0│-1.0│ 0.0│ 0.0│ 0.0│10.0│ 0.0│-1.0│ 2.0│ 0.0│ 0.0│ -- │ 4│ 8.0│ 0.0│ 0.0│ 2.0│ 0.0│-1.0│10.0│ 0.0│ 0.0│ 0.0│ 2.0│ 0.0│ -- │ 5│10.0│ 0.0│ 0.0│ 0.0│ 2.0│ 0.0│10.0│ 0.0│ 0.0│ 0.0│ 0.0│ 2.0│ -- └──┴────┴────┴────┴────┴────┴────┴────┴────┴────┴────┴────┴────┘ exampleSystem :: SSystem Double exampleSystem = SSystem [ SODE 5 [ 0, 0, 1, 0,-1] 10 [ 2, 0, 0, 0, 0] , SODE 10 [ 2, 0, 0, 0, 0] 10 [ 0, 2, 0, 0, 0] , SODE 10 [ 0,-1, 0, 0, 0] 10 [ 0,-1, 2, 0, 0] , SODE 8 [ 0, 0, 2, 0,-1] 10 [ 0, 0, 0, 2, 0] , SODE 10 [ 0, 0, 0, 2, 0] 10 [ 0, 0, 0, 0, 2] ] 5 alphal, betal :: Int -> (forall a. Traversal' (SSystem a) a) alphal i = odes . ix i . alpha betal i = odes . ix i . beta posExpl, negExpl :: Int -> Int -> (forall a. Traversal' (SSystem a) a) posExpl i j = odes . ix i . posExp . ix j negExpl i j = odes . ix i . negExp . ix j
oisdk/SSystemOpt
src/Numeric/SSystem.hs
mit
4,111
0
16
865
1,338
724
614
76
1
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE JavaScriptFFI #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE FlexibleContexts #-} module Program.MapTiles ( downloadMapTiles ) where import Commons import Numeric.DataFrame import Control.Lens import JavaScript.WebGL import qualified Data.JSString as JSString import Control.Concurrent (forkIO) import Reflex import Unsafe.Coerce (unsafeCoerce) import SmallGL import Model.Scenario import Model.GeoJSON.Coordinates.Wgs84 import Program.Scenario data GroundMapView = GroundMapView { gmvZoomLevel :: !Int , gmvMapUrl :: !JSString , tileToMetric :: !((Int, Int) -> Vec4f) , gmvCallback :: !((DataFrame Float '[4,4], TexImageSource) -> IO ()) } downloadMapTiles :: ( Reflex t, MonadIO m, TriggerEvent t m , PerformEvent t m, MonadIO (Performable m) , MonadHold t m) => Behavior t Scenario -> QuaViewT Writing t m () downloadMapTiles scenarioB = do mapUpdatesE' <- (mapUpdate <$> scenarioB <@>) <$> askEvent (ScenarioUpdate ScenarioStateUpdatedOut) mapUpdatesE <- updated . fromUniqDynamic . uniqDynamic <$> holdDyn Nothing mapUpdatesE' -- set new opacity levels if needed registerEvent (SmallGLInput SetMapTileOpacity) $ fmapMaybe getOpacity mapUpdatesE (addMapTileE, addMapTileCbk) <- newTriggerEvent performEvent_ $ liftIO . downloadTiles addMapTileCbk <$> mapUpdatesE registerEvent (SmallGLInput ResetMapTiles) $ () <$ mapUpdatesE registerEvent (SmallGLInput AddMapTileToRendering) addMapTileE where mapUpdate scenario viewS = if scenario^.useMapLayer then (,,,,) (scenario^.mapZoomLevel) (scenario^.mapOpacity) (scenario^.mapUrl) (viewS^.clippingDist) <$> scenario^.geoLoc else Nothing getOpacity (Just (_,o,_,_,_)) = Just (realToFrac o) getOpacity Nothing = Nothing downloadTiles :: ((DataFrame Float '[4,4], TexImageSource) -> IO ()) -> Maybe (Int, Double, JSString, Float, (Double,Double,Double)) -> IO () downloadTiles _ Nothing = return () downloadTiles cbk (Just (zoomLvl, _, mUrl, viewDist, (lon,lat,_)) ) = mapM_ (createMapTilesAsync gmv) [[ (xtile0+i,ytile0+j) | i' <- [0 .. nTiles -1], i <- [i', -i'-1]] | j' <- [0 .. nTiles -1], j <- [j', -j'-1]] where gmv = GroundMapView { gmvZoomLevel = zoomLvl , gmvMapUrl = mUrl , tileToMetric = xytile2metric , gmvCallback = cbk } -- set up the center point to real center of the tile (xtile0,ytile0) = zoomLonLat2xy zoomLvl (realToFrac lon, realToFrac lat) (lon0, lat0) = zoomXY2LonLat zoomLvl (xtile0,ytile0) (lon1, lat1) = zoomXY2LonLat zoomLvl (xtile0+1,ytile0+1) -- a transform from WGS'84 to our local coordinates wgs2metric = wgs84ToMetric (vec2 (realToFrac lon) (realToFrac lat)) xytile2metric (x,y) = (<:> vec2 0 1) . wgs2metric . uncurry vec2 $ zoomXY2LonLat zoomLvl (x, y) -- get center positions in local metric system pos0 = wgs2metric (vec2 lon0 lat0) pos1 = wgs2metric (vec2 lon1 lat1) tileWidth = unScalar $ normL2 (pos1 - pos0) / sqrt 2 nTiles = min 25 . max 3 . ceiling $ viewDist / tileWidth * 0.8 createMapTilesAsync :: GroundMapView -> [(Int, Int)] -- ^ tile x and y -> IO () createMapTilesAsync gmv = void . forkIO . mapM_ (createMapTile gmv) createMapTile :: GroundMapView -> (Int, Int) -- ^ tile x and y -> IO () createMapTile GroundMapView {..} tilexy@(x,y) = createTex gmvMapUrl gmvZoomLevel tilexy >>= mapM_ (gmvCallback . (,) df) where df = tileToMetric (x, y+1) <::> tileToMetric (x+1, y+1) <+:> tileToMetric (x, y) <+:> tileToMetric (x+1, y) foreign import javascript interruptible "var tryDownload = function(attempt) {\ \ if(attempt > 0){ \ \ var osmImg = new Image(); \ \ osmImg.addEventListener('load', function(){$c(osmImg);});\ \ osmImg.addEventListener('error', function(){tryDownload(attempt-1);});\ \ osmImg['crossOrigin'] = 'Anonymous'; \ \ osmImg['src'] = $1; \ \ } else { $c(null); }\ \}; tryDownload(10); " js_createTex :: JSString -> IO (Nullable JSVal) createTex :: JSString -> Int -> (Int,Int) -> IO (Maybe TexImageSource) createTex urlPat z (xtile,ytile) = fmap unsafeCoerce . nullableToMaybe <$> js_createTex url where url = urlPat & JSString.replace "${z}" (toJSString $ show z) & JSString.replace "${x}" (toJSString $ show xtile) & JSString.replace "${y}" (toJSString $ show ytile) zoomLonLat2xy :: Int -> (Float, Float) -> (Int, Int) zoomLonLat2xy z (lon, lat) = (xtile, ytile) where n = 2 ^ z xtile = round $ n * ((lon + 180) / 360) ytile = round $ n * (1 - (log(tan(lat * pi / 180) + 1/cos(lat * pi / 180)) / pi)) / 2 zoomXY2LonLat :: Int -> (Int,Int) -> (Float, Float) zoomXY2LonLat z (xtile, ytile) = (lon, lat) where n = 2 ^ z lon = fromIntegral xtile / n * 360.0 - 180.0 lat = atan(sinh(pi * (1 - 2 * fromIntegral ytile / n))) * 180 / pi
achirkin/qua-view
src/Program/MapTiles.hs
mit
5,406
7
20
1,435
1,681
907
774
-1
-1
----------------------------------------------------------------------------- -- -- Module : InfList -- Copyright : -- License : AllRightsReserved -- -- Maintainer : -- Stability : -- Portability : -- -- | Playing with infinite lists -- ----------------------------------------------------------------------------- module InfList ( generatePairs, splitPairs, splitPairsAt ) where generatePairs :: (t1 -> (t, t1)) -> t1 -> [(t, t1)] generatePairs f g = h:t where h@(_, g1) = f g t = generatePairs f g1 splitPairs :: [(t, b)] -> (t, [(t, b)]) splitPairs (h:t) = (fst h, t) splitPairsAt :: Int -> [(b, b1)] -> ([b], [(b, b1)]) splitPairsAt n l = (map fst h, t) where (h, t) = splitAt n l
equational/JL2012
HaskellExamples/src/InfList.hs
mit
735
0
9
156
244
147
97
11
1
{-# LANGUAGE CPP #-} {-# LANGUAGE LambdaCase, OverloadedStrings, FlexibleContexts, GeneralizedNewtypeDeriving #-} #include "ghc-compat.h" module HsToCoq.ConvertHaskell.Type (convertType, convertLType, convertLHsTyVarBndrs, convertLHsSigType, convertLHsSigTypeWithExcls, convertLHsSigWcType, convertHsSigType_) where import Control.Applicative (liftA2) import Control.Lens import Data.Functor (($>)) import Data.Traversable import Data.List.NonEmpty (nonEmpty) import Data.List ((\\)) import Data.Maybe (maybe) import GHC hiding (Name) import qualified GHC import HsToCoq.Util.GHC.FastString import HsToCoq.Util.GHC import HsToCoq.Util.GHC.HsTypes import HsToCoq.Coq.Gallina as Coq import HsToCoq.Coq.Gallina.Util import HsToCoq.Coq.FreeVars import HsToCoq.ConvertHaskell.Parameters.Edits import HsToCoq.ConvertHaskell.Monad import HsToCoq.ConvertHaskell.Variables import HsToCoq.ConvertHaskell.Literals -------------------------------------------------------------------------------- convertLHsTyVarBndrs :: LocalConvMonad r m => Explicitness -> [LHsTyVarBndr GhcRn] -> m [Binder] convertLHsTyVarBndrs ex tvs = for (map unLoc tvs) $ \case UserTyVar NOEXTP tv -> mkBinder ex . Ident <$> var TypeNS (unLoc tv) KindedTyVar NOEXTP tv k -> mkBinders ex <$> (pure . Ident <$> var TypeNS (unLoc tv)) <*> convertLType k #if __GLASGOW_HASKELL__ >= 806 XTyVarBndr v -> noExtCon v #endif -------------------------------------------------------------------------------- convertType :: LocalConvMonad r m => HsType GhcRn -> m Term #if __GLASGOW_HASKELL__ >= 810 convertType (HsForAllTy NOEXTP _ tvs ty) = do #else convertType (HsForAllTy NOEXTP tvs ty) = do #endif explicitTVs <- convertLHsTyVarBndrs Coq.Implicit tvs tyBody <- convertLType ty pure . maybe tyBody (Forall ?? tyBody) $ nonEmpty explicitTVs convertType (HsQualTy NOEXTP lctx ty) = convertLType ty >>= convertContext lctx convertType (HsTyVar NOEXTP _ (L _ tv)) = Qualid <$> var TypeNS tv convertType (HsAppTy NOEXTP ty1 ty2) = App1 <$> convertLType ty1 <*> convertLType ty2 #if __GLASGOW_HASKELL__ >= 808 convertType HsAppKindTy{} = convUnsupported "type level type application" #endif #if __GLASGOW_HASKELL__ >= 806 convertType HsStarTy{} = pure (Sort Type) convertType XHsType{} = convUnsupported "NewHsTypeX" #else -- TODO: This constructor handles '*' and deparses it later. I'm just gonna -- bank on never seeing any infix type things. convertType (HsAppsTy tys) = let assertPrefix (L _ (HsAppPrefix lty)) = convertLType lty assertPrefix (L _ (HsAppInfix _)) = convUnsupported' "infix types in type application lists" in traverse assertPrefix tys >>= \case tyFun:tyArgs -> pure $ appList tyFun $ map PosArg tyArgs [] -> convUnsupported' "empty lists of type applications" convertType (HsPArrTy _ty) = convUnsupported' "parallel arrays (`[:a:]')" convertType (HsEqTy _ty1 _ty2) = convUnsupported' "type equality" -- FIXME convertType (HsCoreTy _) = convUnsupported' "[internal] embedded core types" #endif convertType (HsFunTy NOEXTP ty1 ty2) = Arrow <$> convertLType ty1 <*> convertLType ty2 convertType (HsListTy NOEXTP ty) = App1 (Var "list") <$> convertLType ty convertType (HsTupleTy NOEXTP tupTy tys) = do case tupTy of HsUnboxedTuple -> pure () -- TODO: Mark converted unboxed tuples specially? HsBoxedTuple -> pure () HsConstraintTuple -> convUnsupported' "constraint tuples" HsBoxedOrConstraintTuple -> pure () -- Sure, it's boxed, why not case tys of [] -> pure $ Var "unit" [ty] -> convertLType ty _ -> (`InScope` "type") <$> foldl1 (mkInfix ?? "*") <$> traverse convertLType tys convertType (HsOpTy NOEXTP ty1 op ty2) = App2 <$> (Qualid <$> var TypeNS (unLoc op)) <*> convertLType ty1 <*> convertLType ty2 -- ??? convertType (HsParTy NOEXTP ty) = Parens <$> convertLType ty convertType (HsIParamTy NOEXTP (L _ (HsIPName ip)) lty) = do isTyCallStack <- maybe (pure False) (fmap (== "CallStack") . ghcPpr) $ viewLHsTyVar lty if isTyCallStack && ip == fsLit "callStack" then pure $ "GHC.Stack.CallStack" else convUnsupported' "implicit parameter constraints" convertType (HsKindSig NOEXTP ty k) = HasType <$> convertLType ty <*> convertLType k convertType (HsSpliceTy _ _) = convUnsupported' "Template Haskell type splices" convertType (HsDocTy NOEXTP ty _doc) = convertLType ty convertType (HsBangTy NOEXTP _bang ty) = convertLType ty -- Strictness annotations are ignored convertType (HsRecTy NOEXTP _fields) = convUnsupported' "record types" -- FIXME convertType (HsExplicitListTy _ _ tys) = foldr (App2 $ Var "cons") (Var "nil") <$> traverse convertLType tys convertType (HsExplicitTupleTy _PlaceHolders tys) = case tys of [] -> pure $ Var "tt" [ty] -> convertLType ty _ -> foldl1 (App2 $ Var "pair") <$> traverse convertLType tys convertType (HsTyLit NOEXTP lit) = case lit of HsNumTy _src int -> either convUnsupported' (pure . Num) $ convertInteger "type-level integers" int HsStrTy _src str -> pure $ convertFastString str convertType (HsWildCardTy _) = convUnsupported' "wildcards" convertType (HsSumTy NOEXTP _) = convUnsupported' "sum types" convertContext :: LocalConvMonad r m => LHsContext GhcRn -> Term -> m Term convertContext lctx tyBody = do classes <- traverse (fmap (Generalized Coq.Implicit) . convertLType) (unLoc lctx) pure . maybe tyBody (Forall ?? tyBody) $ nonEmpty classes -------------------------------------------------------------------------------- convertLType :: LocalConvMonad r m => LHsType GhcRn -> m Term convertLType = convertType . unLoc -------------------------------------------------------------------------------- convertLHsSigTypeWithExcls :: LocalConvMonad r m => UnusedTyVarMode -> LHsSigType GhcRn -> [Qualid] -> m Term #if __GLASGOW_HASKELL__ >= 808 convertLHsSigTypeWithExcls utvm (HsIB hs_itvs hs_lty) excls = do #elif __GLASGOW_HASKELL__ == 806 convertLHsSigTypeWithExcls utvm (HsIB (HsIBRn {hsib_vars=hs_itvs}) hs_lty) excls = do #else convertLHsSigTypeWithExcls utvm (HsIB hs_itvs hs_lty _) excls = do #endif coq_itvs <- traverse (var TypeNS) hs_itvs coq_ty <- convertLType hs_lty finishConvertHsSigTypeWithExcls utvm coq_itvs coq_ty excls #if __GLASGOW_HASKELL__ >= 806 convertLHsSigTypeWithExcls _ (XHsImplicitBndrs v) _ = noExtCon v #endif finishConvertHsSigTypeWithExcls :: LocalConvMonad r m => UnusedTyVarMode -> [Qualid] -> Term -> [Qualid] -> m Term finishConvertHsSigTypeWithExcls utvm coq_itvs coq_ty excls = let coq_tyVars = case utvm of PreserveUnusedTyVars -> coq_itvs DeleteUnusedTyVars -> let fvs = getFreeVars coq_ty in filter (`elem` fvs) coq_itvs coq_binders = mkBinder Coq.Implicit . Ident <$> coq_tyVars \\ excls in pure $ maybeForall coq_binders coq_ty convertLHsSigType :: LocalConvMonad r m => UnusedTyVarMode -> LHsSigType GhcRn -> m Term convertLHsSigType utvm sigTy = convertLHsSigTypeWithExcls utvm sigTy [] convertLHsSigWcType :: LocalConvMonad r m => UnusedTyVarMode -> LHsSigWcType GhcRn -> m Term convertLHsSigWcType utvm (HsWC wcs hsib) | null wcs = convertLHsSigType utvm hsib | otherwise = convUnsupported' "type wildcards" #if __GLASGOW_HASKELL__ >= 806 convertLHsSigWcType _ (XHsWildCardBndrs v) = noExtCon v #endif -------------------------------------------------------------------------------- convertHsSigType_ :: LocalConvMonad r m => UnusedTyVarMode -> LHsQTyVars GhcRn -> Maybe (LHsContext GhcRn) -> HsConDeclDetails GhcRn -> LHsType GhcRn -> [Qualid] -> m Term convertHsSigType_ utvm (HsQTvs { hsq_explicit = qvars }) mcxt args resTy excls = do coq_itvs <- traverse (var TypeNS . binderName . unLoc) qvars coq_ty <- convertLType resTy >>= convertArgs args >>= maybe pure convertContext mcxt finishConvertHsSigTypeWithExcls utvm coq_itvs coq_ty excls #if __GLASGOW_HASKELL__ >= 806 convertHsSigType_ _ (XLHsQTyVars v) _ _ _ _ = noExtCon v #endif convertArgs :: LocalConvMonad r m => HsConDeclDetails GhcRn -> Term -> m Term convertArgs (PrefixCon args) ty = do coq_args <- traverse convertLType args pure (foldr Arrow ty coq_args) convertArgs (RecCon rec) ty = do tyss <- for (unLoc rec) $ \lfield -> case unLoc lfield of -- We must be careful to copy the type when multiple fields @fds@ are under -- the same signature @t@ ConDeclField { cd_fld_names = fds, cd_fld_type = t } -> do ty <- convertLType t pure (fds $> ty) #if __GLASGOW_HASKELL__ >= 806 XConDeclField v -> noExtCon v #endif pure (foldr Arrow ty (concat tyss)) convertArgs (InfixCon t1 t2) ty = liftA2 Arrow (convertLType t1) (liftA2 Arrow (convertLType t2) (pure ty)) binderName :: HsTyVarBndr GhcRn -> GHC.Name binderName (UserTyVar NOEXTP lname) = unLoc lname binderName (KindedTyVar NOEXTP lname _) = unLoc lname #if __GLASGOW_HASKELL__ >= 806 binderName (XTyVarBndr v) = noExtCon v #endif
antalsz/hs-to-coq
src/lib/HsToCoq/ConvertHaskell/Type.hs
mit
9,074
0
17
1,604
2,357
1,172
1,185
161
12
module RedditGrabber where import Data.Aeson.Parser import Network.HTTP import RedditHeader import Data.Text grab :: IO [Post] parse json bytestr -> Result Value convert :: Value -> [Post] create :: Value -> Post create (Object o) = Post { sub = lookupDefault (String pack "") key o, score = Nothing, username = , title = , body = , score = , length = , timestamp = , id = } create x = mzero toString :: Value -> String toString String s = unpack s --already existing raw data getRaw :: IO [Post] removeIntersect :: [Post] -> [Post] -> [Post] removeIntersect new existing = new \\ existing -- still need to update existing upvote counts... maybe keep new posts but dismiss existing ones that have been reloaded? existing \\ new
vektordev/mlreader
src/RedditGrabber.hs
gpl-2.0
745
45
7
146
241
136
105
-1
-1
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE RecordWildCards #-} -- | -- Module: $HEADER$ -- Description: Low level FFI. -- Copyright: -- License: GPL-2 -- -- Maintainer: Jan Sipr <[email protected]> -- Stability: experimental -- Portability: GHC specific language extensions. module Phone.Run ( deinitPhone , initPhone , setNullSndDev , withPhone ) where import Prelude (fromIntegral) import Control.Applicative ((<$>), (<*>), pure) import Control.Exception (bracket_) import Control.Monad ((>=>), (>>=), (=<<), void) import Data.Function (($), (.)) import Data.Maybe (Maybe(Just, Nothing), maybe) import Foreign.Marshal.Utils (fromBool) import Foreign.Ptr (nullPtr) import System.IO (IO) import Control.Monad.IO.Class (liftIO) import Control.Monad.Trans.Cont (ContT(ContT), evalContT) import Control.Monad.Trans.Class (lift) import Phone.Config ( Config ( Config , handlers , logging ) , Handlers ( Handlers , onCallStateChange , onCallTransactionStateChange , onIncomingCall , onMediaStateChange , onRegistrationStarted , onRegistrationStateChange ) , Logging ( Logging , logLevel , logConsoleLevel , logMsgLogging , logFilename ) ) import Phone.Exception ( PhoneException ( CreateLib , Initialization , Start , Transport ) ) import Phone.MonadPJ (MonadPJ(liftPJ)) import Phone.Internal.Event (toEvent) import qualified Phone.Internal.FFI as FFI ( createPjSua , destroyPjSua , pjsuaStart , setNullSndDev , codecSetPriority ) import qualified Phone.Internal.FFI.CallManipulation as FFI (hangupAll) import qualified Phone.Internal.FFI.Configuration as FFI ( initializePjSua , setOnCallStateCallback , setOnCallTransactionStateCallback , setOnIncomingCallCallback , setOnMediaStateCallback , setOnRegistrationStartedCallback , setOnRegistrationStateCallback , toOnCallState , toOnCallTransactionState , toOnIncomingCall , toOnMediaState , toOnRegistrationStarted , toOnRegistrationState , withPjConfig ) import qualified Phone.Internal.FFI.Logging as FFI ( setConsoleLevel , setLevel , setLogFilename , setMsgLogging , withLoggingConfig ) import qualified Phone.Internal.FFI.Media as FFI ( withMediaConfig , setMediaConfigClockRate ) import qualified Phone.Internal.FFI.PjString as FFI ( withPjString , withPjStringPtr ) import qualified Phone.Internal.FFI.Transport as FFI ( createTransport , udpTransport , withTransportConfig ) import qualified Phone.Internal.Utils as FFI (check) withPhone :: Config -> IO () -> IO () withPhone cfg = bracket_ (initPhone cfg) deinitPhone initPhone :: Config -> IO () initPhone Config{..} = liftPJ . evalContT $ do lift . FFI.check CreateLib $ FFI.createPjSua lift . FFI.check Initialization =<< FFI.initializePjSua <$> withPj <*> withLog <*> withMedia lift . FFI.check Transport =<< FFI.createTransport FFI.udpTransport <$> withTransport <*> pure nullPtr lift . FFI.check Start $ FFI.pjsuaStart lift $ setCodecs where Handlers{..} = handlers Logging{..} = logging withTransport = ContT FFI.withTransportConfig withPj = do pjCfg <- ContT FFI.withPjConfig lift $ do whenJust onCallStateChange $ liftIO . FFI.toOnCallState . onCallState >=> FFI.setOnCallStateCallback pjCfg whenJust onCallTransactionStateChange $ liftIO . FFI.toOnCallTransactionState . onCallTransactionState >=> FFI.setOnCallTransactionStateCallback pjCfg whenJust onIncomingCall $ liftIO . FFI.toOnIncomingCall . onIncCall >=> FFI.setOnIncomingCallCallback pjCfg whenJust onRegistrationStateChange $ liftIO . FFI.toOnRegistrationState >=> FFI.setOnRegistrationStateCallback pjCfg whenJust onRegistrationStarted $ liftIO . FFI.toOnRegistrationStarted . onRegStarted >=> FFI.setOnRegistrationStartedCallback pjCfg whenJust onMediaStateChange $ liftIO . FFI.toOnMediaState >=> FFI.setOnMediaStateCallback pjCfg pure pjCfg withLog = do logFile <- ContT $ withMaybePjString logFilename logCfg <- ContT $ FFI.withLoggingConfig lift $ do whenJust logMsgLogging $ FFI.setMsgLogging logCfg . fromBool whenJust logLevel $ FFI.setLevel logCfg . fromIntegral whenJust logConsoleLevel $ FFI.setConsoleLevel logCfg . fromIntegral whenJust logFile $ FFI.setLogFilename logCfg pure logCfg withMedia = do mediaCfg <- ContT FFI.withMediaConfig -- When pjproject is built without resampling (as it happens to be -- in Debian), we want to fix the rate and codecs so that we don't -- crash in resampler creation. lift $ FFI.setMediaConfigClockRate mediaCfg 8000 pure mediaCfg setCodecs = do FFI.withPjStringPtr "PCMU" (`FFI.codecSetPriority` 255) FFI.withPjStringPtr "PCMA" (`FFI.codecSetPriority` 255) onCallState f callId event = toEvent event >>= f callId onCallTransactionState f callId _ event = toEvent event >>= f callId onIncCall f acc callId _ = f acc callId onRegStarted f acc p = f acc $ fromIntegral p whenJust m op = maybe (pure ()) op m withMaybePjString = maybe ($ Nothing) ((. (. Just)) . FFI.withPjString) deinitPhone :: IO () deinitPhone = liftPJ $ do FFI.hangupAll void FFI.destroyPjSua setNullSndDev :: MonadPJ m => m () setNullSndDev = liftPJ FFI.setNullSndDev
IxpertaSolutions/hsua
src/Phone/Run.hs
gpl-2.0
5,950
0
16
1,584
1,311
732
579
-1
-1
---------------------------------------------------- -- -- -- Statistics.hs: -- -- Functions that collect and print out -- -- statistics -- -- -- ---------------------------------------------------- {- Copyright (C) GenI 2002-2005 (originally from HyLoRes) Carlos Areces - [email protected] - http://www.loria.fr/~areces Daniel Gorin - [email protected] Juan Heguiabehere - [email protected] - http://www.inf.unibz.it/~juanh/ Eric Kow - [email protected] - http://www.loria.fr/~kow This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -} {-# LANGUAGE FlexibleContexts, RankNTypes #-} module NLP.GenI.Statistics(Statistics, StatisticsState, emptyStats, showFinalStats, initialStatisticsStateFor, addMetric, Metric(IntMetric), queryMetrics, updateMetrics, incrIntMetric, queryIntMetric, ) where import Control.Applicative ( (<$>) ) import Control.Monad.State import Data.Maybe (mapMaybe) import Text.JSON import Control.DeepSeq ------------------------------------------- -- Statistics are collections of Metrics -- which can be printed out (at regular intervals) ------------------------------------------- newtype Statistics = Stat{ metrics::[Metric] } type StatisticsState a = forall m. (MonadState Statistics m) => m a updateMetrics :: (Metric -> Metric) -> Statistics -> Statistics updateMetrics f stat = stat{metrics = map f (metrics stat) } queryMetrics :: (Metric -> Maybe a) -> Statistics -> [a] queryMetrics f = mapMaybe f . metrics emptyStats :: Statistics emptyStats = Stat [] --------------------------- Monadic Statistics functions follow ------------------------------ initialStatisticsStateFor :: (MonadState Statistics m) => (m a -> Statistics -> b) -> m a -> b initialStatisticsStateFor f = flip f emptyStats -- | Adds a metric at the beginning of the list -- (note we reverse the order whene we want to print the metrics) addMetric :: Metric -> StatisticsState () addMetric newMetric = modify (\stat -> stat{metrics = newMetric : metrics stat } ) showFinalStats :: Statistics -> String showFinalStats = unlines . map show . reverse . metrics -------------------------------------------- -- Metrics -------------------------------------------- data Metric = IntMetric String Int instance Show Metric where show (IntMetric s x) = s ++ " : " ++ show x incrIntMetric :: String -> Int -> Metric -> Metric incrIntMetric key i (IntMetric s c) | s == key = IntMetric s (c+i) incrIntMetric _ _ m = m queryIntMetric :: String -> Metric -> Maybe Int queryIntMetric key (IntMetric s c) | s == key = Just c queryIntMetric _ _ = Nothing --------------------------- JSON Output ------------------------------ instance JSON Statistics where readJSON (JSObject j) = do Stat <$> mapM jsonToMetric (fromJSObject j) readJSON j = fail $ "Expected a JSON object, but got " ++ show j ++ " instead" showJSON = JSObject . toJSObject . map metricToJSON . metrics -- not quite showJSON here metricToJSON :: Metric -> (String, JSValue) metricToJSON (IntMetric s i) = (s, showJSON i) jsonToMetric :: (String, JSValue) -> Result Metric jsonToMetric (s, i) = IntMetric s <$> readJSON i --------------------------- DeepSeq ------------------------------ {-! deriving instance NFData Statistics deriving instance NFData Metric !-} -- GENERATED START instance NFData Statistics where rnf (Stat x1) = rnf x1 `seq` () instance NFData Metric where rnf (IntMetric x1 x2) = rnf x1 `seq` rnf x2 `seq` () -- GENERATED STOP
kowey/GenI
src/NLP/GenI/Statistics.hs
gpl-2.0
4,343
0
11
905
804
438
366
53
1
import Data.List main :: IO () main = print solve solve :: Int solve = sum $ notPossible -- Upper limit for the numbers that can be written as the sum of two abundant numbers p = 28123 -- The proper divisors of a given integer. divisors :: Integral a => a -> [a] divisors x = (nub.init) $ firstHalf ++ secondHalf where firstHalf = 1: (filter ((==0) . rem x) [2 .. sq x]) secondHalf = reverse [ x `div` i | i <- firstHalf] sq = floor . sqrt . fromIntegral -- Whether a number is abundant. isAbundant x = (sum . divisors) x > x -- The list of all abundants smaller than p. abundants = filter isAbundant [1..p] -- Whether a number is the sum of two abundant numbers isSumOfTwoAbundants n = any (\x -> isAbundant (n - x)) $ takeWhile (<n) abundants -- The list of numbers for which it's not possible to be written as a sum of two abundant numbers. notPossible = filter (not . isSumOfTwoAbundants) [1..p]
NorfairKing/project-euler
023/haskell/solution.hs
gpl-2.0
962
0
12
236
270
147
123
15
1
module Main where import CardDB (cards, delete, first, get, insert, maxId, nextId, putDBInfo, randomCards, setDBUp, update) import Cards (ask, create, demote, edit, export, isDue, promote, put) import Control.Monad (filterM, forM_, liftM, when) import Data.Maybe (catMaybes, fromJust) import System.Console.GetOpt (ArgOrder(..), OptDescr(..), ArgDescr(..), getOpt, usageInfo) import System.Environment (getArgs, getProgName) data Options = Options { optAll :: Bool } deriving Show defaults :: Options defaults = Options { optAll = False } allOptions :: [OptDescr (Options -> Options)] allOptions = [ Option "a" ["all"] (NoArg (\ opts -> opts { optAll = True })) "all due cards" ] parseOpt :: [OptDescr (Options -> Options)] -> [String] -> IO (Options, [String]) parseOpt options args = do progName <- getProgName case getOpt Permute options args of (o, n, []) -> return (foldl (flip id) defaults o, n) (_, _, errs) -> ioError (userError (concat errs ++ usageInfo (header progName) options)) where header :: String -> String header name = "Usage: " ++ name ++ " " ++ head args ++ " [OPTIONS...]" main = do args <- getArgs setDBUp case args of ["add"] -> insert =<< create =<< nextId "edit" : ids -> do max <- liftM fromJust maxId let ids' = if null ids then [max] else map read ids mCards <- mapM get ids' forM_ (catMaybes mCards) $ \ card -> update card =<< edit card ["export"] -> mapM_ export =<< cards ["info"] -> putDBInfo ["list"] -> mapM_ put =<< cards "pick": _ -> do (opts, ids) <- parseOpt allOptions $ tail args if optAll opts then do dueCards <- filterM isDue =<< randomCards forM_ dueCards $ \ card -> do isOk <- ask card (=<<) (update card) (if isOk then promote card else demote card) else do mFirst <- first =<< randomCards let ids' = map read ids mCards <- mapM get ids' let mCards' = if null mCards then [mFirst] else mCards forM_ (catMaybes mCards') $ \ card -> do isOk <- ask card (=<<) (update card) (if isOk then promote card else demote card) "remove" : ids -> if null ids then delete =<< liftM fromJust maxId else mapM_ (delete . read) ids _ -> putStrLn ("Argument has to be either add, " ++ "edit [ids...], export, info, list, " ++ "pick [ids...|-a|--all] or remove [ids...].")
bbshortcut/Palace
src/cards.hs
gpl-3.0
3,357
2
23
1,531
922
485
437
77
14
module Config where import Control.Arrow (left) import Control.Exception (throw) import Data.Map (fromList) import System.Directory (getHomeDirectory) import System.FilePath ((</>)) import Text.Parsec (ParseError) import Text.Read (readEither) import Error import Project data Policy = MergeAll | KeepFirst | KeepLast | DropAll deriving (Bounded, Enum, Eq, Ord, Read, Show) data Position = First | Last deriving (Bounded, Enum, Eq, Ord, Read, Show) -- Consider using a Map too. data Config = Config -- The length at which to accept commands, like wa for watch. {completion :: Maybe Int, -- User's favorite editor. editor :: Maybe String, -- User can answer questions. interactive :: Bool, -- Leave this many empty lines between keys. skip :: Int, -- If a value is too long, cut it before the exceeding word and -- move the rest of the value to the next line. wrap :: Maybe Int, -- If a key is too long, move the value to the next line and -- do not take the key into account when calculating indents. maxKeyLength :: Maybe Int, -- The same thing, but taking wrapping into account, so -- applies only if (isJust lineWrap). minValueLength :: Maybe Int, -- If one key is too long, move all keys to the next line and -- indent everything based on this. fallbackIndent :: Maybe Int, -- Order directories somehow. positionDirectories :: Maybe Position, -- Mark directories with a trailing slash. markDirectories :: Bool, -- Work with dot-prefixed files too. ignoreHidden :: Bool, -- Mix existing files not in the contents with the rest when printing. showMissing :: Bool, -- Hide files in the contents that do not exist. hideSpurious :: Bool, -- What to show for entries that do not have values. placeholder :: String, -- Obvious. useSwap :: Bool, -- Then rules for special cases! -- What to do with duplicate entries. duplicatePolicy :: Policy, -- Stop when things happen. interactiveActions :: Bool, interactiveWarnings :: Bool, interactiveErrors :: Bool, warnConfig :: Bool, -- Warn about existing inconsistencies. warnMarker :: Bool, warnMissing :: Bool, warnSpurious :: Bool, -- These are pure. warnOrder :: Bool, warnDuplicate :: Bool, warnLineSkip :: Bool, warnIndentation :: Bool} deriving (Eq, Ord, Read, Show) defaultConfig :: Config defaultConfig = Config {editor = Nothing, placeholder = "??", wrap = Just 80} formatConfig :: Config -> String formatConfig = show -- This is dumb. parseConfig :: String -> Either ContentsError Config parseConfig = left (const $ SyntaxError 0 0) . readEither readConfig :: IO Config readConfig = do fp <- getHomeDirectory c <- readFile $ fp </> projectConfig defaultProject case parseConfig c of Right q -> return q Left x -> throw x
Tuplanolla/contents
Config.hs
gpl-3.0
2,991
0
10
754
556
330
226
61
2
module Main where import System.Environment(getArgs) import Numeric (readFloat) import Data.List (maximumBy) import Data.Function (on) type Thing = (Int, Rational, Int) type Package = ([Int], Rational, Int) splitBy :: Char -> String -> [String] splitBy c s = case dropWhile (== c) s of "" -> [] s' -> w : splitBy c s'' where (w, s'') = break (== c) s' readRational :: String -> Rational readRational = fst . head . readFloat addThing :: Thing -> Package -> Package addThing (index, weight, cost) (indices, totalWeight, price) = (index:indices, weight + totalWeight, cost + price) packThings :: Rational -> [Thing] -> [Package] packThings _ [] = [([], 0, 0)] packThings remWeight (x@(_,weight,_):xs) = (if weight <= remWeight then let packages = packThings (remWeight - weight) xs in map (addThing x) packages else []) ++ packThings remWeight xs bestPackage :: [Package] -> Package bestPackage = maximumBy (compare `on` (\(_, w, p) -> (p, -w))) makeThing :: String -> Thing makeThing s = let [s1,s2,s3] = splitBy ',' $ tail $ init s in (read s1, readRational s2, read $ tail s3) processLine :: String -> String processLine line = let (maxWeightS:_:thingsS) = words line maxWeight = readRational maxWeightS things = map makeThing thingsS packages = packThings maxWeight things in case bestPackage packages of ([], _, _) -> "-" (indices, _, _) -> tail $ init $ show indices main :: IO () main = do [inputFile] <- getArgs input <- readFile inputFile mapM_ putStrLn $ map processLine $ lines input
cryptica/CodeEval
Challenges/114_PackageProblem/main.hs
gpl-3.0
1,642
0
14
398
682
371
311
45
2
-- Simulation of non-flood syncing of content, across a network of nodes. module Main where import System.Random import Control.Monad.Random import Control.Monad import Control.Applicative import Data.Ratio import Data.Ord import Data.List import Data.Maybe import qualified Data.Set as S import qualified Data.Map.Strict as M {- - Tunable values -} totalFiles :: Int totalFiles = 100 -- How likely is a given file to be wanted by any particular node? probabilityFilesWanted :: Probability probabilityFilesWanted = 0.10 -- How many different locations can each transfer node move between? -- (Min, Max) transferDestinationsRange :: (Int, Int) transferDestinationsRange = (2, 3) -- Controls how likely transfer nodes are to move around in a given step -- of the simulation. -- (They actually move slightly less because they may start to move and -- pick the same location they are at.) -- (Min, Max) transferMoveFrequencyRange :: (Probability, Probability) transferMoveFrequencyRange = (0.10, 1.00) -- counts both immobile and transfer nodes as hops, so double Vince's -- theoretical TTL of 3. -- (30% loss on mocambos network w/o ttl of 4!) maxTTL :: TTL maxTTL = TTL (4 * 2) numImmobileNodes :: Int numImmobileNodes = 10 numTransferNodes :: Int numTransferNodes = 20 numSteps :: Int numSteps = 100 -- IO code main :: IO () main = do -- initialnetwork <- evalRandIO (seedFiles totalFiles =<< genNetwork) initialnetwork <- evalRandIO (seedFiles totalFiles =<< mocambosNetwork) networks <- evalRandIO (simulate numSteps initialnetwork) let finalnetwork = last networks putStrLn $ summarize initialnetwork finalnetwork putStrLn "location history of file 1:" print $ trace (traceHaveFile (File 1)) networks putStrLn "request history of file 1:" print $ trace (traceWantFile (File 1)) networks -- Only pure code below :) data Network = Network (M.Map NodeName ImmobileNode) [TransferNode] deriving (Show, Eq) data ImmobileNode = ImmobileNode NodeRepo deriving (Show, Eq) type NodeName = String type Route = [NodeName] data TransferNode = TransferNode { currentlocation :: NodeName , possiblelocations :: [NodeName] , movefrequency :: Probability , transferrepo :: NodeRepo } deriving (Show, Eq) data NodeRepo = NodeRepo { wantFiles :: [Request] , haveFiles :: S.Set File , satisfiedRequests :: S.Set Request } deriving (Show, Eq) data File = File Int deriving (Show, Eq, Ord) randomFile :: (RandomGen g) => Rand g File randomFile = File <$> getRandomR (0, totalFiles) data Request = Request File TTL deriving (Show, Ord) -- compare ignoring TTL instance Eq Request where (Request f1 _) == (Request f2 _) = f1 == f2 requestedFile :: Request -> File requestedFile (Request f _) = f requestTTL :: Request -> TTL requestTTL (Request _ ttl) = ttl data TTL = TTL Int deriving (Show, Eq, Ord) incTTL :: TTL -> TTL incTTL (TTL t) = TTL (t + 1) decTTL :: TTL -> TTL decTTL (TTL t) = TTL (t - 1) staleTTL :: TTL -> Bool staleTTL (TTL t) = t < 1 -- Origin of a request starts one higher than max, since the TTL -- will decrement the first time the Request is transferred to another node. originTTL :: TTL originTTL = incTTL maxTTL randomRequest :: (RandomGen g) => Rand g Request randomRequest = Request <$> randomFile <*> pure originTTL type Probability = Float randomProbability :: (RandomGen g) => Rand g Probability randomProbability = getRandomR (0, 1) -- Returns the state of the network at each step of the simulation. simulate :: (RandomGen g) => Int -> Network -> Rand g [Network] simulate n net = go n [net] where go 0 nets = return (reverse nets) go c (prev:nets) = do new <- step prev go (c - 1) (new:prev:nets) -- Each step of the simulation, check if each TransferNode wants to move, -- and if so: -- 1. It and its current location exchange their Requests. -- 2. And they exchange any requested files. -- 3. Move it to a new random location. -- -- Note: This implementation does not exchange requests between two -- TransferNodes that both arrive at the same location at the same step, -- and then move away in the next step. step :: (RandomGen g) => Network -> Rand g Network step (Network immobiles transfers) = go immobiles [] transfers where go is c [] = return (Network is c) go is c (t:ts) = do r <- randomProbability if movefrequency t <= r then case M.lookup (currentlocation t) is of Nothing -> go is (c ++ [t]) ts Just currentloc -> do let (currentloc', t') = merge currentloc t t'' <- move t' go (M.insert (currentlocation t) currentloc' is) (c ++ [t'']) ts else go is (c ++ [t]) ts merge :: ImmobileNode -> TransferNode -> (ImmobileNode, TransferNode) merge (ImmobileNode ir) t@(TransferNode { transferrepo = tr }) = ( ImmobileNode (go ir tr) , t { transferrepo = go tr ir } ) where go r1 r2 = r1 { wantFiles = wantFiles' , haveFiles = haveFiles' , satisfiedRequests = satisfiedRequests' `S.union` checkSatisfied wantFiles' haveFiles' } where wantFiles' = foldr addRequest (wantFiles r1) (wantFiles r2) haveFiles' = S.foldr (addFile wantFiles' satisfiedRequests') (haveFiles r1) (haveFiles r2) satisfiedRequests' = satisfiedRequests r1 `S.union` satisfiedRequests r2 -- Adds a file to the set, when there's a request for it, and the request -- has not already been satisfied. addFile :: [Request] -> S.Set Request -> File -> S.Set File -> S.Set File addFile rs srs f fs | any (\sr -> f == requestedFile sr) (S.toList srs) = fs | any (\r -> f == requestedFile r) rs = S.insert f fs | otherwise = fs -- Checks if any requests have been satisfied, and returns them, -- to be added to satisfidRequests checkSatisfied :: [Request] -> S.Set File -> S.Set Request checkSatisfied want have = S.fromList (filter satisfied want) where satisfied r = requestTTL r == originTTL && S.member (requestedFile r) have -- Decrements TTL, and avoids adding request with a stale TTL, or a -- request for an already added file with the same or a lower TTL. addRequest :: Request -> [Request] -> [Request] addRequest (Request f ttl) rs | staleTTL ttl' = rs | any (\r -> requestTTL r >= ttl) similar = rs | otherwise = r' : other where ttl' = decTTL ttl r' = Request f ttl' (other, similar) = partition (/= r') rs move :: (RandomGen g) => TransferNode -> Rand g TransferNode move t = do newloc <- randomfrom (possiblelocations t) return $ t { currentlocation = newloc } genNetwork :: (RandomGen g) => Rand g Network genNetwork = do let immobiles = M.fromList (zip (map show [1..]) (replicate numImmobileNodes emptyImmobile)) transfers <- sequence (replicate numTransferNodes (mkTransfer $ M.keys immobiles)) return $ Network immobiles transfers emptyImmobile :: ImmobileNode emptyImmobile = ImmobileNode (NodeRepo [] S.empty S.empty) mkTransfer :: (RandomGen g) => [NodeName] -> Rand g TransferNode mkTransfer immobiles = do -- Transfer nodes are given random routes. May be simplistic. -- Also, some immobile nodes will not be serviced by any transfer nodes. numpossiblelocs <- getRandomR transferDestinationsRange possiblelocs <- sequence (replicate numpossiblelocs (randomfrom immobiles)) mkTransferBetween possiblelocs mkTransferBetween :: (RandomGen g) => [NodeName] -> Rand g TransferNode mkTransferBetween possiblelocs = do currentloc <- randomfrom possiblelocs movefreq <- getRandomR transferMoveFrequencyRange -- transfer nodes start out with no files or requests in their repo let repo = (NodeRepo [] S.empty S.empty) return $ TransferNode currentloc possiblelocs movefreq repo randomfrom :: (RandomGen g) => [a] -> Rand g a randomfrom l = do i <- getRandomR (1, length l) return $ l !! (i - 1) -- Seeds the network with the given number of files. Each file is added to -- one of the immobile nodes of the network at random. And, one other node, -- at random, is selected which wants to get the file. seedFiles :: (RandomGen g) => Int -> Network -> Rand g Network seedFiles 0 network = return network seedFiles n network@(Network m t) = do (origink, ImmobileNode originr) <- randnode (destinationk, ImmobileNode destinationr) <- randnode let file = File n let origin = ImmobileNode $ originr { haveFiles = S.insert file (haveFiles originr) } let destination = ImmobileNode $ destinationr { wantFiles = Request file originTTL : wantFiles destinationr } let m' = M.insert origink origin $ M.insert destinationk destination m seedFiles (n - 1) (Network m' t) where randnode = do k <- randomfrom (M.keys m) return (k, fromJust $ M.lookup k m) summarize :: Network -> Network -> String summarize _initial@(Network origis _) _final@(Network is _ts) = format [ ("Total wanted files", show (sum (overis (length . findoriginreqs . wantFiles . repo)))) , ("Wanted files that were not transferred to requesting node", show (sum (overis (S.size . findunsatisfied . repo)))) , ("Nodes that failed to get files", show (map withinitiallocs $ filter (not . S.null . snd) (M.toList $ M.map (findunsatisfied . repo) is))) , ("Total number of files on immobile nodes at end", show (overis (S.size . haveFiles . repo))) --, ("Immobile nodes at end", show is) ] where findoriginreqs = filter (\r -> requestTTL r == originTTL) findunsatisfied r = let wantedfs = S.fromList $ map requestedFile (findoriginreqs (wantFiles r)) in S.difference wantedfs (haveFiles r) repo (ImmobileNode r) = r overis f = map f $ M.elems is format = unlines . map (\(d, s) -> d ++ ": " ++ s) withinitiallocs (name, missingfiles) = (name, S.map addinitialloc missingfiles) addinitialloc f = (f, M.lookup f initiallocs) initiallocs = M.fromList $ concatMap (\(k, v) -> map (\f -> (f, k)) (S.toList $ haveFiles $ repo v)) $ M.toList origis trace :: (Network -> S.Set NodeName) -> [Network] -> String trace tracer networks = show $ go [] S.empty $ map tracer networks where go c old [] = reverse c go c old (new:l) = go ((S.toList $ new `S.difference` old):c) new l traceHaveFile :: File -> Network -> S.Set NodeName traceHaveFile f (Network m _) = S.fromList $ M.keys $ M.filter (\(ImmobileNode r) -> f `S.member` haveFiles r) m traceWantFile :: File -> Network -> S.Set NodeName traceWantFile f (Network m _) = S.fromList $ M.keys $ M.filter (\(ImmobileNode r) -> any wantf (wantFiles r)) m where wantf (Request rf _ttl) = rf == f mocambosNetwork :: (RandomGen g) => Rand g Network mocambosNetwork = do let major = map (immobilenamed . fst) communities let minor = map immobilenamed (concatMap snd communities) majortransfer <- mapM mkTransferBetween majorroutes minortransfer <- mapM mkTransferBetween (concatMap minorroutes (concat (replicate 5 communities))) return $ Network (M.fromList (major++minor)) (majortransfer ++ minortransfer) where immobilenamed name = (name, emptyImmobile) -- As a simplification, this only makes 2 hop routes, between minor -- and major communities; no 3-legged routes. minorroutes :: (NodeName, [NodeName]) -> [Route] minorroutes (major, minors) = map (\n -> [major, n]) minors communities :: [(NodeName, [NodeName])] communities = [ ("Taina/SP", [ "brotas" , "vauedo ribera" , "cofundo" , "jao" , "fazenda" ] ) , ("Odomode/RS", [ "moradadapaz" , "pelotas" ] ) , ("MercadoSul/DF", [ "mesquito" , "kalungos" ] ) , ("Coco/PE", [ "xamba" , "alafin" , "terreiros" ] ) , ("Linharinho/ES", [ "monte alegne" ] ) , ("Boneco/BA", [ "barroso" , "lagoa santa" , "terravista" ] ) , ("Zumbidospalmanes/NA", [ "allantana" ] ) , ("Casa Pneta/PA", [ "marajo" ] ) , ("Purarue/PA", [ "oriamina" ] ) , ("Madiba/NET", []) ] majorroutes :: [Route] majorroutes = -- person's routes [ ["Taina/SP", "Odomode/RS"] , ["Taina/SP", "MercadoSul/DF"] , ["MercadoSul/DF", "Boneco/BA"] , ["MercadoSul/DF", "Zumbidospalmanes/NA"] , ["Zumbidospalmanes/NA", "Casa Pneta/PA"] , ["Casa Preta/PA", "Puraque/PA"] , ["Casa Preta/PA", "Linharinho/ES"] , ["Boneco/BA", "Coco/PE"] -- internet connections , ["Taina/SP", "MercadoSul/DF", "Coco/PE", "Puraque/PA", "Odomode/RS", "Madiba/NET"] , ["Taina/SP", "MercadoSul/DF", "Coco/PE", "Puraque/PA", "Odomode/RS", "Madiba/NET"] , ["Taina/SP", "MercadoSul/DF", "Coco/PE", "Puraque/PA", "Odomode/RS", "Madiba/NET"] , ["Taina/SP", "MercadoSul/DF", "Coco/PE", "Puraque/PA", "Odomode/RS", "Madiba/NET"] , ["Taina/SP", "MercadoSul/DF", "Coco/PE", "Puraque/PA", "Odomode/RS", "Madiba/NET"] ]
RedeMocambos/baobaxia
doc/Relatorio/Apen/simroutes.hs
gpl-3.0
13,396
193
22
3,245
3,987
2,145
1,842
-1
-1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Gmail.Users.Labels.Patch -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Updates the specified label. This method supports patch semantics. -- -- /See:/ <https://developers.google.com/gmail/api/ Gmail API Reference> for @gmail.users.labels.patch@. module Network.Google.Resource.Gmail.Users.Labels.Patch ( -- * REST Resource UsersLabelsPatchResource -- * Creating a Request , usersLabelsPatch , UsersLabelsPatch -- * Request Lenses , ulpPayload , ulpUserId , ulpId ) where import Network.Google.Gmail.Types import Network.Google.Prelude -- | A resource alias for @gmail.users.labels.patch@ method which the -- 'UsersLabelsPatch' request conforms to. type UsersLabelsPatchResource = "gmail" :> "v1" :> "users" :> Capture "userId" Text :> "labels" :> Capture "id" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] Label :> Patch '[JSON] Label -- | Updates the specified label. This method supports patch semantics. -- -- /See:/ 'usersLabelsPatch' smart constructor. data UsersLabelsPatch = UsersLabelsPatch' { _ulpPayload :: !Label , _ulpUserId :: !Text , _ulpId :: !Text } deriving (Eq,Show,Data,Typeable,Generic) -- | Creates a value of 'UsersLabelsPatch' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'ulpPayload' -- -- * 'ulpUserId' -- -- * 'ulpId' usersLabelsPatch :: Label -- ^ 'ulpPayload' -> Text -- ^ 'ulpId' -> UsersLabelsPatch usersLabelsPatch pUlpPayload_ pUlpId_ = UsersLabelsPatch' { _ulpPayload = pUlpPayload_ , _ulpUserId = "me" , _ulpId = pUlpId_ } -- | Multipart request metadata. ulpPayload :: Lens' UsersLabelsPatch Label ulpPayload = lens _ulpPayload (\ s a -> s{_ulpPayload = a}) -- | The user\'s email address. The special value me can be used to indicate -- the authenticated user. ulpUserId :: Lens' UsersLabelsPatch Text ulpUserId = lens _ulpUserId (\ s a -> s{_ulpUserId = a}) -- | The ID of the label to update. ulpId :: Lens' UsersLabelsPatch Text ulpId = lens _ulpId (\ s a -> s{_ulpId = a}) instance GoogleRequest UsersLabelsPatch where type Rs UsersLabelsPatch = Label type Scopes UsersLabelsPatch = '["https://mail.google.com/", "https://www.googleapis.com/auth/gmail.labels", "https://www.googleapis.com/auth/gmail.modify"] requestClient UsersLabelsPatch'{..} = go _ulpUserId _ulpId (Just AltJSON) _ulpPayload gmailService where go = buildClient (Proxy :: Proxy UsersLabelsPatchResource) mempty
rueshyna/gogol
gogol-gmail/gen/Network/Google/Resource/Gmail/Users/Labels/Patch.hs
mpl-2.0
3,509
0
15
856
462
277
185
72
1
module Support where import Data.Maybe import Data.List import Libaddutil.ListUtils import Libaddutil.Entity import Libaddutil.Vector import Libaddutil.Primitives import Freekick.Libsoccer.Lineup import Freekick.Libsoccer.Formation import Freekick.Libsoccer.Player import Freekick.Libmatch.PlayerInfo import Freekick.Libmatch.MatchStatus import Freekick.Libmatch.SoccerPhysics import Helpers import Parameters spotRunDistanceValue :: PlayerInfo -> Vector3 -> Float spotRunDistanceValue p v = let l = location (plentity p) in max 0 ((maxRunDistance - vectorLength (diffVector3 v l)) / maxRunDistance) spotInFormationArea :: PlayerInfo -> Lineup -> Formation -> Vector3 -> Bool spotInFormationArea p l f v = if isNothing ar then False else pointInArea2D (fromJust ar) v where ar = getPlayerArea (staticplayer p) l f -- | Returns a value between 1 (near) and 0 (away) indicating if the player -- should run to the ball or not. runToBall :: PlayerInfo -> [PlayerInfo] -> Ball -> Lineup -> Formation -> Float runToBall p ts b l f = let bl = (location (ballentity b)) in if spotInFormationArea p l f bl then 1 else if isNearestToBall p ts b then 1 else 0 -- | Returns a value between 1 (near) and 0 (away) indicating if the player -- should run to the ball or not depending on distance. nearToBall :: PlayerInfo -> [PlayerInfo] -> Ball -> Float nearToBall p ts b = if isNearestToBall p ts b then 1 else spotRunDistanceValue p (location (ballentity b)) isNearestToBall :: PlayerInfo -> [PlayerInfo] -> Ball -> Bool isNearestToBall p ts b = let tes = map plentity ts in null $ filter (< vectorLength (vectorFromTo (plentity p) (ballentity b))) (map (vectorLength . (vectorFromTo (ballentity b))) tes) nearestToBall :: MatchStatus -> PlayerInfo nearestToBall m = nearestPlayerToBall (allPlayingPlayers m) (ball m) nearestPlayerToBall :: [PlayerInfo] -> Ball -> PlayerInfo nearestPlayerToBall p b = fst $ mapPreserveMin (\x -> vectorLength (diffVector3 (location $ plentity x) (location $ ballentity b))) p -- | interposeDistance is the distance in range [0,1] between the player -- holding the ball (0) and the other one (1). interposeDistance :: Float interposeDistance = 0.2 -- TODO: the second player should not simply be the second closest one. interpose :: PlayerInfo -> [PlayerInfo] -> [PlayerInfo] -> Ball -> Float -> (Vector3, Float) interpose _ _ _ _ 0.0 = ((0,0,0), 0) interpose p ts os b maxv = (seekVelocity (plentity p) point maxv, nearToBall p ts b) where point = addVector3 ap (scaleVector3 (diffVector3 ap bp) interposeDistance) ap = (scaleVector3 (velocity ae) t) `addVector3` (location ae) bp = (scaleVector3 (velocity be) t) `addVector3` (location be) t = vectorLength (diffVector3 (location (plentity p)) mp) / maxv mp = scaleVector3 (location ae `addVector3` location be) 0.5 ae = plentity apl be = plentity bpl apl = nearestPlayerToBall os b bpl = nearestPlayerToBall (filter (\x -> idnum (staticplayer apl) /= idnum (staticplayer x)) os) b
anttisalonen/freekick
haskell/freekick_server/ai/src/Support.hs
agpl-3.0
3,189
0
16
683
956
506
450
48
3
module QuantLib.Currencies.Europe ( module QuantLib.Currencies.Europe ) where import QuantLib.Currency -- | Swiss france chf :: Currency chf = Currency { cName = "Swiss franc", cCode = "CHF", cIsoCode = 756, cFracsPerUnit = 100 } -- | Czech koruna czk :: Currency czk = Currency { cName = "Czech koruna", cCode = "CZK", cIsoCode = 203, cFracsPerUnit = 100 } -- | Danish krone dkk :: Currency dkk = Currency { cName = "Danish krone", cCode = "DKK", cIsoCode = 208, cFracsPerUnit = 100 } -- | European Euro eur :: Currency eur = Currency { cName = "European Euro", cCode = "EUR", cIsoCode = 978, cFracsPerUnit = 100 } -- | British pound sterling gbp :: Currency gbp = Currency { cName = "British pound sterling", cCode = "GBP", cIsoCode = 826, cFracsPerUnit = 100 }
paulrzcz/hquantlib
src/QuantLib/Currencies/Europe.hs
lgpl-3.0
1,132
0
6
504
206
134
72
33
1
-- Short Exercise -- 1. data Sum a b = First a | Second b deriving (Eq, Show) instance Functor (Sum a) where fmap _ (First a) = First a fmap f (Second a) = Second (f a) {- 2. Why is a Functor instance that applies the function on to First, Either's Left, impossible? Because the Functor instance has to be of kind * -> * which means the right most types need to be the types which are applied to the Functor function. -}
dmp1ce/Haskell-Programming-Exercises
Chapter 16/Short Exercise.hs
unlicense
430
0
8
95
89
47
42
4
0
module ChangeMood where data Mood = Blah | Woot deriving Show changeMood :: Mood -> Mood changeMood Blah = Woot changeMood _ = Blah
thewoolleyman/haskellbook
04/03/chad/ChangeMood.hs
unlicense
134
0
5
26
42
24
18
5
1
#!/usr/bin/env stack -- stack --resolver lts-8.12 script {-# OPTIONS_GHC -fno-warn-missing-signatures #-} {-# OPTIONS_GHC -fno-warn-type-defaults #-} {-# LANGUAGE OverloadedStrings #-} module S5Logging where import Control.Exception.Safe (onException) import Control.Monad.IO.Class (liftIO) import Control.Monad.Logger.CallStack (logDebug, logError, logInfo, runStdoutLoggingT) import qualified Data.Text as T (pack) s5 = main main :: IO () main = runStdoutLoggingT $ do let fp = "/tmp/JUNK/S5Logging" logInfo $ T.pack $ "Writing to file: " ++ fp liftIO (writeFile fp "Hey there!") `onException` logError "Writing to file failed" logInfo $ T.pack $ "Reading from file: " ++ fp content <- liftIO (readFile fp) `onException` logError "Reading from file failed" logDebug $ T.pack $ "Content read: " ++ content
haroldcarr/learn-haskell-coq-ml-etc
haskell/course/2017-05-snoyman-applied-haskell-at-lambdaconf/S5Logging.hs
unlicense
953
0
12
253
198
110
88
20
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} module Citeproc.Element ( pLocale , pDate , Attributes(..) , lookupAttribute , ElementParser , runElementParser , parseFailure , getChildren , allChildren , getAttributes , getNameAttributes , getFormatting , getTextContent ) where import Citeproc.Types import Data.Maybe (fromMaybe) import Control.Monad (foldM) import qualified Data.Map as M import qualified Text.XML as X import Data.Text (Text) import qualified Data.Text as T import Control.Monad.Trans.Reader import Control.Monad.Trans.Except import Control.Monad.Trans.Class (lift) newtype Attributes = Attributes (M.Map Text Text) deriving (Show, Semigroup, Monoid, Eq) lookupAttribute :: Text -> Attributes -> Maybe Text lookupAttribute key (Attributes kvs) = M.lookup key kvs type ElementParser = ReaderT (M.Map X.Name Text) (Except CiteprocError) runElementParser :: ElementParser a -> Either CiteprocError a runElementParser p = runExcept (runReaderT p mempty) parseFailure :: String -> ElementParser a parseFailure s = lift $ throwE (CiteprocParseError $ T.pack s) getChildren :: Text -> X.Element -> [X.Element] getChildren name el = [e | X.NodeElement e <- X.elementNodes el , X.nameLocalName (X.elementName e) == name] allChildren :: X.Element -> [X.Element] allChildren el = [e | X.NodeElement e <- X.elementNodes el] getAttributes :: X.Element -> Attributes getAttributes = Attributes . M.mapKeys X.nameLocalName . X.elementAttributes -- Like getAttributes but incorporates inheritable attributes. getNameAttributes :: X.Element -> ElementParser Attributes getNameAttributes node = do nameattr <- ask let xattr = X.elementAttributes node <> nameattr return $ Attributes $ M.mapKeys X.nameLocalName xattr getFormatting :: Attributes -> Formatting getFormatting attr = Formatting { formatLang = Nothing , formatFontStyle = case lookupAttribute "font-style" attr of Just "italic" -> Just ItalicFont Just "oblique" -> Just ObliqueFont Just "normal" -> Just NormalFont _ -> Nothing , formatFontVariant = case lookupAttribute "font-variant" attr of Just "small-caps" -> Just SmallCapsVariant Just "normal" -> Just NormalVariant _ -> Nothing , formatFontWeight = case lookupAttribute "font-weight" attr of Just "bold" -> Just BoldWeight Just "light" -> Just LightWeight Just "normal" -> Just NormalWeight _ -> Nothing , formatTextDecoration = case lookupAttribute "text-decoration" attr of Just "underline" -> Just UnderlineDecoration Just "none" -> Just NoDecoration _ -> Nothing , formatVerticalAlign = case lookupAttribute "vertical-align" attr of Just "sup" -> Just SupAlign Just "sub" -> Just SubAlign Just "baseline" -> Just BaselineAlign _ -> Nothing , formatPrefix = lookupAttribute "prefix" attr , formatSuffix = lookupAttribute "suffix" attr , formatDisplay = case lookupAttribute "display" attr of Just "block" -> Just DisplayBlock Just "left-margin" -> Just DisplayLeftMargin Just "right-inline" -> Just DisplayRightInline Just "indent" -> Just DisplayIndent _ -> Nothing , formatTextCase = case lookupAttribute "text-case" attr of Just "lowercase" -> Just Lowercase Just "uppercase" -> Just Uppercase Just "capitalize-first" -> Just CapitalizeFirst Just "capitalize-all" -> Just CapitalizeAll Just "sentence" -> Just SentenceCase Just "title" -> Just TitleCase _ -> Nothing , formatDelimiter = lookupAttribute "delimiter" attr , formatStripPeriods = lookupAttribute "strip-periods" attr == Just "true" , formatQuotes = lookupAttribute "quotes" attr == Just "true" , formatAffixesInside = False -- should be true for layout only } getTextContent :: X.Element -> Text getTextContent e = mconcat [t | X.NodeContent t <- X.elementNodes e] pLocale :: X.Element -> ElementParser Locale pLocale node = do let attr = getAttributes node lang <- case lookupAttribute "lang" attr of Nothing -> return Nothing Just l -> either parseFailure (return . Just) $ parseLang l let styleOpts = mconcat . map getAttributes $ getChildren "style-options" node let addDateElt e m = case e of Element (EDate _ dateType _ _) _ -> M.insert dateType e m _ -> error "pDate returned an element other than EDate" dateElts <- foldr addDateElt mempty <$> mapM pDate (getChildren "date" node) let termNodes = concatMap (getChildren "term") (getChildren "terms" node) terms <- foldM parseTerm mempty termNodes return $ Locale { localeLanguage = lang , localePunctuationInQuote = (== "true") <$> lookupAttribute "punctuation-in-quote" styleOpts , localeLimitDayOrdinalsToDay1 = (== "true") <$> lookupAttribute "limit-day-ordinals-to-day-1" styleOpts , localeDate = dateElts , localeTerms = terms } parseTerm :: M.Map Text [(Term, Text)] -> X.Element -> ElementParser (M.Map Text [(Term, Text)]) parseTerm m node = do let attr = getAttributes node name <- case lookupAttribute "name" attr of Just n -> return n Nothing -> parseFailure "Text node has no name attribute" let single = mconcat $ map getTextContent $ getChildren "single" node let multiple = mconcat $ map getTextContent $ getChildren "multiple" node let txt = getTextContent node let form = case lookupAttribute "form" attr of Just "short" -> Short Just "verb" -> Verb Just "verb-short" -> VerbShort Just "symbol" -> Symbol _ -> Long let gender = case lookupAttribute "gender" attr of Just "masculine" -> Just Masculine Just "feminine" -> Just Feminine _ -> Nothing let genderForm = case lookupAttribute "gender-form" attr of Just "masculine" -> Just Masculine Just "feminine" -> Just Feminine _ -> Nothing let match = case lookupAttribute "match" attr of Just "last-digit" -> Just LastDigit Just "last-two-digits" -> Just LastTwoDigits Just "whole-number" -> Just WholeNumber _ -> Nothing let term = Term { termName = name , termForm = form , termNumber = Nothing , termGender = gender , termGenderForm = genderForm , termMatch = match } let addToList x Nothing = Just [x] addToList x (Just xs) = Just (x:xs) if T.null single then return $ M.alter (addToList (term, txt)) (termName term) m else do let term_single = term{ termNumber = Just Singular } let term_plural = term{ termNumber = Just Plural } return $ M.alter (addToList (term_single, single) . addToList (term_plural, multiple)) (termName term) m pDate :: X.Element -> ElementParser (Element a) pDate node = do let attr = getAttributes node let formatting = getFormatting attr let form = lookupAttribute "form" attr let var = toVariable $ fromMaybe mempty $ lookupAttribute "variable" attr let showDateParts = case lookupAttribute "date-parts" attr of Just "year-month-day" -> Just YearMonthDay Just "year-month" -> Just YearMonth Just "year" -> Just Year _ -> Nothing dps <- mapM parseDatePartElement (getChildren "date-part" node) let dateType = case form of Just "numeric" -> LocalizedNumeric Just "text" -> LocalizedText _ -> NonLocalized return $ Element (EDate var dateType showDateParts dps) formatting parseDatePartElement :: X.Element -> ElementParser DP parseDatePartElement node = do let attr = getAttributes node let formatting = getFormatting attr let name = case lookupAttribute "name" attr of Just "day" -> DPDay Just "month" -> DPMonth _ -> DPYear let form = case lookupAttribute "form" attr of Just "numeric" -> DPNumeric Just "numeric-leading-zeros" -> DPNumericLeadingZeros Just "ordinal" -> DPOrdinal Just "long" -> DPLong Just "short" -> DPShort _ | name == DPDay -> DPNumeric | otherwise -> DPLong let rangeDelim = fromMaybe "–" $ lookupAttribute "range-delimiter" attr return $ DP name form rangeDelim formatting
jgm/citeproc
src/Citeproc/Element.hs
bsd-2-clause
9,597
0
16
3,198
2,472
1,202
1,270
212
24
{-# LANGUAGE RankNTypes #-} {-# LANGUAGE Rank2Types #-} module Data.Conduit.Cereal.Internal ( ConduitErrorHandler , SinkErrorHandler , SinkTerminationHandler , mkConduitGet , mkSinkGet ) where import Control.Monad (forever, when) import qualified Data.ByteString as BS import qualified Data.Conduit as C import Data.Serialize hiding (get, put) -- | What should we do if the Get fails? type ConduitErrorHandler m o = String -> C.Conduit BS.ByteString m o type SinkErrorHandler m r = String -> C.Consumer BS.ByteString m r -- | What should we do if the stream is done before the Get is done? type SinkTerminationHandler m r = (BS.ByteString -> Result r) -> C.Consumer BS.ByteString m r -- | Construct a conduitGet with the specified 'ErrorHandler' mkConduitGet :: Monad m => ConduitErrorHandler m o -> Get o -> C.Conduit BS.ByteString m o mkConduitGet errorHandler get = consume True (runGetPartial get) [] BS.empty where pull f b s | BS.null s = C.await >>= maybe (when (not $ null b) (C.leftover $ BS.concat $ reverse b)) (pull f b) | otherwise = consume False f b s consume initial f b s = case f s of Fail msg _ -> do when (not $ null b) (C.leftover $ BS.concat $ reverse consumed) errorHandler msg Partial p -> pull p consumed BS.empty Done a s' -> case initial of -- this only works because the Get will either _always_ consume no input, or _never_ consume no input. True -> forever $ C.yield a False -> C.yield a >> pull (runGetPartial get) [] s' -- False -> C.yield a >> C.leftover s' >> mkConduitGet errorHandler get where consumed = s : b -- | Construct a sinkGet with the specified 'ErrorHandler' and 'TerminationHandler' mkSinkGet :: Monad m => SinkErrorHandler m r -> SinkTerminationHandler m r -> Get r -> C.Consumer BS.ByteString m r mkSinkGet errorHandler terminationHandler get = consume (runGetPartial get) [] BS.empty where pull f b s | BS.null s = C.await >>= \ x -> case x of Nothing -> when (not $ null b) (C.leftover $ BS.concat $ reverse b) >> terminationHandler f Just a -> pull f b a | otherwise = consume f b s consume f b s = case f s of Fail msg _ -> do when (not $ null b) (C.leftover $ BS.concat $ reverse consumed) errorHandler msg Partial p -> pull p consumed BS.empty Done r s' -> when (not $ BS.null s') (C.leftover s') >> return r where consumed = s : b
litherum/cereal-conduit
Data/Conduit/Cereal/Internal.hs
bsd-2-clause
2,746
0
18
866
825
411
414
50
4
-- | Provides functionality of rendering the application model. module Renderer ( Descriptor , initialize , terminate , render ) where import Foreign.Marshal.Array import Foreign.Ptr import Foreign.Storable import Graphics.Rendering.OpenGL import Linear import System.IO import qualified ApplicationModel as AM import qualified LoadShaders as LS import qualified UniformLinear as UL -- | Checks OpenGL errors, and Writes to stderr when errors occur. checkError :: String -- ^ a function name that called this -> IO () checkError functionName = get errors >>= mapM_ reportError where reportError (Error category message) = do hPutStrLn stderr $ (show category) ++ " in " ++ functionName ++ ": " ++ message -- | Converts an offset value to the Ptr value. bufferOffset :: Integral a => a -- ^ an offset value -> Ptr b -- ^ the Ptr value bufferOffset = plusPtr nullPtr . fromIntegral -- | The byte size of a memory area that is converted from a list. arrayByteSize :: (Storable a) => [a] -- ^ a list -> Int arrayByteSize ls = (sizeOf (head ls)) * (length ls) -- | Represents a set of OpenGL objects for rendering information. data Descriptor = Descriptor BufferObject VertexArrayObject BufferObject Program ArrayIndex NumArrayIndices -- | Initializes a buffer object. initializeBuffer :: (Storable a) => BufferTarget -> [a] -> IO BufferObject initializeBuffer t array = do buffer <- genObjectName bindBuffer t $= Just buffer withArray array $ \ptr -> do bufferData t $= (fromIntegral $ arrayByteSize array, ptr, StaticDraw) bindBuffer ElementArrayBuffer $= Nothing return buffer -- | Initializes OpenGL objects. initialize :: IO Descriptor initialize = do -- meshes let vertices = -- vertex attribute format : x, y, z, r, g, b, a [ (-0.90), (-0.90), 0.0, 1.0, 0.0, 0.0, 1.0 , 0.90, (-0.90), 0.0, 0.0, 1.0, 0.0, 1.0 , 0.90, 0.90, 0.0, 1.0, 1.0, 1.0, 1.0 , (-0.90), 0.90, 0.0, 0.0, 0.0, 1.0, 1.0 ] :: [GLfloat] numPositionElements = 3 numColorElements = 4 offsetPosition = 0 offsetColor = offsetPosition + numPositionElements sizeElement = sizeOf (head vertices) sizeVertex = fromIntegral (sizeElement * (numPositionElements + numColorElements)) let indices = [ 0, 1, 2 , 2, 3, 0 ] :: [GLushort] vertexBuffer <- initializeBuffer ArrayBuffer vertices attributes <- genObjectName bindVertexArrayObject $= Just attributes bindBuffer ArrayBuffer $= Just vertexBuffer let vPosition = AttribLocation 0 vColor = AttribLocation 1 vertexAttribPointer vPosition $= (ToFloat, VertexArrayDescriptor (fromIntegral numPositionElements) Float sizeVertex (bufferOffset (offsetPosition * sizeElement))) vertexAttribPointer vColor $= (ToFloat, VertexArrayDescriptor (fromIntegral numColorElements) Float sizeVertex (bufferOffset (offsetColor * sizeElement))) vertexAttribArray vPosition $= Enabled vertexAttribArray vColor $= Enabled bindBuffer ArrayBuffer $= Nothing bindVertexArrayObject $= Nothing indexBuffer <- initializeBuffer ElementArrayBuffer indices program <- LS.loadShaders [ LS.ShaderInfo VertexShader (LS.FileSource "rectangle.vert") , LS.ShaderInfo FragmentShader (LS.FileSource "rectangle.frag") ] currentProgram $= Just program checkError "initialize" return $ Descriptor vertexBuffer attributes indexBuffer program 0 (fromIntegral $ length indices) -- | Terminates OpenGL objects. terminate :: Descriptor -> IO () terminate (Descriptor vertexBuffer attributes indexBuffer program _ _) = do currentProgram $= Nothing shaders <- get $ attachedShaders program mapM_ releaseShader shaders deleteObjectName program deleteObjectName indexBuffer deleteObjectName attributes deleteObjectName vertexBuffer checkError "terminate" where releaseShader shader = do detachShader program shader deleteObjectName shader -- | A world matrix for the rectangle. worldMatrix :: AM.RectangleData -- data of the rectangle -> M44 GLfloat -- ^ a world matrix worldMatrix (AM.RectangleData x y) = V4 (V4 1 0 0 (fx / 50)) (V4 0 1 0 (fy / 50)) (V4 0 0 1 0) (V4 0 0 0 1) where fx = realToFrac x fy = realToFrac y -- | Renders the application model with a descriptor. render :: Descriptor -- ^ a descriptor -> AM.RectangleData -- ^ data of the rectangle -> IO () render (Descriptor _ attributes indexBuffer program rectangleOffset rectangleNumIndices) td = do worldLocation <- get $ uniformLocation program "world" UL.uniformMatrix4fv worldLocation $= [worldMatrix td] clear [ ColorBuffer ] bindVertexArrayObject $= Just attributes bindBuffer ElementArrayBuffer $= Just indexBuffer drawElements Triangles rectangleNumIndices UnsignedShort (bufferOffset rectangleOffset) bindBuffer ElementArrayBuffer $= Nothing bindVertexArrayObject $= Nothing flush checkError "render"
fujiyan/toriaezuzakki
haskell/glfw/keyboard/Renderer.hs
bsd-2-clause
5,260
0
15
1,277
1,300
661
639
120
1
{-# OPTIONS -fglasgow-exts #-} ----------------------------------------------------------------------------- {-| Module : QStyleOptionMenuItem.hs Copyright : (c) David Harley 2010 Project : qtHaskell Version : 1.1.4 Modified : 2010-09-02 17:02:36 Warning : this file is machine generated - do not modify. --} ----------------------------------------------------------------------------- module Qtc.Enums.Gui.QStyleOptionMenuItem ( QStyleOptionMenuItemStyleOptionType , QStyleOptionMenuItemStyleOptionVersion , MenuItemType, eDefaultItem, eSeparator, eSubMenu, eScroller, eTearOff, eMargin, eEmptyArea , CheckType, eNotCheckable, eExclusive, eNonExclusive ) where import Qtc.Classes.Base import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr) import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int) import Qtc.Enums.Base import Qtc.Enums.Classes.Core data CQStyleOptionMenuItemStyleOptionType a = CQStyleOptionMenuItemStyleOptionType a type QStyleOptionMenuItemStyleOptionType = QEnum(CQStyleOptionMenuItemStyleOptionType Int) ieQStyleOptionMenuItemStyleOptionType :: Int -> QStyleOptionMenuItemStyleOptionType ieQStyleOptionMenuItemStyleOptionType x = QEnum (CQStyleOptionMenuItemStyleOptionType x) instance QEnumC (CQStyleOptionMenuItemStyleOptionType Int) where qEnum_toInt (QEnum (CQStyleOptionMenuItemStyleOptionType x)) = x qEnum_fromInt x = QEnum (CQStyleOptionMenuItemStyleOptionType x) withQEnumResult x = do ti <- x return $ qEnum_fromInt $ fromIntegral ti withQEnumListResult x = do til <- x return $ map qEnum_fromInt til instance Qcs (QObject c -> QStyleOptionMenuItemStyleOptionType -> IO ()) where connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler = do funptr <- wrapSlotHandler_int slotHandlerWrapper_int stptr <- newStablePtr (Wrap _handler) withObjectPtr _qsig_obj $ \cobj_sig -> withCWString _qsig_nam $ \cstr_sig -> withObjectPtr _qslt_obj $ \cobj_slt -> withCWString _qslt_nam $ \cstr_slt -> qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr) return () where slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO () slotHandlerWrapper_int funptr stptr qobjptr cint = do qobj <- qObjectFromPtr qobjptr let hint = fromCInt cint if (objectIsNull qobj) then do when (stptr/=ptrNull) (freeStablePtr (castPtrToStablePtr stptr)) when (funptr/=ptrNull) (freeHaskellFunPtr (castPtrToFunPtr funptr)) else _handler qobj (qEnum_fromInt hint) return () instance QeType QStyleOptionMenuItemStyleOptionType where eType = ieQStyleOptionMenuItemStyleOptionType $ 4 data CQStyleOptionMenuItemStyleOptionVersion a = CQStyleOptionMenuItemStyleOptionVersion a type QStyleOptionMenuItemStyleOptionVersion = QEnum(CQStyleOptionMenuItemStyleOptionVersion Int) ieQStyleOptionMenuItemStyleOptionVersion :: Int -> QStyleOptionMenuItemStyleOptionVersion ieQStyleOptionMenuItemStyleOptionVersion x = QEnum (CQStyleOptionMenuItemStyleOptionVersion x) instance QEnumC (CQStyleOptionMenuItemStyleOptionVersion Int) where qEnum_toInt (QEnum (CQStyleOptionMenuItemStyleOptionVersion x)) = x qEnum_fromInt x = QEnum (CQStyleOptionMenuItemStyleOptionVersion x) withQEnumResult x = do ti <- x return $ qEnum_fromInt $ fromIntegral ti withQEnumListResult x = do til <- x return $ map qEnum_fromInt til instance Qcs (QObject c -> QStyleOptionMenuItemStyleOptionVersion -> IO ()) where connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler = do funptr <- wrapSlotHandler_int slotHandlerWrapper_int stptr <- newStablePtr (Wrap _handler) withObjectPtr _qsig_obj $ \cobj_sig -> withCWString _qsig_nam $ \cstr_sig -> withObjectPtr _qslt_obj $ \cobj_slt -> withCWString _qslt_nam $ \cstr_slt -> qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr) return () where slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO () slotHandlerWrapper_int funptr stptr qobjptr cint = do qobj <- qObjectFromPtr qobjptr let hint = fromCInt cint if (objectIsNull qobj) then do when (stptr/=ptrNull) (freeStablePtr (castPtrToStablePtr stptr)) when (funptr/=ptrNull) (freeHaskellFunPtr (castPtrToFunPtr funptr)) else _handler qobj (qEnum_fromInt hint) return () instance QeVersion QStyleOptionMenuItemStyleOptionVersion where eVersion = ieQStyleOptionMenuItemStyleOptionVersion $ 1 data CMenuItemType a = CMenuItemType a type MenuItemType = QEnum(CMenuItemType Int) ieMenuItemType :: Int -> MenuItemType ieMenuItemType x = QEnum (CMenuItemType x) instance QEnumC (CMenuItemType Int) where qEnum_toInt (QEnum (CMenuItemType x)) = x qEnum_fromInt x = QEnum (CMenuItemType x) withQEnumResult x = do ti <- x return $ qEnum_fromInt $ fromIntegral ti withQEnumListResult x = do til <- x return $ map qEnum_fromInt til instance Qcs (QObject c -> MenuItemType -> IO ()) where connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler = do funptr <- wrapSlotHandler_int slotHandlerWrapper_int stptr <- newStablePtr (Wrap _handler) withObjectPtr _qsig_obj $ \cobj_sig -> withCWString _qsig_nam $ \cstr_sig -> withObjectPtr _qslt_obj $ \cobj_slt -> withCWString _qslt_nam $ \cstr_slt -> qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr) return () where slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO () slotHandlerWrapper_int funptr stptr qobjptr cint = do qobj <- qObjectFromPtr qobjptr let hint = fromCInt cint if (objectIsNull qobj) then do when (stptr/=ptrNull) (freeStablePtr (castPtrToStablePtr stptr)) when (funptr/=ptrNull) (freeHaskellFunPtr (castPtrToFunPtr funptr)) else _handler qobj (qEnum_fromInt hint) return () instance QeNormal MenuItemType where eNormal = ieMenuItemType $ 0 eDefaultItem :: MenuItemType eDefaultItem = ieMenuItemType $ 1 eSeparator :: MenuItemType eSeparator = ieMenuItemType $ 2 eSubMenu :: MenuItemType eSubMenu = ieMenuItemType $ 3 eScroller :: MenuItemType eScroller = ieMenuItemType $ 4 eTearOff :: MenuItemType eTearOff = ieMenuItemType $ 5 eMargin :: MenuItemType eMargin = ieMenuItemType $ 6 eEmptyArea :: MenuItemType eEmptyArea = ieMenuItemType $ 7 data CCheckType a = CCheckType a type CheckType = QEnum(CCheckType Int) ieCheckType :: Int -> CheckType ieCheckType x = QEnum (CCheckType x) instance QEnumC (CCheckType Int) where qEnum_toInt (QEnum (CCheckType x)) = x qEnum_fromInt x = QEnum (CCheckType x) withQEnumResult x = do ti <- x return $ qEnum_fromInt $ fromIntegral ti withQEnumListResult x = do til <- x return $ map qEnum_fromInt til instance Qcs (QObject c -> CheckType -> IO ()) where connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler = do funptr <- wrapSlotHandler_int slotHandlerWrapper_int stptr <- newStablePtr (Wrap _handler) withObjectPtr _qsig_obj $ \cobj_sig -> withCWString _qsig_nam $ \cstr_sig -> withObjectPtr _qslt_obj $ \cobj_slt -> withCWString _qslt_nam $ \cstr_slt -> qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr) return () where slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO () slotHandlerWrapper_int funptr stptr qobjptr cint = do qobj <- qObjectFromPtr qobjptr let hint = fromCInt cint if (objectIsNull qobj) then do when (stptr/=ptrNull) (freeStablePtr (castPtrToStablePtr stptr)) when (funptr/=ptrNull) (freeHaskellFunPtr (castPtrToFunPtr funptr)) else _handler qobj (qEnum_fromInt hint) return () eNotCheckable :: CheckType eNotCheckable = ieCheckType $ 0 eExclusive :: CheckType eExclusive = ieCheckType $ 1 eNonExclusive :: CheckType eNonExclusive = ieCheckType $ 2
uduki/hsQt
Qtc/Enums/Gui/QStyleOptionMenuItem.hs
bsd-2-clause
8,527
0
18
1,837
2,231
1,099
1,132
198
1
module Import ( module Prelude , module Yesod , module Foundation , module Settings.StaticFiles , module Data.Monoid , module Control.Applicative , Text #if __GLASGOW_HASKELL__ < 740 , (<>) #endif ) where import Prelude hiding (writeFile, readFile, head, tail, init, last) import Yesod hiding (Route(..)) import Foundation import Data.Monoid (Monoid (mappend, mempty, mconcat)) import Control.Applicative ((<$>), (<*>), pure) import Data.Text (Text) import Settings.StaticFiles #if __GLASGOW_HASKELL__ < 740 infixr 5 <> (<>) :: Monoid m => m -> m -> m (<>) = mappend #endif
LambdaLuminaries/illum
Import.hs
bsd-2-clause
614
0
7
123
175
118
57
19
1
{-# LANGUAGE Safe #-} {-# LANGUAGE UnicodeSyntax #-} module Shake.It.FileSystem.Dir ( removeDirIfExists , copyDir ) where import Control.Exception import Control.Monad (forM_) import Prelude hiding (catch) import System.Directory import System.IO.Error hiding (catch) import System.FilePath ((</>)) import Control.Eternal.Syntax removeDirIfExists ∷ FilePath → IO () removeDirIfExists δ = removeDirectoryRecursive δ `catch` handleExists where handleExists ε | isDoesNotExistError ε = return () | otherwise = throwIO ε copyDir ∷ FilePath -- source → FilePath -- destination → IO () copyDir src dst = do createDirectory dst content ← getDirectoryContents src let xs = filter (∉ [".", ".."]) content forM_ xs $ \name → let srcPath = src </> name dstPath = dst </> name in doesDirectoryExist srcPath >>= \dirExist → if dirExist then copyDir srcPath dstPath else copyFile srcPath dstPath
Heather/Shake.it.off
src/Shake/It/FileSystem/Dir.hs
bsd-3-clause
1,145
0
14
366
283
151
132
29
2
-- Copyright (c) 2016-present, Facebook, Inc. -- All rights reserved. -- -- This source code is licensed under the BSD-style license found in the -- LICENSE file in the root directory of this source tree. {-# LANGUAGE GADTs #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} module Duckling.Volume.CA.Rules ( rules ) where import Data.String import Data.Text (Text) import Prelude import Duckling.Dimensions.Types import Duckling.Types import Duckling.Volume.Helpers import qualified Duckling.Volume.Types as TVolume volumes :: [(Text, String, TVolume.Unit)] volumes = [ ("<latent vol> ml" , "m(l|ililitres?)", TVolume.Millilitre) , ("<vol> hectoliters" , "(hectolitres?)" , TVolume.Hectolitre) , ("<vol> liters" , "l(itres?)?" , TVolume.Litre) , ("<latent vol> gallon", "gal(ons|ó)" , TVolume.Gallon) ] rulesVolumes :: [Rule] rulesVolumes = map go volumes where go :: (Text, String, TVolume.Unit) -> Rule go (name, regexPattern, u) = Rule { name = name , pattern = [ regex regexPattern ] , prod = \_ -> Just $ Token Volume $ unitOnly u } fractions :: [(Text, String, Double)] fractions = [ ("half", "mig", 1/2) ] rulesFractionalVolume :: [Rule] rulesFractionalVolume = map go fractions where go :: (Text, String, Double) -> Rule go (name, regexPattern, f) = Rule { name = name , pattern = [ regex regexPattern , Predicate isUnitOnly ] , prod = \case (_: Token Volume TVolume.VolumeData{TVolume.unit = Just u}: _) -> Just $ Token Volume $ volume u f _ -> Nothing } rules :: [Rule] rules = rulesVolumes ++ rulesFractionalVolume
facebookincubator/duckling
Duckling/Volume/CA/Rules.hs
bsd-3-clause
1,784
0
19
474
451
270
181
45
2
{-# LANGUAGE PatternSynonyms #-} -------------------------------------------------------------------------------- -- | -- Module : Graphics.GL.ARB.TextureBorderClamp -- Copyright : (c) Sven Panne 2019 -- License : BSD3 -- -- Maintainer : Sven Panne <[email protected]> -- Stability : stable -- Portability : portable -- -------------------------------------------------------------------------------- module Graphics.GL.ARB.TextureBorderClamp ( -- * Extension Support glGetARBTextureBorderClamp, gl_ARB_texture_border_clamp, -- * Enums pattern GL_CLAMP_TO_BORDER_ARB ) where import Graphics.GL.ExtensionPredicates import Graphics.GL.Tokens
haskell-opengl/OpenGLRaw
src/Graphics/GL/ARB/TextureBorderClamp.hs
bsd-3-clause
674
0
5
91
47
36
11
7
0
{-# LANGUAGE GeneralizedNewtypeDeriving #-} import Data.Char (digitToInt) import Data.List import Prelude hiding (drop) newtype K = K Integer deriving (Show, Enum, Num, Eq) newtype ValueConsumed = ValueConsumed Integer deriving (Show, Enum, Num, Eq, Ord) data InfStr = InfStr String K ValueConsumed deriving (Show) _infStr (InfStr a _ _) = a _infK (InfStr _ b _) = b _infVC (InfStr _ _ c) = c s0 = 14025256 f n = n*n `mod` 20300713 str :: Integer -> String str n = show n ++ str (f n) infStr :: String infStr = str s0 -- an infinite list of infinite strings allInfStrings :: [String] allInfStrings = allInfStrings' 0 allInfStrings' :: Integer -> [String] allInfStrings' n = genericDrop n infStr : allInfStrings' (n+1) allNumberedInfStrings :: [InfStr] allNumberedInfStrings = map (\(x,y,z) -> InfStr x y z) $ zip3 allInfStrings [1..] $ repeat 0 -------------------------------------------------------------------------------- main = print $ allPKs 9 1 allNumberedInfStrings bla x = allPKs x 1 allNumberedInfStrings allPKs :: Integer -- max k -> Integer -- current k -> [InfStr] -> [K] allPKs maxK curK _ | maxK < curK = [] allPKs maxK curK strings = [k] ++ (allPKs maxK (curK + 1) newStrings) where (k, newStrings) = findK (curK) strings -- findK 10 allNumberedInfStrings works fine; doesn't with allPKs findK :: Integer -- k -> [InfStr] -> (K, [InfStr]) findK k strings = findK' k strings [] findK' :: Integer -> [InfStr] -> [InfStr] -> (K, [InfStr]) findK' k (h:ts) old | isContainedInString = (_infK newString, old ++ [newString] ++ ts) | otherwise = findK' k ts (old ++ [newString]) where (isContainedInString, newString) = testForK k h (_infVC h) testForK :: Integer -- k -> InfStr -- infinite string representation -> ValueConsumed -- accumulator for speed (hopefully) -> (Bool, InfStr) -- actual result and new, shorter -- string representation for speed testForK k string acc | maybeK == fromIntegral k = {-# SCC "==case" #-} (True, newInfStr) | maybeK > fromIntegral k = {-# SCC ">case" #-} (False, string) | otherwise = {-# SCC "otherwiseCase" #-} testForK k constrStr maybeK where infS = _infStr string tinfS = tail infS infK = _infK string constrStr = InfStr tinfS infK acc headCount = ValueConsumed ( fromIntegral $ digitToInt $ head $ infS) maybeK = {-# SCC "maybeK" #-} acc + headCount newInfStr = InfStr (tinfS) (infK) (_infVC string + headCount) t01 k = testForK k string acc where string = InfStr "140252567410149584700380" (K 1) (ValueConsumed 0) acc = ValueConsumed 0 -- should be 2, not 1 t02 = findK 4 strings where strings = [ InfStr "40252567410149584700380" (K 1) (ValueConsumed 1), InfStr "40252567410149584700380" (K 2) (ValueConsumed 0), InfStr "52567410149584700380" (K 3) (ValueConsumed 2) ] -- should be false t03 = testForK 4 (InfStr "40252567410149584700380" (K 1) (ValueConsumed 1)) 0 -- 1 second for max k == 500 -- 7 seconds for max k == 1000 -- 54 seconds for max k == 2000 -- max k == 2*10^15 ? -- new: 9 seconds for 10000 -- new: 39 seconds for 20000 -- 7 minutes for 100000 (10^5) -- after refactoring: 2.2 seconds for 10000 O_o -- after refactoring: 8.5 seconds for 20000 -- after refactoring: 3:40 minutes for 100000 -- more refactoring: 5.8 seconds for 10000 :( -- more refactoring: 25.3 seconds for 20000 :( -- further refactoring: 18.7 seconds for 10000 -- Untested allPKs maxK strings = map fst [ findK 1 strings , findK 2 (snd (findK 1 strings)) , findK 3 (snd (findK 2 (snd (findK 1 strings)))) ,findK 4 (snd (findK 3 (snd (findK 2 (snd (findK 1 strings)))))) ] allPKs maxK strings = (map (fst . fst) . take 4 . tail) (iterate (\((_,strings),n) -> (findK n strings,n + 1)) ((undefined,strings),1)) allPKs maxK strings = (map (fst . fst) . tail) (unfoldr (\((_,strings),n) -> guard (n <= 4) >> return (findK n strings,n + 1)) ((undefined,strings),1)) allPKs maxK strings = (map fst . tail) (scanl (\(_,strings) n -> findK n strings) (undefined,strings) [1 .. 4]) allPKs maxK strings = (map fst . tail) (scanl (flip findK . snd) (undefined,strings) [1 .. 4])
stulli/projectEuler
eu238_old.hs
bsd-3-clause
4,624
0
19
1,296
1,484
794
690
88
1
module Main where import Test.HUnit import System.Exit import System.Directory import qualified System.FilePath as FilePath import BrownPLT.JavaScript.Parser import BrownPLT.JavaScript.PrettyPrint import BrownPLT.JavaScript.Syntax testDir = "tests/parse-pretty" parsePrettyTest filename = TestLabel filename $ TestCase $ do js <- parseJavaScriptFromFile filename let str = renderStatements js case parseScriptFromString "" str of Left err -> assertFailure (show err) Right (Script _ js') -> do let str' = renderStatements js' assertBool "pretty-printed code should re-parse" (str == str') main = do allFiles <- getDirectoryContents testDir let files = map (testDir `FilePath.combine`) $ filter (\x -> FilePath.takeExtension x == ".js") allFiles let parsePretty = TestLabel "parser - printer composition" (TestList (map parsePrettyTest files)) results <- runTestTT parsePretty if errors results > 0 || failures results > 0 then exitFailure else putStrLn "All tests passed."
brownplt/webbits
src/UnitTest.hs
bsd-3-clause
1,041
0
16
194
296
147
149
27
2
import Test.Tasty import Test.Tasty.QuickCheck -- for property testing import Test.Tasty.HUnit -- for case testing import ANN.Simple -------------------------------------------------------------------------------- main :: IO () main = defaultMain tests tests :: TestTree tests = testGroup "Tests" [propertyTests, unitTests] propertyTests :: TestTree propertyTests = testGroup "Property Tests" [ annPropertyTests] unitTests :: TestTree unitTests = testGroup "Unit Tests" [ annUnitTests] -------------------------------------------------------------------------------- -- ARTIFICIAL NEURAL NETWORKS Testing Suite -------------------------------------------------------------------------------- annPropertyTests :: TestTree annPropertyTests = testGroup "Artificial Neural Network Property Tests" [ ] annUnitTests :: TestTree annUnitTests = testGroup "Artificial Neural Network Unit Tests" [ ] --------------------------------------------------------------------------------
prince-aly/MLcyberSquad
test/Tests.hs
bsd-3-clause
997
0
6
115
139
80
59
18
1
{-# LANGUAGE CPP #-} {-# LANGUAGE Rank2Types #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE Trustworthy #-} #include "lens-common.h" {-# OPTIONS_GHC -fno-warn-orphans #-} ---------------------------------------------------------------------------- -- | -- Module : Control.Lens.Fold -- Copyright : (C) 2012-16 Edward Kmett -- License : BSD-style (see the file LICENSE) -- Maintainer : Edward Kmett <[email protected]> -- Stability : provisional -- Portability : Rank2Types -- -- A @'Fold' s a@ is a generalization of something 'Foldable'. It allows -- you to extract multiple results from a container. A 'Foldable' container -- can be characterized by the behavior of -- @'Data.Foldable.foldMap' :: ('Foldable' t, 'Monoid' m) => (a -> m) -> t a -> m@. -- Since we want to be able to work with monomorphic containers, we could -- generalize this signature to @forall m. 'Monoid' m => (a -> m) -> s -> m@, -- and then decorate it with 'Const' to obtain -- -- @type 'Fold' s a = forall m. 'Monoid' m => 'Getting' m s a@ -- -- Every 'Getter' is a valid 'Fold' that simply doesn't use the 'Monoid' -- it is passed. -- -- In practice the type we use is slightly more complicated to allow for -- better error messages and for it to be transformed by certain -- 'Applicative' transformers. -- -- Everything you can do with a 'Foldable' container, you can with with a 'Fold' and there are -- combinators that generalize the usual 'Foldable' operations here. ---------------------------------------------------------------------------- module Control.Lens.Fold ( -- * Folds Fold , IndexedFold -- * Getting Started , (^..) , (^?) , (^?!) , pre, ipre , preview, previews, ipreview, ipreviews , preuse, preuses, ipreuse, ipreuses , has, hasn't -- ** Building Folds , folding, ifolding , foldring, ifoldring , folded , folded64 , unfolded , iterated , filtered , filteredBy , backwards , repeated , replicated , cycled , takingWhile , droppingWhile , worded, lined -- ** Folding , foldMapOf, foldOf , foldrOf, foldlOf , toListOf, toNonEmptyOf , anyOf, allOf, noneOf , andOf, orOf , productOf, sumOf , traverseOf_, forOf_, sequenceAOf_ , traverse1Of_, for1Of_, sequence1Of_ , mapMOf_, forMOf_, sequenceOf_ , asumOf, msumOf , concatMapOf, concatOf , elemOf, notElemOf , lengthOf , nullOf, notNullOf , firstOf, first1Of, lastOf, last1Of , maximumOf, maximum1Of, minimumOf, minimum1Of , maximumByOf, minimumByOf , findOf , findMOf , foldrOf', foldlOf' , foldr1Of, foldl1Of , foldr1Of', foldl1Of' , foldrMOf, foldlMOf , lookupOf -- * Indexed Folds , (^@..) , (^@?) , (^@?!) -- ** Indexed Folding , ifoldMapOf , ifoldrOf , ifoldlOf , ianyOf , iallOf , inoneOf , itraverseOf_ , iforOf_ , imapMOf_ , iforMOf_ , iconcatMapOf , ifindOf , ifindMOf , ifoldrOf' , ifoldlOf' , ifoldrMOf , ifoldlMOf , itoListOf , elemIndexOf , elemIndicesOf , findIndexOf , findIndicesOf -- ** Building Indexed Folds , ifiltered , itakingWhile , idroppingWhile -- * Internal types , Leftmost , Rightmost , Traversed , Sequenced -- * Fold with Reified Monoid , foldBy , foldByOf , foldMapBy , foldMapByOf ) where import Prelude () import Control.Applicative.Backwards import Control.Comonad import Control.Lens.Getter import Control.Lens.Internal.Fold import Control.Lens.Internal.Getter import Control.Lens.Internal.Indexed import Control.Lens.Internal.Magma import Control.Lens.Internal.Prelude import Control.Lens.Type import Control.Monad as Monad import Control.Monad.Reader import Control.Monad.State import Data.CallStack import Data.Functor.Apply hiding ((<.)) import Data.Int (Int64) import Data.List (intercalate) import Data.Maybe (fromMaybe) import Data.Monoid (First (..), All (..), Any (..)) #if MIN_VERSION_reflection(2,1,0) import Data.Reflection #endif import qualified Data.Semigroup as Semi -- $setup -- >>> :set -XNoOverloadedStrings -- >>> import Control.Lens -- >>> import Control.Lens.Extras (is) -- >>> import Data.Function -- >>> import Data.List.Lens -- >>> import Debug.SimpleReflect.Expr -- >>> import Debug.SimpleReflect.Vars as Vars hiding (f,g) -- >>> import Control.DeepSeq (NFData (..), force) -- >>> import Control.Exception (evaluate) -- >>> import Data.Maybe (fromMaybe) -- >>> import Data.Monoid (Sum (..)) -- >>> import System.Timeout (timeout) -- >>> import qualified Data.Map as Map -- >>> let f :: Expr -> Expr; f = Debug.SimpleReflect.Vars.f -- >>> let g :: Expr -> Expr; g = Debug.SimpleReflect.Vars.g -- >>> let timingOut :: NFData a => a -> IO a; timingOut = fmap (fromMaybe (error "timeout")) . timeout (5*10^6) . evaluate . force #ifdef HLINT {-# ANN module "HLint: ignore Eta reduce" #-} {-# ANN module "HLint: ignore Use camelCase" #-} {-# ANN module "HLint: ignore Use curry" #-} {-# ANN module "HLint: ignore Use fmap" #-} #endif infixl 8 ^.., ^?, ^?!, ^@.., ^@?, ^@?! -------------------------- -- Folds -------------------------- -- | Obtain a 'Fold' by lifting an operation that returns a 'Foldable' result. -- -- This can be useful to lift operations from @Data.List@ and elsewhere into a 'Fold'. -- -- >>> [1,2,3,4]^..folding tail -- [2,3,4] folding :: Foldable f => (s -> f a) -> Fold s a folding sfa agb = phantom . traverse_ agb . sfa {-# INLINE folding #-} ifolding :: (Foldable f, Indexable i p, Contravariant g, Applicative g) => (s -> f (i, a)) -> Over p g s t a b ifolding sfa f = phantom . traverse_ (phantom . uncurry (indexed f)) . sfa {-# INLINE ifolding #-} -- | Obtain a 'Fold' by lifting 'foldr' like function. -- -- >>> [1,2,3,4]^..foldring foldr -- [1,2,3,4] foldring :: (Contravariant f, Applicative f) => ((a -> f a -> f a) -> f a -> s -> f a) -> LensLike f s t a b foldring fr f = phantom . fr (\a fa -> f a *> fa) noEffect {-# INLINE foldring #-} -- | Obtain 'FoldWithIndex' by lifting 'ifoldr' like function. ifoldring :: (Indexable i p, Contravariant f, Applicative f) => ((i -> a -> f a -> f a) -> f a -> s -> f a) -> Over p f s t a b ifoldring ifr f = phantom . ifr (\i a fa -> indexed f i a *> fa) noEffect {-# INLINE ifoldring #-} -- | Obtain a 'Fold' from any 'Foldable' indexed by ordinal position. -- -- >>> Just 3^..folded -- [3] -- -- >>> Nothing^..folded -- [] -- -- >>> [(1,2),(3,4)]^..folded.both -- [1,2,3,4] folded :: Foldable f => IndexedFold Int (f a) a folded = conjoined (foldring foldr) (ifoldring ifoldr) {-# INLINE folded #-} ifoldr :: Foldable f => (Int -> a -> b -> b) -> b -> f a -> b ifoldr f z xs = foldr (\ x g i -> i `seq` f i x (g (i+1))) (const z) xs 0 {-# INLINE ifoldr #-} -- | Obtain a 'Fold' from any 'Foldable' indexed by ordinal position. folded64 :: Foldable f => IndexedFold Int64 (f a) a folded64 = conjoined (foldring foldr) (ifoldring ifoldr64) {-# INLINE folded64 #-} ifoldr64 :: Foldable f => (Int64 -> a -> b -> b) -> b -> f a -> b ifoldr64 f z xs = foldr (\ x g i -> i `seq` f i x (g (i+1))) (const z) xs 0 {-# INLINE ifoldr64 #-} -- | Form a 'Fold1' by repeating the input forever. -- -- @ -- 'repeat' ≡ 'toListOf' 'repeated' -- @ -- -- >>> timingOut $ 5^..taking 20 repeated -- [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] -- -- @ -- 'repeated' :: 'Fold1' a a -- @ repeated :: Apply f => LensLike' f a a repeated f a = as where as = f a .> as {-# INLINE repeated #-} -- | A 'Fold' that replicates its input @n@ times. -- -- @ -- 'replicate' n ≡ 'toListOf' ('replicated' n) -- @ -- -- >>> 5^..replicated 20 -- [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] replicated :: Int -> Fold a a replicated n0 f a = go n0 where m = f a go 0 = noEffect go n = m *> go (n - 1) {-# INLINE replicated #-} -- | Transform a non-empty 'Fold' into a 'Fold1' that loops over its elements over and over. -- -- >>> timingOut $ [1,2,3]^..taking 7 (cycled traverse) -- [1,2,3,1,2,3,1] -- -- @ -- 'cycled' :: 'Fold1' s a -> 'Fold1' s a -- @ cycled :: Apply f => LensLike f s t a b -> LensLike f s t a b cycled l f a = as where as = l f a .> as {-# INLINE cycled #-} -- | Build a 'Fold' that unfolds its values from a seed. -- -- @ -- 'Prelude.unfoldr' ≡ 'toListOf' '.' 'unfolded' -- @ -- -- >>> 10^..unfolded (\b -> if b == 0 then Nothing else Just (b, b-1)) -- [10,9,8,7,6,5,4,3,2,1] unfolded :: (b -> Maybe (a, b)) -> Fold b a unfolded f g b0 = go b0 where go b = case f b of Just (a, b') -> g a *> go b' Nothing -> noEffect {-# INLINE unfolded #-} -- | @x '^.' 'iterated' f@ returns an infinite 'Fold1' of repeated applications of @f@ to @x@. -- -- @ -- 'toListOf' ('iterated' f) a ≡ 'iterate' f a -- @ -- -- @ -- 'iterated' :: (a -> a) -> 'Fold1' a a -- @ iterated :: Apply f => (a -> a) -> LensLike' f a a iterated f g a0 = go a0 where go a = g a .> go (f a) {-# INLINE iterated #-} -- | Obtain a 'Fold' that can be composed with to filter another 'Lens', 'Iso', 'Getter', 'Fold' (or 'Traversal'). -- -- Note: This is /not/ a legal 'Traversal', unless you are very careful not to invalidate the predicate on the target. -- -- Note: This is also /not/ a legal 'Prism', unless you are very careful not to inject a value that fails the predicate. -- -- As a counter example, consider that given @evens = 'filtered' 'even'@ the second 'Traversal' law is violated: -- -- @ -- 'Control.Lens.Setter.over' evens 'succ' '.' 'Control.Lens.Setter.over' evens 'succ' '/=' 'Control.Lens.Setter.over' evens ('succ' '.' 'succ') -- @ -- -- So, in order for this to qualify as a legal 'Traversal' you can only use it for actions that preserve the result of the predicate! -- -- >>> [1..10]^..folded.filtered even -- [2,4,6,8,10] -- -- This will preserve an index if it is present. filtered :: (Choice p, Applicative f) => (a -> Bool) -> Optic' p f a a filtered p = dimap (\x -> if p x then Right x else Left x) (either pure id) . right' {-# INLINE filtered #-} -- | Obtain a potentially empty 'IndexedTraversal' by taking the first element from another, -- potentially empty `Fold` and using it as an index. -- -- The resulting optic can be composed with to filter another 'Lens', 'Iso', 'Getter', 'Fold' (or 'Traversal'). -- -- >>> [(Just 2, 3), (Nothing, 4)] & mapped . filteredBy (_1 . _Just) <. _2 %@~ (*) :: [(Maybe Int, Int)] -- [(Just 2,6),(Nothing,4)] -- -- @ -- 'filteredBy' :: 'Fold' a i -> 'IndexedTraversal'' i a a -- @ -- -- Note: As with 'filtered', this is /not/ a legal 'IndexedTraversal', unless you are very careful not to invalidate the predicate on the target! filteredBy :: (Indexable i p, Applicative f) => Getting (First i) a i -> p a (f a) -> a -> f a filteredBy p f val = case val ^? p of Nothing -> pure val Just witness -> indexed f witness val -- | Obtain a 'Fold' by taking elements from another 'Fold', 'Lens', 'Iso', 'Getter' or 'Traversal' while a predicate holds. -- -- @ -- 'takeWhile' p ≡ 'toListOf' ('takingWhile' p 'folded') -- @ -- -- >>> timingOut $ toListOf (takingWhile (<=3) folded) [1..] -- [1,2,3] -- -- @ -- 'takingWhile' :: (a -> 'Bool') -> 'Fold' s a -> 'Fold' s a -- 'takingWhile' :: (a -> 'Bool') -> 'Getter' s a -> 'Fold' s a -- 'takingWhile' :: (a -> 'Bool') -> 'Traversal'' s a -> 'Fold' s a -- * See note below -- 'takingWhile' :: (a -> 'Bool') -> 'Lens'' s a -> 'Fold' s a -- * See note below -- 'takingWhile' :: (a -> 'Bool') -> 'Prism'' s a -> 'Fold' s a -- * See note below -- 'takingWhile' :: (a -> 'Bool') -> 'Iso'' s a -> 'Fold' s a -- * See note below -- 'takingWhile' :: (a -> 'Bool') -> 'IndexedTraversal'' i s a -> 'IndexedFold' i s a -- * See note below -- 'takingWhile' :: (a -> 'Bool') -> 'IndexedLens'' i s a -> 'IndexedFold' i s a -- * See note below -- 'takingWhile' :: (a -> 'Bool') -> 'IndexedFold' i s a -> 'IndexedFold' i s a -- 'takingWhile' :: (a -> 'Bool') -> 'IndexedGetter' i s a -> 'IndexedFold' i s a -- @ -- -- /Note:/ When applied to a 'Traversal', 'takingWhile' yields something that can be used as if it were a 'Traversal', but -- which is not a 'Traversal' per the laws, unless you are careful to ensure that you do not invalidate the predicate when -- writing back through it. takingWhile :: (Conjoined p, Applicative f) => (a -> Bool) -> Over p (TakingWhile p f a a) s t a a -> Over p f s t a a takingWhile p l pafb = fmap runMagma . traverse (cosieve pafb) . runTakingWhile . l flag where flag = cotabulate $ \wa -> let a = extract wa; r = p a in TakingWhile r a $ \pr -> if pr && r then Magma () wa else MagmaPure a {-# INLINE takingWhile #-} -- | Obtain a 'Fold' by dropping elements from another 'Fold', 'Lens', 'Iso', 'Getter' or 'Traversal' while a predicate holds. -- -- @ -- 'dropWhile' p ≡ 'toListOf' ('droppingWhile' p 'folded') -- @ -- -- >>> toListOf (droppingWhile (<=3) folded) [1..6] -- [4,5,6] -- -- >>> toListOf (droppingWhile (<=3) folded) [1,6,1] -- [6,1] -- -- @ -- 'droppingWhile' :: (a -> 'Bool') -> 'Fold' s a -> 'Fold' s a -- 'droppingWhile' :: (a -> 'Bool') -> 'Getter' s a -> 'Fold' s a -- 'droppingWhile' :: (a -> 'Bool') -> 'Traversal'' s a -> 'Fold' s a -- see notes -- 'droppingWhile' :: (a -> 'Bool') -> 'Lens'' s a -> 'Fold' s a -- see notes -- 'droppingWhile' :: (a -> 'Bool') -> 'Prism'' s a -> 'Fold' s a -- see notes -- 'droppingWhile' :: (a -> 'Bool') -> 'Iso'' s a -> 'Fold' s a -- see notes -- @ -- -- @ -- 'droppingWhile' :: (a -> 'Bool') -> 'IndexPreservingTraversal'' s a -> 'IndexPreservingFold' s a -- see notes -- 'droppingWhile' :: (a -> 'Bool') -> 'IndexPreservingLens'' s a -> 'IndexPreservingFold' s a -- see notes -- 'droppingWhile' :: (a -> 'Bool') -> 'IndexPreservingGetter' s a -> 'IndexPreservingFold' s a -- 'droppingWhile' :: (a -> 'Bool') -> 'IndexPreservingFold' s a -> 'IndexPreservingFold' s a -- @ -- -- @ -- 'droppingWhile' :: (a -> 'Bool') -> 'IndexedTraversal'' i s a -> 'IndexedFold' i s a -- see notes -- 'droppingWhile' :: (a -> 'Bool') -> 'IndexedLens'' i s a -> 'IndexedFold' i s a -- see notes -- 'droppingWhile' :: (a -> 'Bool') -> 'IndexedGetter' i s a -> 'IndexedFold' i s a -- 'droppingWhile' :: (a -> 'Bool') -> 'IndexedFold' i s a -> 'IndexedFold' i s a -- @ -- -- Note: Many uses of this combinator will yield something that meets the types, but not the laws of a valid -- 'Traversal' or 'IndexedTraversal'. The 'Traversal' and 'IndexedTraversal' laws are only satisfied if the -- new values you assign to the first target also does not pass the predicate! Otherwise subsequent traversals -- will visit fewer elements and 'Traversal' fusion is not sound. -- -- So for any traversal @t@ and predicate @p@, @`droppingWhile` p t@ may not be lawful, but -- @(`Control.Lens.Traversal.dropping` 1 . `droppingWhile` p) t@ is. For example: -- -- >>> let l :: Traversal' [Int] Int; l = droppingWhile (<= 1) traverse -- >>> let l' :: Traversal' [Int] Int; l' = dropping 1 l -- -- @l@ is not a lawful setter because @`Control.Lens.Setter.over` l f . -- `Control.Lens.Setter.over` l g ≢ `Control.Lens.Setter.over` l (f . g)@: -- -- >>> [1,2,3] & l .~ 0 & l .~ 4 -- [1,0,0] -- >>> [1,2,3] & l .~ 4 -- [1,4,4] -- -- @l'@ on the other hand behaves lawfully: -- -- >>> [1,2,3] & l' .~ 0 & l' .~ 4 -- [1,2,4] -- >>> [1,2,3] & l' .~ 4 -- [1,2,4] droppingWhile :: (Conjoined p, Profunctor q, Applicative f) => (a -> Bool) -> Optical p q (Compose (State Bool) f) s t a a -> Optical p q f s t a a droppingWhile p l f = (flip evalState True .# getCompose) `rmap` l g where g = cotabulate $ \wa -> Compose $ state $ \b -> let a = extract wa b' = b && p a in (if b' then pure a else cosieve f wa, b') {-# INLINE droppingWhile #-} -- | A 'Fold' over the individual 'words' of a 'String'. -- -- @ -- 'worded' :: 'Fold' 'String' 'String' -- 'worded' :: 'Traversal'' 'String' 'String' -- @ -- -- @ -- 'worded' :: 'IndexedFold' 'Int' 'String' 'String' -- 'worded' :: 'IndexedTraversal'' 'Int' 'String' 'String' -- @ -- -- Note: This function type-checks as a 'Traversal' but it doesn't satisfy the laws. It's only valid to use it -- when you don't insert any whitespace characters while traversing, and if your original 'String' contains only -- isolated space characters (and no other characters that count as space, such as non-breaking spaces). worded :: Applicative f => IndexedLensLike' Int f String String worded f = fmap unwords . conjoined traverse (indexing traverse) f . words {-# INLINE worded #-} -- | A 'Fold' over the individual 'lines' of a 'String'. -- -- @ -- 'lined' :: 'Fold' 'String' 'String' -- 'lined' :: 'Traversal'' 'String' 'String' -- @ -- -- @ -- 'lined' :: 'IndexedFold' 'Int' 'String' 'String' -- 'lined' :: 'IndexedTraversal'' 'Int' 'String' 'String' -- @ -- -- Note: This function type-checks as a 'Traversal' but it doesn't satisfy the laws. It's only valid to use it -- when you don't insert any newline characters while traversing, and if your original 'String' contains only -- isolated newline characters. lined :: Applicative f => IndexedLensLike' Int f String String lined f = fmap (intercalate "\n") . conjoined traverse (indexing traverse) f . lines {-# INLINE lined #-} -------------------------- -- Fold/Getter combinators -------------------------- -- | Map each part of a structure viewed through a 'Lens', 'Getter', -- 'Fold' or 'Traversal' to a monoid and combine the results. -- -- >>> foldMapOf (folded . both . _Just) Sum [(Just 21, Just 21)] -- Sum {getSum = 42} -- -- @ -- 'Data.Foldable.foldMap' = 'foldMapOf' 'folded' -- @ -- -- @ -- 'foldMapOf' ≡ 'views' -- 'ifoldMapOf' l = 'foldMapOf' l '.' 'Indexed' -- @ -- -- @ -- 'foldMapOf' :: 'Getter' s a -> (a -> r) -> s -> r -- 'foldMapOf' :: 'Monoid' r => 'Fold' s a -> (a -> r) -> s -> r -- 'foldMapOf' :: 'Semigroup' r => 'Fold1' s a -> (a -> r) -> s -> r -- 'foldMapOf' :: 'Lens'' s a -> (a -> r) -> s -> r -- 'foldMapOf' :: 'Iso'' s a -> (a -> r) -> s -> r -- 'foldMapOf' :: 'Monoid' r => 'Traversal'' s a -> (a -> r) -> s -> r -- 'foldMapOf' :: 'Semigroup' r => 'Traversal1'' s a -> (a -> r) -> s -> r -- 'foldMapOf' :: 'Monoid' r => 'Prism'' s a -> (a -> r) -> s -> r -- @ -- -- @ -- 'foldMapOf' :: 'Getting' r s a -> (a -> r) -> s -> r -- @ foldMapOf :: Getting r s a -> (a -> r) -> s -> r foldMapOf l f = getConst #. l (Const #. f) {-# INLINE foldMapOf #-} -- | Combine the elements of a structure viewed through a 'Lens', 'Getter', -- 'Fold' or 'Traversal' using a monoid. -- -- >>> foldOf (folded.folded) [[Sum 1,Sum 4],[Sum 8, Sum 8],[Sum 21]] -- Sum {getSum = 42} -- -- @ -- 'Data.Foldable.fold' = 'foldOf' 'folded' -- @ -- -- @ -- 'foldOf' ≡ 'view' -- @ -- -- @ -- 'foldOf' :: 'Getter' s m -> s -> m -- 'foldOf' :: 'Monoid' m => 'Fold' s m -> s -> m -- 'foldOf' :: 'Lens'' s m -> s -> m -- 'foldOf' :: 'Iso'' s m -> s -> m -- 'foldOf' :: 'Monoid' m => 'Traversal'' s m -> s -> m -- 'foldOf' :: 'Monoid' m => 'Prism'' s m -> s -> m -- @ foldOf :: Getting a s a -> s -> a foldOf l = getConst #. l Const {-# INLINE foldOf #-} -- | Right-associative fold of parts of a structure that are viewed through a 'Lens', 'Getter', 'Fold' or 'Traversal'. -- -- @ -- 'Data.Foldable.foldr' ≡ 'foldrOf' 'folded' -- @ -- -- @ -- 'foldrOf' :: 'Getter' s a -> (a -> r -> r) -> r -> s -> r -- 'foldrOf' :: 'Fold' s a -> (a -> r -> r) -> r -> s -> r -- 'foldrOf' :: 'Lens'' s a -> (a -> r -> r) -> r -> s -> r -- 'foldrOf' :: 'Iso'' s a -> (a -> r -> r) -> r -> s -> r -- 'foldrOf' :: 'Traversal'' s a -> (a -> r -> r) -> r -> s -> r -- 'foldrOf' :: 'Prism'' s a -> (a -> r -> r) -> r -> s -> r -- @ -- -- @ -- 'ifoldrOf' l ≡ 'foldrOf' l '.' 'Indexed' -- @ -- -- @ -- 'foldrOf' :: 'Getting' ('Endo' r) s a -> (a -> r -> r) -> r -> s -> r -- @ foldrOf :: Getting (Endo r) s a -> (a -> r -> r) -> r -> s -> r foldrOf l f z = flip appEndo z . foldMapOf l (Endo #. f) {-# INLINE foldrOf #-} -- | Left-associative fold of the parts of a structure that are viewed through a 'Lens', 'Getter', 'Fold' or 'Traversal'. -- -- @ -- 'Data.Foldable.foldl' ≡ 'foldlOf' 'folded' -- @ -- -- @ -- 'foldlOf' :: 'Getter' s a -> (r -> a -> r) -> r -> s -> r -- 'foldlOf' :: 'Fold' s a -> (r -> a -> r) -> r -> s -> r -- 'foldlOf' :: 'Lens'' s a -> (r -> a -> r) -> r -> s -> r -- 'foldlOf' :: 'Iso'' s a -> (r -> a -> r) -> r -> s -> r -- 'foldlOf' :: 'Traversal'' s a -> (r -> a -> r) -> r -> s -> r -- 'foldlOf' :: 'Prism'' s a -> (r -> a -> r) -> r -> s -> r -- @ foldlOf :: Getting (Dual (Endo r)) s a -> (r -> a -> r) -> r -> s -> r foldlOf l f z = (flip appEndo z .# getDual) `rmap` foldMapOf l (Dual #. Endo #. flip f) {-# INLINE foldlOf #-} -- | Extract a list of the targets of a 'Fold'. See also ('^..'). -- -- @ -- 'Data.Foldable.toList' ≡ 'toListOf' 'folded' -- ('^..') ≡ 'flip' 'toListOf' -- @ -- >>> toListOf both ("hello","world") -- ["hello","world"] -- -- @ -- 'toListOf' :: 'Getter' s a -> s -> [a] -- 'toListOf' :: 'Fold' s a -> s -> [a] -- 'toListOf' :: 'Lens'' s a -> s -> [a] -- 'toListOf' :: 'Iso'' s a -> s -> [a] -- 'toListOf' :: 'Traversal'' s a -> s -> [a] -- 'toListOf' :: 'Prism'' s a -> s -> [a] -- @ toListOf :: Getting (Endo [a]) s a -> s -> [a] toListOf l = foldrOf l (:) [] {-# INLINE toListOf #-} -- | Extract a 'NonEmpty' of the targets of 'Fold1'. -- -- >>> toNonEmptyOf both1 ("hello", "world") -- "hello" :| ["world"] -- -- @ -- 'toNonEmptyOf' :: 'Getter' s a -> s -> NonEmpty a -- 'toNonEmptyOf' :: 'Fold1' s a -> s -> NonEmpty a -- 'toNonEmptyOf' :: 'Lens'' s a -> s -> NonEmpty a -- 'toNonEmptyOf' :: 'Iso'' s a -> s -> NonEmpty a -- 'toNonEmptyOf' :: 'Traversal1'' s a -> s -> NonEmpty a -- 'toNonEmptyOf' :: 'Prism'' s a -> s -> NonEmpty a -- @ toNonEmptyOf :: Getting (NonEmptyDList a) s a -> s -> NonEmpty a toNonEmptyOf l = flip getNonEmptyDList [] . foldMapOf l (NonEmptyDList #. (:|)) -- | A convenient infix (flipped) version of 'toListOf'. -- -- >>> [[1,2],[3]]^..id -- [[[1,2],[3]]] -- >>> [[1,2],[3]]^..traverse -- [[1,2],[3]] -- >>> [[1,2],[3]]^..traverse.traverse -- [1,2,3] -- -- >>> (1,2)^..both -- [1,2] -- -- @ -- 'Data.Foldable.toList' xs ≡ xs '^..' 'folded' -- ('^..') ≡ 'flip' 'toListOf' -- @ -- -- @ -- ('^..') :: s -> 'Getter' s a -> [a] -- ('^..') :: s -> 'Fold' s a -> [a] -- ('^..') :: s -> 'Lens'' s a -> [a] -- ('^..') :: s -> 'Iso'' s a -> [a] -- ('^..') :: s -> 'Traversal'' s a -> [a] -- ('^..') :: s -> 'Prism'' s a -> [a] -- @ (^..) :: s -> Getting (Endo [a]) s a -> [a] s ^.. l = toListOf l s {-# INLINE (^..) #-} -- | Returns 'True' if every target of a 'Fold' is 'True'. -- -- >>> andOf both (True,False) -- False -- >>> andOf both (True,True) -- True -- -- @ -- 'Data.Foldable.and' ≡ 'andOf' 'folded' -- @ -- -- @ -- 'andOf' :: 'Getter' s 'Bool' -> s -> 'Bool' -- 'andOf' :: 'Fold' s 'Bool' -> s -> 'Bool' -- 'andOf' :: 'Lens'' s 'Bool' -> s -> 'Bool' -- 'andOf' :: 'Iso'' s 'Bool' -> s -> 'Bool' -- 'andOf' :: 'Traversal'' s 'Bool' -> s -> 'Bool' -- 'andOf' :: 'Prism'' s 'Bool' -> s -> 'Bool' -- @ andOf :: Getting All s Bool -> s -> Bool andOf l = getAll #. foldMapOf l All {-# INLINE andOf #-} -- | Returns 'True' if any target of a 'Fold' is 'True'. -- -- >>> orOf both (True,False) -- True -- >>> orOf both (False,False) -- False -- -- @ -- 'Data.Foldable.or' ≡ 'orOf' 'folded' -- @ -- -- @ -- 'orOf' :: 'Getter' s 'Bool' -> s -> 'Bool' -- 'orOf' :: 'Fold' s 'Bool' -> s -> 'Bool' -- 'orOf' :: 'Lens'' s 'Bool' -> s -> 'Bool' -- 'orOf' :: 'Iso'' s 'Bool' -> s -> 'Bool' -- 'orOf' :: 'Traversal'' s 'Bool' -> s -> 'Bool' -- 'orOf' :: 'Prism'' s 'Bool' -> s -> 'Bool' -- @ orOf :: Getting Any s Bool -> s -> Bool orOf l = getAny #. foldMapOf l Any {-# INLINE orOf #-} -- | Returns 'True' if any target of a 'Fold' satisfies a predicate. -- -- >>> anyOf both (=='x') ('x','y') -- True -- >>> import Data.Data.Lens -- >>> anyOf biplate (== "world") (((),2::Int),"hello",("world",11::Int)) -- True -- -- @ -- 'Data.Foldable.any' ≡ 'anyOf' 'folded' -- @ -- -- @ -- 'ianyOf' l ≡ 'anyOf' l '.' 'Indexed' -- @ -- -- @ -- 'anyOf' :: 'Getter' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'anyOf' :: 'Fold' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'anyOf' :: 'Lens'' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'anyOf' :: 'Iso'' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'anyOf' :: 'Traversal'' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'anyOf' :: 'Prism'' s a -> (a -> 'Bool') -> s -> 'Bool' -- @ anyOf :: Getting Any s a -> (a -> Bool) -> s -> Bool anyOf l f = getAny #. foldMapOf l (Any #. f) {-# INLINE anyOf #-} -- | Returns 'True' if every target of a 'Fold' satisfies a predicate. -- -- >>> allOf both (>=3) (4,5) -- True -- >>> allOf folded (>=2) [1..10] -- False -- -- @ -- 'Data.Foldable.all' ≡ 'allOf' 'folded' -- @ -- -- @ -- 'iallOf' l = 'allOf' l '.' 'Indexed' -- @ -- -- @ -- 'allOf' :: 'Getter' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'allOf' :: 'Fold' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'allOf' :: 'Lens'' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'allOf' :: 'Iso'' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'allOf' :: 'Traversal'' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'allOf' :: 'Prism'' s a -> (a -> 'Bool') -> s -> 'Bool' -- @ allOf :: Getting All s a -> (a -> Bool) -> s -> Bool allOf l f = getAll #. foldMapOf l (All #. f) {-# INLINE allOf #-} -- | Returns 'True' only if no targets of a 'Fold' satisfy a predicate. -- -- >>> noneOf each (is _Nothing) (Just 3, Just 4, Just 5) -- True -- >>> noneOf (folded.folded) (<10) [[13,99,20],[3,71,42]] -- False -- -- @ -- 'inoneOf' l = 'noneOf' l '.' 'Indexed' -- @ -- -- @ -- 'noneOf' :: 'Getter' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'noneOf' :: 'Fold' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'noneOf' :: 'Lens'' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'noneOf' :: 'Iso'' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'noneOf' :: 'Traversal'' s a -> (a -> 'Bool') -> s -> 'Bool' -- 'noneOf' :: 'Prism'' s a -> (a -> 'Bool') -> s -> 'Bool' -- @ noneOf :: Getting Any s a -> (a -> Bool) -> s -> Bool noneOf l f = not . anyOf l f {-# INLINE noneOf #-} -- | Calculate the 'Product' of every number targeted by a 'Fold'. -- -- >>> productOf both (4,5) -- 20 -- >>> productOf folded [1,2,3,4,5] -- 120 -- -- @ -- 'Data.Foldable.product' ≡ 'productOf' 'folded' -- @ -- -- This operation may be more strict than you would expect. If you -- want a lazier version use @'ala' 'Product' '.' 'foldMapOf'@ -- -- @ -- 'productOf' :: 'Num' a => 'Getter' s a -> s -> a -- 'productOf' :: 'Num' a => 'Fold' s a -> s -> a -- 'productOf' :: 'Num' a => 'Lens'' s a -> s -> a -- 'productOf' :: 'Num' a => 'Iso'' s a -> s -> a -- 'productOf' :: 'Num' a => 'Traversal'' s a -> s -> a -- 'productOf' :: 'Num' a => 'Prism'' s a -> s -> a -- @ productOf :: Num a => Getting (Endo (Endo a)) s a -> s -> a productOf l = foldlOf' l (*) 1 {-# INLINE productOf #-} -- | Calculate the 'Sum' of every number targeted by a 'Fold'. -- -- >>> sumOf both (5,6) -- 11 -- >>> sumOf folded [1,2,3,4] -- 10 -- >>> sumOf (folded.both) [(1,2),(3,4)] -- 10 -- >>> import Data.Data.Lens -- >>> sumOf biplate [(1::Int,[]),(2,[(3::Int,4::Int)])] :: Int -- 10 -- -- @ -- 'Data.Foldable.sum' ≡ 'sumOf' 'folded' -- @ -- -- This operation may be more strict than you would expect. If you -- want a lazier version use @'ala' 'Sum' '.' 'foldMapOf'@ -- -- @ -- 'sumOf' '_1' :: 'Num' a => (a, b) -> a -- 'sumOf' ('folded' '.' 'Control.Lens.Tuple._1') :: ('Foldable' f, 'Num' a) => f (a, b) -> a -- @ -- -- @ -- 'sumOf' :: 'Num' a => 'Getter' s a -> s -> a -- 'sumOf' :: 'Num' a => 'Fold' s a -> s -> a -- 'sumOf' :: 'Num' a => 'Lens'' s a -> s -> a -- 'sumOf' :: 'Num' a => 'Iso'' s a -> s -> a -- 'sumOf' :: 'Num' a => 'Traversal'' s a -> s -> a -- 'sumOf' :: 'Num' a => 'Prism'' s a -> s -> a -- @ sumOf :: Num a => Getting (Endo (Endo a)) s a -> s -> a sumOf l = foldlOf' l (+) 0 {-# INLINE sumOf #-} -- | Traverse over all of the targets of a 'Fold' (or 'Getter'), computing an 'Applicative' (or 'Functor')-based answer, -- but unlike 'Control.Lens.Traversal.traverseOf' do not construct a new structure. 'traverseOf_' generalizes -- 'Data.Foldable.traverse_' to work over any 'Fold'. -- -- When passed a 'Getter', 'traverseOf_' can work over any 'Functor', but when passed a 'Fold', 'traverseOf_' requires -- an 'Applicative'. -- -- >>> traverseOf_ both putStrLn ("hello","world") -- hello -- world -- -- @ -- 'Data.Foldable.traverse_' ≡ 'traverseOf_' 'folded' -- @ -- -- @ -- 'traverseOf_' '_2' :: 'Functor' f => (c -> f r) -> (d, c) -> f () -- 'traverseOf_' 'Control.Lens.Prism._Left' :: 'Applicative' f => (a -> f b) -> 'Either' a c -> f () -- @ -- -- @ -- 'itraverseOf_' l ≡ 'traverseOf_' l '.' 'Indexed' -- @ -- -- The rather specific signature of 'traverseOf_' allows it to be used as if the signature was any of: -- -- @ -- 'traverseOf_' :: 'Functor' f => 'Getter' s a -> (a -> f r) -> s -> f () -- 'traverseOf_' :: 'Applicative' f => 'Fold' s a -> (a -> f r) -> s -> f () -- 'traverseOf_' :: 'Functor' f => 'Lens'' s a -> (a -> f r) -> s -> f () -- 'traverseOf_' :: 'Functor' f => 'Iso'' s a -> (a -> f r) -> s -> f () -- 'traverseOf_' :: 'Applicative' f => 'Traversal'' s a -> (a -> f r) -> s -> f () -- 'traverseOf_' :: 'Applicative' f => 'Prism'' s a -> (a -> f r) -> s -> f () -- @ traverseOf_ :: Functor f => Getting (Traversed r f) s a -> (a -> f r) -> s -> f () traverseOf_ l f = void . getTraversed #. foldMapOf l (Traversed #. f) {-# INLINE traverseOf_ #-} -- | Traverse over all of the targets of a 'Fold' (or 'Getter'), computing an 'Applicative' (or 'Functor')-based answer, -- but unlike 'Control.Lens.Traversal.forOf' do not construct a new structure. 'forOf_' generalizes -- 'Data.Foldable.for_' to work over any 'Fold'. -- -- When passed a 'Getter', 'forOf_' can work over any 'Functor', but when passed a 'Fold', 'forOf_' requires -- an 'Applicative'. -- -- @ -- 'for_' ≡ 'forOf_' 'folded' -- @ -- -- >>> forOf_ both ("hello","world") putStrLn -- hello -- world -- -- The rather specific signature of 'forOf_' allows it to be used as if the signature was any of: -- -- @ -- 'iforOf_' l s ≡ 'forOf_' l s '.' 'Indexed' -- @ -- -- @ -- 'forOf_' :: 'Functor' f => 'Getter' s a -> s -> (a -> f r) -> f () -- 'forOf_' :: 'Applicative' f => 'Fold' s a -> s -> (a -> f r) -> f () -- 'forOf_' :: 'Functor' f => 'Lens'' s a -> s -> (a -> f r) -> f () -- 'forOf_' :: 'Functor' f => 'Iso'' s a -> s -> (a -> f r) -> f () -- 'forOf_' :: 'Applicative' f => 'Traversal'' s a -> s -> (a -> f r) -> f () -- 'forOf_' :: 'Applicative' f => 'Prism'' s a -> s -> (a -> f r) -> f () -- @ forOf_ :: Functor f => Getting (Traversed r f) s a -> s -> (a -> f r) -> f () forOf_ = flip . traverseOf_ {-# INLINE forOf_ #-} -- | Evaluate each action in observed by a 'Fold' on a structure from left to right, ignoring the results. -- -- @ -- 'sequenceA_' ≡ 'sequenceAOf_' 'folded' -- @ -- -- >>> sequenceAOf_ both (putStrLn "hello",putStrLn "world") -- hello -- world -- -- @ -- 'sequenceAOf_' :: 'Functor' f => 'Getter' s (f a) -> s -> f () -- 'sequenceAOf_' :: 'Applicative' f => 'Fold' s (f a) -> s -> f () -- 'sequenceAOf_' :: 'Functor' f => 'Lens'' s (f a) -> s -> f () -- 'sequenceAOf_' :: 'Functor' f => 'Iso'' s (f a) -> s -> f () -- 'sequenceAOf_' :: 'Applicative' f => 'Traversal'' s (f a) -> s -> f () -- 'sequenceAOf_' :: 'Applicative' f => 'Prism'' s (f a) -> s -> f () -- @ sequenceAOf_ :: Functor f => Getting (Traversed a f) s (f a) -> s -> f () sequenceAOf_ l = void . getTraversed #. foldMapOf l Traversed {-# INLINE sequenceAOf_ #-} -- | Traverse over all of the targets of a 'Fold1', computing an 'Apply' based answer. -- -- As long as you have 'Applicative' or 'Functor' effect you are better using 'traverseOf_'. -- The 'traverse1Of_' is useful only when you have genuine 'Apply' effect. -- -- >>> traverse1Of_ both1 (\ks -> Map.fromList [ (k, ()) | k <- ks ]) ("abc", "bcd") -- fromList [('b',()),('c',())] -- -- @ -- 'traverse1Of_' :: 'Apply' f => 'Fold1' s a -> (a -> f r) -> s -> f () -- @ -- -- @since 4.16 traverse1Of_ :: Functor f => Getting (TraversedF r f) s a -> (a -> f r) -> s -> f () traverse1Of_ l f = void . getTraversedF #. foldMapOf l (TraversedF #. f) {-# INLINE traverse1Of_ #-} -- | See 'forOf_' and 'traverse1Of_'. -- -- >>> for1Of_ both1 ("abc", "bcd") (\ks -> Map.fromList [ (k, ()) | k <- ks ]) -- fromList [('b',()),('c',())] -- -- @ -- 'for1Of_' :: 'Apply' f => 'Fold1' s a -> s -> (a -> f r) -> f () -- @ -- -- @since 4.16 for1Of_ :: Functor f => Getting (TraversedF r f) s a -> s -> (a -> f r) -> f () for1Of_ = flip . traverse1Of_ {-# INLINE for1Of_ #-} -- | See 'sequenceAOf_' and 'traverse1Of_'. -- -- @ -- 'sequence1Of_' :: 'Apply' f => 'Fold1' s (f a) -> s -> f () -- @ -- -- @since 4.16 sequence1Of_ :: Functor f => Getting (TraversedF a f) s (f a) -> s -> f () sequence1Of_ l = void . getTraversedF #. foldMapOf l TraversedF {-# INLINE sequence1Of_ #-} -- | Map each target of a 'Fold' on a structure to a monadic action, evaluate these actions from left to right, and ignore the results. -- -- >>> mapMOf_ both putStrLn ("hello","world") -- hello -- world -- -- @ -- 'Data.Foldable.mapM_' ≡ 'mapMOf_' 'folded' -- @ -- -- @ -- 'mapMOf_' :: 'Monad' m => 'Getter' s a -> (a -> m r) -> s -> m () -- 'mapMOf_' :: 'Monad' m => 'Fold' s a -> (a -> m r) -> s -> m () -- 'mapMOf_' :: 'Monad' m => 'Lens'' s a -> (a -> m r) -> s -> m () -- 'mapMOf_' :: 'Monad' m => 'Iso'' s a -> (a -> m r) -> s -> m () -- 'mapMOf_' :: 'Monad' m => 'Traversal'' s a -> (a -> m r) -> s -> m () -- 'mapMOf_' :: 'Monad' m => 'Prism'' s a -> (a -> m r) -> s -> m () -- @ mapMOf_ :: Monad m => Getting (Sequenced r m) s a -> (a -> m r) -> s -> m () mapMOf_ l f = liftM skip . getSequenced #. foldMapOf l (Sequenced #. f) {-# INLINE mapMOf_ #-} -- | 'forMOf_' is 'mapMOf_' with two of its arguments flipped. -- -- >>> forMOf_ both ("hello","world") putStrLn -- hello -- world -- -- @ -- 'Data.Foldable.forM_' ≡ 'forMOf_' 'folded' -- @ -- -- @ -- 'forMOf_' :: 'Monad' m => 'Getter' s a -> s -> (a -> m r) -> m () -- 'forMOf_' :: 'Monad' m => 'Fold' s a -> s -> (a -> m r) -> m () -- 'forMOf_' :: 'Monad' m => 'Lens'' s a -> s -> (a -> m r) -> m () -- 'forMOf_' :: 'Monad' m => 'Iso'' s a -> s -> (a -> m r) -> m () -- 'forMOf_' :: 'Monad' m => 'Traversal'' s a -> s -> (a -> m r) -> m () -- 'forMOf_' :: 'Monad' m => 'Prism'' s a -> s -> (a -> m r) -> m () -- @ forMOf_ :: Monad m => Getting (Sequenced r m) s a -> s -> (a -> m r) -> m () forMOf_ = flip . mapMOf_ {-# INLINE forMOf_ #-} -- | Evaluate each monadic action referenced by a 'Fold' on the structure from left to right, and ignore the results. -- -- >>> sequenceOf_ both (putStrLn "hello",putStrLn "world") -- hello -- world -- -- @ -- 'Data.Foldable.sequence_' ≡ 'sequenceOf_' 'folded' -- @ -- -- @ -- 'sequenceOf_' :: 'Monad' m => 'Getter' s (m a) -> s -> m () -- 'sequenceOf_' :: 'Monad' m => 'Fold' s (m a) -> s -> m () -- 'sequenceOf_' :: 'Monad' m => 'Lens'' s (m a) -> s -> m () -- 'sequenceOf_' :: 'Monad' m => 'Iso'' s (m a) -> s -> m () -- 'sequenceOf_' :: 'Monad' m => 'Traversal'' s (m a) -> s -> m () -- 'sequenceOf_' :: 'Monad' m => 'Prism'' s (m a) -> s -> m () -- @ sequenceOf_ :: Monad m => Getting (Sequenced a m) s (m a) -> s -> m () sequenceOf_ l = liftM skip . getSequenced #. foldMapOf l Sequenced {-# INLINE sequenceOf_ #-} -- | The sum of a collection of actions, generalizing 'concatOf'. -- -- >>> asumOf both ("hello","world") -- "helloworld" -- -- >>> asumOf each (Nothing, Just "hello", Nothing) -- Just "hello" -- -- @ -- 'asum' ≡ 'asumOf' 'folded' -- @ -- -- @ -- 'asumOf' :: 'Alternative' f => 'Getter' s (f a) -> s -> f a -- 'asumOf' :: 'Alternative' f => 'Fold' s (f a) -> s -> f a -- 'asumOf' :: 'Alternative' f => 'Lens'' s (f a) -> s -> f a -- 'asumOf' :: 'Alternative' f => 'Iso'' s (f a) -> s -> f a -- 'asumOf' :: 'Alternative' f => 'Traversal'' s (f a) -> s -> f a -- 'asumOf' :: 'Alternative' f => 'Prism'' s (f a) -> s -> f a -- @ asumOf :: Alternative f => Getting (Endo (f a)) s (f a) -> s -> f a asumOf l = foldrOf l (<|>) empty {-# INLINE asumOf #-} -- | The sum of a collection of actions, generalizing 'concatOf'. -- -- >>> msumOf both ("hello","world") -- "helloworld" -- -- >>> msumOf each (Nothing, Just "hello", Nothing) -- Just "hello" -- -- @ -- 'msum' ≡ 'msumOf' 'folded' -- @ -- -- @ -- 'msumOf' :: 'MonadPlus' m => 'Getter' s (m a) -> s -> m a -- 'msumOf' :: 'MonadPlus' m => 'Fold' s (m a) -> s -> m a -- 'msumOf' :: 'MonadPlus' m => 'Lens'' s (m a) -> s -> m a -- 'msumOf' :: 'MonadPlus' m => 'Iso'' s (m a) -> s -> m a -- 'msumOf' :: 'MonadPlus' m => 'Traversal'' s (m a) -> s -> m a -- 'msumOf' :: 'MonadPlus' m => 'Prism'' s (m a) -> s -> m a -- @ msumOf :: MonadPlus m => Getting (Endo (m a)) s (m a) -> s -> m a msumOf l = foldrOf l mplus mzero {-# INLINE msumOf #-} -- | Does the element occur anywhere within a given 'Fold' of the structure? -- -- >>> elemOf both "hello" ("hello","world") -- True -- -- @ -- 'elem' ≡ 'elemOf' 'folded' -- @ -- -- @ -- 'elemOf' :: 'Eq' a => 'Getter' s a -> a -> s -> 'Bool' -- 'elemOf' :: 'Eq' a => 'Fold' s a -> a -> s -> 'Bool' -- 'elemOf' :: 'Eq' a => 'Lens'' s a -> a -> s -> 'Bool' -- 'elemOf' :: 'Eq' a => 'Iso'' s a -> a -> s -> 'Bool' -- 'elemOf' :: 'Eq' a => 'Traversal'' s a -> a -> s -> 'Bool' -- 'elemOf' :: 'Eq' a => 'Prism'' s a -> a -> s -> 'Bool' -- @ elemOf :: Eq a => Getting Any s a -> a -> s -> Bool elemOf l = anyOf l . (==) {-# INLINE elemOf #-} -- | Does the element not occur anywhere within a given 'Fold' of the structure? -- -- >>> notElemOf each 'd' ('a','b','c') -- True -- -- >>> notElemOf each 'a' ('a','b','c') -- False -- -- @ -- 'notElem' ≡ 'notElemOf' 'folded' -- @ -- -- @ -- 'notElemOf' :: 'Eq' a => 'Getter' s a -> a -> s -> 'Bool' -- 'notElemOf' :: 'Eq' a => 'Fold' s a -> a -> s -> 'Bool' -- 'notElemOf' :: 'Eq' a => 'Iso'' s a -> a -> s -> 'Bool' -- 'notElemOf' :: 'Eq' a => 'Lens'' s a -> a -> s -> 'Bool' -- 'notElemOf' :: 'Eq' a => 'Traversal'' s a -> a -> s -> 'Bool' -- 'notElemOf' :: 'Eq' a => 'Prism'' s a -> a -> s -> 'Bool' -- @ notElemOf :: Eq a => Getting All s a -> a -> s -> Bool notElemOf l = allOf l . (/=) {-# INLINE notElemOf #-} -- | Map a function over all the targets of a 'Fold' of a container and concatenate the resulting lists. -- -- >>> concatMapOf both (\x -> [x, x + 1]) (1,3) -- [1,2,3,4] -- -- @ -- 'concatMap' ≡ 'concatMapOf' 'folded' -- @ -- -- @ -- 'concatMapOf' :: 'Getter' s a -> (a -> [r]) -> s -> [r] -- 'concatMapOf' :: 'Fold' s a -> (a -> [r]) -> s -> [r] -- 'concatMapOf' :: 'Lens'' s a -> (a -> [r]) -> s -> [r] -- 'concatMapOf' :: 'Iso'' s a -> (a -> [r]) -> s -> [r] -- 'concatMapOf' :: 'Traversal'' s a -> (a -> [r]) -> s -> [r] -- @ concatMapOf :: Getting [r] s a -> (a -> [r]) -> s -> [r] concatMapOf l ces = getConst #. l (Const #. ces) {-# INLINE concatMapOf #-} -- | Concatenate all of the lists targeted by a 'Fold' into a longer list. -- -- >>> concatOf both ("pan","ama") -- "panama" -- -- @ -- 'concat' ≡ 'concatOf' 'folded' -- 'concatOf' ≡ 'view' -- @ -- -- @ -- 'concatOf' :: 'Getter' s [r] -> s -> [r] -- 'concatOf' :: 'Fold' s [r] -> s -> [r] -- 'concatOf' :: 'Iso'' s [r] -> s -> [r] -- 'concatOf' :: 'Lens'' s [r] -> s -> [r] -- 'concatOf' :: 'Traversal'' s [r] -> s -> [r] -- @ concatOf :: Getting [r] s [r] -> s -> [r] concatOf l = getConst #. l Const {-# INLINE concatOf #-} -- | Calculate the number of targets there are for a 'Fold' in a given container. -- -- /Note:/ This can be rather inefficient for large containers and just like 'length', -- this will not terminate for infinite folds. -- -- @ -- 'length' ≡ 'lengthOf' 'folded' -- @ -- -- >>> lengthOf _1 ("hello",()) -- 1 -- -- >>> lengthOf traverse [1..10] -- 10 -- -- >>> lengthOf (traverse.traverse) [[1,2],[3,4],[5,6]] -- 6 -- -- @ -- 'lengthOf' ('folded' '.' 'folded') :: ('Foldable' f, 'Foldable' g) => f (g a) -> 'Int' -- @ -- -- @ -- 'lengthOf' :: 'Getter' s a -> s -> 'Int' -- 'lengthOf' :: 'Fold' s a -> s -> 'Int' -- 'lengthOf' :: 'Lens'' s a -> s -> 'Int' -- 'lengthOf' :: 'Iso'' s a -> s -> 'Int' -- 'lengthOf' :: 'Traversal'' s a -> s -> 'Int' -- @ lengthOf :: Getting (Endo (Endo Int)) s a -> s -> Int lengthOf l = foldlOf' l (\a _ -> a + 1) 0 {-# INLINE lengthOf #-} -- | Perform a safe 'head' of a 'Fold' or 'Traversal' or retrieve 'Just' the result -- from a 'Getter' or 'Lens'. -- -- When using a 'Traversal' as a partial 'Lens', or a 'Fold' as a partial 'Getter' this can be a convenient -- way to extract the optional value. -- -- Note: if you get stack overflows due to this, you may want to use 'firstOf' instead, which can deal -- more gracefully with heavily left-biased trees. This is because '^?' works by using the -- 'Data.Monoid.First' monoid, which can occasionally cause space leaks. -- -- >>> Left 4 ^?_Left -- Just 4 -- -- >>> Right 4 ^?_Left -- Nothing -- -- >>> "world" ^? ix 3 -- Just 'l' -- -- >>> "world" ^? ix 20 -- Nothing -- -- This operator works as an infix version of 'preview'. -- -- @ -- ('^?') ≡ 'flip' 'preview' -- @ -- -- It may be helpful to think of '^?' as having one of the following -- more specialized types: -- -- @ -- ('^?') :: s -> 'Getter' s a -> 'Maybe' a -- ('^?') :: s -> 'Fold' s a -> 'Maybe' a -- ('^?') :: s -> 'Lens'' s a -> 'Maybe' a -- ('^?') :: s -> 'Iso'' s a -> 'Maybe' a -- ('^?') :: s -> 'Traversal'' s a -> 'Maybe' a -- @ (^?) :: s -> Getting (First a) s a -> Maybe a s ^? l = getFirst (foldMapOf l (First #. Just) s) {-# INLINE (^?) #-} -- | Perform an *UNSAFE* 'head' of a 'Fold' or 'Traversal' assuming that it is there. -- -- >>> Left 4 ^?! _Left -- 4 -- -- >>> "world" ^?! ix 3 -- 'l' -- -- @ -- ('^?!') :: s -> 'Getter' s a -> a -- ('^?!') :: s -> 'Fold' s a -> a -- ('^?!') :: s -> 'Lens'' s a -> a -- ('^?!') :: s -> 'Iso'' s a -> a -- ('^?!') :: s -> 'Traversal'' s a -> a -- @ (^?!) :: HasCallStack => s -> Getting (Endo a) s a -> a s ^?! l = foldrOf l const (error "(^?!): empty Fold") s {-# INLINE (^?!) #-} -- | Retrieve the 'First' entry of a 'Fold' or 'Traversal' or retrieve 'Just' the result -- from a 'Getter' or 'Lens'. -- -- The answer is computed in a manner that leaks space less than @'preview'@ or @^?'@ -- and gives you back access to the outermost 'Just' constructor more quickly, but does so -- in a way that builds an intermediate structure, and thus may have worse -- constant factors. This also means that it can not be used in any 'Control.Monad.Reader.MonadReader', -- but must instead have 's' passed as its last argument, unlike 'preview'. -- -- Note: this could been named `headOf`. -- -- >>> firstOf traverse [1..10] -- Just 1 -- -- >>> firstOf both (1,2) -- Just 1 -- -- >>> firstOf ignored () -- Nothing -- -- @ -- 'firstOf' :: 'Getter' s a -> s -> 'Maybe' a -- 'firstOf' :: 'Fold' s a -> s -> 'Maybe' a -- 'firstOf' :: 'Lens'' s a -> s -> 'Maybe' a -- 'firstOf' :: 'Iso'' s a -> s -> 'Maybe' a -- 'firstOf' :: 'Traversal'' s a -> s -> 'Maybe' a -- @ firstOf :: Getting (Leftmost a) s a -> s -> Maybe a firstOf l = getLeftmost . foldMapOf l LLeaf {-# INLINE firstOf #-} -- | Retrieve the 'Data.Semigroup.First' entry of a 'Fold1' or 'Traversal1' or the result from a 'Getter' or 'Lens'. -- -- >>> first1Of traverse1 (1 :| [2..10]) -- 1 -- -- >>> first1Of both1 (1,2) -- 1 -- -- /Note:/ this is different from '^.'. -- -- >>> first1Of traverse1 ([1,2] :| [[3,4],[5,6]]) -- [1,2] -- -- >>> ([1,2] :| [[3,4],[5,6]]) ^. traverse1 -- [1,2,3,4,5,6] -- -- @ -- 'first1Of' :: 'Getter' s a -> s -> a -- 'first1Of' :: 'Fold1' s a -> s -> a -- 'first1Of' :: 'Lens'' s a -> s -> a -- 'first1Of' :: 'Iso'' s a -> s -> a -- 'first1Of' :: 'Traversal1'' s a -> s -> a -- @ first1Of :: Getting (Semi.First a) s a -> s -> a first1Of l = Semi.getFirst . foldMapOf l Semi.First -- | Retrieve the 'Last' entry of a 'Fold' or 'Traversal' or retrieve 'Just' the result -- from a 'Getter' or 'Lens'. -- -- The answer is computed in a manner that leaks space less than @'ala' 'Last' '.' 'foldMapOf'@ -- and gives you back access to the outermost 'Just' constructor more quickly, but may have worse -- constant factors. -- -- >>> lastOf traverse [1..10] -- Just 10 -- -- >>> lastOf both (1,2) -- Just 2 -- -- >>> lastOf ignored () -- Nothing -- -- @ -- 'lastOf' :: 'Getter' s a -> s -> 'Maybe' a -- 'lastOf' :: 'Fold' s a -> s -> 'Maybe' a -- 'lastOf' :: 'Lens'' s a -> s -> 'Maybe' a -- 'lastOf' :: 'Iso'' s a -> s -> 'Maybe' a -- 'lastOf' :: 'Traversal'' s a -> s -> 'Maybe' a -- @ lastOf :: Getting (Rightmost a) s a -> s -> Maybe a lastOf l = getRightmost . foldMapOf l RLeaf {-# INLINE lastOf #-} -- | Retrieve the 'Data.Semigroup.Last' entry of a 'Fold1' or 'Traversal1' or retrieve the result -- from a 'Getter' or 'Lens'.o -- -- >>> last1Of traverse1 (1 :| [2..10]) -- 10 -- -- >>> last1Of both1 (1,2) -- 2 -- -- @ -- 'last1Of' :: 'Getter' s a -> s -> 'Maybe' a -- 'last1Of' :: 'Fold1' s a -> s -> 'Maybe' a -- 'last1Of' :: 'Lens'' s a -> s -> 'Maybe' a -- 'last1Of' :: 'Iso'' s a -> s -> 'Maybe' a -- 'last1Of' :: 'Traversal1'' s a -> s -> 'Maybe' a -- @ last1Of :: Getting (Semi.Last a) s a -> s -> a last1Of l = Semi.getLast . foldMapOf l Semi.Last -- | Returns 'True' if this 'Fold' or 'Traversal' has no targets in the given container. -- -- Note: 'nullOf' on a valid 'Iso', 'Lens' or 'Getter' should always return 'False'. -- -- @ -- 'null' ≡ 'nullOf' 'folded' -- @ -- -- This may be rather inefficient compared to the 'null' check of many containers. -- -- >>> nullOf _1 (1,2) -- False -- -- >>> nullOf ignored () -- True -- -- >>> nullOf traverse [] -- True -- -- >>> nullOf (element 20) [1..10] -- True -- -- @ -- 'nullOf' ('folded' '.' '_1' '.' 'folded') :: ('Foldable' f, 'Foldable' g) => f (g a, b) -> 'Bool' -- @ -- -- @ -- 'nullOf' :: 'Getter' s a -> s -> 'Bool' -- 'nullOf' :: 'Fold' s a -> s -> 'Bool' -- 'nullOf' :: 'Iso'' s a -> s -> 'Bool' -- 'nullOf' :: 'Lens'' s a -> s -> 'Bool' -- 'nullOf' :: 'Traversal'' s a -> s -> 'Bool' -- @ nullOf :: Getting All s a -> s -> Bool nullOf = hasn't {-# INLINE nullOf #-} -- | Returns 'True' if this 'Fold' or 'Traversal' has any targets in the given container. -- -- A more \"conversational\" alias for this combinator is 'has'. -- -- Note: 'notNullOf' on a valid 'Iso', 'Lens' or 'Getter' should always return 'True'. -- -- @ -- 'not' '.' 'null' ≡ 'notNullOf' 'folded' -- @ -- -- This may be rather inefficient compared to the @'not' '.' 'null'@ check of many containers. -- -- >>> notNullOf _1 (1,2) -- True -- -- >>> notNullOf traverse [1..10] -- True -- -- >>> notNullOf folded [] -- False -- -- >>> notNullOf (element 20) [1..10] -- False -- -- @ -- 'notNullOf' ('folded' '.' '_1' '.' 'folded') :: ('Foldable' f, 'Foldable' g) => f (g a, b) -> 'Bool' -- @ -- -- @ -- 'notNullOf' :: 'Getter' s a -> s -> 'Bool' -- 'notNullOf' :: 'Fold' s a -> s -> 'Bool' -- 'notNullOf' :: 'Iso'' s a -> s -> 'Bool' -- 'notNullOf' :: 'Lens'' s a -> s -> 'Bool' -- 'notNullOf' :: 'Traversal'' s a -> s -> 'Bool' -- @ notNullOf :: Getting Any s a -> s -> Bool notNullOf = has {-# INLINE notNullOf #-} -- | Obtain the maximum element (if any) targeted by a 'Fold' or 'Traversal' safely. -- -- Note: 'maximumOf' on a valid 'Iso', 'Lens' or 'Getter' will always return 'Just' a value. -- -- >>> maximumOf traverse [1..10] -- Just 10 -- -- >>> maximumOf traverse [] -- Nothing -- -- >>> maximumOf (folded.filtered even) [1,4,3,6,7,9,2] -- Just 6 -- -- @ -- 'maximum' ≡ 'fromMaybe' ('error' \"empty\") '.' 'maximumOf' 'folded' -- @ -- -- In the interest of efficiency, This operation has semantics more strict than strictly necessary. -- @'rmap' 'getMax' ('foldMapOf' l 'Max')@ has lazier semantics but could leak memory. -- -- @ -- 'maximumOf' :: 'Ord' a => 'Getter' s a -> s -> 'Maybe' a -- 'maximumOf' :: 'Ord' a => 'Fold' s a -> s -> 'Maybe' a -- 'maximumOf' :: 'Ord' a => 'Iso'' s a -> s -> 'Maybe' a -- 'maximumOf' :: 'Ord' a => 'Lens'' s a -> s -> 'Maybe' a -- 'maximumOf' :: 'Ord' a => 'Traversal'' s a -> s -> 'Maybe' a -- @ maximumOf :: Ord a => Getting (Endo (Endo (Maybe a))) s a -> s -> Maybe a maximumOf l = foldlOf' l mf Nothing where mf Nothing y = Just $! y mf (Just x) y = Just $! max x y {-# INLINE maximumOf #-} -- | Obtain the maximum element targeted by a 'Fold1' or 'Traversal1'. -- -- >>> maximum1Of traverse1 (1 :| [2..10]) -- 10 -- -- @ -- 'maximum1Of' :: 'Ord' a => 'Getter' s a -> s -> a -- 'maximum1Of' :: 'Ord' a => 'Fold1' s a -> s -> a -- 'maximum1Of' :: 'Ord' a => 'Iso'' s a -> s -> a -- 'maximum1Of' :: 'Ord' a => 'Lens'' s a -> s -> a -- 'maximum1Of' :: 'Ord' a => 'Traversal1'' s a -> s -> a -- @ maximum1Of :: Ord a => Getting (Semi.Max a) s a -> s -> a maximum1Of l = Semi.getMax . foldMapOf l Semi.Max {-# INLINE maximum1Of #-} -- | Obtain the minimum element (if any) targeted by a 'Fold' or 'Traversal' safely. -- -- Note: 'minimumOf' on a valid 'Iso', 'Lens' or 'Getter' will always return 'Just' a value. -- -- >>> minimumOf traverse [1..10] -- Just 1 -- -- >>> minimumOf traverse [] -- Nothing -- -- >>> minimumOf (folded.filtered even) [1,4,3,6,7,9,2] -- Just 2 -- -- @ -- 'minimum' ≡ 'Data.Maybe.fromMaybe' ('error' \"empty\") '.' 'minimumOf' 'folded' -- @ -- -- In the interest of efficiency, This operation has semantics more strict than strictly necessary. -- @'rmap' 'getMin' ('foldMapOf' l 'Min')@ has lazier semantics but could leak memory. -- -- -- @ -- 'minimumOf' :: 'Ord' a => 'Getter' s a -> s -> 'Maybe' a -- 'minimumOf' :: 'Ord' a => 'Fold' s a -> s -> 'Maybe' a -- 'minimumOf' :: 'Ord' a => 'Iso'' s a -> s -> 'Maybe' a -- 'minimumOf' :: 'Ord' a => 'Lens'' s a -> s -> 'Maybe' a -- 'minimumOf' :: 'Ord' a => 'Traversal'' s a -> s -> 'Maybe' a -- @ minimumOf :: Ord a => Getting (Endo (Endo (Maybe a))) s a -> s -> Maybe a minimumOf l = foldlOf' l mf Nothing where mf Nothing y = Just $! y mf (Just x) y = Just $! min x y {-# INLINE minimumOf #-} -- | Obtain the minimum element targeted by a 'Fold1' or 'Traversal1'. -- -- >>> minimum1Of traverse1 (1 :| [2..10]) -- 1 -- -- @ -- 'minimum1Of' :: 'Ord' a => 'Getter' s a -> s -> a -- 'minimum1Of' :: 'Ord' a => 'Fold1' s a -> s -> a -- 'minimum1Of' :: 'Ord' a => 'Iso'' s a -> s -> a -- 'minimum1Of' :: 'Ord' a => 'Lens'' s a -> s -> a -- 'minimum1Of' :: 'Ord' a => 'Traversal1'' s a -> s -> a -- @ minimum1Of :: Ord a => Getting (Semi.Min a) s a -> s -> a minimum1Of l = Semi.getMin . foldMapOf l Semi.Min {-# INLINE minimum1Of #-} -- | Obtain the maximum element (if any) targeted by a 'Fold', 'Traversal', 'Lens', 'Iso', -- or 'Getter' according to a user supplied 'Ordering'. -- -- >>> maximumByOf traverse (compare `on` length) ["mustard","relish","ham"] -- Just "mustard" -- -- In the interest of efficiency, This operation has semantics more strict than strictly necessary. -- -- @ -- 'Data.Foldable.maximumBy' cmp ≡ 'Data.Maybe.fromMaybe' ('error' \"empty\") '.' 'maximumByOf' 'folded' cmp -- @ -- -- @ -- 'maximumByOf' :: 'Getter' s a -> (a -> a -> 'Ordering') -> s -> 'Maybe' a -- 'maximumByOf' :: 'Fold' s a -> (a -> a -> 'Ordering') -> s -> 'Maybe' a -- 'maximumByOf' :: 'Iso'' s a -> (a -> a -> 'Ordering') -> s -> 'Maybe' a -- 'maximumByOf' :: 'Lens'' s a -> (a -> a -> 'Ordering') -> s -> 'Maybe' a -- 'maximumByOf' :: 'Traversal'' s a -> (a -> a -> 'Ordering') -> s -> 'Maybe' a -- @ maximumByOf :: Getting (Endo (Endo (Maybe a))) s a -> (a -> a -> Ordering) -> s -> Maybe a maximumByOf l cmp = foldlOf' l mf Nothing where mf Nothing y = Just $! y mf (Just x) y = Just $! if cmp x y == GT then x else y {-# INLINE maximumByOf #-} -- | Obtain the minimum element (if any) targeted by a 'Fold', 'Traversal', 'Lens', 'Iso' -- or 'Getter' according to a user supplied 'Ordering'. -- -- In the interest of efficiency, This operation has semantics more strict than strictly necessary. -- -- >>> minimumByOf traverse (compare `on` length) ["mustard","relish","ham"] -- Just "ham" -- -- @ -- 'minimumBy' cmp ≡ 'Data.Maybe.fromMaybe' ('error' \"empty\") '.' 'minimumByOf' 'folded' cmp -- @ -- -- @ -- 'minimumByOf' :: 'Getter' s a -> (a -> a -> 'Ordering') -> s -> 'Maybe' a -- 'minimumByOf' :: 'Fold' s a -> (a -> a -> 'Ordering') -> s -> 'Maybe' a -- 'minimumByOf' :: 'Iso'' s a -> (a -> a -> 'Ordering') -> s -> 'Maybe' a -- 'minimumByOf' :: 'Lens'' s a -> (a -> a -> 'Ordering') -> s -> 'Maybe' a -- 'minimumByOf' :: 'Traversal'' s a -> (a -> a -> 'Ordering') -> s -> 'Maybe' a -- @ minimumByOf :: Getting (Endo (Endo (Maybe a))) s a -> (a -> a -> Ordering) -> s -> Maybe a minimumByOf l cmp = foldlOf' l mf Nothing where mf Nothing y = Just $! y mf (Just x) y = Just $! if cmp x y == GT then y else x {-# INLINE minimumByOf #-} -- | The 'findOf' function takes a 'Lens' (or 'Getter', 'Iso', 'Fold', or 'Traversal'), -- a predicate and a structure and returns the leftmost element of the structure -- matching the predicate, or 'Nothing' if there is no such element. -- -- >>> findOf each even (1,3,4,6) -- Just 4 -- -- >>> findOf folded even [1,3,5,7] -- Nothing -- -- @ -- 'findOf' :: 'Getter' s a -> (a -> 'Bool') -> s -> 'Maybe' a -- 'findOf' :: 'Fold' s a -> (a -> 'Bool') -> s -> 'Maybe' a -- 'findOf' :: 'Iso'' s a -> (a -> 'Bool') -> s -> 'Maybe' a -- 'findOf' :: 'Lens'' s a -> (a -> 'Bool') -> s -> 'Maybe' a -- 'findOf' :: 'Traversal'' s a -> (a -> 'Bool') -> s -> 'Maybe' a -- @ -- -- @ -- 'Data.Foldable.find' ≡ 'findOf' 'folded' -- 'ifindOf' l ≡ 'findOf' l '.' 'Indexed' -- @ -- -- A simpler version that didn't permit indexing, would be: -- -- @ -- 'findOf' :: 'Getting' ('Endo' ('Maybe' a)) s a -> (a -> 'Bool') -> s -> 'Maybe' a -- 'findOf' l p = 'foldrOf' l (\a y -> if p a then 'Just' a else y) 'Nothing' -- @ findOf :: Getting (Endo (Maybe a)) s a -> (a -> Bool) -> s -> Maybe a findOf l f = foldrOf l (\a y -> if f a then Just a else y) Nothing {-# INLINE findOf #-} -- | The 'findMOf' function takes a 'Lens' (or 'Getter', 'Iso', 'Fold', or 'Traversal'), -- a monadic predicate and a structure and returns in the monad the leftmost element of the structure -- matching the predicate, or 'Nothing' if there is no such element. -- -- >>> findMOf each ( \x -> print ("Checking " ++ show x) >> return (even x)) (1,3,4,6) -- "Checking 1" -- "Checking 3" -- "Checking 4" -- Just 4 -- -- >>> findMOf each ( \x -> print ("Checking " ++ show x) >> return (even x)) (1,3,5,7) -- "Checking 1" -- "Checking 3" -- "Checking 5" -- "Checking 7" -- Nothing -- -- @ -- 'findMOf' :: ('Monad' m, 'Getter' s a) -> (a -> m 'Bool') -> s -> m ('Maybe' a) -- 'findMOf' :: ('Monad' m, 'Fold' s a) -> (a -> m 'Bool') -> s -> m ('Maybe' a) -- 'findMOf' :: ('Monad' m, 'Iso'' s a) -> (a -> m 'Bool') -> s -> m ('Maybe' a) -- 'findMOf' :: ('Monad' m, 'Lens'' s a) -> (a -> m 'Bool') -> s -> m ('Maybe' a) -- 'findMOf' :: ('Monad' m, 'Traversal'' s a) -> (a -> m 'Bool') -> s -> m ('Maybe' a) -- @ -- -- @ -- 'findMOf' 'folded' :: (Monad m, Foldable f) => (a -> m Bool) -> f a -> m (Maybe a) -- 'ifindMOf' l ≡ 'findMOf' l '.' 'Indexed' -- @ -- -- A simpler version that didn't permit indexing, would be: -- -- @ -- 'findMOf' :: Monad m => 'Getting' ('Endo' (m ('Maybe' a))) s a -> (a -> m 'Bool') -> s -> m ('Maybe' a) -- 'findMOf' l p = 'foldrOf' l (\a y -> p a >>= \x -> if x then return ('Just' a) else y) $ return 'Nothing' -- @ findMOf :: Monad m => Getting (Endo (m (Maybe a))) s a -> (a -> m Bool) -> s -> m (Maybe a) findMOf l f = foldrOf l (\a y -> f a >>= \r -> if r then return (Just a) else y) $ return Nothing {-# INLINE findMOf #-} -- | The 'lookupOf' function takes a 'Fold' (or 'Getter', 'Traversal', -- 'Lens', 'Iso', etc.), a key, and a structure containing key/value pairs. -- It returns the first value corresponding to the given key. This function -- generalizes 'lookup' to work on an arbitrary 'Fold' instead of lists. -- -- >>> lookupOf folded 4 [(2, 'a'), (4, 'b'), (4, 'c')] -- Just 'b' -- -- >>> lookupOf each 2 [(2, 'a'), (4, 'b'), (4, 'c')] -- Just 'a' -- -- @ -- 'lookupOf' :: 'Eq' k => 'Fold' s (k,v) -> k -> s -> 'Maybe' v -- @ lookupOf :: Eq k => Getting (Endo (Maybe v)) s (k,v) -> k -> s -> Maybe v lookupOf l k = foldrOf l (\(k',v) next -> if k == k' then Just v else next) Nothing {-# INLINE lookupOf #-} -- | A variant of 'foldrOf' that has no base case and thus may only be applied -- to lenses and structures such that the 'Lens' views at least one element of -- the structure. -- -- >>> foldr1Of each (+) (1,2,3,4) -- 10 -- -- @ -- 'foldr1Of' l f ≡ 'Prelude.foldr1' f '.' 'toListOf' l -- 'Data.Foldable.foldr1' ≡ 'foldr1Of' 'folded' -- @ -- -- @ -- 'foldr1Of' :: 'Getter' s a -> (a -> a -> a) -> s -> a -- 'foldr1Of' :: 'Fold' s a -> (a -> a -> a) -> s -> a -- 'foldr1Of' :: 'Iso'' s a -> (a -> a -> a) -> s -> a -- 'foldr1Of' :: 'Lens'' s a -> (a -> a -> a) -> s -> a -- 'foldr1Of' :: 'Traversal'' s a -> (a -> a -> a) -> s -> a -- @ foldr1Of :: HasCallStack => Getting (Endo (Maybe a)) s a -> (a -> a -> a) -> s -> a foldr1Of l f xs = fromMaybe (error "foldr1Of: empty structure") (foldrOf l mf Nothing xs) where mf x my = Just $ case my of Nothing -> x Just y -> f x y {-# INLINE foldr1Of #-} -- | A variant of 'foldlOf' that has no base case and thus may only be applied to lenses and structures such -- that the 'Lens' views at least one element of the structure. -- -- >>> foldl1Of each (+) (1,2,3,4) -- 10 -- -- @ -- 'foldl1Of' l f ≡ 'Prelude.foldl1' f '.' 'toListOf' l -- 'Data.Foldable.foldl1' ≡ 'foldl1Of' 'folded' -- @ -- -- @ -- 'foldl1Of' :: 'Getter' s a -> (a -> a -> a) -> s -> a -- 'foldl1Of' :: 'Fold' s a -> (a -> a -> a) -> s -> a -- 'foldl1Of' :: 'Iso'' s a -> (a -> a -> a) -> s -> a -- 'foldl1Of' :: 'Lens'' s a -> (a -> a -> a) -> s -> a -- 'foldl1Of' :: 'Traversal'' s a -> (a -> a -> a) -> s -> a -- @ foldl1Of :: HasCallStack => Getting (Dual (Endo (Maybe a))) s a -> (a -> a -> a) -> s -> a foldl1Of l f xs = fromMaybe (error "foldl1Of: empty structure") (foldlOf l mf Nothing xs) where mf mx y = Just $ case mx of Nothing -> y Just x -> f x y {-# INLINE foldl1Of #-} -- | Strictly fold right over the elements of a structure. -- -- @ -- 'Data.Foldable.foldr'' ≡ 'foldrOf'' 'folded' -- @ -- -- @ -- 'foldrOf'' :: 'Getter' s a -> (a -> r -> r) -> r -> s -> r -- 'foldrOf'' :: 'Fold' s a -> (a -> r -> r) -> r -> s -> r -- 'foldrOf'' :: 'Iso'' s a -> (a -> r -> r) -> r -> s -> r -- 'foldrOf'' :: 'Lens'' s a -> (a -> r -> r) -> r -> s -> r -- 'foldrOf'' :: 'Traversal'' s a -> (a -> r -> r) -> r -> s -> r -- @ foldrOf' :: Getting (Dual (Endo (Endo r))) s a -> (a -> r -> r) -> r -> s -> r foldrOf' l f z0 xs = foldlOf l f' (Endo id) xs `appEndo` z0 where f' (Endo k) x = Endo $ \ z -> k $! f x z {-# INLINE foldrOf' #-} -- | Fold over the elements of a structure, associating to the left, but strictly. -- -- @ -- 'Data.Foldable.foldl'' ≡ 'foldlOf'' 'folded' -- @ -- -- @ -- 'foldlOf'' :: 'Getter' s a -> (r -> a -> r) -> r -> s -> r -- 'foldlOf'' :: 'Fold' s a -> (r -> a -> r) -> r -> s -> r -- 'foldlOf'' :: 'Iso'' s a -> (r -> a -> r) -> r -> s -> r -- 'foldlOf'' :: 'Lens'' s a -> (r -> a -> r) -> r -> s -> r -- 'foldlOf'' :: 'Traversal'' s a -> (r -> a -> r) -> r -> s -> r -- @ foldlOf' :: Getting (Endo (Endo r)) s a -> (r -> a -> r) -> r -> s -> r foldlOf' l f z0 xs = foldrOf l f' (Endo id) xs `appEndo` z0 where f' x (Endo k) = Endo $ \z -> k $! f z x {-# INLINE foldlOf' #-} -- | A variant of 'foldrOf'' that has no base case and thus may only be applied -- to folds and structures such that the fold views at least one element of the -- structure. -- -- @ -- 'foldr1Of' l f ≡ 'Prelude.foldr1' f '.' 'toListOf' l -- @ -- -- @ -- 'foldr1Of'' :: 'Getter' s a -> (a -> a -> a) -> s -> a -- 'foldr1Of'' :: 'Fold' s a -> (a -> a -> a) -> s -> a -- 'foldr1Of'' :: 'Iso'' s a -> (a -> a -> a) -> s -> a -- 'foldr1Of'' :: 'Lens'' s a -> (a -> a -> a) -> s -> a -- 'foldr1Of'' :: 'Traversal'' s a -> (a -> a -> a) -> s -> a -- @ foldr1Of' :: HasCallStack => Getting (Dual (Endo (Endo (Maybe a)))) s a -> (a -> a -> a) -> s -> a foldr1Of' l f xs = fromMaybe (error "foldr1Of': empty structure") (foldrOf' l mf Nothing xs) where mf x Nothing = Just $! x mf x (Just y) = Just $! f x y {-# INLINE foldr1Of' #-} -- | A variant of 'foldlOf'' that has no base case and thus may only be applied -- to folds and structures such that the fold views at least one element of -- the structure. -- -- @ -- 'foldl1Of'' l f ≡ 'Data.List.foldl1'' f '.' 'toListOf' l -- @ -- -- @ -- 'foldl1Of'' :: 'Getter' s a -> (a -> a -> a) -> s -> a -- 'foldl1Of'' :: 'Fold' s a -> (a -> a -> a) -> s -> a -- 'foldl1Of'' :: 'Iso'' s a -> (a -> a -> a) -> s -> a -- 'foldl1Of'' :: 'Lens'' s a -> (a -> a -> a) -> s -> a -- 'foldl1Of'' :: 'Traversal'' s a -> (a -> a -> a) -> s -> a -- @ foldl1Of' :: HasCallStack => Getting (Endo (Endo (Maybe a))) s a -> (a -> a -> a) -> s -> a foldl1Of' l f xs = fromMaybe (error "foldl1Of': empty structure") (foldlOf' l mf Nothing xs) where mf Nothing y = Just $! y mf (Just x) y = Just $! f x y {-# INLINE foldl1Of' #-} -- | Monadic fold over the elements of a structure, associating to the right, -- i.e. from right to left. -- -- @ -- 'Data.Foldable.foldrM' ≡ 'foldrMOf' 'folded' -- @ -- -- @ -- 'foldrMOf' :: 'Monad' m => 'Getter' s a -> (a -> r -> m r) -> r -> s -> m r -- 'foldrMOf' :: 'Monad' m => 'Fold' s a -> (a -> r -> m r) -> r -> s -> m r -- 'foldrMOf' :: 'Monad' m => 'Iso'' s a -> (a -> r -> m r) -> r -> s -> m r -- 'foldrMOf' :: 'Monad' m => 'Lens'' s a -> (a -> r -> m r) -> r -> s -> m r -- 'foldrMOf' :: 'Monad' m => 'Traversal'' s a -> (a -> r -> m r) -> r -> s -> m r -- @ foldrMOf :: Monad m => Getting (Dual (Endo (r -> m r))) s a -> (a -> r -> m r) -> r -> s -> m r foldrMOf l f z0 xs = foldlOf l f' return xs z0 where f' k x z = f x z >>= k {-# INLINE foldrMOf #-} -- | Monadic fold over the elements of a structure, associating to the left, -- i.e. from left to right. -- -- @ -- 'Data.Foldable.foldlM' ≡ 'foldlMOf' 'folded' -- @ -- -- @ -- 'foldlMOf' :: 'Monad' m => 'Getter' s a -> (r -> a -> m r) -> r -> s -> m r -- 'foldlMOf' :: 'Monad' m => 'Fold' s a -> (r -> a -> m r) -> r -> s -> m r -- 'foldlMOf' :: 'Monad' m => 'Iso'' s a -> (r -> a -> m r) -> r -> s -> m r -- 'foldlMOf' :: 'Monad' m => 'Lens'' s a -> (r -> a -> m r) -> r -> s -> m r -- 'foldlMOf' :: 'Monad' m => 'Traversal'' s a -> (r -> a -> m r) -> r -> s -> m r -- @ foldlMOf :: Monad m => Getting (Endo (r -> m r)) s a -> (r -> a -> m r) -> r -> s -> m r foldlMOf l f z0 xs = foldrOf l f' return xs z0 where f' x k z = f z x >>= k {-# INLINE foldlMOf #-} -- | Check to see if this 'Fold' or 'Traversal' matches 1 or more entries. -- -- >>> has (element 0) [] -- False -- -- >>> has _Left (Left 12) -- True -- -- >>> has _Right (Left 12) -- False -- -- This will always return 'True' for a 'Lens' or 'Getter'. -- -- >>> has _1 ("hello","world") -- True -- -- @ -- 'has' :: 'Getter' s a -> s -> 'Bool' -- 'has' :: 'Fold' s a -> s -> 'Bool' -- 'has' :: 'Iso'' s a -> s -> 'Bool' -- 'has' :: 'Lens'' s a -> s -> 'Bool' -- 'has' :: 'Traversal'' s a -> s -> 'Bool' -- @ has :: Getting Any s a -> s -> Bool has l = getAny #. foldMapOf l (\_ -> Any True) {-# INLINE has #-} -- | Check to see if this 'Fold' or 'Traversal' has no matches. -- -- >>> hasn't _Left (Right 12) -- True -- -- >>> hasn't _Left (Left 12) -- False hasn't :: Getting All s a -> s -> Bool hasn't l = getAll #. foldMapOf l (\_ -> All False) {-# INLINE hasn't #-} ------------------------------------------------------------------------------ -- Pre ------------------------------------------------------------------------------ -- | This converts a 'Fold' to a 'IndexPreservingGetter' that returns the first element, if it -- exists, as a 'Maybe'. -- -- @ -- 'pre' :: 'Getter' s a -> 'IndexPreservingGetter' s ('Maybe' a) -- 'pre' :: 'Fold' s a -> 'IndexPreservingGetter' s ('Maybe' a) -- 'pre' :: 'Traversal'' s a -> 'IndexPreservingGetter' s ('Maybe' a) -- 'pre' :: 'Lens'' s a -> 'IndexPreservingGetter' s ('Maybe' a) -- 'pre' :: 'Iso'' s a -> 'IndexPreservingGetter' s ('Maybe' a) -- 'pre' :: 'Prism'' s a -> 'IndexPreservingGetter' s ('Maybe' a) -- @ pre :: Getting (First a) s a -> IndexPreservingGetter s (Maybe a) pre l = dimap (getFirst . getConst #. l (Const #. First #. Just)) phantom {-# INLINE pre #-} -- | This converts an 'IndexedFold' to an 'IndexPreservingGetter' that returns the first index -- and element, if they exist, as a 'Maybe'. -- -- @ -- 'ipre' :: 'IndexedGetter' i s a -> 'IndexPreservingGetter' s ('Maybe' (i, a)) -- 'ipre' :: 'IndexedFold' i s a -> 'IndexPreservingGetter' s ('Maybe' (i, a)) -- 'ipre' :: 'IndexedTraversal'' i s a -> 'IndexPreservingGetter' s ('Maybe' (i, a)) -- 'ipre' :: 'IndexedLens'' i s a -> 'IndexPreservingGetter' s ('Maybe' (i, a)) -- @ ipre :: IndexedGetting i (First (i, a)) s a -> IndexPreservingGetter s (Maybe (i, a)) ipre l = dimap (getFirst . getConst #. l (Indexed $ \i a -> Const (First (Just (i, a))))) phantom {-# INLINE ipre #-} ------------------------------------------------------------------------------ -- Preview ------------------------------------------------------------------------------ -- | Retrieve the first value targeted by a 'Fold' or 'Traversal' (or 'Just' the result -- from a 'Getter' or 'Lens'). See also 'firstOf' and '^?', which are similar with -- some subtle differences (explained below). -- -- @ -- 'Data.Maybe.listToMaybe' '.' 'toList' ≡ 'preview' 'folded' -- @ -- -- @ -- 'preview' = 'view' '.' 'pre' -- @ -- -- -- Unlike '^?', this function uses a -- 'Control.Monad.Reader.MonadReader' to read the value to be focused in on. -- This allows one to pass the value as the last argument by using the -- 'Control.Monad.Reader.MonadReader' instance for @(->) s@ -- However, it may also be used as part of some deeply nested transformer stack. -- -- 'preview' uses a monoidal value to obtain the result. -- This means that it generally has good performance, but can occasionally cause space leaks -- or even stack overflows on some data types. -- There is another function, 'firstOf', which avoids these issues at the cost of -- a slight constant performance cost and a little less flexibility. -- -- It may be helpful to think of 'preview' as having one of the following -- more specialized types: -- -- @ -- 'preview' :: 'Getter' s a -> s -> 'Maybe' a -- 'preview' :: 'Fold' s a -> s -> 'Maybe' a -- 'preview' :: 'Lens'' s a -> s -> 'Maybe' a -- 'preview' :: 'Iso'' s a -> s -> 'Maybe' a -- 'preview' :: 'Traversal'' s a -> s -> 'Maybe' a -- @ -- -- -- @ -- 'preview' :: 'MonadReader' s m => 'Getter' s a -> m ('Maybe' a) -- 'preview' :: 'MonadReader' s m => 'Fold' s a -> m ('Maybe' a) -- 'preview' :: 'MonadReader' s m => 'Lens'' s a -> m ('Maybe' a) -- 'preview' :: 'MonadReader' s m => 'Iso'' s a -> m ('Maybe' a) -- 'preview' :: 'MonadReader' s m => 'Traversal'' s a -> m ('Maybe' a) -- -- @ preview :: MonadReader s m => Getting (First a) s a -> m (Maybe a) preview l = asks (getFirst #. foldMapOf l (First #. Just)) {-# INLINE preview #-} -- | Retrieve the first index and value targeted by a 'Fold' or 'Traversal' (or 'Just' the result -- from a 'Getter' or 'Lens'). See also ('^@?'). -- -- @ -- 'ipreview' = 'view' '.' 'ipre' -- @ -- -- This is usually applied in the 'Control.Monad.Reader.Reader' -- 'Control.Monad.Monad' @(->) s@. -- -- @ -- 'ipreview' :: 'IndexedGetter' i s a -> s -> 'Maybe' (i, a) -- 'ipreview' :: 'IndexedFold' i s a -> s -> 'Maybe' (i, a) -- 'ipreview' :: 'IndexedLens'' i s a -> s -> 'Maybe' (i, a) -- 'ipreview' :: 'IndexedTraversal'' i s a -> s -> 'Maybe' (i, a) -- @ -- -- However, it may be useful to think of its full generality when working with -- a 'Control.Monad.Monad' transformer stack: -- -- @ -- 'ipreview' :: 'MonadReader' s m => 'IndexedGetter' s a -> m ('Maybe' (i, a)) -- 'ipreview' :: 'MonadReader' s m => 'IndexedFold' s a -> m ('Maybe' (i, a)) -- 'ipreview' :: 'MonadReader' s m => 'IndexedLens'' s a -> m ('Maybe' (i, a)) -- 'ipreview' :: 'MonadReader' s m => 'IndexedTraversal'' s a -> m ('Maybe' (i, a)) -- @ ipreview :: MonadReader s m => IndexedGetting i (First (i, a)) s a -> m (Maybe (i, a)) ipreview l = asks (getFirst #. ifoldMapOf l (\i a -> First (Just (i, a)))) {-# INLINE ipreview #-} -- | Retrieve a function of the first value targeted by a 'Fold' or -- 'Traversal' (or 'Just' the result from a 'Getter' or 'Lens'). -- -- This is usually applied in the 'Control.Monad.Reader.Reader' -- 'Control.Monad.Monad' @(->) s@. -- @ -- 'previews' = 'views' '.' 'pre' -- @ -- -- @ -- 'previews' :: 'Getter' s a -> (a -> r) -> s -> 'Maybe' r -- 'previews' :: 'Fold' s a -> (a -> r) -> s -> 'Maybe' r -- 'previews' :: 'Lens'' s a -> (a -> r) -> s -> 'Maybe' r -- 'previews' :: 'Iso'' s a -> (a -> r) -> s -> 'Maybe' r -- 'previews' :: 'Traversal'' s a -> (a -> r) -> s -> 'Maybe' r -- @ -- -- However, it may be useful to think of its full generality when working with -- a 'Monad' transformer stack: -- -- @ -- 'previews' :: 'MonadReader' s m => 'Getter' s a -> (a -> r) -> m ('Maybe' r) -- 'previews' :: 'MonadReader' s m => 'Fold' s a -> (a -> r) -> m ('Maybe' r) -- 'previews' :: 'MonadReader' s m => 'Lens'' s a -> (a -> r) -> m ('Maybe' r) -- 'previews' :: 'MonadReader' s m => 'Iso'' s a -> (a -> r) -> m ('Maybe' r) -- 'previews' :: 'MonadReader' s m => 'Traversal'' s a -> (a -> r) -> m ('Maybe' r) -- @ previews :: MonadReader s m => Getting (First r) s a -> (a -> r) -> m (Maybe r) previews l f = asks (getFirst . foldMapOf l (First #. Just . f)) {-# INLINE previews #-} -- | Retrieve a function of the first index and value targeted by an 'IndexedFold' or -- 'IndexedTraversal' (or 'Just' the result from an 'IndexedGetter' or 'IndexedLens'). -- See also ('^@?'). -- -- @ -- 'ipreviews' = 'views' '.' 'ipre' -- @ -- -- This is usually applied in the 'Control.Monad.Reader.Reader' -- 'Control.Monad.Monad' @(->) s@. -- -- @ -- 'ipreviews' :: 'IndexedGetter' i s a -> (i -> a -> r) -> s -> 'Maybe' r -- 'ipreviews' :: 'IndexedFold' i s a -> (i -> a -> r) -> s -> 'Maybe' r -- 'ipreviews' :: 'IndexedLens'' i s a -> (i -> a -> r) -> s -> 'Maybe' r -- 'ipreviews' :: 'IndexedTraversal'' i s a -> (i -> a -> r) -> s -> 'Maybe' r -- @ -- -- However, it may be useful to think of its full generality when working with -- a 'Control.Monad.Monad' transformer stack: -- -- @ -- 'ipreviews' :: 'MonadReader' s m => 'IndexedGetter' i s a -> (i -> a -> r) -> m ('Maybe' r) -- 'ipreviews' :: 'MonadReader' s m => 'IndexedFold' i s a -> (i -> a -> r) -> m ('Maybe' r) -- 'ipreviews' :: 'MonadReader' s m => 'IndexedLens'' i s a -> (i -> a -> r) -> m ('Maybe' r) -- 'ipreviews' :: 'MonadReader' s m => 'IndexedTraversal'' i s a -> (i -> a -> r) -> m ('Maybe' r) -- @ ipreviews :: MonadReader s m => IndexedGetting i (First r) s a -> (i -> a -> r) -> m (Maybe r) ipreviews l f = asks (getFirst . ifoldMapOf l (\i -> First #. Just . f i)) {-# INLINE ipreviews #-} ------------------------------------------------------------------------------ -- Preuse ------------------------------------------------------------------------------ -- | Retrieve the first value targeted by a 'Fold' or 'Traversal' (or 'Just' the result -- from a 'Getter' or 'Lens') into the current state. -- -- @ -- 'preuse' = 'use' '.' 'pre' -- @ -- -- @ -- 'preuse' :: 'MonadState' s m => 'Getter' s a -> m ('Maybe' a) -- 'preuse' :: 'MonadState' s m => 'Fold' s a -> m ('Maybe' a) -- 'preuse' :: 'MonadState' s m => 'Lens'' s a -> m ('Maybe' a) -- 'preuse' :: 'MonadState' s m => 'Iso'' s a -> m ('Maybe' a) -- 'preuse' :: 'MonadState' s m => 'Traversal'' s a -> m ('Maybe' a) -- @ preuse :: MonadState s m => Getting (First a) s a -> m (Maybe a) preuse l = gets (preview l) {-# INLINE preuse #-} -- | Retrieve the first index and value targeted by an 'IndexedFold' or 'IndexedTraversal' (or 'Just' the index -- and result from an 'IndexedGetter' or 'IndexedLens') into the current state. -- -- @ -- 'ipreuse' = 'use' '.' 'ipre' -- @ -- -- @ -- 'ipreuse' :: 'MonadState' s m => 'IndexedGetter' i s a -> m ('Maybe' (i, a)) -- 'ipreuse' :: 'MonadState' s m => 'IndexedFold' i s a -> m ('Maybe' (i, a)) -- 'ipreuse' :: 'MonadState' s m => 'IndexedLens'' i s a -> m ('Maybe' (i, a)) -- 'ipreuse' :: 'MonadState' s m => 'IndexedTraversal'' i s a -> m ('Maybe' (i, a)) -- @ ipreuse :: MonadState s m => IndexedGetting i (First (i, a)) s a -> m (Maybe (i, a)) ipreuse l = gets (ipreview l) {-# INLINE ipreuse #-} -- | Retrieve a function of the first value targeted by a 'Fold' or -- 'Traversal' (or 'Just' the result from a 'Getter' or 'Lens') into the current state. -- -- @ -- 'preuses' = 'uses' '.' 'pre' -- @ -- -- @ -- 'preuses' :: 'MonadState' s m => 'Getter' s a -> (a -> r) -> m ('Maybe' r) -- 'preuses' :: 'MonadState' s m => 'Fold' s a -> (a -> r) -> m ('Maybe' r) -- 'preuses' :: 'MonadState' s m => 'Lens'' s a -> (a -> r) -> m ('Maybe' r) -- 'preuses' :: 'MonadState' s m => 'Iso'' s a -> (a -> r) -> m ('Maybe' r) -- 'preuses' :: 'MonadState' s m => 'Traversal'' s a -> (a -> r) -> m ('Maybe' r) -- @ preuses :: MonadState s m => Getting (First r) s a -> (a -> r) -> m (Maybe r) preuses l f = gets (previews l f) {-# INLINE preuses #-} -- | Retrieve a function of the first index and value targeted by an 'IndexedFold' or -- 'IndexedTraversal' (or a function of 'Just' the index and result from an 'IndexedGetter' -- or 'IndexedLens') into the current state. -- -- @ -- 'ipreuses' = 'uses' '.' 'ipre' -- @ -- -- @ -- 'ipreuses' :: 'MonadState' s m => 'IndexedGetter' i s a -> (i -> a -> r) -> m ('Maybe' r) -- 'ipreuses' :: 'MonadState' s m => 'IndexedFold' i s a -> (i -> a -> r) -> m ('Maybe' r) -- 'ipreuses' :: 'MonadState' s m => 'IndexedLens'' i s a -> (i -> a -> r) -> m ('Maybe' r) -- 'ipreuses' :: 'MonadState' s m => 'IndexedTraversal'' i s a -> (i -> a -> r) -> m ('Maybe' r) -- @ ipreuses :: MonadState s m => IndexedGetting i (First r) s a -> (i -> a -> r) -> m (Maybe r) ipreuses l f = gets (ipreviews l f) {-# INLINE ipreuses #-} ------------------------------------------------------------------------------ -- Profunctors ------------------------------------------------------------------------------ -- | This allows you to 'Control.Traversable.traverse' the elements of a pretty much any 'LensLike' construction in the opposite order. -- -- This will preserve indexes on 'Indexed' types and will give you the elements of a (finite) 'Fold' or 'Traversal' in the opposite order. -- -- This has no practical impact on a 'Getter', 'Setter', 'Lens' or 'Iso'. -- -- /NB:/ To write back through an 'Iso', you want to use 'Control.Lens.Isomorphic.from'. -- Similarly, to write back through an 'Prism', you want to use 'Control.Lens.Review.re'. backwards :: (Profunctor p, Profunctor q) => Optical p q (Backwards f) s t a b -> Optical p q f s t a b backwards l f = forwards #. l (Backwards #. f) {-# INLINE backwards #-} ------------------------------------------------------------------------------ -- Indexed Folds ------------------------------------------------------------------------------ -- | Fold an 'IndexedFold' or 'IndexedTraversal' by mapping indices and values to an arbitrary 'Monoid' with access -- to the @i@. -- -- When you don't need access to the index then 'foldMapOf' is more flexible in what it accepts. -- -- @ -- 'foldMapOf' l ≡ 'ifoldMapOf' l '.' 'const' -- @ -- -- @ -- 'ifoldMapOf' :: 'IndexedGetter' i s a -> (i -> a -> m) -> s -> m -- 'ifoldMapOf' :: 'Monoid' m => 'IndexedFold' i s a -> (i -> a -> m) -> s -> m -- 'ifoldMapOf' :: 'IndexedLens'' i s a -> (i -> a -> m) -> s -> m -- 'ifoldMapOf' :: 'Monoid' m => 'IndexedTraversal'' i s a -> (i -> a -> m) -> s -> m -- @ -- ifoldMapOf :: IndexedGetting i m s a -> (i -> a -> m) -> s -> m ifoldMapOf l f = getConst #. l (Const #. Indexed f) {-# INLINE ifoldMapOf #-} -- | Right-associative fold of parts of a structure that are viewed through an 'IndexedFold' or 'IndexedTraversal' with -- access to the @i@. -- -- When you don't need access to the index then 'foldrOf' is more flexible in what it accepts. -- -- @ -- 'foldrOf' l ≡ 'ifoldrOf' l '.' 'const' -- @ -- -- @ -- 'ifoldrOf' :: 'IndexedGetter' i s a -> (i -> a -> r -> r) -> r -> s -> r -- 'ifoldrOf' :: 'IndexedFold' i s a -> (i -> a -> r -> r) -> r -> s -> r -- 'ifoldrOf' :: 'IndexedLens'' i s a -> (i -> a -> r -> r) -> r -> s -> r -- 'ifoldrOf' :: 'IndexedTraversal'' i s a -> (i -> a -> r -> r) -> r -> s -> r -- @ ifoldrOf :: IndexedGetting i (Endo r) s a -> (i -> a -> r -> r) -> r -> s -> r ifoldrOf l f z = flip appEndo z . getConst #. l (Const #. Endo #. Indexed f) {-# INLINE ifoldrOf #-} -- | Left-associative fold of the parts of a structure that are viewed through an 'IndexedFold' or 'IndexedTraversal' with -- access to the @i@. -- -- When you don't need access to the index then 'foldlOf' is more flexible in what it accepts. -- -- @ -- 'foldlOf' l ≡ 'ifoldlOf' l '.' 'const' -- @ -- -- @ -- 'ifoldlOf' :: 'IndexedGetter' i s a -> (i -> r -> a -> r) -> r -> s -> r -- 'ifoldlOf' :: 'IndexedFold' i s a -> (i -> r -> a -> r) -> r -> s -> r -- 'ifoldlOf' :: 'IndexedLens'' i s a -> (i -> r -> a -> r) -> r -> s -> r -- 'ifoldlOf' :: 'IndexedTraversal'' i s a -> (i -> r -> a -> r) -> r -> s -> r -- @ ifoldlOf :: IndexedGetting i (Dual (Endo r)) s a -> (i -> r -> a -> r) -> r -> s -> r ifoldlOf l f z = (flip appEndo z .# getDual) `rmap` ifoldMapOf l (\i -> Dual #. Endo #. flip (f i)) {-# INLINE ifoldlOf #-} -- | Return whether or not any element viewed through an 'IndexedFold' or 'IndexedTraversal' -- satisfy a predicate, with access to the @i@. -- -- When you don't need access to the index then 'anyOf' is more flexible in what it accepts. -- -- @ -- 'anyOf' l ≡ 'ianyOf' l '.' 'const' -- @ -- -- @ -- 'ianyOf' :: 'IndexedGetter' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- 'ianyOf' :: 'IndexedFold' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- 'ianyOf' :: 'IndexedLens'' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- 'ianyOf' :: 'IndexedTraversal'' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- @ ianyOf :: IndexedGetting i Any s a -> (i -> a -> Bool) -> s -> Bool ianyOf l f = getAny #. getConst #. l (Const #. Any #. Indexed f) {-# INLINE ianyOf #-} -- | Return whether or not all elements viewed through an 'IndexedFold' or 'IndexedTraversal' -- satisfy a predicate, with access to the @i@. -- -- When you don't need access to the index then 'allOf' is more flexible in what it accepts. -- -- @ -- 'allOf' l ≡ 'iallOf' l '.' 'const' -- @ -- -- @ -- 'iallOf' :: 'IndexedGetter' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- 'iallOf' :: 'IndexedFold' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- 'iallOf' :: 'IndexedLens'' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- 'iallOf' :: 'IndexedTraversal'' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- @ iallOf :: IndexedGetting i All s a -> (i -> a -> Bool) -> s -> Bool iallOf l f = getAll #. getConst #. l (Const #. All #. Indexed f) {-# INLINE iallOf #-} -- | Return whether or not none of the elements viewed through an 'IndexedFold' or 'IndexedTraversal' -- satisfy a predicate, with access to the @i@. -- -- When you don't need access to the index then 'noneOf' is more flexible in what it accepts. -- -- @ -- 'noneOf' l ≡ 'inoneOf' l '.' 'const' -- @ -- -- @ -- 'inoneOf' :: 'IndexedGetter' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- 'inoneOf' :: 'IndexedFold' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- 'inoneOf' :: 'IndexedLens'' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- 'inoneOf' :: 'IndexedTraversal'' i s a -> (i -> a -> 'Bool') -> s -> 'Bool' -- @ inoneOf :: IndexedGetting i Any s a -> (i -> a -> Bool) -> s -> Bool inoneOf l f = not . ianyOf l f {-# INLINE inoneOf #-} -- | Traverse the targets of an 'IndexedFold' or 'IndexedTraversal' with access to the @i@, discarding the results. -- -- When you don't need access to the index then 'traverseOf_' is more flexible in what it accepts. -- -- @ -- 'traverseOf_' l ≡ 'Control.Lens.Traversal.itraverseOf' l '.' 'const' -- @ -- -- @ -- 'itraverseOf_' :: 'Functor' f => 'IndexedGetter' i s a -> (i -> a -> f r) -> s -> f () -- 'itraverseOf_' :: 'Applicative' f => 'IndexedFold' i s a -> (i -> a -> f r) -> s -> f () -- 'itraverseOf_' :: 'Functor' f => 'IndexedLens'' i s a -> (i -> a -> f r) -> s -> f () -- 'itraverseOf_' :: 'Applicative' f => 'IndexedTraversal'' i s a -> (i -> a -> f r) -> s -> f () -- @ itraverseOf_ :: Functor f => IndexedGetting i (Traversed r f) s a -> (i -> a -> f r) -> s -> f () itraverseOf_ l f = void . getTraversed #. getConst #. l (Const #. Traversed #. Indexed f) {-# INLINE itraverseOf_ #-} -- | Traverse the targets of an 'IndexedFold' or 'IndexedTraversal' with access to the index, discarding the results -- (with the arguments flipped). -- -- @ -- 'iforOf_' ≡ 'flip' '.' 'itraverseOf_' -- @ -- -- When you don't need access to the index then 'forOf_' is more flexible in what it accepts. -- -- @ -- 'forOf_' l a ≡ 'iforOf_' l a '.' 'const' -- @ -- -- @ -- 'iforOf_' :: 'Functor' f => 'IndexedGetter' i s a -> s -> (i -> a -> f r) -> f () -- 'iforOf_' :: 'Applicative' f => 'IndexedFold' i s a -> s -> (i -> a -> f r) -> f () -- 'iforOf_' :: 'Functor' f => 'IndexedLens'' i s a -> s -> (i -> a -> f r) -> f () -- 'iforOf_' :: 'Applicative' f => 'IndexedTraversal'' i s a -> s -> (i -> a -> f r) -> f () -- @ iforOf_ :: Functor f => IndexedGetting i (Traversed r f) s a -> s -> (i -> a -> f r) -> f () iforOf_ = flip . itraverseOf_ {-# INLINE iforOf_ #-} -- | Run monadic actions for each target of an 'IndexedFold' or 'IndexedTraversal' with access to the index, -- discarding the results. -- -- When you don't need access to the index then 'mapMOf_' is more flexible in what it accepts. -- -- @ -- 'mapMOf_' l ≡ 'Control.Lens.Setter.imapMOf' l '.' 'const' -- @ -- -- @ -- 'imapMOf_' :: 'Monad' m => 'IndexedGetter' i s a -> (i -> a -> m r) -> s -> m () -- 'imapMOf_' :: 'Monad' m => 'IndexedFold' i s a -> (i -> a -> m r) -> s -> m () -- 'imapMOf_' :: 'Monad' m => 'IndexedLens'' i s a -> (i -> a -> m r) -> s -> m () -- 'imapMOf_' :: 'Monad' m => 'IndexedTraversal'' i s a -> (i -> a -> m r) -> s -> m () -- @ imapMOf_ :: Monad m => IndexedGetting i (Sequenced r m) s a -> (i -> a -> m r) -> s -> m () imapMOf_ l f = liftM skip . getSequenced #. getConst #. l (Const #. Sequenced #. Indexed f) {-# INLINE imapMOf_ #-} -- | Run monadic actions for each target of an 'IndexedFold' or 'IndexedTraversal' with access to the index, -- discarding the results (with the arguments flipped). -- -- @ -- 'iforMOf_' ≡ 'flip' '.' 'imapMOf_' -- @ -- -- When you don't need access to the index then 'forMOf_' is more flexible in what it accepts. -- -- @ -- 'forMOf_' l a ≡ 'Control.Lens.Traversal.iforMOf' l a '.' 'const' -- @ -- -- @ -- 'iforMOf_' :: 'Monad' m => 'IndexedGetter' i s a -> s -> (i -> a -> m r) -> m () -- 'iforMOf_' :: 'Monad' m => 'IndexedFold' i s a -> s -> (i -> a -> m r) -> m () -- 'iforMOf_' :: 'Monad' m => 'IndexedLens'' i s a -> s -> (i -> a -> m r) -> m () -- 'iforMOf_' :: 'Monad' m => 'IndexedTraversal'' i s a -> s -> (i -> a -> m r) -> m () -- @ iforMOf_ :: Monad m => IndexedGetting i (Sequenced r m) s a -> s -> (i -> a -> m r) -> m () iforMOf_ = flip . imapMOf_ {-# INLINE iforMOf_ #-} -- | Concatenate the results of a function of the elements of an 'IndexedFold' or 'IndexedTraversal' -- with access to the index. -- -- When you don't need access to the index then 'concatMapOf' is more flexible in what it accepts. -- -- @ -- 'concatMapOf' l ≡ 'iconcatMapOf' l '.' 'const' -- 'iconcatMapOf' ≡ 'ifoldMapOf' -- @ -- -- @ -- 'iconcatMapOf' :: 'IndexedGetter' i s a -> (i -> a -> [r]) -> s -> [r] -- 'iconcatMapOf' :: 'IndexedFold' i s a -> (i -> a -> [r]) -> s -> [r] -- 'iconcatMapOf' :: 'IndexedLens'' i s a -> (i -> a -> [r]) -> s -> [r] -- 'iconcatMapOf' :: 'IndexedTraversal'' i s a -> (i -> a -> [r]) -> s -> [r] -- @ iconcatMapOf :: IndexedGetting i [r] s a -> (i -> a -> [r]) -> s -> [r] iconcatMapOf = ifoldMapOf {-# INLINE iconcatMapOf #-} -- | The 'ifindOf' function takes an 'IndexedFold' or 'IndexedTraversal', a predicate that is also -- supplied the index, a structure and returns the left-most element of the structure -- matching the predicate, or 'Nothing' if there is no such element. -- -- When you don't need access to the index then 'findOf' is more flexible in what it accepts. -- -- @ -- 'findOf' l ≡ 'ifindOf' l '.' 'const' -- @ -- -- @ -- 'ifindOf' :: 'IndexedGetter' i s a -> (i -> a -> 'Bool') -> s -> 'Maybe' a -- 'ifindOf' :: 'IndexedFold' i s a -> (i -> a -> 'Bool') -> s -> 'Maybe' a -- 'ifindOf' :: 'IndexedLens'' i s a -> (i -> a -> 'Bool') -> s -> 'Maybe' a -- 'ifindOf' :: 'IndexedTraversal'' i s a -> (i -> a -> 'Bool') -> s -> 'Maybe' a -- @ ifindOf :: IndexedGetting i (Endo (Maybe a)) s a -> (i -> a -> Bool) -> s -> Maybe a ifindOf l f = ifoldrOf l (\i a y -> if f i a then Just a else y) Nothing {-# INLINE ifindOf #-} -- | The 'ifindMOf' function takes an 'IndexedFold' or 'IndexedTraversal', a monadic predicate that is also -- supplied the index, a structure and returns in the monad the left-most element of the structure -- matching the predicate, or 'Nothing' if there is no such element. -- -- When you don't need access to the index then 'findMOf' is more flexible in what it accepts. -- -- @ -- 'findMOf' l ≡ 'ifindMOf' l '.' 'const' -- @ -- -- @ -- 'ifindMOf' :: 'Monad' m => 'IndexedGetter' i s a -> (i -> a -> m 'Bool') -> s -> m ('Maybe' a) -- 'ifindMOf' :: 'Monad' m => 'IndexedFold' i s a -> (i -> a -> m 'Bool') -> s -> m ('Maybe' a) -- 'ifindMOf' :: 'Monad' m => 'IndexedLens'' i s a -> (i -> a -> m 'Bool') -> s -> m ('Maybe' a) -- 'ifindMOf' :: 'Monad' m => 'IndexedTraversal'' i s a -> (i -> a -> m 'Bool') -> s -> m ('Maybe' a) -- @ ifindMOf :: Monad m => IndexedGetting i (Endo (m (Maybe a))) s a -> (i -> a -> m Bool) -> s -> m (Maybe a) ifindMOf l f = ifoldrOf l (\i a y -> f i a >>= \r -> if r then return (Just a) else y) $ return Nothing {-# INLINE ifindMOf #-} -- | /Strictly/ fold right over the elements of a structure with an index. -- -- When you don't need access to the index then 'foldrOf'' is more flexible in what it accepts. -- -- @ -- 'foldrOf'' l ≡ 'ifoldrOf'' l '.' 'const' -- @ -- -- @ -- 'ifoldrOf'' :: 'IndexedGetter' i s a -> (i -> a -> r -> r) -> r -> s -> r -- 'ifoldrOf'' :: 'IndexedFold' i s a -> (i -> a -> r -> r) -> r -> s -> r -- 'ifoldrOf'' :: 'IndexedLens'' i s a -> (i -> a -> r -> r) -> r -> s -> r -- 'ifoldrOf'' :: 'IndexedTraversal'' i s a -> (i -> a -> r -> r) -> r -> s -> r -- @ ifoldrOf' :: IndexedGetting i (Dual (Endo (r -> r))) s a -> (i -> a -> r -> r) -> r -> s -> r ifoldrOf' l f z0 xs = ifoldlOf l f' id xs z0 where f' i k x z = k $! f i x z {-# INLINE ifoldrOf' #-} -- | Fold over the elements of a structure with an index, associating to the left, but /strictly/. -- -- When you don't need access to the index then 'foldlOf'' is more flexible in what it accepts. -- -- @ -- 'foldlOf'' l ≡ 'ifoldlOf'' l '.' 'const' -- @ -- -- @ -- 'ifoldlOf'' :: 'IndexedGetter' i s a -> (i -> r -> a -> r) -> r -> s -> r -- 'ifoldlOf'' :: 'IndexedFold' i s a -> (i -> r -> a -> r) -> r -> s -> r -- 'ifoldlOf'' :: 'IndexedLens'' i s a -> (i -> r -> a -> r) -> r -> s -> r -- 'ifoldlOf'' :: 'IndexedTraversal'' i s a -> (i -> r -> a -> r) -> r -> s -> r -- @ ifoldlOf' :: IndexedGetting i (Endo (r -> r)) s a -> (i -> r -> a -> r) -> r -> s -> r ifoldlOf' l f z0 xs = ifoldrOf l f' id xs z0 where f' i x k z = k $! f i z x {-# INLINE ifoldlOf' #-} -- | Monadic fold right over the elements of a structure with an index. -- -- When you don't need access to the index then 'foldrMOf' is more flexible in what it accepts. -- -- @ -- 'foldrMOf' l ≡ 'ifoldrMOf' l '.' 'const' -- @ -- -- @ -- 'ifoldrMOf' :: 'Monad' m => 'IndexedGetter' i s a -> (i -> a -> r -> m r) -> r -> s -> m r -- 'ifoldrMOf' :: 'Monad' m => 'IndexedFold' i s a -> (i -> a -> r -> m r) -> r -> s -> m r -- 'ifoldrMOf' :: 'Monad' m => 'IndexedLens'' i s a -> (i -> a -> r -> m r) -> r -> s -> m r -- 'ifoldrMOf' :: 'Monad' m => 'IndexedTraversal'' i s a -> (i -> a -> r -> m r) -> r -> s -> m r -- @ ifoldrMOf :: Monad m => IndexedGetting i (Dual (Endo (r -> m r))) s a -> (i -> a -> r -> m r) -> r -> s -> m r ifoldrMOf l f z0 xs = ifoldlOf l f' return xs z0 where f' i k x z = f i x z >>= k {-# INLINE ifoldrMOf #-} -- | Monadic fold over the elements of a structure with an index, associating to the left. -- -- When you don't need access to the index then 'foldlMOf' is more flexible in what it accepts. -- -- @ -- 'foldlMOf' l ≡ 'ifoldlMOf' l '.' 'const' -- @ -- -- @ -- 'ifoldlMOf' :: 'Monad' m => 'IndexedGetter' i s a -> (i -> r -> a -> m r) -> r -> s -> m r -- 'ifoldlMOf' :: 'Monad' m => 'IndexedFold' i s a -> (i -> r -> a -> m r) -> r -> s -> m r -- 'ifoldlMOf' :: 'Monad' m => 'IndexedLens'' i s a -> (i -> r -> a -> m r) -> r -> s -> m r -- 'ifoldlMOf' :: 'Monad' m => 'IndexedTraversal'' i s a -> (i -> r -> a -> m r) -> r -> s -> m r -- @ ifoldlMOf :: Monad m => IndexedGetting i (Endo (r -> m r)) s a -> (i -> r -> a -> m r) -> r -> s -> m r ifoldlMOf l f z0 xs = ifoldrOf l f' return xs z0 where f' i x k z = f i z x >>= k {-# INLINE ifoldlMOf #-} -- | Extract the key-value pairs from a structure. -- -- When you don't need access to the indices in the result, then 'toListOf' is more flexible in what it accepts. -- -- @ -- 'toListOf' l ≡ 'map' 'snd' '.' 'itoListOf' l -- @ -- -- @ -- 'itoListOf' :: 'IndexedGetter' i s a -> s -> [(i,a)] -- 'itoListOf' :: 'IndexedFold' i s a -> s -> [(i,a)] -- 'itoListOf' :: 'IndexedLens'' i s a -> s -> [(i,a)] -- 'itoListOf' :: 'IndexedTraversal'' i s a -> s -> [(i,a)] -- @ itoListOf :: IndexedGetting i (Endo [(i,a)]) s a -> s -> [(i,a)] itoListOf l = ifoldrOf l (\i a -> ((i,a):)) [] {-# INLINE itoListOf #-} -- | An infix version of 'itoListOf'. -- @ -- ('^@..') :: s -> 'IndexedGetter' i s a -> [(i,a)] -- ('^@..') :: s -> 'IndexedFold' i s a -> [(i,a)] -- ('^@..') :: s -> 'IndexedLens'' i s a -> [(i,a)] -- ('^@..') :: s -> 'IndexedTraversal'' i s a -> [(i,a)] -- @ (^@..) :: s -> IndexedGetting i (Endo [(i,a)]) s a -> [(i,a)] s ^@.. l = ifoldrOf l (\i a -> ((i,a):)) [] s {-# INLINE (^@..) #-} -- | Perform a safe 'head' (with index) of an 'IndexedFold' or 'IndexedTraversal' or retrieve 'Just' the index and result -- from an 'IndexedGetter' or 'IndexedLens'. -- -- When using a 'IndexedTraversal' as a partial 'IndexedLens', or an 'IndexedFold' as a partial 'IndexedGetter' this can be a convenient -- way to extract the optional value. -- -- @ -- ('^@?') :: s -> 'IndexedGetter' i s a -> 'Maybe' (i, a) -- ('^@?') :: s -> 'IndexedFold' i s a -> 'Maybe' (i, a) -- ('^@?') :: s -> 'IndexedLens'' i s a -> 'Maybe' (i, a) -- ('^@?') :: s -> 'IndexedTraversal'' i s a -> 'Maybe' (i, a) -- @ (^@?) :: s -> IndexedGetting i (Endo (Maybe (i, a))) s a -> Maybe (i, a) s ^@? l = ifoldrOf l (\i x _ -> Just (i,x)) Nothing s {-# INLINE (^@?) #-} -- | Perform an *UNSAFE* 'head' (with index) of an 'IndexedFold' or 'IndexedTraversal' assuming that it is there. -- -- @ -- ('^@?!') :: s -> 'IndexedGetter' i s a -> (i, a) -- ('^@?!') :: s -> 'IndexedFold' i s a -> (i, a) -- ('^@?!') :: s -> 'IndexedLens'' i s a -> (i, a) -- ('^@?!') :: s -> 'IndexedTraversal'' i s a -> (i, a) -- @ (^@?!) :: HasCallStack => s -> IndexedGetting i (Endo (i, a)) s a -> (i, a) s ^@?! l = ifoldrOf l (\i x _ -> (i,x)) (error "(^@?!): empty Fold") s {-# INLINE (^@?!) #-} -- | Retrieve the index of the first value targeted by a 'IndexedFold' or 'IndexedTraversal' which is equal to a given value. -- -- @ -- 'Data.List.elemIndex' ≡ 'elemIndexOf' 'folded' -- @ -- -- @ -- 'elemIndexOf' :: 'Eq' a => 'IndexedFold' i s a -> a -> s -> 'Maybe' i -- 'elemIndexOf' :: 'Eq' a => 'IndexedTraversal'' i s a -> a -> s -> 'Maybe' i -- @ elemIndexOf :: Eq a => IndexedGetting i (First i) s a -> a -> s -> Maybe i elemIndexOf l a = findIndexOf l (a ==) {-# INLINE elemIndexOf #-} -- | Retrieve the indices of the values targeted by a 'IndexedFold' or 'IndexedTraversal' which are equal to a given value. -- -- @ -- 'Data.List.elemIndices' ≡ 'elemIndicesOf' 'folded' -- @ -- -- @ -- 'elemIndicesOf' :: 'Eq' a => 'IndexedFold' i s a -> a -> s -> [i] -- 'elemIndicesOf' :: 'Eq' a => 'IndexedTraversal'' i s a -> a -> s -> [i] -- @ elemIndicesOf :: Eq a => IndexedGetting i (Endo [i]) s a -> a -> s -> [i] elemIndicesOf l a = findIndicesOf l (a ==) {-# INLINE elemIndicesOf #-} -- | Retrieve the index of the first value targeted by a 'IndexedFold' or 'IndexedTraversal' which satisfies a predicate. -- -- @ -- 'Data.List.findIndex' ≡ 'findIndexOf' 'folded' -- @ -- -- @ -- 'findIndexOf' :: 'IndexedFold' i s a -> (a -> 'Bool') -> s -> 'Maybe' i -- 'findIndexOf' :: 'IndexedTraversal'' i s a -> (a -> 'Bool') -> s -> 'Maybe' i -- @ findIndexOf :: IndexedGetting i (First i) s a -> (a -> Bool) -> s -> Maybe i findIndexOf l p = preview (l . filtered p . asIndex) {-# INLINE findIndexOf #-} -- | Retrieve the indices of the values targeted by a 'IndexedFold' or 'IndexedTraversal' which satisfy a predicate. -- -- @ -- 'Data.List.findIndices' ≡ 'findIndicesOf' 'folded' -- @ -- -- @ -- 'findIndicesOf' :: 'IndexedFold' i s a -> (a -> 'Bool') -> s -> [i] -- 'findIndicesOf' :: 'IndexedTraversal'' i s a -> (a -> 'Bool') -> s -> [i] -- @ findIndicesOf :: IndexedGetting i (Endo [i]) s a -> (a -> Bool) -> s -> [i] findIndicesOf l p = toListOf (l . filtered p . asIndex) {-# INLINE findIndicesOf #-} ------------------------------------------------------------------------------- -- Converting to Folds ------------------------------------------------------------------------------- -- | Filter an 'IndexedFold' or 'IndexedGetter', obtaining an 'IndexedFold'. -- -- >>> [0,0,0,5,5,5]^..traversed.ifiltered (\i a -> i <= a) -- [0,5,5,5] -- -- Compose with 'ifiltered' to filter another 'IndexedLens', 'IndexedIso', 'IndexedGetter', 'IndexedFold' (or 'IndexedTraversal') with -- access to both the value and the index. -- -- Note: As with 'filtered', this is /not/ a legal 'IndexedTraversal', unless you are very careful not to invalidate the predicate on the target! ifiltered :: (Indexable i p, Applicative f) => (i -> a -> Bool) -> Optical' p (Indexed i) f a a ifiltered p f = Indexed $ \i a -> if p i a then indexed f i a else pure a {-# INLINE ifiltered #-} -- | Obtain an 'IndexedFold' by taking elements from another -- 'IndexedFold', 'IndexedLens', 'IndexedGetter' or 'IndexedTraversal' while a predicate holds. -- -- @ -- 'itakingWhile' :: (i -> a -> 'Bool') -> 'IndexedFold' i s a -> 'IndexedFold' i s a -- 'itakingWhile' :: (i -> a -> 'Bool') -> 'IndexedTraversal'' i s a -> 'IndexedFold' i s a -- 'itakingWhile' :: (i -> a -> 'Bool') -> 'IndexedLens'' i s a -> 'IndexedFold' i s a -- 'itakingWhile' :: (i -> a -> 'Bool') -> 'IndexedGetter' i s a -> 'IndexedFold' i s a -- @ -- -- Note: Applying 'itakingWhile' to an 'IndexedLens' or 'IndexedTraversal' will still allow you to use it as a -- pseudo-'IndexedTraversal', but if you change the value of any target to one where the predicate returns -- 'False', then you will break the 'Traversal' laws and 'Traversal' fusion will no longer be sound. itakingWhile :: (Indexable i p, Profunctor q, Contravariant f, Applicative f) => (i -> a -> Bool) -> Optical' (Indexed i) q (Const (Endo (f s))) s a -> Optical' p q f s a itakingWhile p l f = (flip appEndo noEffect .# getConst) `rmap` l g where g = Indexed $ \i a -> Const . Endo $ if p i a then (indexed f i a *>) else const noEffect {-# INLINE itakingWhile #-} -- | Obtain an 'IndexedFold' by dropping elements from another 'IndexedFold', 'IndexedLens', 'IndexedGetter' or 'IndexedTraversal' while a predicate holds. -- -- @ -- 'idroppingWhile' :: (i -> a -> 'Bool') -> 'IndexedFold' i s a -> 'IndexedFold' i s a -- 'idroppingWhile' :: (i -> a -> 'Bool') -> 'IndexedTraversal'' i s a -> 'IndexedFold' i s a -- see notes -- 'idroppingWhile' :: (i -> a -> 'Bool') -> 'IndexedLens'' i s a -> 'IndexedFold' i s a -- see notes -- 'idroppingWhile' :: (i -> a -> 'Bool') -> 'IndexedGetter' i s a -> 'IndexedFold' i s a -- @ -- -- Note: As with `droppingWhile` applying 'idroppingWhile' to an 'IndexedLens' or 'IndexedTraversal' will still -- allow you to use it as a pseudo-'IndexedTraversal', but if you change the value of the first target to one -- where the predicate returns 'True', then you will break the 'Traversal' laws and 'Traversal' fusion will -- no longer be sound. idroppingWhile :: (Indexable i p, Profunctor q, Applicative f) => (i -> a -> Bool) -> Optical (Indexed i) q (Compose (State Bool) f) s t a a -> Optical p q f s t a a idroppingWhile p l f = (flip evalState True .# getCompose) `rmap` l g where g = Indexed $ \ i a -> Compose $ state $ \b -> let b' = b && p i a in (if b' then pure a else indexed f i a, b') {-# INLINE idroppingWhile #-} ------------------------------------------------------------------------------ -- Misc. ------------------------------------------------------------------------------ skip :: a -> () skip _ = () {-# INLINE skip #-} ------------------------------------------------------------------------------ -- Folds with Reified Monoid ------------------------------------------------------------------------------ -- | Fold a value using a specified 'Fold' and 'Monoid' operations. -- This is like 'foldBy' where the 'Foldable' instance can be -- manually specified. -- -- @ -- 'foldByOf' 'folded' ≡ 'foldBy' -- @ -- -- @ -- 'foldByOf' :: 'Getter' s a -> (a -> a -> a) -> a -> s -> a -- 'foldByOf' :: 'Fold' s a -> (a -> a -> a) -> a -> s -> a -- 'foldByOf' :: 'Lens'' s a -> (a -> a -> a) -> a -> s -> a -- 'foldByOf' :: 'Traversal'' s a -> (a -> a -> a) -> a -> s -> a -- 'foldByOf' :: 'Iso'' s a -> (a -> a -> a) -> a -> s -> a -- @ -- -- >>> foldByOf both (++) [] ("hello","world") -- "helloworld" foldByOf :: Fold s a -> (a -> a -> a) -> a -> s -> a foldByOf l f z = reifyMonoid f z (foldMapOf l ReflectedMonoid) -- | Fold a value using a specified 'Fold' and 'Monoid' operations. -- This is like 'foldMapBy' where the 'Foldable' instance can be -- manually specified. -- -- @ -- 'foldMapByOf' 'folded' ≡ 'foldMapBy' -- @ -- -- @ -- 'foldMapByOf' :: 'Getter' s a -> (r -> r -> r) -> r -> (a -> r) -> s -> r -- 'foldMapByOf' :: 'Fold' s a -> (r -> r -> r) -> r -> (a -> r) -> s -> r -- 'foldMapByOf' :: 'Traversal'' s a -> (r -> r -> r) -> r -> (a -> r) -> s -> r -- 'foldMapByOf' :: 'Lens'' s a -> (r -> r -> r) -> r -> (a -> r) -> s -> r -- 'foldMapByOf' :: 'Iso'' s a -> (r -> r -> r) -> r -> (a -> r) -> s -> r -- @ -- -- >>> foldMapByOf both (+) 0 length ("hello","world") -- 10 foldMapByOf :: Fold s a -> (r -> r -> r) -> r -> (a -> r) -> s -> r foldMapByOf l f z g = reifyMonoid f z (foldMapOf l (ReflectedMonoid #. g))
ddssff/lens
src/Control/Lens/Fold.hs
bsd-3-clause
101,174
0
17
23,476
12,481
7,444
5,037
-1
-1
{-# language CPP #-} -- | = Name -- -- VK_EXT_fragment_shader_interlock - device extension -- -- == VK_EXT_fragment_shader_interlock -- -- [__Name String__] -- @VK_EXT_fragment_shader_interlock@ -- -- [__Extension Type__] -- Device extension -- -- [__Registered Extension Number__] -- 252 -- -- [__Revision__] -- 1 -- -- [__Extension and Version Dependencies__] -- -- - Requires Vulkan 1.0 -- -- - Requires @VK_KHR_get_physical_device_properties2@ -- -- [__Contact__] -- -- - Piers Daniell -- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_EXT_fragment_shader_interlock] @pdaniell-nv%0A<<Here describe the issue or question you have about the VK_EXT_fragment_shader_interlock extension>> > -- -- == Other Extension Metadata -- -- [__Last Modified Date__] -- 2019-05-02 -- -- [__Interactions and External Dependencies__] -- -- - This extension requires -- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/EXT/SPV_EXT_fragment_shader_interlock.html SPV_EXT_fragment_shader_interlock> -- -- - This extension provides API support for -- <https://www.khronos.org/registry/OpenGL/extensions/ARB/ARB_fragment_shader_interlock.txt GL_ARB_fragment_shader_interlock> -- -- [__Contributors__] -- -- - Daniel Koch, NVIDIA -- -- - Graeme Leese, Broadcom -- -- - Jan-Harald Fredriksen, Arm -- -- - Jason Ekstrand, Intel -- -- - Jeff Bolz, NVIDIA -- -- - Ruihao Zhang, Qualcomm -- -- - Slawomir Grajewski, Intel -- -- - Spencer Fricke, Samsung -- -- == Description -- -- This extension adds support for the @FragmentShaderPixelInterlockEXT@, -- @FragmentShaderSampleInterlockEXT@, and -- @FragmentShaderShadingRateInterlockEXT@ capabilities from the -- @SPV_EXT_fragment_shader_interlock@ extension to Vulkan. -- -- Enabling these capabilities provides a critical section for fragment -- shaders to avoid overlapping pixels being processed at the same time, -- and certain guarantees about the ordering of fragment shader invocations -- of fragments of overlapping pixels. -- -- This extension can be useful for algorithms that need to access -- per-pixel data structures via shader loads and stores. Algorithms using -- this extension can access per-pixel data structures in critical sections -- without other invocations accessing the same per-pixel data. -- Additionally, the ordering guarantees are useful for cases where the API -- ordering of fragments is meaningful. For example, applications may be -- able to execute programmable blending operations in the fragment shader, -- where the destination buffer is read via image loads and the final value -- is written via image stores. -- -- == New Structures -- -- - Extending -- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2', -- 'Vulkan.Core10.Device.DeviceCreateInfo': -- -- - 'PhysicalDeviceFragmentShaderInterlockFeaturesEXT' -- -- == New Enum Constants -- -- - 'EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME' -- -- - 'EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION' -- -- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType': -- -- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT' -- -- == New SPIR-V Capabilities -- -- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#spirvenv-capabilities-table-FragmentShaderSampleInterlockEXT FragmentShaderInterlockEXT> -- -- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#spirvenv-capabilities-table-FragmentShaderPixelInterlockEXT FragmentShaderPixelInterlockEXT> -- -- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#spirvenv-capabilities-table-FragmentShaderShadingRateInterlockEXT FragmentShaderShadingRateInterlockEXT> -- -- == Version History -- -- - Revision 1, 2019-05-24 (Piers Daniell) -- -- - Internal revisions -- -- == See Also -- -- 'PhysicalDeviceFragmentShaderInterlockFeaturesEXT' -- -- == Document Notes -- -- For more information, see the -- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_EXT_fragment_shader_interlock Vulkan Specification> -- -- This page is a generated document. Fixes and changes should be made to -- the generator scripts, not directly. module Vulkan.Extensions.VK_EXT_fragment_shader_interlock ( PhysicalDeviceFragmentShaderInterlockFeaturesEXT(..) , EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION , pattern EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION , EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME , pattern EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME ) where import Foreign.Marshal.Alloc (allocaBytes) import Foreign.Ptr (nullPtr) import Foreign.Ptr (plusPtr) import Vulkan.CStruct (FromCStruct) import Vulkan.CStruct (FromCStruct(..)) import Vulkan.CStruct (ToCStruct) import Vulkan.CStruct (ToCStruct(..)) import Vulkan.Zero (Zero(..)) import Data.String (IsString) import Data.Typeable (Typeable) import Foreign.Storable (Storable) import Foreign.Storable (Storable(peek)) import Foreign.Storable (Storable(poke)) import qualified Foreign.Storable (Storable(..)) import GHC.Generics (Generic) import Foreign.Ptr (Ptr) import Data.Kind (Type) import Vulkan.Core10.FundamentalTypes (bool32ToBool) import Vulkan.Core10.FundamentalTypes (boolToBool32) import Vulkan.Core10.FundamentalTypes (Bool32) import Vulkan.Core10.Enums.StructureType (StructureType) import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT)) -- | VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT - Structure -- describing fragment shader interlock features that can be supported by -- an implementation -- -- = Members -- -- This structure describes the following features: -- -- = Description -- -- If the 'PhysicalDeviceFragmentShaderInterlockFeaturesEXT' structure is -- included in the @pNext@ chain of the -- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2' -- structure passed to -- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2', -- it is filled in to indicate whether each corresponding feature is -- supported. 'PhysicalDeviceFragmentShaderInterlockFeaturesEXT' /can/ also -- be used in the @pNext@ chain of 'Vulkan.Core10.Device.DeviceCreateInfo' -- to selectively enable these features. -- -- == Valid Usage (Implicit) -- -- = See Also -- -- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_fragment_shader_interlock VK_EXT_fragment_shader_interlock>, -- 'Vulkan.Core10.FundamentalTypes.Bool32', -- 'Vulkan.Core10.Enums.StructureType.StructureType' data PhysicalDeviceFragmentShaderInterlockFeaturesEXT = PhysicalDeviceFragmentShaderInterlockFeaturesEXT { -- | #features-fragmentShaderSampleInterlock# @fragmentShaderSampleInterlock@ -- indicates that the implementation supports the -- @FragmentShaderSampleInterlockEXT@ SPIR-V capability. fragmentShaderSampleInterlock :: Bool , -- | #features-fragmentShaderPixelInterlock# @fragmentShaderPixelInterlock@ -- indicates that the implementation supports the -- @FragmentShaderPixelInterlockEXT@ SPIR-V capability. fragmentShaderPixelInterlock :: Bool , -- | #features-fragmentShaderShadingRateInterlock# -- @fragmentShaderShadingRateInterlock@ indicates that the implementation -- supports the @FragmentShaderShadingRateInterlockEXT@ SPIR-V capability. fragmentShaderShadingRateInterlock :: Bool } deriving (Typeable, Eq) #if defined(GENERIC_INSTANCES) deriving instance Generic (PhysicalDeviceFragmentShaderInterlockFeaturesEXT) #endif deriving instance Show PhysicalDeviceFragmentShaderInterlockFeaturesEXT instance ToCStruct PhysicalDeviceFragmentShaderInterlockFeaturesEXT where withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p) pokeCStruct p PhysicalDeviceFragmentShaderInterlockFeaturesEXT{..} f = do poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT) poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr) poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (fragmentShaderSampleInterlock)) poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (fragmentShaderPixelInterlock)) poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (fragmentShaderShadingRateInterlock)) f cStructSize = 32 cStructAlignment = 8 pokeZeroCStruct p f = do poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT) poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr) poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero)) poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (zero)) poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (zero)) f instance FromCStruct PhysicalDeviceFragmentShaderInterlockFeaturesEXT where peekCStruct p = do fragmentShaderSampleInterlock <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32)) fragmentShaderPixelInterlock <- peek @Bool32 ((p `plusPtr` 20 :: Ptr Bool32)) fragmentShaderShadingRateInterlock <- peek @Bool32 ((p `plusPtr` 24 :: Ptr Bool32)) pure $ PhysicalDeviceFragmentShaderInterlockFeaturesEXT (bool32ToBool fragmentShaderSampleInterlock) (bool32ToBool fragmentShaderPixelInterlock) (bool32ToBool fragmentShaderShadingRateInterlock) instance Storable PhysicalDeviceFragmentShaderInterlockFeaturesEXT where sizeOf ~_ = 32 alignment ~_ = 8 peek = peekCStruct poke ptr poked = pokeCStruct ptr poked (pure ()) instance Zero PhysicalDeviceFragmentShaderInterlockFeaturesEXT where zero = PhysicalDeviceFragmentShaderInterlockFeaturesEXT zero zero zero type EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION = 1 -- No documentation found for TopLevel "VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION" pattern EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION :: forall a . Integral a => a pattern EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION = 1 type EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME = "VK_EXT_fragment_shader_interlock" -- No documentation found for TopLevel "VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME" pattern EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a pattern EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME = "VK_EXT_fragment_shader_interlock"
expipiplus1/vulkan
src/Vulkan/Extensions/VK_EXT_fragment_shader_interlock.hs
bsd-3-clause
10,854
0
14
1,638
1,235
763
472
-1
-1
{-# LANGUAGE PatternSynonyms #-} -------------------------------------------------------------------------------- -- | -- Module : Graphics.GL.NV.HalfFloat -- Copyright : (c) Sven Panne 2019 -- License : BSD3 -- -- Maintainer : Sven Panne <[email protected]> -- Stability : stable -- Portability : portable -- -------------------------------------------------------------------------------- module Graphics.GL.NV.HalfFloat ( -- * Extension Support glGetNVHalfFloat, gl_NV_half_float, -- * Enums pattern GL_HALF_FLOAT_NV, -- * Functions glColor3hNV, glColor3hvNV, glColor4hNV, glColor4hvNV, glFogCoordhNV, glFogCoordhvNV, glMultiTexCoord1hNV, glMultiTexCoord1hvNV, glMultiTexCoord2hNV, glMultiTexCoord2hvNV, glMultiTexCoord3hNV, glMultiTexCoord3hvNV, glMultiTexCoord4hNV, glMultiTexCoord4hvNV, glNormal3hNV, glNormal3hvNV, glSecondaryColor3hNV, glSecondaryColor3hvNV, glTexCoord1hNV, glTexCoord1hvNV, glTexCoord2hNV, glTexCoord2hvNV, glTexCoord3hNV, glTexCoord3hvNV, glTexCoord4hNV, glTexCoord4hvNV, glVertex2hNV, glVertex2hvNV, glVertex3hNV, glVertex3hvNV, glVertex4hNV, glVertex4hvNV, glVertexAttrib1hNV, glVertexAttrib1hvNV, glVertexAttrib2hNV, glVertexAttrib2hvNV, glVertexAttrib3hNV, glVertexAttrib3hvNV, glVertexAttrib4hNV, glVertexAttrib4hvNV, glVertexAttribs1hvNV, glVertexAttribs2hvNV, glVertexAttribs3hvNV, glVertexAttribs4hvNV, glVertexWeighthNV, glVertexWeighthvNV ) where import Graphics.GL.ExtensionPredicates import Graphics.GL.Tokens import Graphics.GL.Functions
haskell-opengl/OpenGLRaw
src/Graphics/GL/NV/HalfFloat.hs
bsd-3-clause
1,606
0
5
236
192
133
59
54
0
-- Copyright (c) 2016-present, Facebook, Inc. -- All rights reserved. -- -- This source code is licensed under the BSD-style license found in the -- LICENSE file in the root directory of this source tree. {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedStrings #-} module Duckling.Numeral.UK.Rules ( rules ) where import Data.HashMap.Strict (HashMap) import Data.Maybe import Data.String import Data.Text (Text) import Prelude import qualified Data.HashMap.Strict as HashMap import qualified Data.Text as Text import Duckling.Dimensions.Types import Duckling.Numeral.Helpers import Duckling.Numeral.Types (NumeralData (..)) import Duckling.Regex.Types import Duckling.Types import qualified Duckling.Numeral.Types as TNumeral twentyNinetyMap :: HashMap Text Integer twentyNinetyMap = HashMap.fromList [ ( "двадцять" , 20 ) , ( "тридцять" , 30 ) , ( "сорок" , 40 ) , ( "п‘ятдесят" , 50 ) , ( "шістдесят" , 60 ) , ( "сімдесят" , 70 ) , ( "дев‘яносто", 90 ) , ( "вісімдесят", 80 ) ] ruleInteger5 :: Rule ruleInteger5 = Rule { name = "integer (20..90)" , pattern = [ regex "(двадцять|тридцять|сорок|п‘ятдесят|шістдесят|сімдесят|вісімдесят|дев‘яносто)" ] , prod = \tokens -> case tokens of (Token RegexMatch (GroupMatch (match:_)):_) -> HashMap.lookup (Text.toLower match) twentyNinetyMap >>= integer _ -> Nothing } ruleDecimalWithThousandsSeparator :: Rule ruleDecimalWithThousandsSeparator = Rule { name = "decimal with thousands separator" , pattern = [ regex "(\\d+(,\\d\\d\\d)+\\.\\d+)" ] , prod = \tokens -> case tokens of (Token RegexMatch (GroupMatch (match:_)):_) -> parseDouble (Text.replace "," Text.empty match) >>= double _ -> Nothing } ruleDecimalNumeral :: Rule ruleDecimalNumeral = Rule { name = "decimal number" , pattern = [ regex "(\\d*\\.\\d+)" ] , prod = \tokens -> case tokens of (Token RegexMatch (GroupMatch (match:_)):_) -> parseDecimal True match _ -> Nothing } ruleInteger3 :: Rule ruleInteger3 = Rule { name = "integer 2" , pattern = [ regex "(два|дві|двоє|пара|пару|парочку|парочка)" ] , prod = \_ -> integer 2 } hundredsMap :: HashMap Text Integer hundredsMap = HashMap.fromList [ ( "сто" , 100 ) , ( "двісті" , 200 ) , ( "триста" , 300 ) , ( "чотириста" , 400 ) , ( "п‘ятсот" , 500 ) , ( "шістсот" , 600 ) , ( "сімсот" , 700 ) , ( "вісімсот" , 800 ) , ( "дев‘ятсот" , 900 ) ] ruleInteger6 :: Rule ruleInteger6 = Rule { name = "integer (100..900)" , pattern = [ regex "(сто|двісті|триста|чотириста|п‘ятсот|шістсот|сімсот|вісімсот|дев‘ятсот)" ] , prod = \tokens -> case tokens of (Token RegexMatch (GroupMatch (match:_)):_) -> HashMap.lookup (Text.toLower match) hundredsMap >>= integer _ -> Nothing } ruleNumeralsPrefixWithMinus :: Rule ruleNumeralsPrefixWithMinus = Rule { name = "numbers prefix with -, minus" , pattern = [ regex "-|мінус\\s?" , dimension Numeral ] , prod = \tokens -> case tokens of (_:Token Numeral nd:_) -> double (TNumeral.value nd * (-1)) _ -> Nothing } ruleNumeralsSuffixesKMG :: Rule ruleNumeralsSuffixesKMG = Rule { name = "numbers suffixes (K, M, G)" , pattern = [ dimension Numeral , regex "((к|м|г)|(К|М|Г))(?=[\\W\\$€]|$)" ] , prod = \tokens -> case tokens of (Token Numeral NumeralData{TNumeral.value = v}: Token RegexMatch (GroupMatch (match:_)): _) -> case Text.toLower match of "к" -> double $ v * 1e3 "К" -> double $ v * 1e3 "м" -> double $ v * 1e6 "М" -> double $ v * 1e6 "г" -> double $ v * 1e9 "Г" -> double $ v * 1e9 _ -> Nothing _ -> Nothing } ruleInteger7 :: Rule ruleInteger7 = Rule { name = "integer 21..99" , pattern = [ oneOf [70, 20, 60, 50, 40, 90, 30, 80] , numberBetween 1 10 ] , prod = \tokens -> case tokens of (Token Numeral NumeralData{TNumeral.value = v1}: Token Numeral NumeralData{TNumeral.value = v2}: _) -> double $ v1 + v2 _ -> Nothing } ruleInteger8 :: Rule ruleInteger8 = Rule { name = "integer 101..999" , pattern = [ oneOf [300, 600, 500, 100, 800, 200, 900, 700, 400] , numberBetween 1 100 ] , prod = \tokens -> case tokens of (Token Numeral NumeralData{TNumeral.value = v1}: Token Numeral NumeralData{TNumeral.value = v2}: _) -> double $ v1 + v2 _ -> Nothing } ruleInteger :: Rule ruleInteger = Rule { name = "integer 0" , pattern = [ regex "(нуль)" ] , prod = \_ -> integer 0 } threeNineteenMap :: HashMap Text Integer threeNineteenMap = HashMap.fromList [ ( "три" , 3 ) , ( "чотири" , 4 ) , ( "п‘ять" , 5 ) , ( "шість" , 6 ) , ( "сім" , 7 ) , ( "вісім" , 8 ) , ( "дев‘ять" , 9 ) , ( "десять" , 10 ) , ( "одинадцять" , 11 ) , ( "дванадцять" , 12 ) , ( "тринадцять" , 13 ) , ( "чотирнадцять" , 14 ) , ( "п‘ятнадцять" , 15 ) , ( "шістнадцять" , 16 ) , ( "сімнадцять" , 17 ) , ( "вісімнадцять" , 18 ) , ( "дев‘ятнадцять" , 19 ) ] ruleInteger4 :: Rule ruleInteger4 = Rule { name = "integer (3..19)" , pattern = [ regex "(три|чотирнадцять|чотири|п‘ятнадцять|п‘ять|шістнадцять|шість|сімнадцять|сім|вісімнадцять|вісім|дев‘ятнадцять|дев‘ять|десять|одинадцять|дванадцять|тринадцять)" ] , prod = \tokens -> case tokens of (Token RegexMatch (GroupMatch (match:_)):_) -> HashMap.lookup (Text.toLower match) threeNineteenMap >>= integer _ -> Nothing } ruleInteger2 :: Rule ruleInteger2 = Rule { name = "integer 1" , pattern = [ regex "(один|одна|одну|одне|одного)" ] , prod = \_ -> integer 1 } ruleNumeralDotNumeral :: Rule ruleNumeralDotNumeral = Rule { name = "number dot number" , pattern = [ dimension Numeral , regex "крапка" , Predicate $ not . hasGrain ] , prod = \tokens -> case tokens of (Token Numeral nd1:_:Token Numeral nd2:_) -> double $ TNumeral.value nd1 + decimalsToDouble (TNumeral.value nd2) _ -> Nothing } ruleIntegerWithThousandsSeparator :: Rule ruleIntegerWithThousandsSeparator = Rule { name = "integer with thousands separator ," , pattern = [ regex "(\\d{1,3}(,\\d\\d\\d){1,5})" ] , prod = \tokens -> case tokens of (Token RegexMatch (GroupMatch (match:_)): _) -> let fmt = Text.replace "," Text.empty match in parseDouble fmt >>= double _ -> Nothing } rules :: [Rule] rules = [ ruleDecimalNumeral , ruleDecimalWithThousandsSeparator , ruleInteger , ruleInteger2 , ruleInteger3 , ruleInteger4 , ruleInteger5 , ruleInteger6 , ruleInteger7 , ruleInteger8 , ruleIntegerWithThousandsSeparator , ruleNumeralDotNumeral , ruleNumeralsPrefixWithMinus , ruleNumeralsSuffixesKMG ]
facebookincubator/duckling
Duckling/Numeral/UK/Rules.hs
bsd-3-clause
7,868
0
18
2,059
1,987
1,145
842
205
8
{- (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 \section[RnNames]{Extracting imported and top-level names in scope} -} {-# LANGUAGE CPP, NondecreasingIndentation #-} module RnNames ( rnImports, getLocalNonValBinders, newRecordSelector, rnExports, extendGlobalRdrEnvRn, gresFromAvails, calculateAvails, reportUnusedNames, checkConName ) where #include "HsVersions.h" import DynFlags import HsSyn import TcEnv import RnEnv import RnHsDoc ( rnHsDoc ) import LoadIface ( loadSrcInterface ) import TcRnMonad import PrelNames import Module import Name import NameEnv import NameSet import Avail import FieldLabel import HscTypes import RdrName import RdrHsSyn ( setRdrNameSpace ) import Outputable import Maybes import SrcLoc import BasicTypes ( TopLevelFlag(..), StringLiteral(..) ) import ErrUtils import Util import FastString import FastStringEnv import ListSetOps import Id import Type import PatSyn import qualified GHC.LanguageExtensions as LangExt import Control.Monad import Data.Either ( partitionEithers, isRight, rights ) -- import qualified Data.Foldable as Foldable import Data.Map ( Map ) import qualified Data.Map as Map import Data.Ord ( comparing ) import Data.List ( partition, (\\), find, sortBy ) -- import qualified Data.Set as Set import System.FilePath ((</>)) import System.IO {- ************************************************************************ * * \subsection{rnImports} * * ************************************************************************ Note [Tracking Trust Transitively] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When we import a package as well as checking that the direct imports are safe according to the rules outlined in the Note [HscMain . Safe Haskell Trust Check] we must also check that these rules hold transitively for all dependent modules and packages. Doing this without caching any trust information would be very slow as we would need to touch all packages and interface files a module depends on. To avoid this we make use of the property that if a modules Safe Haskell mode changes, this triggers a recompilation from that module in the dependcy graph. So we can just worry mostly about direct imports. There is one trust property that can change for a package though without recompliation being triggered: package trust. So we must check that all packages a module tranitively depends on to be trusted are still trusted when we are compiling this module (as due to recompilation avoidance some modules below may not be considered trusted any more without recompilation being triggered). We handle this by augmenting the existing transitive list of packages a module M depends on with a bool for each package that says if it must be trusted when the module M is being checked for trust. This list of trust required packages for a single import is gathered in the rnImportDecl function and stored in an ImportAvails data structure. The union of these trust required packages for all imports is done by the rnImports function using the combine function which calls the plusImportAvails function that is a union operation for the ImportAvails type. This gives us in an ImportAvails structure all packages required to be trusted for the module we are currently compiling. Checking that these packages are still trusted (and that direct imports are trusted) is done in HscMain.checkSafeImports. See the note below, [Trust Own Package] for a corner case in this method and how its handled. Note [Trust Own Package] ~~~~~~~~~~~~~~~~~~~~~~~~ There is a corner case of package trust checking that the usual transitive check doesn't cover. (For how the usual check operates see the Note [Tracking Trust Transitively] below). The case is when you import a -XSafe module M and M imports a -XTrustworthy module N. If N resides in a different package than M, then the usual check works as M will record a package dependency on N's package and mark it as required to be trusted. If N resides in the same package as M though, then importing M should require its own package be trusted due to N (since M is -XSafe so doesn't create this requirement by itself). The usual check fails as a module doesn't record a package dependency of its own package. So instead we now have a bool field in a modules interface file that simply states if the module requires its own package to be trusted. This field avoids us having to load all interface files that the module depends on to see if one is trustworthy. Note [Trust Transitive Property] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ So there is an interesting design question in regards to transitive trust checking. Say I have a module B compiled with -XSafe. B is dependent on a bunch of modules and packages, some packages it requires to be trusted as its using -XTrustworthy modules from them. Now if I have a module A that doesn't use safe haskell at all and simply imports B, should A inherit all the the trust requirements from B? Should A now also require that a package p is trusted since B required it? We currently say no but saying yes also makes sense. The difference is, if a module M that doesn't use Safe Haskell imports a module N that does, should all the trusted package requirements be dropped since M didn't declare that it cares about Safe Haskell (so -XSafe is more strongly associated with the module doing the importing) or should it be done still since the author of the module N that uses Safe Haskell said they cared (so -XSafe is more strongly associated with the module that was compiled that used it). Going with yes is a simpler semantics we think and harder for the user to stuff up but it does mean that Safe Haskell will affect users who don't care about Safe Haskell as they might grab a package from Cabal which uses safe haskell (say network) and that packages imports -XTrustworthy modules from another package (say bytestring), so requires that package is trusted. The user may now get compilation errors in code that doesn't do anything with Safe Haskell simply because they are using the network package. They will have to call 'ghc-pkg trust network' to get everything working. Due to this invasive nature of going with yes we have gone with no for now. -} -- | Process Import Decls. See 'rnImportDecl' for a description of what -- the return types represent. -- Note: Do the non SOURCE ones first, so that we get a helpful warning -- for SOURCE ones that are unnecessary rnImports :: [LImportDecl RdrName] -> RnM ([LImportDecl Name], GlobalRdrEnv, ImportAvails, AnyHpcUsage) rnImports imports = do this_mod <- getModule let (source, ordinary) = partition is_source_import imports is_source_import d = ideclSource (unLoc d) stuff1 <- mapAndReportM (rnImportDecl this_mod) ordinary stuff2 <- mapAndReportM (rnImportDecl this_mod) source -- Safe Haskell: See Note [Tracking Trust Transitively] let (decls, rdr_env, imp_avails, hpc_usage) = combine (stuff1 ++ stuff2) return (decls, rdr_env, imp_avails, hpc_usage) where combine :: [(LImportDecl Name, GlobalRdrEnv, ImportAvails, AnyHpcUsage)] -> ([LImportDecl Name], GlobalRdrEnv, ImportAvails, AnyHpcUsage) combine = foldr plus ([], emptyGlobalRdrEnv, emptyImportAvails, False) plus (decl, gbl_env1, imp_avails1,hpc_usage1) (decls, gbl_env2, imp_avails2,hpc_usage2) = ( decl:decls, gbl_env1 `plusGlobalRdrEnv` gbl_env2, imp_avails1 `plusImportAvails` imp_avails2, hpc_usage1 || hpc_usage2 ) -- | Given a located import declaration @decl@ from @this_mod@, -- calculate the following pieces of information: -- -- 1. An updated 'LImportDecl', where all unresolved 'RdrName' in -- the entity lists have been resolved into 'Name's, -- -- 2. A 'GlobalRdrEnv' representing the new identifiers that were -- brought into scope (taking into account module qualification -- and hiding), -- -- 3. 'ImportAvails' summarizing the identifiers that were imported -- by this declaration, and -- -- 4. A boolean 'AnyHpcUsage' which is true if the imported module -- used HPC. rnImportDecl :: Module -> LImportDecl RdrName -> RnM (LImportDecl Name, GlobalRdrEnv, ImportAvails, AnyHpcUsage) rnImportDecl this_mod (L loc decl@(ImportDecl { ideclName = loc_imp_mod_name, ideclPkgQual = mb_pkg , ideclSource = want_boot, ideclSafe = mod_safe , ideclQualified = qual_only, ideclImplicit = implicit , ideclAs = as_mod, ideclHiding = imp_details })) = setSrcSpan loc $ do when (isJust mb_pkg) $ do pkg_imports <- xoptM LangExt.PackageImports when (not pkg_imports) $ addErr packageImportErr -- If there's an error in loadInterface, (e.g. interface -- file not found) we get lots of spurious errors from 'filterImports' let imp_mod_name = unLoc loc_imp_mod_name doc = ppr imp_mod_name <+> text "is directly imported" -- Check for self-import, which confuses the typechecker (Trac #9032) -- ghc --make rejects self-import cycles already, but batch-mode may not -- at least not until TcIface.tcHiBootIface, which is too late to avoid -- typechecker crashes. (Indirect self imports are not caught until -- TcIface, see #10337 tracking how to make this error better.) -- -- Originally, we also allowed 'import {-# SOURCE #-} M', but this -- caused bug #10182: in one-shot mode, we should never load an hs-boot -- file for the module we are compiling into the EPS. In principle, -- it should be possible to support this mode of use, but we would have to -- extend Provenance to support a local definition in a qualified location. -- For now, we don't support it, but see #10336 when (imp_mod_name == moduleName this_mod && (case mb_pkg of -- If we have import "<pkg>" M, then we should -- check that "<pkg>" is "this" (which is magic) -- or the name of this_mod's package. Yurgh! -- c.f. GHC.findModule, and Trac #9997 Nothing -> True Just (StringLiteral _ pkg_fs) -> pkg_fs == fsLit "this" || fsToUnitId pkg_fs == moduleUnitId this_mod)) (addErr (text "A module cannot import itself:" <+> ppr imp_mod_name)) -- Check for a missing import list (Opt_WarnMissingImportList also -- checks for T(..) items but that is done in checkDodgyImport below) case imp_details of Just (False, _) -> return () -- Explicit import list _ | implicit -> return () -- Do not bleat for implicit imports | qual_only -> return () | otherwise -> whenWOptM Opt_WarnMissingImportList $ addWarn (Reason Opt_WarnMissingImportList) (missingImportListWarn imp_mod_name) iface <- loadSrcInterface doc imp_mod_name want_boot (fmap sl_fs mb_pkg) -- Compiler sanity check: if the import didn't say -- {-# SOURCE #-} we should not get a hi-boot file WARN( not want_boot && mi_boot iface, ppr imp_mod_name ) do -- Issue a user warning for a redundant {- SOURCE -} import -- NB that we arrange to read all the ordinary imports before -- any of the {- SOURCE -} imports. -- -- in --make and GHCi, the compilation manager checks for this, -- and indeed we shouldn't do it here because the existence of -- the non-boot module depends on the compilation order, which -- is not deterministic. The hs-boot test can show this up. dflags <- getDynFlags warnIf NoReason (want_boot && not (mi_boot iface) && isOneShot (ghcMode dflags)) (warnRedundantSourceImport imp_mod_name) when (mod_safe && not (safeImportsOn dflags)) $ addErr (text "safe import can't be used as Safe Haskell isn't on!" $+$ ptext (sLit $ "please enable Safe Haskell through either " ++ "Safe, Trustworthy or Unsafe")) let qual_mod_name = as_mod `orElse` imp_mod_name imp_spec = ImpDeclSpec { is_mod = imp_mod_name, is_qual = qual_only, is_dloc = loc, is_as = qual_mod_name } -- filter the imports according to the import declaration (new_imp_details, gres) <- filterImports iface imp_spec imp_details -- for certain error messages, we’d like to know what could be imported -- here, if everything were imported potential_gres <- mkGlobalRdrEnv . snd <$> filterImports iface imp_spec Nothing let gbl_env = mkGlobalRdrEnv gres is_hiding | Just (True,_) <- imp_details = True | otherwise = False -- should the import be safe? mod_safe' = mod_safe || (not implicit && safeDirectImpsReq dflags) || (implicit && safeImplicitImpsReq dflags) let imv = ImportedModsVal { imv_name = qual_mod_name , imv_span = loc , imv_is_safe = mod_safe' , imv_is_hiding = is_hiding , imv_all_exports = potential_gres , imv_qualified = qual_only } let imports = (calculateAvails dflags iface mod_safe' want_boot) { imp_mods = unitModuleEnv (mi_module iface) [imv] } -- Complain if we import a deprecated module whenWOptM Opt_WarnWarningsDeprecations ( case (mi_warns iface) of WarnAll txt -> addWarn (Reason Opt_WarnWarningsDeprecations) (moduleWarn imp_mod_name txt) _ -> return () ) let new_imp_decl = L loc (decl { ideclSafe = mod_safe' , ideclHiding = new_imp_details }) return (new_imp_decl, gbl_env, imports, mi_hpc iface) -- | Calculate the 'ImportAvails' induced by an import of a particular -- interface, but without 'imp_mods'. calculateAvails :: DynFlags -> ModIface -> IsSafeImport -> IsBootInterface -> ImportAvails calculateAvails dflags iface mod_safe' want_boot = let imp_mod = mi_module iface orph_iface = mi_orphan iface has_finsts = mi_finsts iface deps = mi_deps iface trust = getSafeMode $ mi_trust iface trust_pkg = mi_trust_pkg iface -- If the module exports anything defined in this module, just -- ignore it. Reason: otherwise it looks as if there are two -- local definition sites for the thing, and an error gets -- reported. Easiest thing is just to filter them out up -- front. This situation only arises if a module imports -- itself, or another module that imported it. (Necessarily, -- this invoves a loop.) -- -- We do this *after* filterImports, so that if you say -- module A where -- import B( AType ) -- type AType = ... -- -- module B( AType ) where -- import {-# SOURCE #-} A( AType ) -- -- then you won't get a 'B does not export AType' message. -- Compute new transitive dependencies orphans | orph_iface = ASSERT( not (imp_mod `elem` dep_orphs deps) ) imp_mod : dep_orphs deps | otherwise = dep_orphs deps finsts | has_finsts = ASSERT( not (imp_mod `elem` dep_finsts deps) ) imp_mod : dep_finsts deps | otherwise = dep_finsts deps pkg = moduleUnitId (mi_module iface) -- Does this import mean we now require our own pkg -- to be trusted? See Note [Trust Own Package] ptrust = trust == Sf_Trustworthy || trust_pkg (dependent_mods, dependent_pkgs, pkg_trust_req) | pkg == thisPackage dflags = -- Imported module is from the home package -- Take its dependent modules and add imp_mod itself -- Take its dependent packages unchanged -- -- NB: (dep_mods deps) might include a hi-boot file -- for the module being compiled, CM. Do *not* filter -- this out (as we used to), because when we've -- finished dealing with the direct imports we want to -- know if any of them depended on CM.hi-boot, in -- which case we should do the hi-boot consistency -- check. See LoadIface.loadHiBootInterface ((moduleName imp_mod,want_boot):dep_mods deps,dep_pkgs deps,ptrust) | otherwise = -- Imported module is from another package -- Dump the dependent modules -- Add the package imp_mod comes from to the dependent packages ASSERT2( not (pkg `elem` (map fst $ dep_pkgs deps)) , ppr pkg <+> ppr (dep_pkgs deps) ) ([], (pkg, False) : dep_pkgs deps, False) in ImportAvails { imp_mods = emptyModuleEnv, -- this gets filled in later imp_orphs = orphans, imp_finsts = finsts, imp_dep_mods = mkModDeps dependent_mods, imp_dep_pkgs = map fst $ dependent_pkgs, -- Add in the imported modules trusted package -- requirements. ONLY do this though if we import the -- module as a safe import. -- See Note [Tracking Trust Transitively] -- and Note [Trust Transitive Property] imp_trust_pkgs = if mod_safe' then map fst $ filter snd dependent_pkgs else [], -- Do we require our own pkg to be trusted? -- See Note [Trust Own Package] imp_trust_own_pkg = pkg_trust_req } warnRedundantSourceImport :: ModuleName -> SDoc warnRedundantSourceImport mod_name = text "Unnecessary {-# SOURCE #-} in the import of module" <+> quotes (ppr mod_name) {- ************************************************************************ * * \subsection{importsFromLocalDecls} * * ************************************************************************ From the top-level declarations of this module produce * the lexical environment * the ImportAvails created by its bindings. Note [Top-level Names in Template Haskell decl quotes] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ See also: Note [Interactively-bound Ids in GHCi] in HscTypes Note [Looking up Exact RdrNames] in RnEnv Consider a Template Haskell declaration quotation like this: module M where f x = h [d| f = 3 |] When renaming the declarations inside [d| ...|], we treat the top level binders specially in two ways 1. We give them an Internal Name, not (as usual) an External one. This is done by RnEnv.newTopSrcBinder. 2. We make them *shadow* the outer bindings. See Note [GlobalRdrEnv shadowing] 3. We find out whether we are inside a [d| ... |] by testing the TH stage. This is a slight hack, because the stage field was really meant for the type checker, and here we are not interested in the fields of Brack, hence the error thunks in thRnBrack. -} extendGlobalRdrEnvRn :: [AvailInfo] -> MiniFixityEnv -> RnM (TcGblEnv, TcLclEnv) -- Updates both the GlobalRdrEnv and the FixityEnv -- We return a new TcLclEnv only because we might have to -- delete some bindings from it; -- see Note [Top-level Names in Template Haskell decl quotes] extendGlobalRdrEnvRn avails new_fixities = do { (gbl_env, lcl_env) <- getEnvs ; stage <- getStage ; isGHCi <- getIsGHCi ; let rdr_env = tcg_rdr_env gbl_env fix_env = tcg_fix_env gbl_env th_bndrs = tcl_th_bndrs lcl_env th_lvl = thLevel stage -- Delete new_occs from global and local envs -- If we are in a TemplateHaskell decl bracket, -- we are going to shadow them -- See Note [GlobalRdrEnv shadowing] inBracket = isBrackStage stage lcl_env_TH = lcl_env { tcl_rdr = delLocalRdrEnvList (tcl_rdr lcl_env) new_occs } -- See Note [GlobalRdrEnv shadowing] lcl_env2 | inBracket = lcl_env_TH | otherwise = lcl_env -- Deal with shadowing: see Note [GlobalRdrEnv shadowing] want_shadowing = isGHCi || inBracket rdr_env1 | want_shadowing = shadowNames rdr_env new_names | otherwise = rdr_env lcl_env3 = lcl_env2 { tcl_th_bndrs = extendNameEnvList th_bndrs [ (n, (TopLevel, th_lvl)) | n <- new_names ] } ; rdr_env2 <- foldlM add_gre rdr_env1 new_gres ; let fix_env' = foldl extend_fix_env fix_env new_gres gbl_env' = gbl_env { tcg_rdr_env = rdr_env2, tcg_fix_env = fix_env' } ; traceRn (text "extendGlobalRdrEnvRn 2" <+> (pprGlobalRdrEnv True rdr_env2)) ; return (gbl_env', lcl_env3) } where new_names = concatMap availNames avails new_occs = map nameOccName new_names -- If there is a fixity decl for the gre, add it to the fixity env extend_fix_env fix_env gre | Just (L _ fi) <- lookupFsEnv new_fixities (occNameFS occ) = extendNameEnv fix_env name (FixItem occ fi) | otherwise = fix_env where name = gre_name gre occ = greOccName gre new_gres :: [GlobalRdrElt] -- New LocalDef GREs, derived from avails new_gres = concatMap localGREsFromAvail avails add_gre :: GlobalRdrEnv -> GlobalRdrElt -> RnM GlobalRdrEnv -- Extend the GlobalRdrEnv with a LocalDef GRE -- If there is already a LocalDef GRE with the same OccName, -- report an error and discard the new GRE -- This establishes INVARIANT 1 of GlobalRdrEnvs add_gre env gre | not (null dups) -- Same OccName defined twice = do { addDupDeclErr (gre : dups); return env } | otherwise = return (extendGlobalRdrEnv env gre) where name = gre_name gre occ = nameOccName name dups = filter isLocalGRE (lookupGlobalRdrEnv env occ) {- ********************************************************************* * * getLocalDeclBindersd@ returns the names for an HsDecl It's used for source code. *** See Note [The Naming story] in HsDecls **** * * ********************************************************************* -} getLocalNonValBinders :: MiniFixityEnv -> HsGroup RdrName -> RnM ((TcGblEnv, TcLclEnv), NameSet) -- Get all the top-level binders bound the group *except* -- for value bindings, which are treated separately -- Specifically we return AvailInfo for -- * type decls (incl constructors and record selectors) -- * class decls (including class ops) -- * associated types -- * foreign imports -- * value signatures (in hs-boot files only) getLocalNonValBinders fixity_env (HsGroup { hs_valds = binds, hs_tyclds = tycl_decls, hs_fords = foreign_decls }) = do { -- Process all type/class decls *except* family instances ; let inst_decls = tycl_decls >>= group_instds ; overload_ok <- xoptM LangExt.DuplicateRecordFields ; (tc_avails, tc_fldss) <- fmap unzip $ mapM (new_tc overload_ok) (tyClGroupTyClDecls tycl_decls) ; traceRn (text "getLocalNonValBinders 1" <+> ppr tc_avails) ; envs <- extendGlobalRdrEnvRn tc_avails fixity_env ; setEnvs envs $ do { -- Bring these things into scope first -- See Note [Looking up family names in family instances] -- Process all family instances -- to bring new data constructors into scope ; (nti_availss, nti_fldss) <- mapAndUnzipM (new_assoc overload_ok) inst_decls -- Finish off with value binders: -- foreign decls and pattern synonyms for an ordinary module -- type sigs in case of a hs-boot file only ; is_boot <- tcIsHsBootOrSig ; let val_bndrs | is_boot = hs_boot_sig_bndrs | otherwise = for_hs_bndrs ; val_avails <- mapM new_simple val_bndrs ; let avails = concat nti_availss ++ val_avails new_bndrs = availsToNameSetWithSelectors avails `unionNameSet` availsToNameSetWithSelectors tc_avails flds = concat nti_fldss ++ concat tc_fldss ; traceRn (text "getLocalNonValBinders 2" <+> ppr avails) ; (tcg_env, tcl_env) <- extendGlobalRdrEnvRn avails fixity_env -- Extend tcg_field_env with new fields (this used to be the -- work of extendRecordFieldEnv) ; let field_env = extendNameEnvList (tcg_field_env tcg_env) flds envs = (tcg_env { tcg_field_env = field_env }, tcl_env) ; traceRn (text "getLocalNonValBinders 3" <+> vcat [ppr flds, ppr field_env]) ; return (envs, new_bndrs) } } where ValBindsIn _val_binds val_sigs = binds for_hs_bndrs :: [Located RdrName] for_hs_bndrs = hsForeignDeclsBinders foreign_decls -- In a hs-boot file, the value binders come from the -- *signatures*, and there should be no foreign binders hs_boot_sig_bndrs = [ L decl_loc (unLoc n) | L decl_loc (TypeSig ns _) <- val_sigs, n <- ns] -- the SrcSpan attached to the input should be the span of the -- declaration, not just the name new_simple :: Located RdrName -> RnM AvailInfo new_simple rdr_name = do{ nm <- newTopSrcBinder rdr_name ; return (avail nm) } new_tc :: Bool -> LTyClDecl RdrName -> RnM (AvailInfo, [(Name, [FieldLabel])]) new_tc overload_ok tc_decl -- NOT for type/data instances = do { let (bndrs, flds) = hsLTyClDeclBinders tc_decl ; names@(main_name : sub_names) <- mapM newTopSrcBinder bndrs ; flds' <- mapM (newRecordSelector overload_ok sub_names) flds ; let fld_env = case unLoc tc_decl of DataDecl { tcdDataDefn = d } -> mk_fld_env d names flds' _ -> [] ; return (AvailTC main_name names flds', fld_env) } -- Calculate the mapping from constructor names to fields, which -- will go in tcg_field_env. It's convenient to do this here where -- we are working with a single datatype definition. mk_fld_env :: HsDataDefn RdrName -> [Name] -> [FieldLabel] -> [(Name, [FieldLabel])] mk_fld_env d names flds = concatMap find_con_flds (dd_cons d) where find_con_flds (L _ (ConDeclH98 { con_name = L _ rdr , con_details = RecCon cdflds })) = [( find_con_name rdr , concatMap find_con_decl_flds (unLoc cdflds) )] find_con_flds (L _ (ConDeclGADT { con_names = rdrs , con_type = (HsIB { hsib_body = res_ty})})) = map (\ (L _ rdr) -> ( find_con_name rdr , concatMap find_con_decl_flds cdflds)) rdrs where (_tvs, _cxt, tau) = splitLHsSigmaTy res_ty cdflds = case tau of L _ (HsFunTy (L _ (HsAppsTy [L _ (HsAppPrefix (L _ (HsRecTy flds)))])) _) -> flds L _ (HsFunTy (L _ (HsRecTy flds)) _) -> flds _ -> [] find_con_flds _ = [] find_con_name rdr = expectJust "getLocalNonValBinders/find_con_name" $ find (\ n -> nameOccName n == rdrNameOcc rdr) names find_con_decl_flds (L _ x) = map find_con_decl_fld (cd_fld_names x) find_con_decl_fld (L _ (FieldOcc (L _ rdr) _)) = expectJust "getLocalNonValBinders/find_con_decl_fld" $ find (\ fl -> flLabel fl == lbl) flds where lbl = occNameFS (rdrNameOcc rdr) new_assoc :: Bool -> LInstDecl RdrName -> RnM ([AvailInfo], [(Name, [FieldLabel])]) new_assoc _ (L _ (TyFamInstD {})) = return ([], []) -- type instances don't bind new names new_assoc overload_ok (L _ (DataFamInstD d)) = do { (avail, flds) <- new_di overload_ok Nothing d ; return ([avail], flds) } new_assoc overload_ok (L _ (ClsInstD (ClsInstDecl { cid_poly_ty = inst_ty , cid_datafam_insts = adts }))) | Just (L loc cls_rdr) <- getLHsInstDeclClass_maybe inst_ty = do { cls_nm <- setSrcSpan loc $ lookupGlobalOccRn cls_rdr ; (avails, fldss) <- mapAndUnzipM (new_loc_di overload_ok (Just cls_nm)) adts ; return (avails, concat fldss) } | otherwise = return ([], []) -- Do not crash on ill-formed instances -- Eg instance !Show Int Trac #3811c new_di :: Bool -> Maybe Name -> DataFamInstDecl RdrName -> RnM (AvailInfo, [(Name, [FieldLabel])]) new_di overload_ok mb_cls ti_decl = do { main_name <- lookupFamInstName mb_cls (dfid_tycon ti_decl) ; let (bndrs, flds) = hsDataFamInstBinders ti_decl ; sub_names <- mapM newTopSrcBinder bndrs ; flds' <- mapM (newRecordSelector overload_ok sub_names) flds ; let avail = AvailTC (unLoc main_name) sub_names flds' -- main_name is not bound here! fld_env = mk_fld_env (dfid_defn ti_decl) sub_names flds' ; return (avail, fld_env) } new_loc_di :: Bool -> Maybe Name -> LDataFamInstDecl RdrName -> RnM (AvailInfo, [(Name, [FieldLabel])]) new_loc_di overload_ok mb_cls (L _ d) = new_di overload_ok mb_cls d newRecordSelector :: Bool -> [Name] -> LFieldOcc RdrName -> RnM FieldLabel newRecordSelector _ [] _ = error "newRecordSelector: datatype has no constructors!" newRecordSelector overload_ok (dc:_) (L loc (FieldOcc (L _ fld) _)) = do { selName <- newTopSrcBinder $ L loc $ field ; return $ qualFieldLbl { flSelector = selName } } where fieldOccName = occNameFS $ rdrNameOcc fld qualFieldLbl = mkFieldLabelOccs fieldOccName (nameOccName dc) overload_ok field | isExact fld = fld -- use an Exact RdrName as is to preserve the bindings -- of an already renamer-resolved field and its use -- sites. This is needed to correctly support record -- selectors in Template Haskell. See Note [Binders in -- Template Haskell] in Convert.hs and Note [Looking up -- Exact RdrNames] in RnEnv.hs. | otherwise = mkRdrUnqual (flSelector qualFieldLbl) {- Note [Looking up family names in family instances] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider module M where type family T a :: * type instance M.T Int = Bool We might think that we can simply use 'lookupOccRn' when processing the type instance to look up 'M.T'. Alas, we can't! The type family declaration is in the *same* HsGroup as the type instance declaration. Hence, as we are currently collecting the binders declared in that HsGroup, these binders will not have been added to the global environment yet. Solution is simple: process the type family declarations first, extend the environment, and then process the type instances. ************************************************************************ * * \subsection{Filtering imports} * * ************************************************************************ @filterImports@ takes the @ExportEnv@ telling what the imported module makes available, and filters it through the import spec (if any). Note [Dealing with imports] ~~~~~~~~~~~~~~~~~~~~~~~~~~~ For import M( ies ), we take the mi_exports of M, and make imp_occ_env :: OccEnv (Name, AvailInfo, Maybe Name) One entry for each Name that M exports; the AvailInfo is the AvailInfo exported from M that exports that Name. The situation is made more complicated by associated types. E.g. module M where class C a where { data T a } instance C Int where { data T Int = T1 | T2 } instance C Bool where { data T Int = T3 } Then M's export_avails are (recall the AvailTC invariant from Avails.hs) C(C,T), T(T,T1,T2,T3) Notice that T appears *twice*, once as a child and once as a parent. From this we construct the imp_occ_env C -> (C, C(C,T), Nothing) T -> (T, T(T,T1,T2,T3), Just C) T1 -> (T1, T(T,T1,T2,T3), Nothing) -- similarly T2,T3 If we say import M( T(T1,T2) ) then we get *two* Avails: C(T), T(T1,T2) Note that the imp_occ_env will have entries for data constructors too, although we never look up data constructors. -} filterImports :: ModIface -> ImpDeclSpec -- The span for the entire import decl -> Maybe (Bool, Located [LIE RdrName]) -- Import spec; True => hiding -> RnM (Maybe (Bool, Located [LIE Name]), -- Import spec w/ Names [GlobalRdrElt]) -- Same again, but in GRE form filterImports iface decl_spec Nothing = return (Nothing, gresFromAvails (Just imp_spec) (mi_exports iface)) where imp_spec = ImpSpec { is_decl = decl_spec, is_item = ImpAll } filterImports iface decl_spec (Just (want_hiding, L l import_items)) = do -- check for errors, convert RdrNames to Names items1 <- mapM lookup_lie import_items let items2 :: [(LIE Name, AvailInfo)] items2 = concat items1 -- NB the AvailInfo may have duplicates, and several items -- for the same parent; e.g N(x) and N(y) names = availsToNameSet (map snd items2) keep n = not (n `elemNameSet` names) pruned_avails = filterAvails keep all_avails hiding_spec = ImpSpec { is_decl = decl_spec, is_item = ImpAll } gres | want_hiding = gresFromAvails (Just hiding_spec) pruned_avails | otherwise = concatMap (gresFromIE decl_spec) items2 return (Just (want_hiding, L l (map fst items2)), gres) where all_avails = mi_exports iface -- See Note [Dealing with imports] imp_occ_env :: OccEnv (Name, -- the name AvailInfo, -- the export item providing the name Maybe Name) -- the parent of associated types imp_occ_env = mkOccEnv_C combine [ (nameOccName n, (n, a, Nothing)) | a <- all_avails, n <- availNames a] where -- See example in Note [Dealing with imports] -- 'combine' is only called for associated types which appear twice -- in the all_avails. In the example, we combine -- T(T,T1,T2,T3) and C(C,T) to give (T, T(T,T1,T2,T3), Just C) combine (name1, a1@(AvailTC p1 _ []), mp1) (name2, a2@(AvailTC p2 _ []), mp2) = ASSERT( name1 == name2 && isNothing mp1 && isNothing mp2 ) if p1 == name1 then (name1, a1, Just p2) else (name1, a2, Just p1) combine x y = pprPanic "filterImports/combine" (ppr x $$ ppr y) lookup_name :: RdrName -> IELookupM (Name, AvailInfo, Maybe Name) lookup_name rdr | isQual rdr = failLookupWith (QualImportError rdr) | Just succ <- mb_success = return succ | otherwise = failLookupWith BadImport where mb_success = lookupOccEnv imp_occ_env (rdrNameOcc rdr) lookup_lie :: LIE RdrName -> TcRn [(LIE Name, AvailInfo)] lookup_lie (L loc ieRdr) = do (stuff, warns) <- setSrcSpan loc $ liftM (fromMaybe ([],[])) $ run_lookup (lookup_ie ieRdr) mapM_ emit_warning warns return [ (L loc ie, avail) | (ie,avail) <- stuff ] where -- Warn when importing T(..) if T was exported abstractly emit_warning (DodgyImport n) = whenWOptM Opt_WarnDodgyImports $ addWarn (Reason Opt_WarnDodgyImports) (dodgyImportWarn n) emit_warning MissingImportList = whenWOptM Opt_WarnMissingImportList $ addWarn (Reason Opt_WarnMissingImportList) (missingImportListItem ieRdr) emit_warning BadImportW = whenWOptM Opt_WarnDodgyImports $ addWarn (Reason Opt_WarnDodgyImports) (lookup_err_msg BadImport) run_lookup :: IELookupM a -> TcRn (Maybe a) run_lookup m = case m of Failed err -> addErr (lookup_err_msg err) >> return Nothing Succeeded a -> return (Just a) lookup_err_msg err = case err of BadImport -> badImportItemErr iface decl_spec ieRdr all_avails IllegalImport -> illegalImportItemErr QualImportError rdr -> qualImportItemErr rdr -- For each import item, we convert its RdrNames to Names, -- and at the same time construct an AvailInfo corresponding -- to what is actually imported by this item. -- Returns Nothing on error. -- We return a list here, because in the case of an import -- item like C, if we are hiding, then C refers to *both* a -- type/class and a data constructor. Moreover, when we import -- data constructors of an associated family, we need separate -- AvailInfos for the data constructors and the family (as they have -- different parents). See Note [Dealing with imports] lookup_ie :: IE RdrName -> IELookupM ([(IE Name, AvailInfo)], [IELookupWarning]) lookup_ie ie = handle_bad_import $ do case ie of IEVar (L l n) -> do (name, avail, _) <- lookup_name n return ([(IEVar (L l name), trimAvail avail name)], []) IEThingAll (L l tc) -> do (name, avail, mb_parent) <- lookup_name tc let warns = case avail of Avail {} -- e.g. f(..) -> [DodgyImport tc] AvailTC _ subs fs | null (drop 1 subs) && null fs -- e.g. T(..) where T is a synonym -> [DodgyImport tc] | not (is_qual decl_spec) -- e.g. import M( T(..) ) -> [MissingImportList] | otherwise -> [] renamed_ie = IEThingAll (L l name) sub_avails = case avail of Avail {} -> [] AvailTC name2 subs fs -> [(renamed_ie, AvailTC name2 (subs \\ [name]) fs)] case mb_parent of Nothing -> return ([(renamed_ie, avail)], warns) -- non-associated ty/cls Just parent -> return ((renamed_ie, AvailTC parent [name] []) : sub_avails, warns) -- associated type IEThingAbs (L l tc) | want_hiding -- hiding ( C ) -- Here the 'C' can be a data constructor -- *or* a type/class, or even both -> let tc_name = lookup_name tc dc_name = lookup_name (setRdrNameSpace tc srcDataName) in case catIELookupM [ tc_name, dc_name ] of [] -> failLookupWith BadImport names -> return ([mkIEThingAbs l name | name <- names], []) | otherwise -> do nameAvail <- lookup_name tc return ([mkIEThingAbs l nameAvail], []) IEThingWith (L l rdr_tc) wc rdr_ns rdr_fs -> ASSERT2(null rdr_fs, ppr rdr_fs) do (name, AvailTC _ ns subflds, mb_parent) <- lookup_name rdr_tc -- Look up the children in the sub-names of the parent let subnames = case ns of -- The tc is first in ns, [] -> [] -- if it is there at all -- See the AvailTC Invariant in Avail.hs (n1:ns1) | n1 == name -> ns1 | otherwise -> ns case lookupChildren (map Left subnames ++ map Right subflds) rdr_ns of Nothing -> failLookupWith BadImport Just (childnames, childflds) -> case mb_parent of -- non-associated ty/cls Nothing -> return ([(IEThingWith (L l name) wc childnames childflds, AvailTC name (name:map unLoc childnames) (map unLoc childflds))], []) -- associated ty Just parent -> return ([(IEThingWith (L l name) wc childnames childflds, AvailTC name (map unLoc childnames) (map unLoc childflds)), (IEThingWith (L l name) wc childnames childflds, AvailTC parent [name] [])], []) _other -> failLookupWith IllegalImport -- could be IEModuleContents, IEGroup, IEDoc, IEDocNamed -- all errors. where mkIEThingAbs l (n, av, Nothing ) = (IEThingAbs (L l n), trimAvail av n) mkIEThingAbs l (n, _, Just parent) = (IEThingAbs (L l n), AvailTC parent [n] []) handle_bad_import m = catchIELookup m $ \err -> case err of BadImport | want_hiding -> return ([], [BadImportW]) _ -> failLookupWith err type IELookupM = MaybeErr IELookupError data IELookupWarning = BadImportW | MissingImportList | DodgyImport RdrName -- NB. use the RdrName for reporting a "dodgy" import data IELookupError = QualImportError RdrName | BadImport | IllegalImport failLookupWith :: IELookupError -> IELookupM a failLookupWith err = Failed err catchIELookup :: IELookupM a -> (IELookupError -> IELookupM a) -> IELookupM a catchIELookup m h = case m of Succeeded r -> return r Failed err -> h err catIELookupM :: [IELookupM a] -> [a] catIELookupM ms = [ a | Succeeded a <- ms ] {- ************************************************************************ * * \subsection{Import/Export Utils} * * ************************************************************************ -} plusAvail :: AvailInfo -> AvailInfo -> AvailInfo plusAvail a1 a2 | debugIsOn && availName a1 /= availName a2 = pprPanic "RnEnv.plusAvail names differ" (hsep [ppr a1,ppr a2]) plusAvail a1@(Avail {}) (Avail {}) = a1 plusAvail (AvailTC _ [] []) a2@(AvailTC {}) = a2 plusAvail a1@(AvailTC {}) (AvailTC _ [] []) = a1 plusAvail (AvailTC n1 (s1:ss1) fs1) (AvailTC n2 (s2:ss2) fs2) = case (n1==s1, n2==s2) of -- Maintain invariant the parent is first (True,True) -> AvailTC n1 (s1 : (ss1 `unionLists` ss2)) (fs1 `unionLists` fs2) (True,False) -> AvailTC n1 (s1 : (ss1 `unionLists` (s2:ss2))) (fs1 `unionLists` fs2) (False,True) -> AvailTC n1 (s2 : ((s1:ss1) `unionLists` ss2)) (fs1 `unionLists` fs2) (False,False) -> AvailTC n1 ((s1:ss1) `unionLists` (s2:ss2)) (fs1 `unionLists` fs2) plusAvail (AvailTC n1 ss1 fs1) (AvailTC _ [] fs2) = AvailTC n1 ss1 (fs1 `unionLists` fs2) plusAvail (AvailTC n1 [] fs1) (AvailTC _ ss2 fs2) = AvailTC n1 ss2 (fs1 `unionLists` fs2) plusAvail a1 a2 = pprPanic "RnEnv.plusAvail" (hsep [ppr a1,ppr a2]) -- | trims an 'AvailInfo' to keep only a single name trimAvail :: AvailInfo -> Name -> AvailInfo trimAvail (Avail b n) _ = Avail b n trimAvail (AvailTC n ns fs) m = case find ((== m) . flSelector) fs of Just x -> AvailTC n [] [x] Nothing -> ASSERT( m `elem` ns ) AvailTC n [m] [] -- | filters 'AvailInfo's by the given predicate filterAvails :: (Name -> Bool) -> [AvailInfo] -> [AvailInfo] filterAvails keep avails = foldr (filterAvail keep) [] avails -- | filters an 'AvailInfo' by the given predicate filterAvail :: (Name -> Bool) -> AvailInfo -> [AvailInfo] -> [AvailInfo] filterAvail keep ie rest = case ie of Avail _ n | keep n -> ie : rest | otherwise -> rest AvailTC tc ns fs -> let ns' = filter keep ns fs' = filter (keep . flSelector) fs in if null ns' && null fs' then rest else AvailTC tc ns' fs' : rest -- | Given an import\/export spec, construct the appropriate 'GlobalRdrElt's. gresFromIE :: ImpDeclSpec -> (LIE Name, AvailInfo) -> [GlobalRdrElt] gresFromIE decl_spec (L loc ie, avail) = gresFromAvail prov_fn avail where is_explicit = case ie of IEThingAll (L _ name) -> \n -> n == name _ -> \_ -> True prov_fn name = Just (ImpSpec { is_decl = decl_spec, is_item = item_spec }) where item_spec = ImpSome { is_explicit = is_explicit name, is_iloc = loc } {- Note [Children for duplicate record fields] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider the module {-# LANGUAGE DuplicateRecordFields #-} module M (F(foo, MkFInt, MkFBool)) where data family F a data instance F Int = MkFInt { foo :: Int } data instance F Bool = MkFBool { foo :: Bool } The `foo` in the export list refers to *both* selectors! For this reason, lookupChildren builds an environment that maps the FastString to a list of items, rather than a single item. -} mkChildEnv :: [GlobalRdrElt] -> NameEnv [GlobalRdrElt] mkChildEnv gres = foldr add emptyNameEnv gres where add gre env = case gre_par gre of FldParent p _ -> extendNameEnv_Acc (:) singleton env p gre ParentIs p -> extendNameEnv_Acc (:) singleton env p gre NoParent -> env PatternSynonym -> env findPatSyns :: [GlobalRdrElt] -> [GlobalRdrElt] findPatSyns gres = foldr add [] gres where add g@(GRE { gre_par = PatternSynonym }) ps = g:ps add _ ps = ps findChildren :: NameEnv [a] -> Name -> [a] findChildren env n = lookupNameEnv env n `orElse` [] lookupChildren :: [Either Name FieldLabel] -> [Located RdrName] -> Maybe ([Located Name], [Located FieldLabel]) -- (lookupChildren all_kids rdr_items) maps each rdr_item to its -- corresponding Name all_kids, if the former exists -- The matching is done by FastString, not OccName, so that -- Cls( meth, AssocTy ) -- will correctly find AssocTy among the all_kids of Cls, even though -- the RdrName for AssocTy may have a (bogus) DataName namespace -- (Really the rdr_items should be FastStrings in the first place.) lookupChildren all_kids rdr_items = do xs <- mapM doOne rdr_items return (fmap concat (partitionEithers xs)) where doOne (L l r) = case (lookupFsEnv kid_env . occNameFS . rdrNameOcc) r of Just [Left n] -> Just (Left (L l n)) Just rs | all isRight rs -> Just (Right (map (L l) (rights rs))) _ -> Nothing -- See Note [Children for duplicate record fields] kid_env = extendFsEnvList_C (++) emptyFsEnv [(either (occNameFS . nameOccName) flLabel x, [x]) | x <- all_kids] classifyGREs :: [GlobalRdrElt] -> ([Name], [FieldLabel]) classifyGREs = partitionEithers . map classifyGRE classifyGRE :: GlobalRdrElt -> Either Name FieldLabel classifyGRE gre = case gre_par gre of FldParent _ Nothing -> Right (FieldLabel (occNameFS (nameOccName n)) False n) FldParent _ (Just lbl) -> Right (FieldLabel lbl True n) _ -> Left n where n = gre_name gre -- | Combines 'AvailInfo's from the same family -- 'avails' may have several items with the same availName -- E.g import Ix( Ix(..), index ) -- will give Ix(Ix,index,range) and Ix(index) -- We want to combine these; addAvail does that nubAvails :: [AvailInfo] -> [AvailInfo] nubAvails avails = nameEnvElts (foldl add emptyNameEnv avails) where add env avail = extendNameEnv_C plusAvail env (availName avail) avail {- ************************************************************************ * * \subsection{Export list processing} * * ************************************************************************ Processing the export list. You might think that we should record things that appear in the export list as ``occurrences'' (using @addOccurrenceName@), but you'd be wrong. We do check (here) that they are in scope, but there is no need to slurp in their actual declaration (which is what @addOccurrenceName@ forces). Indeed, doing so would big trouble when compiling @PrelBase@, because it re-exports @GHC@, which includes @takeMVar#@, whose type includes @ConcBase.StateAndSynchVar#@, and so on... Note [Exports of data families] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Suppose you see (Trac #5306) module M where import X( F ) data instance F Int = FInt What does M export? AvailTC F [FInt] or AvailTC F [F,FInt]? The former is strictly right because F isn't defined in this module. But then you can never do an explicit import of M, thus import M( F( FInt ) ) because F isn't exported by M. Nor can you import FInt alone from here import M( FInt ) because we don't have syntax to support that. (It looks like an import of the type FInt.) At one point I implemented a compromise: * When constructing exports with no export list, or with module M( module M ), we add the parent to the exports as well. * But not when you see module M( f ), even if f is a class method with a parent. * Nor when you see module M( module N ), with N /= M. But the compromise seemed too much of a hack, so we backed it out. You just have to use an explicit export list: module M( F(..) ) where ... -} type ExportAccum -- The type of the accumulating parameter of -- the main worker function in rnExports = ([LIE Name], -- Export items with Names ExportOccMap, -- Tracks exported occurrence names [AvailInfo]) -- The accumulated exported stuff -- Not nub'd! emptyExportAccum :: ExportAccum emptyExportAccum = ([], emptyOccEnv, []) type ExportOccMap = OccEnv (Name, IE RdrName) -- Tracks what a particular exported OccName -- in an export list refers to, and which item -- it came from. It's illegal to export two distinct things -- that have the same occurrence name rnExports :: Bool -- False => no 'module M(..) where' header at all -> Maybe (Located [LIE RdrName]) -- Nothing => no explicit export list -> TcGblEnv -> RnM (Maybe [LIE Name], TcGblEnv) -- Complains if two distinct exports have same OccName -- Warns about identical exports. -- Complains about exports items not in scope rnExports explicit_mod exports tcg_env@(TcGblEnv { tcg_mod = this_mod, tcg_rdr_env = rdr_env, tcg_imports = imports }) = unsetWOptM Opt_WarnWarningsDeprecations $ -- Do not report deprecations arising from the export -- list, to avoid bleating about re-exporting a deprecated -- thing (especially via 'module Foo' export item) do { -- If the module header is omitted altogether, then behave -- as if the user had written "module Main(main) where..." -- EXCEPT in interactive mode, when we behave as if he had -- written "module Main where ..." -- Reason: don't want to complain about 'main' not in scope -- in interactive mode ; dflags <- getDynFlags ; let real_exports | explicit_mod = exports | ghcLink dflags == LinkInMemory = Nothing | otherwise = Just (noLoc [noLoc (IEVar (noLoc main_RDR_Unqual))]) -- ToDo: the 'noLoc' here is unhelpful if 'main' -- turns out to be out of scope ; (rn_exports, avails) <- exports_from_avail real_exports rdr_env imports this_mod ; traceRn (ppr avails) ; let final_avails = nubAvails avails -- Combine families final_ns = availsToNameSetWithSelectors final_avails ; traceRn (text "rnExports: Exports:" <+> ppr final_avails) ; let new_tcg_env = (tcg_env { tcg_exports = final_avails, tcg_rn_exports = case tcg_rn_exports tcg_env of Nothing -> Nothing Just _ -> rn_exports, tcg_dus = tcg_dus tcg_env `plusDU` usesOnly final_ns }) ; return (rn_exports, new_tcg_env) } exports_from_avail :: Maybe (Located [LIE RdrName]) -- Nothing => no explicit export list -> GlobalRdrEnv -> ImportAvails -> Module -> RnM (Maybe [LIE Name], [AvailInfo]) exports_from_avail Nothing rdr_env _imports _this_mod -- The same as (module M) where M is the current module name, -- so that's how we handle it, except we also export the data family -- when a data instance is exported. = let avails = [ fix_faminst $ availFromGRE gre | gre <- globalRdrEnvElts rdr_env , isLocalGRE gre ] in return (Nothing, avails) where -- #11164: when we define a data instance -- but not data family, re-export the family -- Even though we don't check whether this is actually a data family -- only data families can locally define subordinate things (`ns` here) -- without locally defining (and instead importing) the parent (`n`) fix_faminst (AvailTC n ns flds) | not (n `elem` ns) = AvailTC n (n:ns) flds fix_faminst avail = avail exports_from_avail (Just (L _ rdr_items)) rdr_env imports this_mod = do (ie_names, _, exports) <- foldlM do_litem emptyExportAccum rdr_items return (Just ie_names, exports) where do_litem :: ExportAccum -> LIE RdrName -> RnM ExportAccum do_litem acc lie = setSrcSpan (getLoc lie) (exports_from_item acc lie) -- Maps a parent to its in-scope children kids_env :: NameEnv [GlobalRdrElt] kids_env = mkChildEnv (globalRdrEnvElts rdr_env) pat_syns :: [GlobalRdrElt] pat_syns = findPatSyns (globalRdrEnvElts rdr_env) imported_modules = [ imv_name imv | xs <- moduleEnvElts $ imp_mods imports, imv <- xs ] exports_from_item :: ExportAccum -> LIE RdrName -> RnM ExportAccum exports_from_item acc@(ie_names, occs, exports) (L loc (IEModuleContents (L lm mod))) | let earlier_mods = [ mod | (L _ (IEModuleContents (L _ mod))) <- ie_names ] , mod `elem` earlier_mods -- Duplicate export of M = do { warnIf (Reason Opt_WarnDuplicateExports) True (dupModuleExport mod) ; return acc } | otherwise = do { let { exportValid = (mod `elem` imported_modules) || (moduleName this_mod == mod) ; gre_prs = pickGREsModExp mod (globalRdrEnvElts rdr_env) ; new_exports = map (availFromGRE . fst) gre_prs ; names = map (gre_name . fst) gre_prs ; all_gres = foldr (\(gre1,gre2) gres -> gre1 : gre2 : gres) [] gre_prs } ; checkErr exportValid (moduleNotImported mod) ; warnIf (Reason Opt_WarnDodgyExports) (exportValid && null gre_prs) (nullModuleExport mod) ; traceRn (text "efa" <+> (ppr mod $$ ppr all_gres)) ; addUsedGREs all_gres ; occs' <- check_occs (IEModuleContents (noLoc mod)) occs names -- This check_occs not only finds conflicts -- between this item and others, but also -- internally within this item. That is, if -- 'M.x' is in scope in several ways, we'll have -- several members of mod_avails with the same -- OccName. ; traceRn (vcat [ text "export mod" <+> ppr mod , ppr new_exports ]) ; return (L loc (IEModuleContents (L lm mod)) : ie_names, occs', new_exports ++ exports) } exports_from_item acc@(lie_names, occs, exports) (L loc ie) | isDoc ie = do new_ie <- lookup_doc_ie ie return (L loc new_ie : lie_names, occs, exports) | otherwise = do (new_ie, avail) <- lookup_ie ie if isUnboundName (ieName new_ie) then return acc -- Avoid error cascade else do occs' <- check_occs ie occs (availNames avail) return (L loc new_ie : lie_names, occs', avail : exports) ------------- lookup_ie :: IE RdrName -> RnM (IE Name, AvailInfo) lookup_ie (IEVar (L l rdr)) = do (name, avail) <- lookupGreAvailRn rdr return (IEVar (L l name), avail) lookup_ie (IEThingAbs (L l rdr)) = do (name, avail) <- lookupGreAvailRn rdr return (IEThingAbs (L l name), avail) lookup_ie ie@(IEThingAll n) = do (n, avail, flds) <- lookup_ie_all ie n let name = unLoc n return (IEThingAll n, AvailTC name (name:avail) flds) lookup_ie ie@(IEThingWith l wc sub_rdrs _) = do (lname, subs, avails, flds) <- lookup_ie_with ie l sub_rdrs (_, all_avail, all_flds) <- case wc of NoIEWildcard -> return (lname, [], []) IEWildcard _ -> lookup_ie_all ie l let name = unLoc lname return (IEThingWith lname wc subs [], AvailTC name (name : avails ++ all_avail) (flds ++ all_flds)) lookup_ie _ = panic "lookup_ie" -- Other cases covered earlier lookup_ie_with :: IE RdrName -> Located RdrName -> [Located RdrName] -> RnM (Located Name, [Located Name], [Name], [FieldLabel]) lookup_ie_with ie (L l rdr) sub_rdrs = do name <- lookupGlobalOccRn rdr let gres = findChildren kids_env name mchildren = lookupChildren (map classifyGRE (gres ++ pat_syns)) sub_rdrs addUsedKids rdr gres if isUnboundName name then return (L l name, [], [name], []) else case mchildren of Nothing -> do addErr (exportItemErr ie) return (L l name, [], [name], []) Just (non_flds, flds) -> do addUsedKids rdr gres return (L l name, non_flds , map unLoc non_flds , map unLoc flds) lookup_ie_all :: IE RdrName -> Located RdrName -> RnM (Located Name, [Name], [FieldLabel]) lookup_ie_all ie (L l rdr) = do name <- lookupGlobalOccRn rdr let gres = findChildren kids_env name (non_flds, flds) = classifyGREs gres addUsedKids rdr gres warnDodgyExports <- woptM Opt_WarnDodgyExports when (null gres) $ if isTyConName name then when warnDodgyExports $ addWarn (Reason Opt_WarnDodgyExports) (dodgyExportWarn name) else -- This occurs when you export T(..), but -- only import T abstractly, or T is a synonym. addErr (exportItemErr ie) return (L l name, non_flds, flds) ------------- lookup_doc_ie :: IE RdrName -> RnM (IE Name) lookup_doc_ie (IEGroup lev doc) = do rn_doc <- rnHsDoc doc return (IEGroup lev rn_doc) lookup_doc_ie (IEDoc doc) = do rn_doc <- rnHsDoc doc return (IEDoc rn_doc) lookup_doc_ie (IEDocNamed str) = return (IEDocNamed str) lookup_doc_ie _ = panic "lookup_doc_ie" -- Other cases covered earlier -- In an export item M.T(A,B,C), we want to treat the uses of -- A,B,C as if they were M.A, M.B, M.C -- Happily pickGREs does just the right thing addUsedKids :: RdrName -> [GlobalRdrElt] -> RnM () addUsedKids parent_rdr kid_gres = addUsedGREs (pickGREs parent_rdr kid_gres) isDoc :: IE RdrName -> Bool isDoc (IEDoc _) = True isDoc (IEDocNamed _) = True isDoc (IEGroup _ _) = True isDoc _ = False ------------------------------- check_occs :: IE RdrName -> ExportOccMap -> [Name] -> RnM ExportOccMap check_occs ie occs names -- 'names' are the entities specifed by 'ie' = foldlM check occs names where check occs name = case lookupOccEnv occs name_occ of Nothing -> return (extendOccEnv occs name_occ (name, ie)) Just (name', ie') | name == name' -- Duplicate export -- But we don't want to warn if the same thing is exported -- by two different module exports. See ticket #4478. -> do { warnIf (Reason Opt_WarnDuplicateExports) (not (dupExport_ok name ie ie')) (dupExportWarn name_occ ie ie') ; return occs } | otherwise -- Same occ name but different names: an error -> do { global_env <- getGlobalRdrEnv ; addErr (exportClashErr global_env name' name ie' ie) ; return occs } where name_occ = nameOccName name dupExport_ok :: Name -> IE RdrName -> IE RdrName -> Bool -- The Name is exported by both IEs. Is that ok? -- "No" iff the name is mentioned explicitly in both IEs -- or one of the IEs mentions the name *alone* -- "Yes" otherwise -- -- Examples of "no": module M( f, f ) -- module M( fmap, Functor(..) ) -- module M( module Data.List, head ) -- -- Example of "yes" -- module M( module A, module B ) where -- import A( f ) -- import B( f ) -- -- Example of "yes" (Trac #2436) -- module M( C(..), T(..) ) where -- class C a where { data T a } -- instace C Int where { data T Int = TInt } -- -- Example of "yes" (Trac #2436) -- module Foo ( T ) where -- data family T a -- module Bar ( T(..), module Foo ) where -- import Foo -- data instance T Int = TInt dupExport_ok n ie1 ie2 = not ( single ie1 || single ie2 || (explicit_in ie1 && explicit_in ie2) ) where explicit_in (IEModuleContents _) = False -- module M explicit_in (IEThingAll r) = nameOccName n == rdrNameOcc (unLoc r) -- T(..) explicit_in _ = True single (IEVar {}) = True single (IEThingAbs {}) = True single _ = False {- ********************************************************* * * \subsection{Unused names} * * ********************************************************* -} reportUnusedNames :: Maybe (Located [LIE RdrName]) -- Export list -> TcGblEnv -> RnM () reportUnusedNames _export_decls gbl_env = do { traceRn ((text "RUN") <+> (ppr (tcg_dus gbl_env))) ; warnUnusedImportDecls gbl_env ; warnUnusedTopBinds unused_locals ; warnMissingSignatures gbl_env } where used_names :: NameSet used_names = findUses (tcg_dus gbl_env) emptyNameSet -- NB: currently, if f x = g, we only treat 'g' as used if 'f' is used -- Hence findUses -- Collect the defined names from the in-scope environment defined_names :: [GlobalRdrElt] defined_names = globalRdrEnvElts (tcg_rdr_env gbl_env) -- Note that defined_and_used, defined_but_not_used -- are both [GRE]; that's why we need defined_and_used -- rather than just used_names _defined_and_used, defined_but_not_used :: [GlobalRdrElt] (_defined_and_used, defined_but_not_used) = partition (gre_is_used used_names) defined_names kids_env = mkChildEnv defined_names -- This is done in mkExports too; duplicated work gre_is_used :: NameSet -> GlobalRdrElt -> Bool gre_is_used used_names (GRE {gre_name = name}) = name `elemNameSet` used_names || any (\ gre -> gre_name gre `elemNameSet` used_names) (findChildren kids_env name) -- A use of C implies a use of T, -- if C was brought into scope by T(..) or T(C) -- Filter out the ones that are -- (a) defined in this module, and -- (b) not defined by a 'deriving' clause -- The latter have an Internal Name, so we can filter them out easily unused_locals :: [GlobalRdrElt] unused_locals = filter is_unused_local defined_but_not_used is_unused_local :: GlobalRdrElt -> Bool is_unused_local gre = isLocalGRE gre && isExternalName (gre_name gre) {- ********************************************************* * * \subsection{Unused imports} * * ********************************************************* This code finds which import declarations are unused. The specification and implementation notes are here: http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/UnusedImports -} type ImportDeclUsage = ( LImportDecl Name -- The import declaration , [AvailInfo] -- What *is* used (normalised) , [Name] ) -- What is imported but *not* used warnUnusedImportDecls :: TcGblEnv -> RnM () warnUnusedImportDecls gbl_env = do { uses <- readMutVar (tcg_used_gres gbl_env) ; let user_imports = filterOut (ideclImplicit . unLoc) (tcg_rn_imports gbl_env) -- This whole function deals only with *user* imports -- both for warning about unnecessary ones, and for -- deciding the minimal ones rdr_env = tcg_rdr_env gbl_env fld_env = mkFieldEnv rdr_env ; let usage :: [ImportDeclUsage] usage = findImportUsage user_imports uses ; traceRn (vcat [ text "Uses:" <+> ppr uses , text "Import usage" <+> ppr usage]) ; whenWOptM Opt_WarnUnusedImports $ mapM_ (warnUnusedImport Opt_WarnUnusedImports fld_env) usage ; whenGOptM Opt_D_dump_minimal_imports $ printMinimalImports usage } -- | Warn the user about top level binders that lack type signatures. warnMissingSignatures :: TcGblEnv -> RnM () warnMissingSignatures gbl_env = do { let exports = availsToNameSet (tcg_exports gbl_env) sig_ns = tcg_sigs gbl_env -- We use sig_ns to exclude top-level bindings that are generated by GHC binds = collectHsBindsBinders $ tcg_binds gbl_env pat_syns = tcg_patsyns gbl_env -- Warn about missing signatures -- Do this only when we we have a type to offer ; warn_missing_sigs <- woptM Opt_WarnMissingSignatures ; warn_only_exported <- woptM Opt_WarnMissingExportedSignatures ; warn_pat_syns <- woptM Opt_WarnMissingPatternSynonymSignatures ; let add_sig_warns | warn_only_exported = add_warns Opt_WarnMissingExportedSignatures | warn_missing_sigs = add_warns Opt_WarnMissingSignatures | warn_pat_syns = add_warns Opt_WarnMissingPatternSynonymSignatures | otherwise = return () add_warns flag = when warn_pat_syns (mapM_ add_pat_syn_warn pat_syns) >> when (warn_missing_sigs || warn_only_exported) (mapM_ add_bind_warn binds) where add_pat_syn_warn p = add_warn (patSynName p) (pprPatSynType p) add_bind_warn id = do { env <- tcInitTidyEnv -- Why not use emptyTidyEnv? ; let name = idName id (_, ty) = tidyOpenType env (idType id) ty_msg = ppr ty ; add_warn name ty_msg } add_warn name ty_msg = when (name `elemNameSet` sig_ns && export_check name) (addWarnAt (Reason flag) (getSrcSpan name) (get_msg name ty_msg)) export_check name = not warn_only_exported || name `elemNameSet` exports get_msg name ty_msg = sep [ text "Top-level binding with no type signature:", nest 2 $ pprPrefixName name <+> dcolon <+> ty_msg ] ; add_sig_warns } {- Note [The ImportMap] ~~~~~~~~~~~~~~~~~~~~ The ImportMap is a short-lived intermediate data struture records, for each import declaration, what stuff brought into scope by that declaration is actually used in the module. The SrcLoc is the location of the END of a particular 'import' declaration. Why *END*? Because we don't want to get confused by the implicit Prelude import. Consider (Trac #7476) the module import Foo( foo ) main = print foo There is an implicit 'import Prelude(print)', and it gets a SrcSpan of line 1:1 (just the point, not a span). If we use the *START* of the SrcSpan to identify the import decl, we'll confuse the implicit import Prelude with the explicit 'import Foo'. So we use the END. It's just a cheap hack; we could equally well use the Span too. The AvailInfos are the things imported from that decl (just a list, not normalised). -} type ImportMap = Map SrcLoc [AvailInfo] -- See [The ImportMap] findImportUsage :: [LImportDecl Name] -> [GlobalRdrElt] -> [ImportDeclUsage] findImportUsage imports used_gres = map unused_decl imports where import_usage :: ImportMap import_usage = foldr extendImportMap Map.empty used_gres unused_decl decl@(L loc (ImportDecl { ideclHiding = imps })) = (decl, nubAvails used_avails, nameSetElems unused_imps) where used_avails = Map.lookup (srcSpanEnd loc) import_usage `orElse` [] -- srcSpanEnd: see Note [The ImportMap] used_names = availsToNameSetWithSelectors used_avails used_parents = mkNameSet [n | AvailTC n _ _ <- used_avails] unused_imps -- Not trivial; see eg Trac #7454 = case imps of Just (False, L _ imp_ies) -> foldr (add_unused . unLoc) emptyNameSet imp_ies _other -> emptyNameSet -- No explicit import list => no unused-name list add_unused :: IE Name -> NameSet -> NameSet add_unused (IEVar (L _ n)) acc = add_unused_name n acc add_unused (IEThingAbs (L _ n)) acc = add_unused_name n acc add_unused (IEThingAll (L _ n)) acc = add_unused_all n acc add_unused (IEThingWith (L _ p) wc ns fs) acc = add_wc_all (add_unused_with p xs acc) where xs = map unLoc ns ++ map (flSelector . unLoc) fs add_wc_all = case wc of NoIEWildcard -> id IEWildcard _ -> add_unused_all p add_unused _ acc = acc add_unused_name n acc | n `elemNameSet` used_names = acc | otherwise = acc `extendNameSet` n add_unused_all n acc | n `elemNameSet` used_names = acc | n `elemNameSet` used_parents = acc | otherwise = acc `extendNameSet` n add_unused_with p ns acc | all (`elemNameSet` acc1) ns = add_unused_name p acc1 | otherwise = acc1 where acc1 = foldr add_unused_name acc ns -- If you use 'signum' from Num, then the user may well have -- imported Num(signum). We don't want to complain that -- Num is not itself mentioned. Hence the two cases in add_unused_with. extendImportMap :: GlobalRdrElt -> ImportMap -> ImportMap -- For each of a list of used GREs, find all the import decls that brought -- it into scope; choose one of them (bestImport), and record -- the RdrName in that import decl's entry in the ImportMap extendImportMap gre imp_map = add_imp gre (bestImport (gre_imp gre)) imp_map where add_imp :: GlobalRdrElt -> ImportSpec -> ImportMap -> ImportMap add_imp gre (ImpSpec { is_decl = imp_decl_spec }) imp_map = Map.insertWith add decl_loc [avail] imp_map where add _ avails = avail : avails -- add is really just a specialised (++) decl_loc = srcSpanEnd (is_dloc imp_decl_spec) -- For srcSpanEnd see Note [The ImportMap] avail = availFromGRE gre warnUnusedImport :: WarningFlag -> NameEnv (FieldLabelString, Name) -> ImportDeclUsage -> RnM () warnUnusedImport flag fld_env (L loc decl, used, unused) | Just (False,L _ []) <- ideclHiding decl = return () -- Do not warn for 'import M()' | Just (True, L _ hides) <- ideclHiding decl , not (null hides) , pRELUDE_NAME == unLoc (ideclName decl) = return () -- Note [Do not warn about Prelude hiding] | null used = addWarnAt (Reason flag) loc msg1 -- Nothing used; drop entire decl | null unused = return () -- Everything imported is used; nop | otherwise = addWarnAt (Reason flag) loc msg2 -- Some imports are unused where msg1 = vcat [pp_herald <+> quotes pp_mod <+> pp_not_used, nest 2 (text "except perhaps to import instances from" <+> quotes pp_mod), text "To import instances alone, use:" <+> text "import" <+> pp_mod <> parens Outputable.empty ] msg2 = sep [pp_herald <+> quotes sort_unused, text "from module" <+> quotes pp_mod <+> pp_not_used] pp_herald = text "The" <+> pp_qual <+> text "import of" pp_qual | ideclQualified decl = text "qualified" | otherwise = Outputable.empty pp_mod = ppr (unLoc (ideclName decl)) pp_not_used = text "is redundant" ppr_possible_field n = case lookupNameEnv fld_env n of Just (fld, p) -> ppr p <> parens (ppr fld) Nothing -> ppr n -- Print unused names in a deterministic (lexicographic) order sort_unused = pprWithCommas ppr_possible_field $ sortBy (comparing nameOccName) unused {- Note [Do not warn about Prelude hiding] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We do not warn about import Prelude hiding( x, y ) because even if nothing else from Prelude is used, it may be essential to hide x,y to avoid name-shadowing warnings. Example (Trac #9061) import Prelude hiding( log ) f x = log where log = () Note [Printing minimal imports] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To print the minimal imports we walk over the user-supplied import decls, and simply trim their import lists. NB that * We do *not* change the 'qualified' or 'as' parts! * We do not disard a decl altogether; we might need instances from it. Instead we just trim to an empty import list -} printMinimalImports :: [ImportDeclUsage] -> RnM () -- See Note [Printing minimal imports] printMinimalImports imports_w_usage = do { imports' <- mapM mk_minimal imports_w_usage ; this_mod <- getModule ; dflags <- getDynFlags ; liftIO $ do { h <- openFile (mkFilename dflags this_mod) WriteMode ; printForUser dflags h neverQualify (vcat (map ppr imports')) } -- The neverQualify is important. We are printing Names -- but they are in the context of an 'import' decl, and -- we never qualify things inside there -- E.g. import Blag( f, b ) -- not import Blag( Blag.f, Blag.g )! } where mkFilename dflags this_mod | Just d <- dumpDir dflags = d </> basefn | otherwise = basefn where basefn = moduleNameString (moduleName this_mod) ++ ".imports" mk_minimal (L l decl, used, unused) | null unused , Just (False, _) <- ideclHiding decl = return (L l decl) | otherwise = do { let ImportDecl { ideclName = L _ mod_name , ideclSource = is_boot , ideclPkgQual = mb_pkg } = decl ; iface <- loadSrcInterface doc mod_name is_boot (fmap sl_fs mb_pkg) ; let lies = map (L l) (concatMap (to_ie iface) used) ; return (L l (decl { ideclHiding = Just (False, L l lies) })) } where doc = text "Compute minimal imports for" <+> ppr decl to_ie :: ModIface -> AvailInfo -> [IE Name] -- The main trick here is that if we're importing all the constructors -- we want to say "T(..)", but if we're importing only a subset we want -- to say "T(A,B,C)". So we have to find out what the module exports. to_ie _ (Avail _ n) = [IEVar (noLoc n)] to_ie _ (AvailTC n [m] []) | n==m = [IEThingAbs (noLoc n)] to_ie iface (AvailTC n ns fs) = case [(xs,gs) | AvailTC x xs gs <- mi_exports iface , x == n , x `elem` xs -- Note [Partial export] ] of [xs] | all_used xs -> [IEThingAll (noLoc n)] | otherwise -> [IEThingWith (noLoc n) NoIEWildcard (map noLoc (filter (/= n) ns)) (map noLoc fs)] -- Note [Overloaded field import] _other | all_non_overloaded fs -> map (IEVar . noLoc) $ ns ++ map flSelector fs | otherwise -> [IEThingWith (noLoc n) NoIEWildcard (map noLoc (filter (/= n) ns)) (map noLoc fs)] where fld_lbls = map flLabel fs all_used (avail_occs, avail_flds) = all (`elem` ns) avail_occs && all (`elem` fld_lbls) (map flLabel avail_flds) all_non_overloaded = all (not . flIsOverloaded) {- Note [Partial export] ~~~~~~~~~~~~~~~~~~~~~ Suppose we have module A( op ) where class C a where op :: a -> a module B where import A f = ..op... Then the minimal import for module B is import A( op ) not import A( C( op ) ) which we would usually generate if C was exported from B. Hence the (x `elem` xs) test when deciding what to generate. Note [Overloaded field import] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ On the other hand, if we have {-# LANGUAGE DuplicateRecordFields #-} module A where data T = MkT { foo :: Int } module B where import A f = ...foo... then the minimal import for module B must be import A ( T(foo) ) because when DuplicateRecordFields is enabled, field selectors are not in scope without their enclosing datatype. ************************************************************************ * * \subsection{Errors} * * ************************************************************************ -} qualImportItemErr :: RdrName -> SDoc qualImportItemErr rdr = hang (text "Illegal qualified name in import item:") 2 (ppr rdr) badImportItemErrStd :: ModIface -> ImpDeclSpec -> IE RdrName -> SDoc badImportItemErrStd iface decl_spec ie = sep [text "Module", quotes (ppr (is_mod decl_spec)), source_import, text "does not export", quotes (ppr ie)] where source_import | mi_boot iface = text "(hi-boot interface)" | otherwise = Outputable.empty badImportItemErrDataCon :: OccName -> ModIface -> ImpDeclSpec -> IE RdrName -> SDoc badImportItemErrDataCon dataType_occ iface decl_spec ie = vcat [ text "In module" <+> quotes (ppr (is_mod decl_spec)) <+> source_import <> colon , nest 2 $ quotes datacon <+> text "is a data constructor of" <+> quotes dataType , text "To import it use" , nest 2 $ quotes (text "import") <+> ppr (is_mod decl_spec) <> parens_sp (dataType <> parens_sp datacon) , text "or" , nest 2 $ quotes (text "import") <+> ppr (is_mod decl_spec) <> parens_sp (dataType <> text "(..)") ] where datacon_occ = rdrNameOcc $ ieName ie datacon = parenSymOcc datacon_occ (ppr datacon_occ) dataType = parenSymOcc dataType_occ (ppr dataType_occ) source_import | mi_boot iface = text "(hi-boot interface)" | otherwise = Outputable.empty parens_sp d = parens (space <> d <> space) -- T( f,g ) badImportItemErr :: ModIface -> ImpDeclSpec -> IE RdrName -> [AvailInfo] -> SDoc badImportItemErr iface decl_spec ie avails = case find checkIfDataCon avails of Just con -> badImportItemErrDataCon (availOccName con) iface decl_spec ie Nothing -> badImportItemErrStd iface decl_spec ie where checkIfDataCon (AvailTC _ ns _) = case find (\n -> importedFS == nameOccNameFS n) ns of Just n -> isDataConName n Nothing -> False checkIfDataCon _ = False availOccName = nameOccName . availName nameOccNameFS = occNameFS . nameOccName importedFS = occNameFS . rdrNameOcc $ ieName ie illegalImportItemErr :: SDoc illegalImportItemErr = text "Illegal import item" dodgyImportWarn :: RdrName -> SDoc dodgyImportWarn item = dodgyMsg (text "import") item dodgyExportWarn :: Name -> SDoc dodgyExportWarn item = dodgyMsg (text "export") item dodgyMsg :: (OutputableBndr n, HasOccName n) => SDoc -> n -> SDoc dodgyMsg kind tc = sep [ text "The" <+> kind <+> ptext (sLit "item") <+> quotes (ppr (IEThingAll (noLoc tc))) <+> text "suggests that", quotes (ppr tc) <+> text "has (in-scope) constructors or class methods,", text "but it has none" ] exportItemErr :: IE RdrName -> SDoc exportItemErr export_item = sep [ text "The export item" <+> quotes (ppr export_item), text "attempts to export constructors or class methods that are not visible here" ] exportClashErr :: GlobalRdrEnv -> Name -> Name -> IE RdrName -> IE RdrName -> MsgDoc exportClashErr global_env name1 name2 ie1 ie2 = vcat [ text "Conflicting exports for" <+> quotes (ppr occ) <> colon , ppr_export ie1' name1' , ppr_export ie2' name2' ] where occ = nameOccName name1 ppr_export ie name = nest 3 (hang (quotes (ppr ie) <+> text "exports" <+> quotes (ppr name)) 2 (pprNameProvenance (get_gre name))) -- get_gre finds a GRE for the Name, so that we can show its provenance get_gre name = case lookupGRE_Name global_env name of (gre:_) -> gre [] -> pprPanic "exportClashErr" (ppr name) get_loc name = greSrcSpan (get_gre name) (name1', ie1', name2', ie2') = if get_loc name1 < get_loc name2 then (name1, ie1, name2, ie2) else (name2, ie2, name1, ie1) addDupDeclErr :: [GlobalRdrElt] -> TcRn () addDupDeclErr [] = panic "addDupDeclErr: empty list" addDupDeclErr gres@(gre : _) = addErrAt (getSrcSpan (last sorted_names)) $ -- Report the error at the later location vcat [text "Multiple declarations of" <+> quotes (ppr (nameOccName name)), -- NB. print the OccName, not the Name, because the -- latter might not be in scope in the RdrEnv and so will -- be printed qualified. text "Declared at:" <+> vcat (map (ppr . nameSrcLoc) sorted_names)] where name = gre_name gre sorted_names = sortWith nameSrcLoc (map gre_name gres) dupExportWarn :: OccName -> IE RdrName -> IE RdrName -> SDoc dupExportWarn occ_name ie1 ie2 = hsep [quotes (ppr occ_name), text "is exported by", quotes (ppr ie1), text "and", quotes (ppr ie2)] dupModuleExport :: ModuleName -> SDoc dupModuleExport mod = hsep [text "Duplicate", quotes (text "Module" <+> ppr mod), text "in export list"] moduleNotImported :: ModuleName -> SDoc moduleNotImported mod = text "The export item `module" <+> ppr mod <> text "' is not imported" nullModuleExport :: ModuleName -> SDoc nullModuleExport mod = text "The export item `module" <+> ppr mod <> ptext (sLit "' exports nothing") missingImportListWarn :: ModuleName -> SDoc missingImportListWarn mod = text "The module" <+> quotes (ppr mod) <+> ptext (sLit "does not have an explicit import list") missingImportListItem :: IE RdrName -> SDoc missingImportListItem ie = text "The import item" <+> quotes (ppr ie) <+> ptext (sLit "does not have an explicit import list") moduleWarn :: ModuleName -> WarningTxt -> SDoc moduleWarn mod (WarningTxt _ txt) = sep [ text "Module" <+> quotes (ppr mod) <> ptext (sLit ":"), nest 2 (vcat (map (ppr . sl_fs . unLoc) txt)) ] moduleWarn mod (DeprecatedTxt _ txt) = sep [ text "Module" <+> quotes (ppr mod) <+> text "is deprecated:", nest 2 (vcat (map (ppr . sl_fs . unLoc) txt)) ] packageImportErr :: SDoc packageImportErr = text "Package-qualified imports are not enabled; use PackageImports" -- This data decl will parse OK -- data T = a Int -- treating "a" as the constructor. -- It is really hard to make the parser spot this malformation. -- So the renamer has to check that the constructor is legal -- -- We can get an operator as the constructor, even in the prefix form: -- data T = :% Int Int -- from interface files, which always print in prefix form checkConName :: RdrName -> TcRn () checkConName name = checkErr (isRdrDataCon name) (badDataCon name) badDataCon :: RdrName -> SDoc badDataCon name = hsep [text "Illegal data constructor name", quotes (ppr name)]
vikraman/ghc
compiler/rename/RnNames.hs
bsd-3-clause
88,632
14
30
27,977
16,569
8,574
7,995
-1
-1
{-| Copyright : (c) Dave Laing, 2017 License : BSD3 Maintainer : [email protected] Stability : experimental Portability : non-portable -} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances #-} module Fragment.SystemF ( module X , SystemFTag ) where import Ast import Rules.Type.Infer.Common import Rules.Term import Fragment.SystemF.Ast as X import Fragment.SystemF.Helpers as X import Fragment.KiBase.Ast.Kind import Fragment.KiArr.Ast.Kind import Fragment.TyArr.Ast.Type import Fragment.TyAll.Ast.Type import Fragment.TyAll.Ast.Error import Fragment.TmLam.Ast.Term import Fragment.TmApp.Ast.Term import Fragment.SystemF.Rules.Type.Infer.Common import Fragment.SystemF.Rules.Term data SystemFTag instance AstIn SystemFTag where type KindList SystemFTag = '[KiFBase, KiFArr] type TypeList SystemFTag = '[TyFArr, TyFAll] type TypeSchemeList SystemFTag = '[] type PatternList SystemFTag = '[] type TermList SystemFTag = '[TmFLam, TmFApp, TmFSystemF] instance EvalRules e SystemFTag where type EvalConstraint ki ty pt tm a e SystemFTag = SystemFEvalConstraint ki ty pt tm a evalInput _ _ = systemFEvalRules instance MkInferType i => InferTypeRules i SystemFTag where type InferTypeConstraint e w s r m ki ty pt tm a i SystemFTag = SystemFInferTypeConstraint e w s r m ki ty pt tm a i type InferTypeErrorList ki ty pt tm a i SystemFTag = '[ ErrExpectedTyAll ki ty a ] type InferTypeWarningList ki ty pt tm a i SystemFTag = '[] inferTypeInput m i _ = systemFInferTypeInput m i
dalaing/type-systems
src/Fragment/SystemF.hs
bsd-3-clause
1,627
0
8
281
405
235
170
-1
-1
{-# LANGUAGE UnicodeSyntax #-} module Util where import Semantics (Term(..), Context, Binding) printtm ∷ Context → Term → IO () printtm ctx (TmVar n) = if length ctx == n then case (indexToName ctx n) of Just s → putStrLn s Nothing → putStrLn "[bad index]" else putStrLn "[bad index]" printtm ctx (TmAbs x t) = let (ctx', x') = pickFreshName ctx x out = concat [ "(lambda " , show x , ". " , show ctx' , show t , show ")"] in putStrLn out printtm ctx (TmApp t₁ t₂) = let out = concat [ "(" , show ctx , show t₁ , show ctx , show t₁ , ")" ] in putStrLn out pickFreshName ∷ Context → String → (Context, String ) pickFreshName ctx x = if isNameBound ctx x then pickFreshName ctx (x ++ "'") else (((x, NameBind) : ctx), x) isNameBound ∷ Context → String → Bool isNameBound [] x = False isNameBound ((y, _):ys) x = if y == x then True else isNameBound ys x indexToName ∷ Context → Int → Maybe String indexToName ctx x = -- TODO: There is probably a function that looks up -- from a list and returns Nothing if index is out -- of bounds. if x > (ctxlength ctx) - 1 then Nothing else let (xn,_) = ctx !! x in Just xn -- `ctxlength` is merely an alas for `length` that we create -- for the sake of consistency with TAPL. ctxlength ∷ Context → Int ctxlength = length
ayberkt/TAPL
src/Untyped/Util.hs
bsd-3-clause
2,226
10
11
1,183
492
260
232
41
3
{-# LANGUAGE TypeFamilies, PackageImports #-} module Main (main) where import Control.Applicative ((<$>), (<*>)) import Control.Monad (forever) import Control.Concurrent (forkIO) import Data.Ratio (numerator) import Data.Time (UTCTime(..), getCurrentTime, toModifiedJulianDay) import Data.HandleLike (HandleLike(..)) import System.IO (Handle, openFile, IOMode(..)) import System.Environment (getArgs) import System.Directory (createDirectoryIfMissing) import System.FilePath ((</>), (<.>)) import System.Posix.Process (getProcessID) import Network (listenOn, accept) import "crypto-random" Crypto.Random (CPRG(..)) import qualified Data.ByteString as BS import qualified Data.ByteString.Char8 as BSC import qualified Data.ByteString.Base64 as BASE64 import qualified Crypto.Hash.SHA256 as SHA256 import TestServer (server, ValidateHandle(..)) import CommandLine (readOptions) import TestRandom (StdGen) main :: IO () main = do (prt, cs, rsa, ec, mcs, td) <- readOptions =<< getArgs createDirectoryIfMissing False td let g = cprgCreate undefined :: StdGen soc <- listenOn prt forever $ do (h, _, _) <- accept soc fp <- (td </>) <$> createName writeFile (fp <.> "cs") $ show cs ++ "\n" cl <- openFile (fp <.> "clt") WriteMode sv <- openFile (fp <.> "srv") WriteMode forkIO $ server g (DebugHandle h cl sv) cs rsa ec mcs data DebugHandle = DebugHandle Handle Handle Handle deriving Show instance ValidateHandle DebugHandle where validate (DebugHandle h _ _) = validate h instance HandleLike DebugHandle where type HandleMonad DebugHandle = IO hlPut (DebugHandle h _ sv) = (>>) <$> BS.hPut sv <*> hlPut h hlGet (DebugHandle h cl _) n = hlGet h n >>= (>>) <$> BS.hPut cl <*> return hlClose (DebugHandle h cl sv) = hlClose `mapM_` [h, cl, sv] hlDebug (DebugHandle h _ _) = hlDebug h createName :: IO FilePath createName = do now <- getCurrentTime pid <- getProcessID return . BSC.unpack . sub '/' '-' . BS.take 12 . BASE64.encode . SHA256.hash . BSC.pack $ concat [ show . toModifiedJulianDay $ utctDay now, show . numerator . toRational $ utctDayTime now, show pid ] sub :: Char -> Char -> BS.ByteString -> BS.ByteString sub pre pst bs | Just (c, bs') <- BSC.uncons bs = BSC.cons (if c == pre then pst else c) $ sub pre pst bs' | otherwise = BS.empty
YoshikuniJujo/forest
subprojects/tls-analysis/server/makeTestFiles.hs
bsd-3-clause
2,297
10
14
385
883
481
402
58
2
{-# LANGUAGE CPP #-} module Main where import Data.Typeable import Tct.Core import qualified Tct.Core.Common.Parser as P import qualified Tct.Core.Common.Pretty as PP import qualified Tct.Core.Data as T import Tct.Core.Interactive import Tct.Trs.Data import qualified Tct.Trs.Data.DependencyGraph as DG import qualified Tct.Trs.Data.Mode as M import qualified Tct.Trs.Data.Problem as Prob import qualified Tct.Trs.Data.RuleSelector as RS import qualified Tct.Trs.Data.Trs as Trs import Tct.Trs.Interactive import Tct.Trs.Processor import Certify import DC import RC import qualified Debug.Trace as T main :: IO () main = tm `setModeWith` defaultTctConfig #ifdef NOTRECOMPILE { recompile = False } #endif -- { defaultSolver = Just ("minismt",["-v2","-m", "-neg", "-ib", "4", "-ob", "6"]) } { defaultSolver = Just ("yices-smt2",[]) } tm :: M.TrsMode tm = M.trsMode `withStrategies` [ T.SD certifySD , T.SD runtimeSD , T.SD derivationalSD ] `withDefaultStrategy` T.deflFun competitionSD competitionSD = strategy "competition" (OneTuple $ some timArg `T.optional` Nothing) competition where timArg = nat `withName` "timeout" `withHelp` ["timeout"] competition mto = timeoutRelative mto 100 $ withProblem $ \p -> if Prob.isRCProblem p then runtime' Best mto else derivational -- trace :: String -> TrsStrategy -> TrsStrategy -- trace s st = T.trace s $ withProblem $ \ prob -> -- T.trace s $ T.trace (PP.display $ PP.pretty prob) st -- timArg = nat `withName` "timeout" `withHelp` ["timeout"] -- degArg = nat `withName` "degree" `withHelp` ["max degree"]
ComputationWithBoundedResources/tct-config-mischel
tct-trs.hs
bsd-3-clause
1,787
0
10
449
342
219
123
38
2
{-# LANGUAGE PatternSynonyms #-} -------------------------------------------------------------------------------- -- | -- Module : Graphics.GL.NV.TransformFeedback2 -- Copyright : (c) Sven Panne 2019 -- License : BSD3 -- -- Maintainer : Sven Panne <[email protected]> -- Stability : stable -- Portability : portable -- -------------------------------------------------------------------------------- module Graphics.GL.NV.TransformFeedback2 ( -- * Extension Support glGetNVTransformFeedback2, gl_NV_transform_feedback2, -- * Enums pattern GL_TRANSFORM_FEEDBACK_BINDING_NV, pattern GL_TRANSFORM_FEEDBACK_BUFFER_ACTIVE_NV, pattern GL_TRANSFORM_FEEDBACK_BUFFER_PAUSED_NV, pattern GL_TRANSFORM_FEEDBACK_NV, -- * Functions glBindTransformFeedbackNV, glDeleteTransformFeedbacksNV, glDrawTransformFeedbackNV, glGenTransformFeedbacksNV, glIsTransformFeedbackNV, glPauseTransformFeedbackNV, glResumeTransformFeedbackNV ) where import Graphics.GL.ExtensionPredicates import Graphics.GL.Tokens import Graphics.GL.Functions
haskell-opengl/OpenGLRaw
src/Graphics/GL/NV/TransformFeedback2.hs
bsd-3-clause
1,068
0
5
131
90
64
26
18
0
{-# LANGUAGE EmptyDataDecls, TypeSynonymInstances #-} {-# OPTIONS_GHC -fcontext-stack43 #-} module Games.Chaos2010.Database.Board_highlights where import Games.Chaos2010.Database.Fields import Database.HaskellDB.DBLayout type Board_highlights = Record (HCons (LVPair X (Expr (Maybe Int))) (HCons (LVPair Y (Expr (Maybe Int))) (HCons (LVPair Sprite (Expr (Maybe String))) HNil))) board_highlights :: Table Board_highlights board_highlights = baseTable "board_highlights"
JakeWheat/Chaos-2010
Games/Chaos2010/Database/Board_highlights.hs
bsd-3-clause
509
0
17
86
132
72
60
12
1
class AAM aam where type Time (aam :: *) :: * type Addr (aam :: *) :: * tzero :: aam -> Time aam tick :: aam -> Call -> Time aam -> Time aam alloc :: aam -> Name -> Time aam -> Addr aam type Env aam = Map Name (Addr aam) type Store aam = Map (Addr aam) (Val aam) data Val aam = LitV Lit | Clo [Name] Call (Env aam) type StateSpace aam = Maybe (Call, Env aam, Store aam, Time aam)
davdar/quals
writeup-old/sections/03AAMByExample/02AbstractStateSpace/01AAM.hs
bsd-3-clause
391
2
10
98
211
107
104
-1
-1
{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeFamilies #-} module Main(main) where import Dirs(FunName, funHash, listAllFuns) import Predicates(funLevelPreds, lpredCalls) import StorageBackend(localStorage, Readable, noStorage, disableWrite) import SiteState import qualified Data.Map as Map import Data.List(mapAccumL) import Text.PrettyPrint import Control.Exception main :: IO () main = do siteState <- newSiteState (disableWrite localStorage) noStorage cg <- callGraph siteState print (export cg) -- | Information about a single function. data Node = Node { fun :: FunName , stage :: Int , calls :: [FunName] , calledBy :: [FunName] } deriving Show -- | JSON representation of a call-graph. export :: [Node] -> Doc export = block one "{" "}" where one pre Node { .. } = hang (pre <+> exportFun fun <> colon) 2 $ vcat [ text "{ \"stage\": " <+> int stage , text ", \"calls\":" <+> block oneFun "[" "]" calls , text ", \"calledBy\":" <+> block oneFun "[" "]" calledBy , text "}" ] oneFun pre f = pre <+> exportFun f exportFun = text . show . funHash block _ start end [] = text (start ++ end) block how start end xs = vcat (zipWith how (map text (start : repeat ",")) xs) $$ text end -- | Construct the call-graph for all functions that we know. callGraph :: Readable (Local s) => SiteState s -> IO [Node] callGraph siteState = do fs <- listAllFuns siteState cs <- mapM (getCalls siteState) fs let missing = [ f | c <- cs, f <- c, not (f `elem` fs) ] xs = zip missing (repeat []) ++ zip fs cs invMap = Map.fromListWith (++) [ (g,[f]) | (f,gs) <- xs, g <- gs ] getCalledBy f = Map.findWithDefault [] f invMap stage = 0 -- Temporary return $ setStages [ Node { calledBy = getCalledBy fun , .. } | (fun,calls) <- xs ] -- | Assumes no loops in the graph (i.e., no recursive functions). setStages :: [Node] -> [Node] setStages nodes0 = snd (mapAccumL go Map.empty nodes0) where nodeMap = Map.fromList [ (fun n, n) | n <- nodes0 ] go done nd = case Map.lookup (fun nd) done of Just s -> (done, s) Nothing -> let (done1, nodes) = mapAccumL go' done (calls nd) newStage = maximum (0 : map ((+ 1) . stage) nodes) node = nd { stage = newStage } in (Map.insert (fun nd) node done1, node) go' done nodeId = case Map.lookup nodeId nodeMap of Just node -> go done node Nothing -> error ("getStages: Missing node " ++ show nodeId) -- | What functions are called by this function. -- This does not use the `depends` infracstrucutre, it is extracted -- directly from the `holes.hs` file. getCalls :: Readable (Local s) => SiteState s -> FunName -> IO [FunName] getCalls siteState fun = do ps <- funLevelPreds siteState fun return (Map.keys (lpredCalls ps)) `catch` \SomeException {} -> return []
GaloisInc/verification-game
web-prover/exes/ComputeCallGraph.hs
bsd-3-clause
3,235
0
20
998
1,039
543
496
-1
-1
module Anatomy.Parser.Base where import Control.Monad.Identity import Text.Parsec import Atomo.Types (ParserState) import Atomo.Lexer.Base (TaggedToken) import Anatomy.Types type Parser = ParsecT [TaggedAToken] ParserState Identity withToken :: (AToken -> Maybe a) -> Parser a withToken f = -- TODO: showAToken tokenPrim (show . taToken) (\_ t _ -> taLocation t) (f . taToken) chunk :: Parser String chunk = withToken $ \t -> case t of ATokChunk s -> Just s _ -> Nothing keyword :: Parser ([String], [TaggedAToken]) keyword = withToken $ \t -> case t of ATokKeyword ns ts -> Just (ns, ts) _ -> Nothing single :: Parser String single = withToken $ \t -> case t of ATokSingle n -> Just n _ -> Nothing atomo :: Parser [TaggedToken] atomo = withToken $ \t -> case t of ATokAtomo ts -> Just ts _ -> Nothing nested :: Parser [TaggedAToken] nested = withToken $ \t -> case t of ATokNested ts -> Just ts _ -> Nothing definition :: Parser ([TaggedToken], [[TaggedToken]], [TaggedToken]) definition = withToken $ \t -> case t of ATokDefinition th cs r -> Just (th, cs, r) _ -> Nothing
vito/atomo-anatomy
src/Anatomy/Parser/Base.hs
bsd-3-clause
1,217
0
11
327
451
243
208
40
2
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} import Data.Aeson (decode, encode, parseJSON, toJSON) import Data.Maybe (fromJust) import Data.Yaml (decodeFile) import Kubernetes.KubeConfig (AuthInfo (..), Cluster (..), Config, Context (..), getAuthInfo, getCluster, getContext) import Test.Hspec main :: IO () main = do config :: Config <- fromJust <$> decodeFile "test/testdata/kubeconfig.yaml" hspec $ do describe "FromJSON and ToJSON instances" $ do it "roundtrips successfully" $ do decode (encode (toJSON config)) `shouldBe` Just config describe "getContext" $ do it "returns the correct context" $ do getContext config `shouldBe` (Right (Context "cluster-aaa" "user-aaa" Nothing)) describe "getCluster" $ do it "returns the correct cluster" $ do server <$> getCluster config `shouldBe` (Right "https://aaa.example.com") describe "getAuthInfo" $ do it "returns the correct authInfo" $ do fst <$> getAuthInfo config `shouldBe` (Right "user-aaa")
denibertovic/haskell
kubeconfig/test/Spec.hs
bsd-3-clause
1,221
0
21
372
311
157
154
25
1
{-# LANGUAGE PatternGuards #-} module Idris.Delaborate (bugaddr, delab, delab', delabMV, delabTy, delabTy', pshow, pprintErr) where -- Convert core TT back into high level syntax, primarily for display -- purposes. import Util.Pretty import Idris.AbsSyntax import Idris.Core.TT import Idris.Core.Evaluate import Idris.ErrReverse import Data.List (intersperse) import Debug.Trace bugaddr = "https://github.com/idris-lang/Idris-dev/issues" delab :: IState -> Term -> PTerm delab i tm = delab' i tm False False delabMV :: IState -> Term -> PTerm delabMV i tm = delab' i tm False True delabTy :: IState -> Name -> PTerm delabTy i n = case lookupTy n (tt_ctxt i) of (ty:_) -> case lookupCtxt n (idris_implicits i) of (imps:_) -> delabTy' i imps ty False False _ -> delabTy' i [] ty False False delab' :: IState -> Term -> Bool -> Bool -> PTerm delab' i t f mvs = delabTy' i [] t f mvs delabTy' :: IState -> [PArg] -- ^ implicit arguments to type, if any -> Term -> Bool -- ^ use full names -> Bool -- ^ Don't treat metavariables specially -> PTerm delabTy' ist imps tm fullname mvs = de [] imps tm where un = fileFC "(val)" de env _ (App f a) = deFn env f [a] de env _ (V i) | i < length env = PRef un (snd (env!!i)) | otherwise = PRef un (sUN ("v" ++ show i ++ "")) de env _ (P _ n _) | n == unitTy = PTrue un | n == unitCon = PTrue un | n == falseTy = PFalse un | Just n' <- lookup n env = PRef un n' | otherwise = case lookup n (idris_metavars ist) of Just (Just _, mi, _) -> mkMVApp n [] _ -> PRef un n de env _ (Bind n (Lam ty) sc) = PLam n (de env [] ty) (de ((n,n):env) [] sc) de env (PImp _ _ _ _ _ _:is) (Bind n (Pi ty) sc) = PPi impl n (de env [] ty) (de ((n,n):env) is sc) de env (PConstraint _ _ _ _:is) (Bind n (Pi ty) sc) = PPi constraint n (de env [] ty) (de ((n,n):env) is sc) de env (PTacImplicit _ _ _ tac _ _:is) (Bind n (Pi ty) sc) = PPi (tacimpl tac) n (de env [] ty) (de ((n,n):env) is sc) de env _ (Bind n (Pi ty) sc) = PPi expl n (de env [] ty) (de ((n,n):env) [] sc) de env _ (Bind n (Let ty val) sc) = PLet n (de env [] ty) (de env [] val) (de ((n,n):env) [] sc) de env _ (Bind n (Hole ty) sc) = de ((n, sUN "[__]"):env) [] sc de env _ (Bind n (Guess ty val) sc) = de ((n, sUN "[__]"):env) [] sc de env _ (Bind n _ sc) = de ((n,n):env) [] sc de env _ (Constant i) = PConstant i de env _ Erased = Placeholder de env _ Impossible = Placeholder de env _ (TType i) = PType dens x | fullname = x dens ns@(NS n _) = case lookupCtxt n (idris_implicits ist) of [_] -> n -- just one thing [] -> n -- metavariables have no implicits _ -> ns dens n = n deFn env (App f a) args = deFn env f (a:args) deFn env (P _ n _) [l,r] | n == pairTy = PPair un (de env [] l) (de env [] r) | n == eqCon = PRefl un (de env [] r) | n == sUN "lazy" = de env [] r deFn env (P _ n _) [ty, Bind x (Lam _) r] | n == sUN "Exists" = PDPair un (PRef un x) (de env [] ty) (de ((x,x):env) [] (instantiate (P Bound x ty) r)) deFn env (P _ n _) [_,_,l,r] | n == pairCon = PPair un (de env [] l) (de env [] r) | n == eqTy = PEq un (de env [] l) (de env [] r) | n == sUN "Ex_intro" = PDPair un (de env [] l) Placeholder (de env [] r) deFn env (P _ n _) args | not mvs = case lookup n (idris_metavars ist) of Just (Just _, mi, _) -> mkMVApp n (drop mi (map (de env []) args)) _ -> mkPApp n (map (de env []) args) | otherwise = mkPApp n (map (de env []) args) deFn env f args = PApp un (de env [] f) (map pexp (map (de env []) args)) mkMVApp n [] = PMetavar n mkMVApp n args = PApp un (PMetavar n) (map pexp args) mkPApp n args | [imps] <- lookupCtxt n (idris_implicits ist) = PApp un (PRef un n) (zipWith imp (imps ++ repeat (pexp undefined)) args) | otherwise = PApp un (PRef un n) (map pexp args) imp (PImp p m l n _ d) arg = PImp p m l n arg d imp (PExp p l _ d) arg = PExp p l arg d imp (PConstraint p l _ d) arg = PConstraint p l arg d imp (PTacImplicit p l n sc _ d) arg = PTacImplicit p l n sc arg d -- | How far to indent sub-errors errorIndent :: Int errorIndent = 8 -- | Actually indent a sub-error - no line at end because a newline can end -- multiple layers of indent indented :: Doc a -> Doc a indented = nest errorIndent . (line <>) pprintTerm :: IState -> PTerm -> Doc OutputAnnotation pprintTerm ist = prettyImp (opt_showimp (idris_options ist)) pshow :: IState -> Err -> String pshow ist err = displayDecorated (consoleDecorate ist) . renderPretty 1.0 80 . fmap (fancifyAnnots ist) $ pprintErr ist err pprintErr :: IState -> Err -> Doc OutputAnnotation pprintErr i err = pprintErr' i (fmap (errReverse i) err) pprintErr' i (Msg s) = text s pprintErr' i (InternalMsg s) = vsep [ text "INTERNAL ERROR:" <+> text s, text "This is probably a bug, or a missing error message.", text ("Please consider reporting at " ++ bugaddr) ] pprintErr' i (CantUnify _ x y e sc s) = text "Can't unify" <> indented (pprintTerm i (delab i x)) <$> text "with" <> indented (pprintTerm i (delab i y)) <> case e of Msg "" -> empty _ -> line <> line <> text "Specifically:" <> indented (pprintErr' i e) <> if (opt_errContext (idris_options i)) then text $ showSc i sc else empty pprintErr' i (CantConvert x y env) = text "Can't convert" <> indented (pprintTerm i (delab i x)) <> text "with" <> indented (pprintTerm i (delab i y)) <> if (opt_errContext (idris_options i)) then text (showSc i env) else empty pprintErr' i (CantSolveGoal x env) = text "Can't solve goal " <> indented (pprintTerm i (delab i x)) <> if (opt_errContext (idris_options i)) then text (showSc i env) else empty pprintErr' i (UnifyScope n out tm env) = text "Can't unify" <> indented (annName n) <+> text "with" <> indented (pprintTerm i (delab i tm)) <+> text "as" <> indented (annName out) <> text "is not in scope" <> if (opt_errContext (idris_options i)) then text (showSc i env) else empty pprintErr' i (CantInferType t) = text "Can't infer type for" <+> text t pprintErr' i (NonFunctionType f ty) = pprintTerm i (delab i f) <+> text "does not have a function type" <+> parens (pprintTerm i (delab i ty)) pprintErr' i (NotEquality tm ty) = pprintTerm i (delab i tm) <+> text "does not have an equality type" <+> parens (pprintTerm i (delab i ty)) pprintErr' i (TooManyArguments f) = text "Too many arguments for" <+> annName f pprintErr' i (CantIntroduce ty) = text "Can't use lambda here: type is" <+> pprintTerm i (delab i ty) pprintErr' i (InfiniteUnify x tm env) = text "Unifying" <+> annName' x (showbasic x) <+> text "and" <+> pprintTerm i (delab i tm) <+> text "would lead to infinite value" <> if (opt_errContext (idris_options i)) then text (showSc i env) else empty pprintErr' i (NotInjective p x y) = text "Can't verify injectivity of" <+> pprintTerm i (delab i p) <+> text " when unifying" <+> pprintTerm i (delab i x) <+> text "and" <+> pprintTerm i (delab i y) pprintErr' i (CantResolve c) = text "Can't resolve type class" <+> pprintTerm i (delab i c) pprintErr' i (CantResolveAlts as) = text "Can't disambiguate name:" <+> cat (punctuate (comma <> space) (map text as)) pprintErr' i (NoTypeDecl n) = text "No type declaration for" <+> annName n pprintErr' i (NoSuchVariable n) = text "No such variable" <+> annName n pprintErr' i (IncompleteTerm t) = text "Incomplete term" <+> pprintTerm i (delab i t) pprintErr' i UniverseError = text "Universe inconsistency" pprintErr' i ProgramLineComment = text "Program line next to comment" pprintErr' i (Inaccessible n) = annName n <+> text "is not an accessible pattern variable" pprintErr' i (NonCollapsiblePostulate n) = text "The return type of postulate" <+> annName n <+> text "is not collapsible" pprintErr' i (AlreadyDefined n) = annName n<+> text "is already defined" pprintErr' i (ProofSearchFail e) = pprintErr' i e pprintErr' i (NoRewriting tm) = text "rewrite did not change type" <+> pprintTerm i (delab i tm) pprintErr' i (At f e) = annotate (AnnFC f) (text (show f)) <> colon <> pprintErr' i e pprintErr' i (Elaborating s n e) = text "When elaborating" <+> text s <> annName' n (showqual i n) <> colon <$> pprintErr' i e pprintErr' i (ProviderError msg) = text ("Type provider error: " ++ msg) pprintErr' i (LoadingFailed fn e) = text "Loading" <+> text fn <+> text "failed:" <+> pprintErr' i e pprintErr' i (ReflectionError parts orig) = let parts' = map (hsep . map showPart) parts in vsep parts' <> if (opt_origerr (idris_options i)) then line <> line <> text "Original error:" <$> indented (pprintErr' i orig) else empty where showPart :: ErrorReportPart -> Doc OutputAnnotation showPart (TextPart str) = text str showPart (NamePart n) = annName n showPart (TermPart tm) = pprintTerm i (delab i tm) showPart (SubReport rs) = indented . hsep . map showPart $ rs pprintErr' i (ReflectionFailed msg err) = text "When attempting to perform error reflection, the following internal error occurred:" <> indented (pprintErr' i err) <> text ("This is probably a bug. Please consider reporting it at " ++ bugaddr) annName :: Name -> Doc OutputAnnotation annName n = annName' n (show n) annName' :: Name -> String -> Doc OutputAnnotation annName' n str = annotate (AnnName n Nothing Nothing) (text str) showSc i [] = "" showSc i xs = "\n\nIn context:\n" ++ showSep "\n" (map showVar (reverse xs)) where showVar (x, y) = "\t" ++ showbasic x ++ " : " ++ show (delab i y) showqual :: IState -> Name -> String showqual i n = showName (Just i) [] False False (dens n) where dens ns@(NS n _) = case lookupCtxt n (idris_implicits i) of [_] -> n -- just one thing _ -> ns dens n = n showbasic n@(UN _) = show n showbasic (MN _ s) = str s showbasic (NS n s) = showSep "." (map str (reverse s)) ++ "." ++ showbasic n showbasic (SN s) = show s
ctford/Idris-Elba-dev
src/Idris/Delaborate.hs
bsd-3-clause
10,915
0
18
3,231
4,705
2,308
2,397
208
31
module Sound.Synthesis.Constant ( τ , ƒ0 , ƒNyquist ) where τ :: Double -> Double τ = 2 * pi ƒ0 :: Double ƒ0 = 44100 ƒNyquist :: Double ƒNyquist = ƒ0 / 2
pskrz/Synthesis
src/Sound/Synthesis/Constant.hs
bsd-3-clause
179
0
5
49
60
36
24
10
1
module Cardano.Wallet.Kernel.DB.BlockContext ( -- * Block context BlockContext(..) , blockContextSucceeds -- ** Lenses , bcSlotId , bcHash , bcPrevMain -- * Construction , mainBlockContext ) where import Universum import Control.Lens (lazy, makeLenses, strict) import Data.SafeCopy (base, deriveSafeCopy) import Formatting (bprint, build, (%)) import qualified Formatting.Buildable import qualified Pos.Chain.Block as Core import Pos.Chain.Genesis (GenesisHash) import qualified Pos.Core as Core import Cardano.Wallet.Kernel.DB.InDb import Cardano.Wallet.Kernel.NodeStateAdaptor import qualified Cardano.Wallet.Kernel.Util.Strict as Strict {------------------------------------------------------------------------------- Block context -------------------------------------------------------------------------------} -- | Information about where a block is placed in the chain data BlockContext = BlockContext { -- | Slot ID of this block _bcSlotId :: !(InDb Core.SlotId) -- | Header hash of this block , _bcHash :: !(InDb Core.HeaderHash) -- | Header hash of the previous /main/ block -- -- NOTE: Since this is used in 'applyBlock' to check whether or not -- this block fits onto the chain, and we only apply main blocks, -- it is important that if the raw block's previous pointer to an EBB, -- we do some work to figure out what the previous /main/ block was. -- See 'mostRecentMainBlock'. , _bcPrevMain :: !(Strict.Maybe (InDb Core.HeaderHash)) } deriving Eq makeLenses ''BlockContext deriveSafeCopy 1 'base ''BlockContext -- | Check if one checkpoint succeeds another -- -- The second argument is a 'Maybe', because the first checkpoint in an account -- will have no context. The first argument is /not/ a 'Maybe' because /ONLY/ -- the first checkpoint in an account can have no context. blockContextSucceeds :: BlockContext -> Maybe BlockContext -> Bool _ `blockContextSucceeds` Nothing = True a `blockContextSucceeds` (Just b) = case a ^. bcPrevMain . lazy of Nothing -> False -- Previous checkpoint must have been the initial one Just prev -> prev == b ^. bcHash {------------------------------------------------------------------------------- Construction -------------------------------------------------------------------------------} mainBlockContext :: (NodeConstraints, MonadIO m, MonadCatch m) => GenesisHash -> Core.MainBlock -> WithNodeState m BlockContext mainBlockContext genesisHash mb = do mPrev <- view strict <$> mostRecentMainBlock genesisHash (mb ^. Core.mainBlockPrevBlock) return BlockContext { _bcSlotId = InDb $ mb ^. Core.mainBlockSlot , _bcHash = InDb $ Core.headerHash mb , _bcPrevMain = (InDb . Core.headerHash) <$> mPrev } {------------------------------------------------------------------------------- Pretty-printing -------------------------------------------------------------------------------} instance Buildable BlockContext where build BlockContext{..} = bprint ( "BlockContext " % "{ slotId " % build % ", hash " % build % ", prev " % build % "}" ) _bcSlotId _bcHash _bcPrevMain
input-output-hk/pos-haskell-prototype
wallet/src/Cardano/Wallet/Kernel/DB/BlockContext.hs
mit
3,365
0
14
726
529
306
223
-1
-1
module MyLines where firstSen = "Tyger, Tyger, burning bright\n" secondSen = "In the forests of the night\n" thirdSen = "What immortal hand or eye\n" fourthSen = "Could frame thy fearful\ \ symmetry?" sentences = firstSen ++ secondSen ++ thirdSen ++ fourthSen myLines :: String -> [String] myLines s = go s [] where go s acc = case s of [] -> acc _ -> takeWhile (/= '\n') s : go (dropWhile (== '\n') $ dropWhile (/= '\n') s) acc myLines' :: Char -> String -> [String] myLines' c s = go c s [] where go c s acc = case s of [] -> acc _ -> takeWhile (/= c) s : go c (dropWhile (== c) $ dropWhile (/= c) s) acc shouldEqual = [ "Tyger, Tyger, burning bright" , "In the forests of the night" , "What immortal hand or eye" , "Could frame thy fearful symmetry?" ] main :: IO () main = print $ "Is myLines working? " ++ show (myLines sentences == shouldEqual) ++ " Is myLines' working? " ++ show (myLines' '\n' sentences == shouldEqual)
brodyberg/Notes
ProjectRosalind.hsproj/LearnHaskell/lib/HaskellBook/MyLinesChapter9.hs
mit
1,108
0
15
363
325
172
153
33
2
{-# LANGUAGE FlexibleContexts #-} module Propellor.Property.Postfix where import Propellor import qualified Propellor.Property.Apt as Apt import qualified Propellor.Property.File as File import qualified Propellor.Property.Service as Service import qualified Propellor.Property.User as User import qualified Data.Map as M import Data.List import Data.Char installed :: Property NoInfo installed = Apt.serviceInstalledRunning "postfix" restarted :: Property NoInfo restarted = Service.restarted "postfix" reloaded :: Property NoInfo reloaded = Service.reloaded "postfix" -- | Configures postfix as a satellite system, which -- relays all mail through a relay host, which defaults to smtp.domain. -- -- The smarthost may refuse to relay mail on to other domains, without -- futher coniguration/keys. But this should be enough to get cron job -- mail flowing to a place where it will be seen. satellite :: Property NoInfo satellite = check (not <$> mainCfIsSet "relayhost") setup `requires` installed where setup = trivial $ property "postfix satellite system" $ do hn <- asks hostName let (_, domain) = separate (== '.') hn ensureProperties [ Apt.reConfigure "postfix" [ ("postfix/main_mailer_type", "select", "Satellite system") , ("postfix/root_address", "string", "root") , ("postfix/destinations", "string", " ") , ("postfix/mailname", "string", hn) ] , mainCf ("relayhost", domain) `onChange` reloaded ] -- | Sets up a file by running a property (which the filename is passed -- to). If the setup property makes a change, postmap will be run on the -- file, and postfix will be reloaded. mappedFile :: Combines (Property x) (Property NoInfo) => FilePath -> (FilePath -> Property x) -> Property (CInfo x NoInfo) mappedFile f setup = setup f `onChange` cmdProperty "postmap" [f] -- | Run newaliases command, which should be done after changing -- </etc/aliases>. newaliases :: Property NoInfo newaliases = trivial $ cmdProperty "newaliases" [] -- | The main config file for postfix. mainCfFile :: FilePath mainCfFile = "/etc/postfix/main.cf" -- | Sets a main.cf name=value pair. Does not reload postfix immediately. mainCf :: (String, String) -> Property NoInfo mainCf (name, value) = check notset set `describe` ("postfix main.cf " ++ setting) where setting = name ++ "=" ++ value notset = (/= Just value) <$> getMainCf name set = cmdProperty "postconf" ["-e", setting] -- | Gets a man.cf setting. getMainCf :: String -> IO (Maybe String) getMainCf name = parse . lines <$> readProcess "postconf" [name] where parse (l:_) = Just $ case separate (== '=') l of (_, (' ':v)) -> v (_, v) -> v parse [] = Nothing -- | Checks if a main.cf field is set. A field that is set to -- the empty string is considered not set. mainCfIsSet :: String -> IO Bool mainCfIsSet name = do v <- getMainCf name return $ v /= Nothing && v /= Just "" -- | Parses main.cf, and removes any initial configuration lines that are -- overridden to other values later in the file. -- -- For example, to add some settings, removing any old settings: -- -- > mainCf `File.containsLines` -- > [ "# I like bars." -- > , "foo = bar" -- > ] `onChange` dedupMainCf -- -- Note that multiline configurations that continue onto the next line -- are not currently supported. dedupMainCf :: Property NoInfo dedupMainCf = File.fileProperty "postfix main.cf dedupped" dedupCf mainCfFile dedupCf :: [String] -> [String] dedupCf ls = let parsed = map parse ls in dedup [] (keycounts $ rights parsed) parsed where parse l | "#" `isPrefixOf` l = Left l | "=" `isInfixOf` l = let (k, v) = separate (== '=') l in Right ((filter (not . isSpace) k), v) | otherwise = Left l fmt k v = k ++ " =" ++ v keycounts = M.fromListWith (+) . map (\(k, _v) -> (k, (1 :: Integer))) dedup c _ [] = reverse c dedup c kc ((Left v):rest) = dedup (v:c) kc rest dedup c kc ((Right (k, v)):rest) = case M.lookup k kc of Just n | n > 1 -> dedup c (M.insert k (n - 1) kc) rest _ -> dedup (fmt k v:c) kc rest -- | Installs saslauthd and configures it for postfix, authenticating -- against PAM. -- -- Does not configure postfix to use it; eg smtpd_sasl_auth_enable = yes -- needs to be set to enable use. See -- https://wiki.debian.org/PostfixAndSASL saslAuthdInstalled :: Property NoInfo saslAuthdInstalled = setupdaemon `requires` Service.running "saslauthd" `requires` postfixgroup `requires` dirperm `requires` Apt.installed ["sasl2-bin"] `requires` smtpdconf where setupdaemon = "/etc/default/saslauthd" `File.containsLines` [ "START=yes" , "OPTIONS=\"-c -m " ++ dir ++ "\"" ] `onChange` Service.restarted "saslauthd" smtpdconf = "/etc/postfix/sasl/smtpd.conf" `File.containsLines` [ "pwcheck_method: saslauthd" , "mech_list: PLAIN LOGIN" ] dirperm = check (not <$> doesDirectoryExist dir) $ cmdProperty "dpkg-statoverride" [ "--add", "root", "sasl", "710", dir ] postfixgroup = "postfix" `User.hasGroup` "sasl" `onChange` restarted dir = "/var/spool/postfix/var/run/saslauthd"
avengerpenguin/propellor
src/Propellor/Property/Postfix.hs
bsd-2-clause
5,063
74
16
927
1,371
755
616
97
4
module HCP.Topup ( nodif_brain_mask , outprefix , fieldcoef , movpar_txt , rules ) where import Development.Shake import Development.Shake.FilePath import FSL (extractVol_, getDim4) import qualified HCP.Preprocessing as Preprocessing import Text.Printf outdir :: [Char] outdir = "hcp-output/2_topup" nodif_brain :: FilePath nodif_brain = outdir </> "nodif_brain.nii.gz" nodif_brain_mask :: FilePath nodif_brain_mask= outdir </> "nodif_brain_mask.nii.gz" hifib0 :: FilePath hifib0 = outdir </> "hifib0.nii.gz" outprefix :: FilePath outprefix = outdir </> "topup_Pos_Neg_b0" fieldcoef :: [Char] fieldcoef = outprefix ++ "_fieldcoef.nii.gz" movpar_txt :: [Char] movpar_txt = outprefix ++ "_movpar.txt" topupcfg :: [Char] topupcfg = "b02b0.cnf" rules :: Rules () rules = do [nodif_brain, nodif_brain_mask] *>> \_ -> do need [hifib0] command [] "bet" [hifib0, nodif_brain, "-m", "-f", "0.2"] hifib0 %> \_ -> do need [Preprocessing.posb0s ,Preprocessing.negb0s ,Preprocessing.acqparams_txt ,fieldcoef ,movpar_txt] dimt <- (+1) <$> getDim4 Preprocessing.posb0s withTempFile $ \posb01 -> withTempFile $ \negb01 -> do extractVol_ posb01 Preprocessing.posb0s 1 extractVol_ negb01 Preprocessing.negb0s 1 command_ [] "applytopup" [printf "--imain=%s,%s" posb01 negb01 ,"--topup=" ++ outprefix ,"--datain="++Preprocessing.acqparams_txt ,"--inindex=1,"++ show dimt ,"--out="++hifib0] [fieldcoef, movpar_txt] *>> \_ -> do need [Preprocessing.posnegb0s ,Preprocessing.acqparams_txt ,topupcfg] command [] "topup" ["--imain="++Preprocessing.posnegb0s ,"--datain="++Preprocessing.acqparams_txt ,"--config="++topupcfg ,"--out=" ++ outprefix ,"-v"]
pnlbwh/test-tensormasking
pipeline-lib/Pipeline/HCP/old/Topup.hs
bsd-3-clause
2,144
0
20
696
486
268
218
62
1
{-# LANGUAGE OverloadedStrings #-} -- | -- Module : Language.Ava.Base.Parser -- Copyright : (c) 2016 Owain Lewis -- -- License : BSD-style -- Maintainer : [email protected] -- Stability : experimental -- Portability : GHC -- -- module Language.Ava.Base.Parser ( parseInteger , parseDouble , parseBoolean , parseString , parseList , parseWord , parseQuotation , parseMany , readExpr , AvaParseError ) where import Text.Parsec import Text.Parsec.Text (Parser) import Data.Bifunctor (bimap) import qualified Data.Text as T import Language.Ava.Base.AST as AST import qualified Language.Ava.Base.Lexer as Lexer type AvaParseError = String ------------------------------------------------------------- readExpr :: Parser a -> T.Text -> Either AvaParseError a readExpr p input = bimap show id (parse p "<stdin>" input) parseMany :: T.Text -> Either AvaParseError [AST.Value] parseMany = readExpr $ manyTill parseExpr eof ------------------------------------------------------------- parseInteger :: Parser AST.Prim parseInteger = do digits <- Lexer.whiteSpace *> (many1 digit) <* Lexer.whiteSpace return $ AST.Integer (read digits) parseDouble :: Parser AST.Prim parseDouble = AST.Double <$> Lexer.float parseNumber :: Parser AST.Prim parseNumber = try parseDouble <|> parseInteger parseBoolean :: Parser AST.Prim parseBoolean = parseTrue <|> parseFalse where parseTrue = (Lexer.reserved "true") >> return (AST.Boolean True) parseFalse = (Lexer.reserved "false") >> return (AST.Boolean False) parseString :: Parser AST.Prim parseString = AST.String . T.unpack <$> Lexer.stringLiteral parseList :: Parser AST.Prim parseList = AST.List <$> Lexer.brackets (Lexer.commaSep parseExpr) parseQuotation :: Parser AST.Prim parseQuotation = (\xs -> AST.Quotation $ xs) <$> (Lexer.braces $ many parseExpr) parseWord :: Parser AST.Prim parseWord = AST.Word . T.unpack <$> Lexer.identifier parsePrim :: Parser AST.Prim parsePrim = parseNumber <|> parseQuotation <|> parseList <|> parseBoolean <|> parseWord -------------------------------------------------- -- Control flow -------------------------------------------------- parseDefine :: Parser AST.Value parseDefine = do Lexer.reserved "define" name <- Lexer.identifier forms <- Lexer.braces (many parseExpr) return $ AST.Define (T.unpack name) forms parseLet :: Parser AST.Value parseLet = do Lexer.reserved "let" name <- Lexer.identifier Lexer.lexeme (char '=') expr <- parseExpr return $ AST.Let (T.unpack name) expr parseExpr :: Parser AST.Value parseExpr = (try parseLet <|> parseDefine) <|> (AST.Prim <$> parsePrim)
owainlewis/seven
src/Language/Ava/Base/Parser.hs
bsd-3-clause
2,647
0
11
407
733
390
343
61
1
{-# LANGUAGE StaticPointers #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveDataTypeable #-} module Main where import GHC.StaticPtr import GHC.Word import GHC.Generics import Data.Data import Data.Binary import Data.ByteString fact :: Int -> Int fact 0 = 1 fact n = n * fact (n - 1) main = do let sptr :: StaticPtr (Int -> Int) sptr = static fact print $ staticPtrInfo sptr print $ deRefStaticPtr sptr 10 -- --------------------------------------------------------------------- type StaticKey1 = Fingerprint -- Defined in GHC.Fingerprint. data Fingerprint = Fingerprint {-# UNPACK #-} !Word64 {-# UNPACK #-} !Word64 deriving (Generic, Typeable) staticKey :: StaticPtr a -> StaticKey1 staticKey = undefined
mpickering/ghc-exactprint
tests/examples/ghc710/StaticPointers.hs
bsd-3-clause
736
0
12
126
181
98
83
23
1
-- | Display game data on the screen using one of the available frontends -- (determined at compile time with cabal flags). module Game.LambdaHack.Client.UI.DrawClient ( ColorMode(..) , draw ) where import Control.Exception.Assert.Sugar import qualified Data.EnumMap.Strict as EM import qualified Data.EnumSet as ES import Data.List import Data.Maybe import Data.Ord import Data.Text (Text) import qualified Data.Text as T import Game.LambdaHack.Client.Bfs import Game.LambdaHack.Client.CommonClient import Game.LambdaHack.Client.MonadClient import Game.LambdaHack.Client.State import Game.LambdaHack.Client.UI.Animation import qualified Game.LambdaHack.Common.Ability as Ability import Game.LambdaHack.Common.Actor as Actor import Game.LambdaHack.Common.ActorState import qualified Game.LambdaHack.Common.Color as Color import qualified Game.LambdaHack.Common.Dice as Dice import Game.LambdaHack.Common.Faction import Game.LambdaHack.Common.Item import Game.LambdaHack.Common.ItemDescription import Game.LambdaHack.Common.ItemStrongest import qualified Game.LambdaHack.Common.Kind as Kind import Game.LambdaHack.Common.Level import Game.LambdaHack.Common.Misc import Game.LambdaHack.Common.MonadStateRead import Game.LambdaHack.Common.Msg import Game.LambdaHack.Common.Perception import Game.LambdaHack.Common.Point import qualified Game.LambdaHack.Common.PointArray as PointArray import Game.LambdaHack.Common.Request import Game.LambdaHack.Common.State import qualified Game.LambdaHack.Common.Tile as Tile import Game.LambdaHack.Common.Time import Game.LambdaHack.Common.Vector import qualified Game.LambdaHack.Content.ItemKind as IK import Game.LambdaHack.Content.ModeKind import qualified Game.LambdaHack.Content.TileKind as TK -- | Color mode for the display. data ColorMode = ColorFull -- ^ normal, with full colours | ColorBW -- ^ black+white only -- TODO: split up and generally rewrite. -- | Draw the whole screen: level map and status area. -- Pass at most a single page if overlay of text unchanged -- to the frontends to display separately or overlay over map, -- depending on the frontend. draw :: MonadClient m => ColorMode -> LevelId -> Maybe Point -> Maybe Point -> Maybe (PointArray.Array BfsDistance, Maybe [Point]) -> (Text, Maybe Text) -> (Text, Maybe Text) -> Overlay -> m SingleFrame draw dm drawnLevelId cursorPos tgtPos bfsmpathRaw (cursorDesc, mcursorHP) (targetDesc, mtargetHP) sfTop = do cops <- getsState scops mleader <- getsClient _sleader s <- getState cli@StateClient{ stgtMode, seps, sexplored , smarkVision, smarkSmell, smarkSuspect, swaitTimes } <- getClient per <- getPerFid drawnLevelId let Kind.COps{[email protected]{okind=tokind, ouniqGroup}} = cops (lvl@Level{lxsize, lysize, lsmell, ltime}) = sdungeon s EM.! drawnLevelId (bl, mblid, mbpos) = case (cursorPos, mleader) of (Just cursor, Just leader) -> let Actor{bpos, blid} = getActorBody leader s in if blid /= drawnLevelId then ( [cursor], Just blid, Just bpos ) else ( fromMaybe [] $ bla lxsize lysize seps bpos cursor , Just blid , Just bpos ) _ -> ([], Nothing, Nothing) mpath = maybe Nothing (\(_, mp) -> if null bl || mblid /= Just drawnLevelId then Nothing else mp) bfsmpathRaw actorsHere = actorAssocs (const True) drawnLevelId s cursorHere = find (\(_, m) -> cursorPos == Just (Actor.bpos m)) actorsHere shiftedBTrajectory = case cursorHere of Just (_, Actor{btrajectory = Just p, bpos = prPos}) -> trajectoryToPath prPos (fst p) _ -> [] unknownId = ouniqGroup "unknown space" dis pos0 = let tile = lvl `at` pos0 tk = tokind tile floorBag = EM.findWithDefault EM.empty pos0 $ lfloor lvl (itemSlots, _) = sslots cli bagItemSlots = EM.filter (`EM.member` floorBag) itemSlots floorIids = EM.elems bagItemSlots -- first slot will be shown sml = EM.findWithDefault timeZero pos0 lsmell smlt = sml `timeDeltaToFrom` ltime viewActor aid Actor{bsymbol, bcolor, bhp, bproj} | Just aid == mleader = (symbol, inverseVideo) | otherwise = (symbol, Color.defAttr {Color.fg = bcolor}) where symbol | bhp <= 0 && not bproj = '%' | otherwise = bsymbol rainbow p = Color.defAttr {Color.fg = toEnum $ fromEnum p `rem` 14 + 1} -- smarkSuspect is an optional overlay, so let's overlay it -- over both visible and invisible tiles. vcolor | smarkSuspect && Tile.isSuspect cotile tile = Color.BrCyan | vis = TK.tcolor tk | otherwise = TK.tcolor2 tk fgOnPathOrLine = case (vis, Tile.isWalkable cotile tile) of _ | tile == unknownId -> Color.BrBlack _ | Tile.isSuspect cotile tile -> Color.BrCyan (True, True) -> Color.BrGreen (True, False) -> Color.BrRed (False, True) -> Color.Green (False, False) -> Color.Red atttrOnPathOrLine = if Just pos0 == cursorPos then inverseVideo {Color.fg = fgOnPathOrLine} else Color.defAttr {Color.fg = fgOnPathOrLine} (char, attr0) = case find (\(_, m) -> pos0 == Actor.bpos m) actorsHere of _ | isJust stgtMode && (elem pos0 bl || elem pos0 shiftedBTrajectory) -> ('*', atttrOnPathOrLine) -- line takes precedence over path _ | isJust stgtMode && maybe False (elem pos0) mpath -> (';', Color.defAttr {Color.fg = fgOnPathOrLine}) Just (aid, m) -> viewActor aid m _ | smarkSmell && sml > ltime -> (timeDeltaToDigit smellTimeout smlt, rainbow pos0) | otherwise -> case floorIids of [] -> (TK.tsymbol tk, Color.defAttr {Color.fg = vcolor}) iid : _ -> viewItem $ getItemBody iid s vis = ES.member pos0 $ totalVisible per a = case dm of ColorBW -> Color.defAttr ColorFull -> if smarkVision && vis then attr0 {Color.bg = Color.Blue} else attr0 in Color.AttrChar a char widthX = 80 widthTgt = 39 widthStats = widthX - widthTgt addAttr t = map (Color.AttrChar Color.defAttr) (T.unpack t) arenaStatus = drawArenaStatus (ES.member drawnLevelId sexplored) lvl widthStats displayPathText mp mt = let (plen, llen) = case (mp, bfsmpathRaw, mbpos) of (Just target, Just (bfs, _), Just bpos) | mblid == Just drawnLevelId -> (fromMaybe 0 (accessBfs bfs target), chessDist bpos target) _ -> (0, 0) pText | plen == 0 = "" | otherwise = "p" <> tshow plen lText | llen == 0 = "" | otherwise = "l" <> tshow llen text = fromMaybe (pText <+> lText) mt in if T.null text then "" else " " <> text -- The indicators must fit, they are the actual information. pathCsr = displayPathText cursorPos mcursorHP trimTgtDesc n t = assert (not (T.null t) && n > 2) $ if T.length t <= n then t else let ellipsis = "..." fitsPlusOne = T.take (n - T.length ellipsis + 1) t fits = if T.last fitsPlusOne == ' ' then T.init fitsPlusOne else let lw = T.words fitsPlusOne in T.unwords $ init lw in fits <> ellipsis cursorText = let n = widthTgt - T.length pathCsr - 8 in (if isJust stgtMode then "x-hair>" else "X-hair:") <+> trimTgtDesc n cursorDesc cursorGap = T.replicate (widthTgt - T.length pathCsr - T.length cursorText) " " cursorStatus = addAttr $ cursorText <> cursorGap <> pathCsr minLeaderStatusWidth = 19 -- covers 3-digit HP selectedStatus <- drawSelected drawnLevelId (widthStats - minLeaderStatusWidth) leaderStatus <- drawLeaderStatus swaitTimes (widthStats - length selectedStatus) damageStatus <- drawLeaderDamage (widthStats - length leaderStatus - length selectedStatus) nameStatus <- drawPlayerName (widthStats - length leaderStatus - length selectedStatus - length damageStatus) let statusGap = addAttr $ T.replicate (widthStats - length leaderStatus - length selectedStatus - length damageStatus - length nameStatus) " " -- The indicators must fit, they are the actual information. pathTgt = displayPathText tgtPos mtargetHP targetText = let n = widthTgt - T.length pathTgt - 8 in "Target:" <+> trimTgtDesc n targetDesc targetGap = T.replicate (widthTgt - T.length pathTgt - T.length targetText) " " targetStatus = addAttr $ targetText <> targetGap <> pathTgt sfBottom = [ encodeLine $ arenaStatus ++ cursorStatus , encodeLine $ selectedStatus ++ nameStatus ++ statusGap ++ damageStatus ++ leaderStatus ++ targetStatus ] fLine y = encodeLine $ let f l x = let ac = dis $ Point x y in ac : l in foldl' f [] [lxsize-1,lxsize-2..0] sfLevel = -- fully evaluated let f l y = let !line = fLine y in line : l in foldl' f [] [lysize-1,lysize-2..0] sfBlank = False return $! SingleFrame{..} inverseVideo :: Color.Attr inverseVideo = Color.Attr { Color.fg = Color.bg Color.defAttr , Color.bg = Color.fg Color.defAttr } -- Comfortably accomodates 3-digit level numbers and 25-character -- level descriptions (currently enforced max). drawArenaStatus :: Bool -> Level -> Int -> [Color.AttrChar] drawArenaStatus explored Level{ldepth=AbsDepth ld, ldesc, lseen, lclear} width = let addAttr t = map (Color.AttrChar Color.defAttr) (T.unpack t) seenN = 100 * lseen `div` max 1 lclear seenTxt | explored || seenN >= 100 = "all" | otherwise = T.justifyLeft 3 ' ' (tshow seenN <> "%") lvlN = T.justifyLeft 2 ' ' (tshow ld) seenStatus = "[" <> seenTxt <+> "seen] " in addAttr $ T.justifyLeft width ' ' $ T.take 29 (lvlN <+> T.justifyLeft 26 ' ' ldesc) <+> seenStatus drawLeaderStatus :: MonadClient m => Int -> Int -> m [Color.AttrChar] drawLeaderStatus waitT width = do mleader <- getsClient _sleader s <- getState let addAttr t = map (Color.AttrChar Color.defAttr) (T.unpack t) addColor c t = map (Color.AttrChar $ Color.Attr c Color.defBG) (T.unpack t) maxLeaderStatusWidth = 23 -- covers 3-digit HP and 2-digit Calm (calmHeaderText, hpHeaderText) = if width < maxLeaderStatusWidth then ("C", "H") else ("Calm", "HP") case mleader of Just leader -> do activeItems <- activeItemsClient leader let (darkL, bracedL, hpDelta, calmDelta, ahpS, bhpS, acalmS, bcalmS) = let b@Actor{bhp, bcalm} = getActorBody leader s amaxHP = sumSlotNoFilter IK.EqpSlotAddMaxHP activeItems amaxCalm = sumSlotNoFilter IK.EqpSlotAddMaxCalm activeItems in ( not (actorInAmbient b s) , braced b, bhpDelta b, bcalmDelta b , tshow $ max 0 amaxHP, tshow (bhp `divUp` oneM) , tshow $ max 0 amaxCalm, tshow (bcalm `divUp` oneM)) -- This is a valuable feedback for the otherwise hard to observe -- 'wait' command. slashes = ["/", "|", "\\", "|"] slashPick = slashes !! (max 0 (waitT - 1) `mod` length slashes) checkDelta ResDelta{..} | resCurrentTurn < 0 || resPreviousTurn < 0 = addColor Color.BrRed -- alarming news have priority | resCurrentTurn > 0 || resPreviousTurn > 0 = addColor Color.BrGreen | otherwise = addAttr -- only if nothing at all noteworthy calmAddAttr = checkDelta calmDelta darkPick | darkL = "." | otherwise = ":" calmHeader = calmAddAttr $ calmHeaderText <> darkPick calmText = bcalmS <> (if darkL then slashPick else "/") <> acalmS bracePick | bracedL = "}" | otherwise = ":" hpAddAttr = checkDelta hpDelta hpHeader = hpAddAttr $ hpHeaderText <> bracePick hpText = bhpS <> (if bracedL then slashPick else "/") <> ahpS return $! calmHeader <> addAttr (T.justifyRight 6 ' ' calmText <> " ") <> hpHeader <> addAttr (T.justifyRight 6 ' ' hpText <> " ") Nothing -> return $! addAttr $ calmHeaderText <> ": --/-- " <> hpHeaderText <> ": --/-- " drawLeaderDamage :: MonadClient m => Int -> m [Color.AttrChar] drawLeaderDamage width = do mleader <- getsClient _sleader let addColor t = map (Color.AttrChar $ Color.Attr Color.BrCyan Color.defBG) (T.unpack t) stats <- case mleader of Just leader -> do actorSk <- actorSkillsClient leader b <- getsState $ getActorBody leader localTime <- getsState $ getLocalTime (blid b) allAssocs <- fullAssocsClient leader [CEqp, COrgan] let activeItems = map snd allAssocs calm10 = calmEnough10 b $ map snd allAssocs forced = assert (not $ bproj b) False permitted = permittedPrecious calm10 forced preferredPrecious = either (const False) id . permitted strongest = strongestMelee False localTime allAssocs strongestPreferred = filter (preferredPrecious . snd . snd) strongest damage = case strongestPreferred of _ | EM.findWithDefault 0 Ability.AbMelee actorSk <= 0 -> "0" [] -> "0" (_average, (_, itemFull)) : _ -> let getD :: IK.Effect -> Maybe Dice.Dice -> Maybe Dice.Dice getD (IK.Hurt dice) acc = Just $ dice + fromMaybe 0 acc getD (IK.Burn dice) acc = Just $ dice + fromMaybe 0 acc getD _ acc = acc mdice = case itemDisco itemFull of Just ItemDisco{itemAE=Just ItemAspectEffect{jeffects}} -> foldr getD Nothing jeffects Just ItemDisco{itemKind} -> foldr getD Nothing (IK.ieffects itemKind) Nothing -> Nothing tdice = case mdice of Nothing -> "0" Just dice -> tshow dice bonus = sumSlotNoFilter IK.EqpSlotAddHurtMelee activeItems unknownBonus = unknownMelee activeItems tbonus = if bonus == 0 then if unknownBonus then "+?" else "" else (if bonus > 0 then "+" else "") <> tshow bonus <> if unknownBonus then "%?" else "%" in tdice <> tbonus return $! damage Nothing -> return "" return $! if T.null stats || T.length stats >= width then [] else addColor $ stats <> " " -- TODO: colour some texts using the faction's colour drawSelected :: MonadClient m => LevelId -> Int -> m [Color.AttrChar] drawSelected drawnLevelId width = do mleader <- getsClient _sleader selected <- getsClient sselected side <- getsClient sside allOurs <- getsState $ filter ((== side) . bfid) . EM.elems . sactorD ours <- getsState $ filter (not . bproj . snd) . actorAssocs (== side) drawnLevelId let viewOurs (aid, Actor{bsymbol, bcolor, bhp}) = let cattr = Color.defAttr {Color.fg = bcolor} sattr | Just aid == mleader = inverseVideo | ES.member aid selected = -- TODO: in the future use a red rectangle instead -- of background and mark them on the map, too; -- also, perhaps blink all selected on the map, -- when selection changes if bcolor /= Color.Blue then cattr {Color.bg = Color.Blue} else cattr {Color.bg = Color.Magenta} | otherwise = cattr in Color.AttrChar sattr $ if bhp > 0 then bsymbol else '%' maxViewed = width - 2 star = let sattr = case ES.size selected of 0 -> Color.defAttr {Color.fg = Color.BrBlack} n | n == length ours -> Color.defAttr {Color.bg = Color.Blue} _ -> Color.defAttr char = if length ours > maxViewed then '$' else '*' in Color.AttrChar sattr char viewed = map viewOurs $ take maxViewed $ sortBy (comparing keySelected) ours addAttr t = map (Color.AttrChar Color.defAttr) (T.unpack t) -- Don't show anything if the only actor in the dungeon is the leader. -- He's clearly highlighted on the level map, anyway. party = if length allOurs == 1 && length ours == 1 || null ours then [] else [star] ++ viewed ++ addAttr " " return $! party drawPlayerName :: MonadClient m => Int -> m [Color.AttrChar] drawPlayerName width = do let addAttr t = map (Color.AttrChar Color.defAttr) (T.unpack t) side <- getsClient sside fact <- getsState $ (EM.! side) . sfactionD let nameN n t = let fitWords [] = [] fitWords l@(_ : rest) = if sum (map T.length l) + length l - 1 > n then fitWords rest else l in T.unwords $ reverse $ fitWords $ reverse $ T.words t ourName = nameN (width - 1) $ fname $ gplayer fact return $! if T.null ourName || T.length ourName >= width then [] else addAttr $ ourName <> " "
Concomitant/LambdaHack
Game/LambdaHack/Client/UI/DrawClient.hs
bsd-3-clause
18,861
0
32
6,432
5,270
2,735
2,535
-1
-1
{-# LANGUAGE CPP #-} {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE ViewPatterns #-} module Optical.Patterns ( -- * Points pattern P0 , pattern P1 , pattern P2 , pattern P3 , pattern P4 #if __GLASGOW_HASKELL__ >= 709 -- Numbers , pattern NaN , pattern Infinity #endif ) where import Linear.Affine import Linear pattern P0 :: Point V0 a pattern P0 = P V0 pattern P1 :: a -> Point V1 a pattern P1 x = P (V1 x) pattern P2 :: a -> a -> Point V2 a pattern P2 x y = P (V2 x y) pattern P3 :: a -> a -> a -> Point V3 a pattern P3 x y z = P (V3 x y z) pattern P4 :: a -> a -> a -> a -> Point V4 a pattern P4 x y z w = P (V4 x y z w) #if __GLASGOW_HASKELL__ >= 709 #if __GLASGOW_HASKELL__ >= 800 pattern NaN :: RealFloat a => a #else pattern NaN :: () => RealFloat a => a #endif pattern NaN <- (isNaN -> True) where NaN = 0/0 #if __GLASGOW_HASKELL__ >= 800 pattern Infinity :: RealFloat a => a #else pattern Infinity :: () => RealFloat a => a #endif pattern Infinity <- (isInfinite -> True) where Infinity = 1/0 #endif
cchalmers/optical
src/Optical/Patterns.hs
bsd-3-clause
1,044
0
10
251
359
191
168
22
0
module Main where import Types import Instances main :: IO () main = return ()
mkloczko/derive-storable-plugin
test/ids/TypeSynonym/Main.hs
mit
81
0
6
17
30
17
13
5
1
-- -- Copyright (c) 2011 Citrix Systems, Inc. -- -- This program is free software; you can redistribute it and/or modify -- it under the terms of the GNU General Public License as published by -- the Free Software Foundation; either version 2 of the License, or -- (at your option) any later version. -- -- This program is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -- GNU General Public License for more details. -- -- You should have received a copy of the GNU General Public License -- along with this program; if not, write to the Free Software -- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -- module Migrations.M_8 ( migration ) where import UpgradeEngine migration = Migration { sourceVersion = 8 , targetVersion = 9 , actions = act } act :: IO () act = do xformVmJSON $ upgradeManifests -- Derives the appliance type from the installed manifest upgradeManifests :: JSValue -> JSValue upgradeManifests tree = jsModify manifest "/backend/appliance_manifest" $ tree where app_type True _ = "dynamic" app_type False n | n > 1 = "hybrid" | otherwise = "static" shared (Just (JSBool x)) = x shared _ = error "expected appliance shared boolean" num_disks (Just (JSArray dms)) = length dms num_disks _ = error "expected array of disk manifests" manifest tree = jsSet "/appliance/imgType" (jsBoxString $ app_type s n) $ tree where s = shared $ jsGet "/appliance/shared" tree n = num_disks $ jsGet "/disk_manifests" tree
jean-edouard/manager
upgrade-db/Migrations/M_8.hs
gpl-2.0
1,799
0
11
485
270
146
124
25
4
{-# LANGUAGE ImportQualifiedPost #-} -- If 'ImportQualifiedPost' is enabled 'qualified' can appear in -- postpositive position. import Prelude qualified main = Prelude.undefined
sdiehl/ghc
testsuite/tests/module/mod181.hs
bsd-3-clause
181
1
5
25
20
10
10
-1
-1
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} import AppCache import Routes import Yesod.Core instance Yesod App mkYesodDispatch "App" resourcesApp getHomeR :: Handler String getHomeR = return "Hello" getSomethingR :: Handler String getSomethingR = return "Hello" getAppCacheR :: Handler AppCache getAppCacheR = $(appCache resourcesApp) main :: IO () main = warp 3000 App
ygale/yesod
demo/appcache/Main.hs
mit
518
3
7
122
120
54
66
17
1
module Main(main) where import Memo1 testMemo = do let keys = [ [1..n] | n <- [1..1000] ] keys2 = [ [n,n-1..1] | n <- [1..1000] ] mlength = memo length putStr (show (map mlength (keys ++ keys ++ keys2 ++ keys2))) putStr (show (mlength [1..100000])) -- mlength will memoize itself over each element of 'keys', returning -- the memoized result the second time around. Then we move onto -- keys2, and while we're doing this the first lot of memo table -- entries can be purged. Finally, we do a a large computation -- (length [1..10000]) to allow time for the memo table to be fully -- purged. main = testMemo
beni55/ghcjs
test/pkg/base/memo001.hs
mit
636
0
15
141
157
87
70
9
1
module H1.H1 (main) where last' :: [a] -> a last' [] = error "no last element for empty" last' (x: []) = x last' (x:xs) = last' xs lastbutone :: [a] -> a lastbutone [] = error "no next-to-last for empty" lastbutone [x] = error "no next-to-last for singleton" lastbutone (x:[y]) = x lastbutone (x:xs) = lastbutone xs nth :: Int -> [a] -> a nth _ [] = error "index out of bounds for list" nth 0 (x:xs) = x nth n (x:xs) = nth (n - 1) xs len' :: [a] -> Int len' [] = 0 len' (x:[]) = 1 len' (x:xs) = 1 + len' xs rev :: [a] -> [a] rev [] = [] rev (x:[]) = [x] rev (x:xs) = rev xs ++ [x] isPal :: Eq a => [a] -> Bool isPal xs = xs == (rev xs) main :: IO () main = do print $ last' "asdf" print $ lastbutone "asdf" print $ nth 1 "asdf" print $ len' "asdf" print $ rev "asdf" print $ isPal "asdfdsa"
ublubu/euler
H1/H1.hs
mit
810
0
8
196
470
240
230
33
1
module Codec.Binary.Base64 (encode) where import Control.Applicative import Control.Monad import qualified Data.Binary.Get as BY import qualified Data.Binary.Bits.Get as BI import Data.Bits (shiftL) import qualified Data.ByteString.Lazy as BL import Data.Monoid ((<>)) import Data.Word encode :: BL.ByteString -> BL.ByteString encode = BY.runGet getB64 where getB64 = do slices <- many $ (Slice <$> takeGroups [grp 4 6 0] <*> return 0) <|> (Slice <$> takeGroups [grp 2 6 0, grp 1 4 2] <*> return 1) <|> (Slice <$> takeGroups [grp 1 6 0, grp 1 2 4] <*> return 2) return $ BL.concat (map encodeSlice slices) takeGroups :: [BI.BitGet [a]] -> BY.Get [a] takeGroups = fmap concat . BI.runBitGet . sequence grp :: Int -> Int -> Int -> BI.BitGet [Word8] grp n sz sh = replicateM n $ flip shiftL sh <$> BI.getWord8 sz encodeSlice :: Slice -> BL.ByteString encodeSlice (Slice bs p) = encBs <> padding where encBs = BL.pack . map encodeWord $ bs padding = BL.pack $ replicate p 61 encodeWord w | w < 26 = w + 65 -- A-Z | w < 52 = (w-26) + 97 -- a-z | w < 62 = (w-52) + 48 -- 0-9 | w == 62 = 43 -- + | otherwise = 47 -- / data Slice = Slice [Word8] Int
begriffs/lancelot
src/Codec/Binary/Base64.hs
mit
1,277
0
18
349
529
279
250
33
1
{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE AutoDeriveTypeable #-} {-# LANGUAGE DeriveDataTypeable #-} -- There are lots of pattern synpnyms, and little would be gained by adding -- the type signatures. {-# OPTIONS_GHC -fno-warn-missing-pattern-synonym-signatures #-} {-# OPTIONS_GHC -fno-warn-missing-signatures #-} module IHaskell.Display.Widgets.Layout.Common where import qualified IHaskell.Display.Widgets.Singletons as S pattern AlignContent = S.SLAlignContent pattern AlignItems = S.SLAlignItems pattern AlignSelf = S.SLAlignSelf pattern Border = S.SLBorder pattern Bottom = S.SLBottom pattern Display = S.SLDisplay pattern Flex = S.SLFlex pattern FlexFlow = S.SLFlexFlow pattern GridArea = S.SLGridArea pattern GridAutoColumns = S.SLGridAutoColumns pattern GridAutoFlow = S.SLGridAutoFlow pattern GridAutoRows = S.SLGridAutoRows pattern GridColumn = S.SLGridColumn pattern GridGap = S.SLGridGap pattern GridRow = S.SLGridRow pattern GridTemplateAreas = S.SLGridTemplateAreas pattern GridTemplateColumns = S.SLGridTemplateColumns pattern GridTemplateRows = S.SLGridTemplateRows pattern Height = S.SLHeight pattern JustifyContent = S.SLJustifyContent pattern JustifyItems = S.SLJustifyItems pattern Left = S.SLLeft pattern Margin = S.SLMargin pattern MaxHeight = S.SLMaxHeight pattern MaxWidth = S.SLMaxWidth pattern MinHeight = S.SLMinHeight pattern MinWidth = S.SLMinWidth pattern Order = S.SLOrder pattern Overflow = S.SLOverflow pattern OverflowX = S.SLOverflowX pattern OverflowY = S.SLOverflowY pattern Padding = S.SLPadding pattern Right = S.SLRight pattern Top = S.SLTop pattern Visibility = S.SLVisibility pattern Width = S.SLWidth -- TODO: This should be implemented with static type checking, so it's -- easier to verify at compile-time. "The Haskell Way". -- But a lot of these fields have common values. ¿Maybe doing some kind -- of singleton for the CSS fields? ¿Maybe appending the type like -- InheritOverflow / InheritVisible / InheritGrid... -- In the meantime we'll use arrays of strings and some runtime verification cssProps :: [String] cssProps = ["inherit", "initial", "unset"] alignContentProps = ["flex-start", "flex-end", "center", "space-between", "space-around", "space-evenly", "stretch"] ++ cssProps alignItemProps = ["flex-start", "flex-end", "center", "baseline", "stretch"] ++ cssProps alignSelfProps = ["auto", "flex-start", "flex-end", "center", "baseline", "stretch"] ++ cssProps gridAutoFlowProps = ["column", "row", "row dense", "column dense"] ++ cssProps justifyContentProps = ["flex-start", "flex-end", "center", "space-between", "space-around"] ++ cssProps justifyItemsProps = ["flex-start", "flex-end", "center"] ++ cssProps overflowProps = ["visible", "hidden", "scroll", "auto"] ++ cssProps visibilityProps = ["visible", "hidden"] ++ cssProps
gibiansky/IHaskell
ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/Layout/Common.hs
mit
2,934
0
6
355
598
338
260
57
1
module Rebase.Data.Sequence ( module Data.Sequence ) where import Data.Sequence
nikita-volkov/rebase
library/Rebase/Data/Sequence.hs
mit
83
0
5
12
20
13
7
4
0
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, RankNTypes #-} module Jatek.Interact where import Control.Lens import Control.Arrow (first, second) import Control.Monad (ap, liftM) import Control.Monad.State.Strict import System.IO (hFlush, stdout) import System.Random import Jatek.Actor -- InteractT is a monad transformer that includes two categories of state: RNG -- state (Random) and user state (Stateful). This is to improve usability; it -- doesn't give capabilities that a regular StateT wouldn't. -- Its purpose is to represent, "a gamelike process, not necessarily tied to a -- specific game". The "holy grail" is to be able to compose games together and -- build larger games out of subgames ("mechanics"). -- i : player ID type (often Int). -- u : user state. This shouldn't be folded into the monad m, because the -- function liftI won't "lens" the state of a subgame into a larger game. -- s : server message (e.g. a changing view from the game). -- c : client message (e.g. a player's action). -- m : monad being transformed. -- a : what the interaction returns. -- Related concept: every game has an associated State type and a View type. The -- View type is what part of the game each player can see. A simple InteractT -- use case might have the server sending views directly. data InteractT i u c s m a = Terminal a | Talk [(i, s)] ([(i, c)] -> InteractT i u c s m a) | Stateful (u -> (InteractT i u c s m a, u)) | M (m (InteractT i u c s m a)) instance (Monad m, Show a, Show s, Show i) => Show (InteractT i u c s m a) where show (Terminal a) = "Terminal " ++ show a show (Talk msgs cont) = "Talk " ++ (show msgs) ++ " (λ)" show (Stateful cont) = "Stateful (λ)" show (M _) = "M (..)" instance (Monad m) => Functor (InteractT i u c s m) where fmap = liftM instance (Monad m) => Applicative (InteractT i u c s m) where pure = return (<*>) = ap instance (Monad m) => Monad (InteractT i u c s m) where return = Terminal (Terminal a) >>= k = k a (Talk msgs cont) >>= k = Talk msgs (\cs -> (cont cs) >>= k) (Stateful cont) >>= k = Stateful (\u -> first (flip (>>=) k) $ cont u) (M ma) >>= k = M $ fmap (flip (>>=) k) ma instance (Monad m) => MonadState u (InteractT i u c s m) where get = Stateful (\u -> (Terminal u , u)) put u = Stateful (\_ -> (Terminal (), u)) instance MonadTrans (InteractT i u c s) where lift ma = M $ Terminal <$> ma instance MonadIO (InteractT i u c s IO) where liftIO = lift talk :: [(i, s)] -> ([(i, c)] -> InteractT i u c s m a) -> InteractT i u c s m a talk = Talk procure :: [i] -> (i -> s) -> (i -> c -> a) -> InteractT i u c s m [(i, a)] procure ids sf cf = Talk (map (\i -> (i, sf i)) ids) (\ics -> Terminal $ map (\(i, c) -> (i, cf i c)) ics) send :: [(i, s)] -> InteractT i u c s m () send msgs = Talk msgs (\_ -> Terminal ()) -- WARNING: In stateful monads, e.g. m = IO, this *can* side-effect the System. -- That's by design. runInteractT :: (Monad m, Ord i) => InteractT i u c s m a -> System i m s c -> u -> m (a, u) runInteractT intx system st = case intx of Terminal a -> return (a, st) Talk send cont -> sync system send >>= (\(intx1, sys1) -> runInteractT (cont intx1) sys1 st) Stateful cont -> let (intx1, st1) = cont st in runInteractT intx1 system st1 M cont -> cont >>= (\intx1 -> runInteractT intx1 system st) liftInteractT :: (Monad m, Ord i) => (c' -> c) -> (s -> s') -> Lens' u' u -> InteractT i u c s m a -> InteractT i u' c' s' m a liftInteractT cf sf uLens intx = case intx of Terminal a -> Terminal a Talk send cont -> Talk (map (second sf) send) (liftInteractT cf sf uLens . cont . map (second cf)) Stateful cont -> Stateful (\u' -> let (intx1, u1) = cont (view uLens u') in (liftInteractT cf sf uLens intx1, set uLens u1 u')) M cont -> M $ fmap (liftInteractT cf sf uLens) cont
michaelochurch/jatek
Jatek/Interact.hs
mit
4,024
0
17
1,029
1,494
789
705
66
4
module GosperSpec where import Test.Hspec import Gosper spec :: Spec spec = do describe "getList" $ do it "normal" $ do (take 10 $ getList 1) `shouldBe` [1,2,4,8,16,32,64,128,256,512] it "error" $ do getList 0 `shouldBe` [] main :: IO() main = do hspec spec
czchen/haskell-gosper
test/GosperSpec.hs
mit
311
0
16
97
139
74
65
13
1
module Data.Mole.Core where import Control.Concurrent import Control.Concurrent.STM import Control.Monad import Control.Monad.Trans.Maybe import Data.Map (Map) import qualified Data.Map as M import Data.Set (Set) import qualified Data.Set as S import Data.ByteString (ByteString) import qualified Data.Text as T import Data.Maybe import Data.Time import Data.Mole.Types import Data.Mole.Builder.External import System.Environment import System.IO (hFlush, stdout) import qualified Network.Kraken as K padL :: Int -> String -> String padL n s | length s < n = s ++ replicate (n - length s) ' ' | otherwise = s newHandle :: Config -> IO Handle newHandle config = do st <- newTVarIO $ State Nothing (return ()) M.empty l <- newTMVarIO () msgs <- newTQueueIO void $ forkIO $ forever $ do (Message time aId msg) <- atomically $ readTQueue msgs putStrLn $ mconcat [ formatTime defaultTimeLocale "%H:%M:%S" time , " [ " <> take 24 (padL 24 (T.unpack $ unAssetId aId)) <> " ] " , msg ] hFlush stdout e <- newTQueueIO void $ forkIO $ forever $ do join $ atomically $ readTQueue e kH <- runMaybeT $ do apiKey <- MaybeT $ lookupEnv "KRAKEN_API_KEY" apiSecret <- MaybeT $ lookupEnv "KRAKEN_API_SECRET" MaybeT $ Just <$> K.newHandle (K.Config apiKey apiSecret) let h = Handle st msgs e kH l -- This background thread periodically checks if there are any assets -- marked as dirty and forks a build thread for each. tId <- forkIO $ forever $ do -- Get a list of dirty assets. Those are the ones which we need to -- rebuild. The check runs in a STM transaction, and will block until -- at least one asset is dirty. Much efficient, wow. dirtyAssetIds <- atomically $ do s <- readTVar st let assetIds = M.keys $ M.filter ((==) Dirty . arsState) (assets s) if length assetIds == 0 then retry else return assetIds forM_ dirtyAssetIds $ \aId -> do -- First we have to mark the asset as being built. This is to avoid -- forking two or more build threads for the same asset. markBuilding h aId forkIO $ do assetDef <- lookupAssetDefinition config h aId case assetDef of Nothing -> do -- failBuild h aId (AssetNotFound aId) logMessage h aId $ "Asset not found, treating as external: " ++ show aId buildAsset h aId $ AssetDefinition (externalBuilder $ PublicIdentifier $ unAssetId aId) id (\_ _ _ -> return ()) Just ad -> do -- logMessage h aId $ "Building" buildAsset h aId ad atomically $ modifyTVar st (\s -> s { dispatcherThreadId = Just tId }) return h logMessage :: Handle -> AssetId -> String -> IO () logMessage h aId msg = do now <- getCurrentTime atomically $ writeTQueue (messages h) (Message now aId msg) adjustAssetRuntimeState :: Handle -> AssetId -> (AssetRuntimeState -> AssetRuntimeState) -> IO () adjustAssetRuntimeState h aId f = atomically $ do modifyTVar (state h) $ \s -> s { assets = M.adjust f aId (assets s) } insertAssetRuntimeStateWith :: Handle -> AssetId -> (AssetRuntimeState -> AssetRuntimeState -> AssetRuntimeState) -> AssetRuntimeState -> IO () insertAssetRuntimeStateWith h aId f d = atomically $ do modifyTVar (state h) $ \s -> s { assets = M.insertWith f aId d (assets s) } updateMetadata :: Handle -> AssetId -> Set FilePath -> Set AssetId -> ByteString -> Map AssetId PublicIdentifier -> IO () updateMetadata h aId src ds fp rd = adjustAssetRuntimeState h aId $ \ars -> ars { arsSources = src , arsDependencySet = ds , arsSource = Just (fp, rd) } buildIfNecessary :: Handle -> AssetId -> IO () buildIfNecessary h aId = insertAssetRuntimeStateWith h aId adj (assetRuntimeState Dirty) where adj _ ars = case arsState ars of Building _ -> ars Completed _ -> ars _ -> ars { arsState = Dirty } markDirty :: Handle -> AssetId -> IO () markDirty h aId = insertAssetRuntimeStateWith h aId f (assetRuntimeState Dirty) where f _ ars = ars { arsState = Dirty } markBuilding :: Handle -> AssetId -> IO () markBuilding h aId = do s <- Building <$> getCurrentTime insertAssetRuntimeStateWith h aId (\_ ars -> ars { arsState = s }) (assetRuntimeState s) failBuild :: Handle -> AssetId -> Error -> IO () failBuild h aId err = do logMessage h aId $ "Failure: " ++ show err adjustAssetRuntimeState h aId $ \ars -> ars { arsState = Failed err } rebuildReverseDependencies h aId finishBuilding :: Handle -> AssetId -> Result -> IO () finishBuilding h aId res = do now <- getCurrentTime let diff (Building t0) = diffUTCTime now t0 diff _ = fromIntegral (0 :: Int) adjustAssetRuntimeState h aId $ \ars -> ars { arsState = Completed (diff $ arsState ars) , arsResult = Just res } mbArs <- atomically $ do s <- readTVar (state h) return $ M.lookup aId (assets s) case mbArs of Nothing -> return () Just ars -> case arsState ars of Completed td -> logMessage h aId $ "Build time: " ++ show td _ -> return () -- Go through all reverse dependencies and mark them as dirty. rebuildReverseDependencies h aId isBuilding :: AssetState -> Bool isBuilding (Building _) = True isBuilding _ = False isFailed :: AssetState -> Bool isFailed (Failed _) = True isFailed _ = False rebuildReverseDependencies :: Handle -> AssetId -> IO () rebuildReverseDependencies h aId = do s <- atomically $ readTVar (state h) forM_ (M.toList $ assets s) $ \(aId', ars) -> do when ((not $ isBuilding $ arsState ars) && S.member aId (arsDependencySet ars)) $ do markDirty h aId' -- | Wait until the set of assets is built, and return the corresponding -- results. If any of the assets fails to build (for whatever reason), then -- immediately abort and return the reason. require :: Handle -> Set AssetId -> IO (Either Error (Map AssetId Result)) require h assetIds = do -- Mark assets as dirty if they are not comleted yet. forM_ assetIds $ \dep -> do buildIfNecessary h dep -- Wait for the dependencies to have completed building. atomically $ do s <- readTVar (state h) -- All dependencies which are relevant. let allDependencies = M.filterWithKey (\aId _ -> S.member aId assetIds) (assets s) -- The dependencies which are completed and for which we have a result. let completedDependencies = flip M.mapMaybe allDependencies $ \ars -> case (arsState ars, arsResult ars) of (Completed _, Just res) -> Just res _ -> Nothing -- A more accurate check would be 'assetIds == M.keysSet completedDependencies'. -- Though comparing the length is probably faster. if length completedDependencies == length assetIds then return $ Right $ completedDependencies else if any (isFailed . arsState) (M.elems allDependencies) then return $ Left DependencyFailed else retry assetsByPublicIdentifier :: State -> PublicIdentifier -> [(AssetId, Result)] assetsByPublicIdentifier st pubId = filter (\(_,res) -> publicIdentifier res == pubId) $ catMaybes $ map f $ M.assocs $ assets st where f (aId, AssetRuntimeState (Completed _) _ _ _ (Just res)) = Just (aId, res) f _ = Nothing assetByPublicIdentifier :: State -> PublicIdentifier -> Maybe Result assetByPublicIdentifier st pubId = lookup pubId $ catMaybes $ map f $ M.elems $ assets st where f (AssetRuntimeState (Completed _) _ _ _ (Just res)) = Just (publicIdentifier res, res) f _ = Nothing lookupAssetDefinition :: Config -> Handle -> AssetId -> IO (Maybe AssetDefinition) lookupAssetDefinition config h aId = case M.lookup aId (assetDefinitions config) of Just ad -> return $ Just ad Nothing -> autoDiscovery config h aId buildAsset :: Handle -> AssetId -> AssetDefinition -> IO () buildAsset h aId ad = do Builder src depSet cont fp <- createBuilder ad h aId eitherResolvedDeps <- require h depSet case eitherResolvedDeps of Left e -> failBuild h aId e Right resolvedDeps -> do let sourceDeps = M.map publicIdentifier resolvedDeps -- First check if we actually need to rebuild the asset. If the source -- fingerprint is still the same then we can skip directly to 'Completed'. needsRebuild <- atomically $ do s <- readTVar (state h) return $ case M.lookup aId (assets s) of Just (AssetRuntimeState _ _ _ (Just (sfp, srd)) (Just _)) -> sfp /= fp || sourceDeps /= srd _ -> True -- Eagerly update the metadata, even if we don't have to rebuild the asset. -- When deciding whether to rebuild the asset or not, the only thing that -- matters is the fingerprint. But the builder may have an updated or more -- accurate set of dependencies now, and we do want to update that. updateMetadata h aId src depSet fp sourceDeps if not needsRebuild then do logMessage h aId $ "Skip" now <- getCurrentTime let diff (Building t0) = diffUTCTime now t0 diff _ = fromIntegral (0 :: Int) adjustAssetRuntimeState h aId $ \ars -> ars { arsState = Completed (diff $ arsState ars) } else do -- logger lock $ "Got all dependencies of " ++ show aId -- logger lock $ resolvedDeps case cont (M.map publicIdentifier resolvedDeps) of Left e -> failBuild h aId e Right result1@(Result pub _) -> do let result = result1 { publicIdentifier = transformPublicIdentifier ad pub } -- logger lock $ "Pub: " ++ (publicIdentifier result) -- logger lock $ res atomically $ writeTQueue (emitStream h) $ emitResult ad h aId result finishBuilding h aId result
wereHamster/mole
src/Data/Mole/Core.hs
mit
10,804
0
28
3,418
2,993
1,476
1,517
178
6
{-# LANGUAGE DataKinds, ConstraintKinds, KindSignatures, GADTs #-} {-# LANGUAGE FlexibleInstances, FlexibleContexts, MultiParamTypeClasses #-} {-# LANGUAGE UndecidableInstances, ScopedTypeVariables #-} {-# LANGUAGE StandaloneDeriving #-} module Control.OperationalTransformation.Properties ( ArbitraryFor (..) -- , TestableOTSystem -- , ArbitraryOTSystem , Nat (..), One, Two, Three , DocHistory (..) , ConcurrentDocHistories (..) , prop_compose_assoc , prop_apply_functorial , prop_transform_apply_comm , prop_transform_comm , prop_transform_compose_compat_l , prop_transform_compose_compat_r , prop_transform_functorial ) where import Control.OperationalTransformation import Test.QuickCheck hiding (Result, reason) import Test.QuickCheck.Property import Control.Applicative ((<$>), (<*>)) {- type ArbitraryOTSystem doc op = ( OTSystem doc op, OTComposableOperation op , Arbitrary doc, ArbitraryFor doc op --Arbitrary (GenOp doc op) , Show doc, Eq doc, Show op, Eq op ) -} type TestableOTSystem doc op = ( OTSystem doc op, OTComposableOperation op --, Arbitrary doc, ArbitraryFor doc op --Arbitrary (GenOp doc op) , Show doc, Eq doc, Show op, Eq op ) class ArbitraryFor a b where arbitraryFor :: a -> Gen b genOp :: (OTSystem doc op, ArbitraryFor doc op) => doc -> Gen (op, doc) genOp doc = do op <- arbitraryFor doc case apply op doc of Left err -> fail err Right doc' -> return (op, doc') data Nat = Z | S !Nat deriving (Eq, Show) type One = S Z type Two = S One type Three = S Two data DocHistory doc op :: Nat -> * where -- | Last state LS :: doc -> DocHistory doc op Z -- | Snapshot SS :: doc -> op -> DocHistory doc op n -> DocHistory doc op (S n) deriving instance (Show doc, Show op) => Show (DocHistory doc op n) deriving instance (Eq doc, Eq op) => Eq (DocHistory doc op n) data ConcurrentDocHistories doc op n k = CDH (DocHistory doc op n) (DocHistory doc op k) deriving instance (Show doc, Show op) => Show (ConcurrentDocHistories doc op n k) deriving instance (Eq doc, Eq op) => Eq (ConcurrentDocHistories doc op n k) {- getCurrentState :: DocHistory doc op n -> doc getCurrentState (LS doc) = doc getCurrentState (SS _ _ dh) = getCurrentState dh snocDocHistory :: DocHistory doc op n -> op -> doc -> DocHistory doc op (S n) snocDocHistory (LS doc) op doc' = SS doc op (LS doc') snocDocHistory (SS doc op dh) op' doc' = SS doc op (snocDocHistory dh op' doc') -} instance ArbitraryFor doc (DocHistory doc op Z) where arbitraryFor = return . LS instance (OTSystem doc op, ArbitraryFor doc op, ArbitraryFor doc (DocHistory doc op n)) => ArbitraryFor doc (DocHistory doc op (S n)) where arbitraryFor doc = do (op, doc') <- genOp doc SS doc op <$> arbitraryFor doc' instance (Arbitrary doc, ArbitraryFor doc (DocHistory doc op n)) => Arbitrary (DocHistory doc op n) where arbitrary = (arbitrary :: Gen doc) >>= arbitraryFor instance (ArbitraryFor doc (DocHistory doc op n), ArbitraryFor doc (DocHistory doc op k)) => ArbitraryFor doc (ConcurrentDocHistories doc op n k) where arbitraryFor doc = CDH <$> arbitraryFor doc <*> arbitraryFor doc instance (Arbitrary doc, ArbitraryFor doc (ConcurrentDocHistories doc op n k)) => Arbitrary (ConcurrentDocHistories doc op n k) where arbitrary = (arbitrary :: Gen doc) >>= arbitraryFor (==?) :: (Eq a, Show a) => a -> a -> Result a ==? b | a == b = succeeded | otherwise = failed { reason = "expected " ++ show a ++ " to be " ++ show b } eitherResult :: Either String a -> (a -> Result) -> Result eitherResult (Left err) _ = failed { reason = err } eitherResult (Right a) f = f a eitherProperty :: Either String a -> (a -> Property) -> Property eitherProperty (Left err) _ = property $ failed { reason = err } eitherProperty (Right res) prop = prop res prop_compose_assoc :: TestableOTSystem doc op => DocHistory doc op Three -> Result prop_compose_assoc (SS _doc a (SS _ b (SS _ c _))) = eitherResult (compose a b) $ \ab -> eitherResult (compose ab c) $ \abc1 -> eitherResult (compose b c) $ \bc -> eitherResult (compose a bc) $ \abc2 -> abc1 ==? abc2 -- | @(b ∘ a)(d) = a(b(d))@ where /a/ and /b/ are two consecutive operations -- and /d/ is the initial document. prop_apply_functorial :: TestableOTSystem doc op => DocHistory doc op Two -> Result prop_apply_functorial (SS doc a (SS _ b (LS _))) = eitherResult (apply a doc) $ \doc' -> eitherResult (apply b doc') $ \doc''1 -> eitherResult (compose a b) $ \ab -> eitherResult (apply ab doc) $ \doc''2 -> doc''1 ==? doc''2 -- | @b'(a(d)) = a'(b(d))@ where /a/ and /b/ are random operations, /d/ is the -- initial document and @(a', b') = transform(a, b)@. prop_transform_apply_comm :: TestableOTSystem doc op => ConcurrentDocHistories doc op One One -> Result prop_transform_apply_comm (CDH (SS _ a (LS docA)) (SS _ b (LS docB))) = eitherResult (transform a b) $ \(a', b') -> eitherResult (apply a' docB) $ \doc''1 -> eitherResult (apply b' docA) $ \doc''2 -> doc''1 ==? doc''2 -- | @b' ∘ a = a' ∘ b@ where /a/ and /b/ are random operations and -- @(a', b') = transform(a, b)@. Note that this is a stronger property than -- 'prop_transform_apply_comm', because 'prop_transform_comm' and -- 'prop_apply_functorial' imply 'prop_transform_apply_comm'. prop_transform_comm :: TestableOTSystem doc op => ConcurrentDocHistories doc op One One -> Result prop_transform_comm (CDH (SS _ a _) (SS _ b _)) = eitherResult (transform a b) $ \(a', b') -> eitherResult (compose a b') $ \ab' -> eitherResult (compose b a') $ \ba' -> ab' ==? ba' -- | Transformation is compatible with composition on the left. That is, if we -- have two consecutive operations /a/ and /b/ and a concurrent operation /c/, -- then it doesn't make a difference whether we transform /c/ against /a/ and -- then against /b/ or transform /c/ against the composition of /a/ and /b/. -- In other terms, @c'_1 = c'_2@ where @(_, c'_1) = transform(b ∘ a, c)@, -- @(_, c') = transform(a, c)@ and @(_, c'_2) = transform(b, c')@. prop_transform_compose_compat_l :: (OTSystem doc op, OTComposableOperation op, Arbitrary doc, Show op, Eq op) => (doc -> Gen op) -> Property prop_transform_compose_compat_l genOperation = property $ do doc <- arbitrary a <- genOperation doc c <- genOperation doc return $ eitherProperty (apply a doc) $ \doc' -> property $ do b <- genOperation doc' let res = (,) <$> (snd <$> (compose a b >>= flip transform c)) <*> (snd <$> (transform a c >>= transform b . snd)) return $ eitherProperty res $ \(c'_1, c'_2) -> property $ c'_1 ==? c'_2 -- | Transformation is compatible with composition on the /right/. prop_transform_compose_compat_r :: (OTSystem doc op, OTComposableOperation op, Arbitrary doc, Show op, Eq op) => (doc -> Gen op) -> Property prop_transform_compose_compat_r genOperation = property $ do doc <- arbitrary a <- genOperation doc c <- genOperation doc return $ eitherProperty (apply a doc) $ \doc' -> property $ do b <- genOperation doc' let res = (,) <$> (fst <$> (compose a b >>= transform c)) <*> (fst <$> (transform c a >>= flip transform b . fst)) return $ eitherProperty res $ \(c'_1, c'_2) -> property $ c'_1 ==? c'_2 -- second functor axiom (F(f . g) = Ff . Fg) for F = transform c prop_transform_functorial :: TestableOTSystem doc op => ConcurrentDocHistories doc op One Two -> Result prop_transform_functorial (CDH (SS _ c _) (SS _ a (SS _ b _))) = eitherResult (compose a b) $ \ab -> eitherResult (transform c ab) $ \(_c''1, abPrimed1) -> eitherResult (transform c a) $ \(c', a') -> eitherResult (transform c' b) $ \(_c''2, b') -> eitherResult (compose a' b') $ \abPrimed2 -> abPrimed1 ==? abPrimed2
Operational-Transformation/ot.hs
src/Control/OperationalTransformation/Properties.hs
mit
7,823
0
22
1,569
2,397
1,242
1,155
143
2
{-# htermination (\\) :: Eq a => [[a]] -> [[a]] -> [[a]] #-} import List
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/List_BACKSLBACKSL_4.hs
mit
73
0
3
15
5
3
2
1
0
{-# OPTIONS_GHC -fno-warn-orphans #-} module Graphics.Urho3D.Graphics.RenderSurface( RenderSurface , renderSurfaceContext ) where import qualified Language.C.Inline as C import qualified Language.C.Inline.Cpp as C import Graphics.Urho3D.Graphics.Internal.RenderSurface import Data.Monoid C.context (C.cppCtx <> renderSurfaceCntx ) C.include "<Urho3D/Graphics/RenderSurface.h>" C.using "namespace Urho3D" renderSurfaceContext :: C.Context renderSurfaceContext = renderSurfaceCntx
Teaspot-Studio/Urho3D-Haskell
src/Graphics/Urho3D/Graphics/RenderSurface.hs
mit
491
0
8
53
94
57
37
-1
-1
{- | Automatic generators for ADTs. - - Cf. A PropEr Integration of Types and Function Specifications with PBT, - by Manolis Papadakis and Konstantinos Sagonas. - - Naming conventions: - - @v@, variables; @c@: constructors; (value level) - - @tc@, type constructors; @tv@: type variables; - - @funNameS@, action within a state monad. - - TODO: add weights to alternatives - - Possible extension: higher kinded types (kind <-> generator type), -} {-# LANGUAGE RecordWildCards, TemplateHaskell, RankNTypes, DeriveFunctor, FlexibleContexts #-} module Core.Rigidify.Generator where import Control.Applicative import Control.Lens hiding (Choice) import Control.Monad.Except import Control.Monad.State import Control.Monad.Reader import Control.Monad.Random import Data.Either import Data.Function import Data.List import Data.Ratio import Data.Set (Set) import Data.Map (Map) import qualified Data.Set as Set import qualified Data.Map as Map import Common.Types import Common.Util -- * Types data Void type Type = TcType -- | Partial values with typed holes. -- TODO merge with ZCSet data DataPat' c b = DPCon c [DataPat' c b] | DPLeaf b deriving (Eq, Ord, Show, Functor) type DataPat c tc tv = DataPat' c (Type tc tv) -- | Fully defined values. data DataTree c = DCon c [DataTree c] | DInt Int deriving (Eq, Ord, Show, Functor) -- | Generators (wrapped in a monadic action) -- parameterized by other generators, indexed by type @i@. type GenWithSubGen i m x = Map i (m x) -> m x type DataGen' c tv m = GenWithSubGen tv m (DataTree c) -- | Generators of fully defined values, that act in the monad @m@. data DataGen c tv m = DataGen { genArgs :: [tv] -- ^ Sorted list of type variables (indices expected to map to a generator). , genData :: DataGen' c tv m } -- | Mapping from type constructors @tc@ to data generators. type DataGenMap c tc tv m = Map tc (DataGen c tv m) -- * "Sized" actions type Sized = ReaderT Int unsized :: Monad m => m a -> Sized m a unsized = lift scale :: Monad m => (Int -> Int) -> Sized m a -> Sized m a scale = local resize :: Monad m => Int -> Sized m a -> Sized m a resize = local . const sized :: Monad m => (Int -> m a) -> Sized m a sized = ReaderT sized' :: Monad m => (Int -> Sized m a) -> Sized m a sized' = (ask >>=) runSized :: Monad m => Int -> Sized m a -> m a runSized = flip runReaderT countLeaves :: DataPat c tc tv -> Int countLeaves (DPCon _ ts) = sum (countLeaves <$> ts) countLeaves (DPLeaf _) = 1 -- * Weighted choices -- | All inliner functions assume the sum of the weights to be 1 -- (simplifying intermediate explicit renormalization). type Weight = Ratio Int -- Take the product of weights. combine :: ([a] -> b) -> [(Weight, a)] -> (Weight, b) combine f was = let (ws, as) = unzip was in (product ws, f as) scaleWeights :: Weight -> [(Weight, a)] -> [(Weight, a)] scaleWeights s = map (\(w, a) -> (s * w, a)) intWeights :: [(Weight, a)] -> [(Int, a)] intWeights was = let (ws, as) = unzip was wLCM = foldl' lcm 1 . map denominator $ ws in zip ((\w -> numerator w * (wLCM `div` denominator w)) <$> ws) as always :: a -> [(Weight, a)] always a = [(1, a)] -- * Create generators from partial values with typed holes -- | Create a parameterized generator for a type constructor, -- given a view of generators of all types in the program to handle recursive -- calls. -- -- The generator chooses a partial value and recursively fills in the holes. mkDataGen :: (MonadRandom m, Ord tc, Ord tv) => DataGenMap c tc tv (Sized m) -- ^ Sized generators -> tc -> DataGen c tv m -> [tv] -- ^ @T::tc, [u,v,w]::[tv]@ represents of the type "@T u v w@". -> Choice (Int, DataPat c tc tv) -- ^ Alternatives tagged with number of revursive references. -> DataGen c tv (Sized m) mkDataGen g tc baseGen vs alts = mkDataGenM g k tc alts asTree localGen where k gen = DataGen vs $ \argGen -> sized $ \size -> if size == 0 then genData baseGen $ Map.map (runSized 0) argGen else runSized size (gen argGen) asTree = snd localGen (0, _) = id localGen (1, _) = scale (subtract 1) localGen (leafCount, _) = scale (`div` leafCount) -- | Create a base generator, same method. mkBaseGen :: (MonadRandom m, Ord tc, Ord tv) => DataGenMap c tc tv m -> tc -> [tv] -> Choice (DataPat c tc tv) -> DataGen c tv m mkBaseGen baseg tc vs alts = mkDataGenM baseg k tc alts id (const id) where k gen = DataGen vs gen -- | An implementation shared by the two functions above. mkDataGenM :: (MonadRandom m, Ord tc, Ord tv) => DataGenMap c tc tv m -> (DataGen' c tv m -> DataGen c tv m) -> tc -> Choice a -- ^ A @DataTree c@ with auxiliary information. -> (a -> DataPat c tc tv) -> (a -> m (DataTree c) -> m (DataTree c)) -- ^ Use that information to update parameters (e.g., size) of recursive calls -> DataGen c tv m mkDataGenM g k tc alts asTree localGen = k $ \ argGen -> do choose alts >>= \a -> localGen a (dataPatToGen (typedGen g argGen) (asTree a)) -- | Helper. dataPatToGen :: MonadRandom m => (b -> m (DataTree c)) -- ^ what to do at the leaves -> DataPat' c b -> m (DataTree c) dataPatToGen leafGen (DPCon c args) = liftM (DCon c) $ mapM (dataPatToGen leafGen) args dataPatToGen leafGen (DPLeaf b) = leafGen b -- | Generate a value of a given type. typedGen :: (MonadRandom m, Ord tc, Ord tv) => Map tc (DataGen c tv m) -> Map tv (m (DataTree c)) -> Type tc tv -> m (DataTree c) typedGen g argGen (TcVar v) = argGen Map.! v typedGen g argGen (TcCon c _ args) = let cGen = g Map.! c cArgGen = Map.fromList . zip (genArgs cGen) $ typedGen g argGen <$> args in genData cGen cArgGen typedGen _ _ _ = error "typedGen" -- * Inlining -- Each type is inlined with a separate initial state from others. -- | Our specialization of the state monad with exceptions. type Inliner tc a = ExceptT tc (State (InlState tc)) a -- | The state keeps track of recursive types that we have failed -- to inline, so we do not need to recheck. data InlState tc = InlState { _noInline :: Set tc } makeLenses ''InlState inlineData :: (Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> Set tc -> Map tc [(Int, DataPat c tc tv)] inlineData env baseTcs = Map.map intWeights . Map.mapWithKey (inlineTc env baseTcs) $ tyConEnv env -- | Inline a type definition. -- -- By this, we mean to unfold partial values as much as possible, -- potentially increasing their number, but decreasing the number of -- lookups of auxiliary generators as well as calls to a RNG. inlineTc :: (Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> Set tc -> tc -> ([tv], [c]) -> [(Weight, DataPat c tc tv)] inlineTc env baseTcs tc (vs, _) = let targs = TcVar <$> vs Right r = evalState (runExceptT $ inlineS' env Set.empty tc targs) (InlState (Set.insert tc baseTcs)) in r -- | Try to inline a definition. inlineS :: (Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> Set tc -> Type tc tv -> Inliner tc [(Weight, DataPat c tc tv)] inlineS _env _stack ty@(TcVar _) = return $ always (DPLeaf ty) inlineS env stack ty@(TcCon tc _ targs) = do s <- get -- This needs to be checked first, to treat correctly -- references to the root type we are inlining. if tc `Set.member` _noInline s then return $ always (DPLeaf ty) -- An attempt at unfolding of a recursive type has been detected. -- Unwind back the the previous call on that type. else if tc `Set.member` stack then throwError tc else do let stack' = Set.insert tc stack catchError (inlineS' env stack' tc targs) $ \ tc' -> if tc == tc' then do noInline %= Set.insert tc return $ always (DPLeaf ty) else throwError tc' inlineS _ _ _ = error "inlineS: Not implemented." -- | Helper. inlineS' :: (Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> Set tc -> tc -> [Type tc tv] -> Inliner tc [(Weight, DataPat c tc tv)] inlineS' env stack tc targs = do let (vs, cs) = tyConEnv env Map.! tc sub = mkSub vs targs alts' <- forM cs $ \c -> do let Forall _ ty = conEnv env Map.! c cargsTy = fst (unFun $ subst sub ty) cargs' <- forM cargsTy $ inlineS env stack return $ combine (DPCon c) <$> sequence cargs' return . scaleWeights (1 % length cs) $ concat alts' -- * Inlining for base cases -- Base cases are also inlined, but within the same state instance, -- to preserve "consistency" of base cases. -- | Types are ranked depending on how well founded their base cases are. data TypeRank = Simple -- ^ The type has clear base cases, is built-in, or can -- rely just on other @Simple@ types (inductively). | DependsVar -- ^ Clauses depend on some of the type variables. | Cyclic -- ^ Cyclic type dependency with no base case. deriving (Eq, Ord, Show) -- | Specialized state monad. type BaseInliner c tc tv = State (BaseInlState c tc tv) -- | Remember inlinings and ranks. data BaseInlState c tc tv = BaseInlState { _inlined :: Map tc (TypeRank, [(Weight, DataPat c tc tv)]) } makeLenses ''BaseInlState -- | Inline all type definitions at once. inlineBase :: (Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> BaseInlState c tc tv -> Map tc [(Int, DataPat c tc tv)] inlineBase env initState = let inl tc (vs, _) = inlineBaseS' env Set.empty tc (TcVar <$> vs) inlined = evalState (Map.traverseWithKey inl (tyConEnv env)) initState in Map.map (intWeights . snd) inlined -- | Inline the definition of a type constructor applied to variables, -- adding the result to the environment if it succeeds. inlineBaseTcS :: (Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> Set tc -> tc -> BaseInliner c tc tv (TypeRank, [(Weight, DataPat c tc tv)]) inlineBaseTcS env stack tc = do s <- get remember s $ do let stack' = Set.insert tc stack (vs, cs) = tyConEnv env Map.! tc mixedAlts <- forM cs $ \c -> do let Forall _ ty = conEnv env Map.! c (cargs, _) = unFun ty case cargs of -- Failed attempts are left unrecorded, as they may later succeed -- when the corresponding type constructor is at the root instead. [] -> return (Simple, always (DPCon c [])) _ -> do cargs' <- forM cargs $ inlineBaseS env stack' return $ priorityCon c cargs' return $ filterSimplest mixedAlts where -- If it has already been inlined previously, return the memoized value. remember s m = case Map.lookup tc (_inlined s) of Just alts -> return alts Nothing -> do alts <- m case fst alts of Cyclic -> return () _ -> inlined %= Map.insert tc alts return alts -- | Combine all alternatives of constructor arguments. priorityCon :: c -> [(TypeRank, [(Weight, DataPat c tc tv)])] -> (TypeRank, [(Weight, DataPat c tc tv)]) priorityCon c cargs = (maximum (fst <$> cargs), combine (DPCon c) <$> sequence (snd <$> cargs)) -- | Keep the alternatives of lowest rank. filterSimplest :: [(TypeRank, [(Weight, DataPat c tc tv)])] -> (TypeRank, [(Weight, DataPat c tc tv)]) filterSimplest alts = let rank = minimum $ fst <$> alts alts' = snd <$> filter ((rank ==) . fst) alts in (rank, scaleWeights (1 % length alts') . concat $ alts') inlineBaseS :: (Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> Set tc -> Type tc tv -> BaseInliner c tc tv (TypeRank, [(Weight, DataPat c tc tv)]) inlineBaseS _env _stack ty@(TcVar v) = return (DependsVar, always (DPLeaf ty)) inlineBaseS env stack ty@(TcCon tc _ targs) = inlineBaseS' env stack tc targs inlineBaseS _ _ _ = error "inlineBaseS" inlineBaseS' :: (Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> Set tc -> tc -> [Type tc tv] -> BaseInliner c tc tv (TypeRank, [(Weight, DataPat c tc tv)]) inlineBaseS' env stack tc targs = do if tc `Set.member` stack then return (Cyclic, []) else do r@(rk, alts) <- inlineBaseTcS env stack tc case rk of Simple -> return r Cyclic -> return (Cyclic, []) DependsVar -> do let (vs, _) = tyConEnv env Map.! tc sub = mkSub vs targs (filterSimplest <$>) . forM alts $ \(w, alt) -> (scaleWeights w <$>) <$> inlineBaseDataS env stack sub alt -- | Unfold holes in a partial value. inlineBaseDataS env stack sub (DPCon c cargs) = priorityCon c <$> mapM (inlineBaseDataS env stack sub) cargs inlineBaseDataS env stack sub (DPLeaf ty) = inlineBaseS env stack (subst sub ty) -- | Create generators for user-defined types, provided via the environment. -- Generators for built-in types must be given from outside. createDataGen :: (MonadRandom m, Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> Map tc (DataGen c tv (Sized m)) -- ^ Built-in generators -> Map tc (DataGen c tv m) -- ^ Base case generators -> (Map tc [(Int, DataPat c tc tv)], Map tc [(Int, DataPat c tc tv)]) -- ^ Inlinings -> Map tc (DataGen c tv (Sized m)) createDataGen env g baseGen (inlined, inlinedBase) = fix $ \g' -> Map.union g $ Map.mapWithKey (mkGen g') (baseGen `mapZip` (tyConEnv env `mapZip'` inlined)) where mkGen g' tc (baseGen, (vs, alts)) = let alts' = map (\(w, a) -> (w, (countLeaves a, a))) alts in mkDataGen g' tc baseGen vs alts' -- | Base generators. createBaseGen :: (MonadRandom m, Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> DataGenMap c tc tv (Sized m) -> Map tc [(Int, DataPat c tc tv)] -> DataGenMap c tc tv m createBaseGen env g inlinedBase = fix $ \g' -> Map.union (size0 g) $ Map.mapWithKey (uncurry . mkBaseGen g') (tyConEnv env `mapZip'` inlinedBase) where size0 = Map.map $ \DataGen{..} -> DataGen genArgs $ runSized 0 . genData . (unsized <$>) -- | Inline definitions inlineDataGen :: (Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> Map tc (DataGen c tv (Sized m)) -> (Map tc [(Int, DataPat c tc tv)], Map tc [(Int, DataPat c tc tv)]) inlineDataGen env g = let inlinedBase = inlineBaseGen env g inlined = inlineData env (Map.keysSet g) in (inlined, inlinedBase) inlineBaseGen :: (Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> Map tc (DataGen c tv m) -> Map tc [(Int, DataPat c tc tv)] inlineBaseGen env g = let initState = BaseInlState . flip Map.mapWithKey g $ \ tc _ -> let (vs, _) = tyConEnv env Map.! tc in (Simple, always (DPLeaf (TcCon tc (length vs) (TcVar <$> vs)))) in inlineBase env initState make :: (MonadRandom m, Ord c, Ord tc, Ord tv) => TcEnv v c tc tv -> BuiltIn c tc tv -> ( DataGenMap c tc tv (Sized m), DataGenMap c tc tv m , Map tc [(Int, DataPat c tc tv)], Map tc [(Int, DataPat c tc tv)] ) make env bi = (gen, baseGen, inlined, inlinedBase) where gBuiltIn = mkGBuiltIn bi inl@(inlined, inlinedBase) = inlineDataGen env gBuiltIn baseGen = createBaseGen env gBuiltIn inlinedBase gen = createDataGen env gBuiltIn baseGen inl mkGBuiltIn BuiltIn{..} = Map.fromList [ (biInt, DataGen [] (const . sized $ \n -> getRandomR (-n, n) >>= return . DInt)) , (biList, DataGen [biListArg] (\subgen -> sized' $ \n -> do let m = intSquareRoot n l <- getRandomR (0, m) xs <- replicateM l $ resize m (subgen Map.! biListArg) return $ foldl' (\ t x -> DCon biListCons [x, t]) (DCon biListNil []) xs)) ] countNodes :: DataTree c -> Int countNodes (DCon _ ts) = 1 + sum (map countNodes ts) countNodes (DInt _) = 1
QuickChick/Luck
luck/src/Core/Rigidify/Generator.hs
mit
15,317
0
23
3,574
5,582
2,902
2,680
-1
-1
{-# LANGUAGE CPP, NoImplicitPrelude #-} #if __GLASGOW_HASKELL__ >= 702 {-# LANGUAGE Safe #-} #endif module Data.Tuple.Compat ( fst , snd , curry , uncurry , swap #if MIN_VERSION_ghc_prim(0,7,0) , Solo(..) #endif ) where import Data.Tuple #if !(MIN_VERSION_base(4,16,0)) && MIN_VERSION_ghc_prim(0,7,0) import GHC.Tuple (Solo(..)) #endif
haskell-compat/base-compat
base-compat/src/Data/Tuple/Compat.hs
mit
352
0
6
59
58
41
17
8
0
{-# LANGUAGE GADTs #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances, FlexibleContexts #-} {-# LANGUAGE CPP #-} {-# LANGUAGE ConstraintKinds #-} --{-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE TypeOperators, DataKinds, KindSignatures, PolyKinds #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE ScopedTypeVariables #-} module Grammar where import Data.Proxy import GHC.TypeLits import GHC.Prim import Data.Vinyl data ConstraintList (c :: k -> Constraint) (l :: [k]) where TNil :: ConstraintList c '[] TCons :: c a => ConstraintList c l -> ConstraintList c (a ': l) class ConstrainAll (c :: k -> Constraint) (l :: [k]) where reify :: ConstraintList c l instance ConstrainAll c '[] where reify = TNil instance (c a, ConstrainAll c l) => ConstrainAll c (a ': l) where reify = TCons reify -- only nullary Type constructors class (KnownSymbol s, ConstrainAll Constructor (Constructors s)) => Type s where type Constructors s :: [Symbol] constructors :: proxy s -> ConstraintList Constructor (Constructors s) constructors _ = reify class (KnownSymbol s, ConstrainAll Type (Types s)) => Constructor (s :: Symbol) where type Types s :: [Symbol] types :: proxy s -> ConstraintList Type (Types s) types _ = reify -- values type Product = Rec data Sum (f :: k -> *) (l :: [k]) where First :: f a -> Sum f (a ': l) Next :: Sum f l -> Sum f (a ': l) data Value s where Value :: Type s => Sum Cons (Constructors s) -> Value s data Cons s where Cons :: Constructor s => Product Value (Types s) -> Cons s instance Show (Value s) where show (Value sum) = f proof sum where proof = reify :: ConstraintList Constructor (Constructors s) f :: ConstraintList Constructor l -> Sum Cons l -> String f (TCons _) (First cons) = symbolVal cons ++ show cons f (TCons proof') (Next sum') = f proof' sum' instance Show (Cons s) where show (Cons prod) = f prod where f :: Product Value l -> String f RNil = "" f (val :& prod') = " (" ++ show val ++ ")" ++ f prod' -- classes to make writing values easier class Contains (l :: [k]) (a :: k) where contain :: f a -> Sum f l instance Contains (a ': l) a where contain = First instance {-# OVERLAPPABLE #-} Contains l a => Contains (t ': l) a where contain = Next . contain #define P(s) (Proxy::Proxy (s)) type family Curried (f :: k -> *) (l :: [k]) (r :: *) where Curried f '[] r = r Curried f (a ': l) r = f a -> Curried f l r class Curry (l :: [k]) where curry' :: (Rec f l -> a) -> Curried f l a instance Curry '[] where curry' f = f RNil instance Curry l => Curry (a ': l) where curry' f fa = curry' (\args -> f $ fa :& args) -- example ADT/CFG -- could reduce verbosity with Template Haskell instance Type "Nat" where type Constructors "Nat" = '["Z", "S"] instance Constructor "Z" where type Types "Z" = '[] instance Constructor "S" where type Types "S" = '["Nat"] #define C(s) (curry' (Value . contain . (Cons :: Product Value (Types "s") -> Cons "s"))) zero :: Value "Nat" zero = C(Z) -- cpphs buggy? doesn't work if C(S) C(Z) are on the same line one :: Value "Nat" one = C(S) C(Z) two :: Value "Nat" two = C(S) one
vladfi1/hs-misc
GrammarOld.hs
mit
3,236
0
11
710
1,196
640
556
78
1
----------------------------------------------------------------------------- -- -- Module : Language.PureScript.AST.Binders -- Copyright : (c) 2013-14 Phil Freeman, (c) 2014 Gary Burgess, and other contributors -- License : MIT -- -- Maintainer : Phil Freeman <[email protected]> -- Stability : experimental -- Portability : -- -- | Case binders -- ----------------------------------------------------------------------------- {-# LANGUAGE DeriveDataTypeable #-} module Language.PureScript.AST.Binders where import qualified Data.Data as D import Language.PureScript.AST.SourcePos import Language.PureScript.Names import Language.PureScript.Comments import Language.PureScript.Types -- | -- Data type for binders -- data Binder -- | -- Wildcard binder -- = NullBinder -- | -- A binder which matches a boolean literal -- | BooleanBinder Bool -- | -- A binder which matches a string literal -- | StringBinder String -- | -- A binder which matches a character literal -- | CharBinder Char -- | -- A binder which matches a numeric literal -- | NumberBinder (Either Integer Double) -- | -- A binder which binds an identifier -- | VarBinder Ident -- | -- A binder which matches a data constructor -- | ConstructorBinder (Qualified ProperName) [Binder] -- | -- A binder which matches a record and binds its properties -- | ObjectBinder [(String, Binder)] -- | -- A binder which matches an array and binds its elements -- | ArrayBinder [Binder] -- | -- A binder which binds its input to an identifier -- | NamedBinder Ident Binder -- | -- A binder with source position information -- | PositionedBinder SourceSpan [Comment] Binder -- | -- A binder with a type annotation -- | TypedBinder Type Binder deriving (Show, Read, Eq, D.Data, D.Typeable) -- | -- Collect all names introduced in binders in an expression -- binderNames :: Binder -> [Ident] binderNames = go [] where go ns (VarBinder name) = name : ns go ns (ConstructorBinder _ bs) = foldl go ns bs go ns (ObjectBinder bs) = foldl go ns (map snd bs) go ns (ArrayBinder bs) = foldl go ns bs go ns (NamedBinder name b) = go (name : ns) b go ns (PositionedBinder _ _ b) = go ns b go ns (TypedBinder _ b) = go ns b go ns _ = ns
michaelficarra/purescript
src/Language/PureScript/AST/Binders.hs
mit
2,312
0
9
492
439
264
175
30
8
module EmailConfig where data Configuration = Configuration { host :: String, port :: Maybe Int, username :: String, password :: String } deriving (Show, Read) -- conf :: Configuration -- conf = Configuration "smtp.gmail.com" Nothing "[email protected]" "password" -- saveConfig :: IO () -- saveConfig = writeFile "config.txt" $ show conf readConfig :: IO Configuration readConfig = do c <- readFile "email.config" let config = read c :: Configuration return config
Radivarig/EmailNotifications
src/EmailConfig.hs
mit
505
0
10
107
99
55
44
12
1
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE EmptyDataDeriving #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE UndecidableSuperClasses #-} {-# LANGUAGE OverloadedLabels #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeApplications #-} {-# LANGUAGE ScopedTypeVariables #-} {-# OPTIONS_GHC -Wno-unused-imports #-} {-# OPTIONS_GHC -Wno-dodgy-exports #-} {-# OPTIONS_GHC -Wno-unused-matches #-} {-# OPTIONS_GHC -Wno-orphans #-} {-# OPTIONS_GHC -Wno-unticked-promoted-constructors #-} {-# OPTIONS_GHC -Wno-name-shadowing #-} module Capnp.Gen.Capnp.Persistent.New where import qualified Capnp.Repr as R import qualified Capnp.Repr.Parsed as RP import qualified Capnp.New.Basics as Basics import qualified GHC.OverloadedLabels as OL import qualified Capnp.GenHelpers.New as GH import qualified Capnp.New.Classes as C import qualified GHC.Generics as Generics import qualified Capnp.GenHelpers.New.Rpc as GH import qualified Prelude as Std_ import qualified Data.Word as Std_ import qualified Data.Int as Std_ import Prelude ((<$>), (<*>), (>>=)) data Persistent sturdyRef owner type instance (R.ReprFor (Persistent sturdyRef owner)) = (R.Ptr (Std_.Just R.Cap)) instance (C.HasTypeId (Persistent sturdyRef owner)) where typeId = 14468694717054801553 instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.Parse (Persistent sturdyRef owner) (GH.Client (Persistent sturdyRef owner))) where parse = GH.parseCap encode = GH.encodeCap instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (GH.Export (Persistent sturdyRef owner)) where type Server (Persistent sturdyRef owner) = (Persistent'server_ sturdyRef owner) methodHandlerTree _ s_ = (GH.MethodHandlerTree (C.typeId @((Persistent sturdyRef owner))) [(GH.toUntypedMethodHandler ((persistent'save @(sturdyRef) @(owner)) s_))] []) class (Persistent'server_ sturdyRef owner s_) where {-# MINIMAL persistent'save #-} persistent'save :: s_ -> (GH.MethodHandler (Persistent'SaveParams sturdyRef owner) (Persistent'SaveResults sturdyRef owner)) persistent'save _ = GH.methodUnimplemented instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (GH.HasMethod "save" (Persistent sturdyRef owner) (Persistent'SaveParams sturdyRef owner) (Persistent'SaveResults sturdyRef owner)) where methodByLabel = (GH.Method 14468694717054801553 0) data Persistent'SaveParams sturdyRef owner type instance (R.ReprFor (Persistent'SaveParams sturdyRef owner)) = (R.Ptr (Std_.Just R.Struct)) instance (C.HasTypeId (Persistent'SaveParams sturdyRef owner)) where typeId = 17829674341603767205 instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.TypedStruct (Persistent'SaveParams sturdyRef owner)) where numStructWords = 0 numStructPtrs = 1 instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.Allocate (Persistent'SaveParams sturdyRef owner)) where type AllocHint (Persistent'SaveParams sturdyRef owner) = () new _ = C.newTypedStruct instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.EstimateAlloc (Persistent'SaveParams sturdyRef owner) (C.Parsed (Persistent'SaveParams sturdyRef owner))) instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.AllocateList (Persistent'SaveParams sturdyRef owner)) where type ListAllocHint (Persistent'SaveParams sturdyRef owner) = Std_.Int newList = C.newTypedStructList instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.EstimateListAlloc (Persistent'SaveParams sturdyRef owner) (C.Parsed (Persistent'SaveParams sturdyRef owner))) data instance C.Parsed (Persistent'SaveParams sturdyRef owner) = Persistent'SaveParams {sealFor :: (RP.Parsed owner)} deriving(Generics.Generic) deriving instance ((Std_.Show (RP.Parsed sturdyRef)) ,(Std_.Show (RP.Parsed owner))) => (Std_.Show (C.Parsed (Persistent'SaveParams sturdyRef owner))) deriving instance ((Std_.Eq (RP.Parsed sturdyRef)) ,(Std_.Eq (RP.Parsed owner))) => (Std_.Eq (C.Parsed (Persistent'SaveParams sturdyRef owner))) instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.Parse (Persistent'SaveParams sturdyRef owner) (C.Parsed (Persistent'SaveParams sturdyRef owner))) where parse raw_ = (Persistent'SaveParams <$> (GH.parseField #sealFor raw_)) instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.Marshal (Persistent'SaveParams sturdyRef owner) (C.Parsed (Persistent'SaveParams sturdyRef owner))) where marshalInto raw_ Persistent'SaveParams{..} = (do (GH.encodeField #sealFor sealFor raw_) (Std_.pure ()) ) instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (GH.HasField "sealFor" GH.Slot (Persistent'SaveParams sturdyRef owner) owner) where fieldByLabel = (GH.ptrField 0) data Persistent'SaveResults sturdyRef owner type instance (R.ReprFor (Persistent'SaveResults sturdyRef owner)) = (R.Ptr (Std_.Just R.Struct)) instance (C.HasTypeId (Persistent'SaveResults sturdyRef owner)) where typeId = 13215893102637674431 instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.TypedStruct (Persistent'SaveResults sturdyRef owner)) where numStructWords = 0 numStructPtrs = 1 instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.Allocate (Persistent'SaveResults sturdyRef owner)) where type AllocHint (Persistent'SaveResults sturdyRef owner) = () new _ = C.newTypedStruct instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.EstimateAlloc (Persistent'SaveResults sturdyRef owner) (C.Parsed (Persistent'SaveResults sturdyRef owner))) instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.AllocateList (Persistent'SaveResults sturdyRef owner)) where type ListAllocHint (Persistent'SaveResults sturdyRef owner) = Std_.Int newList = C.newTypedStructList instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.EstimateListAlloc (Persistent'SaveResults sturdyRef owner) (C.Parsed (Persistent'SaveResults sturdyRef owner))) data instance C.Parsed (Persistent'SaveResults sturdyRef owner) = Persistent'SaveResults {sturdyRef :: (RP.Parsed sturdyRef)} deriving(Generics.Generic) deriving instance ((Std_.Show (RP.Parsed sturdyRef)) ,(Std_.Show (RP.Parsed owner))) => (Std_.Show (C.Parsed (Persistent'SaveResults sturdyRef owner))) deriving instance ((Std_.Eq (RP.Parsed sturdyRef)) ,(Std_.Eq (RP.Parsed owner))) => (Std_.Eq (C.Parsed (Persistent'SaveResults sturdyRef owner))) instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.Parse (Persistent'SaveResults sturdyRef owner) (C.Parsed (Persistent'SaveResults sturdyRef owner))) where parse raw_ = (Persistent'SaveResults <$> (GH.parseField #sturdyRef raw_)) instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (C.Marshal (Persistent'SaveResults sturdyRef owner) (C.Parsed (Persistent'SaveResults sturdyRef owner))) where marshalInto raw_ Persistent'SaveResults{..} = (do (GH.encodeField #sturdyRef sturdyRef raw_) (Std_.pure ()) ) instance ((GH.TypeParam sturdyRef) ,(GH.TypeParam owner)) => (GH.HasField "sturdyRef" GH.Slot (Persistent'SaveResults sturdyRef owner) sturdyRef) where fieldByLabel = (GH.ptrField 0) data RealmGateway internalRef externalRef internalOwner externalOwner type instance (R.ReprFor (RealmGateway internalRef externalRef internalOwner externalOwner)) = (R.Ptr (Std_.Just R.Cap)) instance (C.HasTypeId (RealmGateway internalRef externalRef internalOwner externalOwner)) where typeId = 9583422979879616212 instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.Parse (RealmGateway internalRef externalRef internalOwner externalOwner) (GH.Client (RealmGateway internalRef externalRef internalOwner externalOwner))) where parse = GH.parseCap encode = GH.encodeCap instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (GH.Export (RealmGateway internalRef externalRef internalOwner externalOwner)) where type Server (RealmGateway internalRef externalRef internalOwner externalOwner) = (RealmGateway'server_ internalRef externalRef internalOwner externalOwner) methodHandlerTree _ s_ = (GH.MethodHandlerTree (C.typeId @((RealmGateway internalRef externalRef internalOwner externalOwner))) [(GH.toUntypedMethodHandler ((realmGateway'import_ @(internalRef) @(externalRef) @(internalOwner) @(externalOwner)) s_)) ,(GH.toUntypedMethodHandler ((realmGateway'export @(internalRef) @(externalRef) @(internalOwner) @(externalOwner)) s_))] []) class (RealmGateway'server_ internalRef externalRef internalOwner externalOwner s_) where {-# MINIMAL realmGateway'import_,realmGateway'export #-} realmGateway'import_ :: s_ -> (GH.MethodHandler (RealmGateway'import'params internalRef externalRef internalOwner externalOwner) (Persistent'SaveResults internalRef internalOwner)) realmGateway'import_ _ = GH.methodUnimplemented realmGateway'export :: s_ -> (GH.MethodHandler (RealmGateway'export'params internalRef externalRef internalOwner externalOwner) (Persistent'SaveResults externalRef externalOwner)) realmGateway'export _ = GH.methodUnimplemented instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (GH.HasMethod "import_" (RealmGateway internalRef externalRef internalOwner externalOwner) (RealmGateway'import'params internalRef externalRef internalOwner externalOwner) (Persistent'SaveResults internalRef internalOwner)) where methodByLabel = (GH.Method 9583422979879616212 0) instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (GH.HasMethod "export" (RealmGateway internalRef externalRef internalOwner externalOwner) (RealmGateway'export'params internalRef externalRef internalOwner externalOwner) (Persistent'SaveResults externalRef externalOwner)) where methodByLabel = (GH.Method 9583422979879616212 1) data RealmGateway'import'params internalRef externalRef internalOwner externalOwner type instance (R.ReprFor (RealmGateway'import'params internalRef externalRef internalOwner externalOwner)) = (R.Ptr (Std_.Just R.Struct)) instance (C.HasTypeId (RealmGateway'import'params internalRef externalRef internalOwner externalOwner)) where typeId = 17348653140467603277 instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.TypedStruct (RealmGateway'import'params internalRef externalRef internalOwner externalOwner)) where numStructWords = 0 numStructPtrs = 2 instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.Allocate (RealmGateway'import'params internalRef externalRef internalOwner externalOwner)) where type AllocHint (RealmGateway'import'params internalRef externalRef internalOwner externalOwner) = () new _ = C.newTypedStruct instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.EstimateAlloc (RealmGateway'import'params internalRef externalRef internalOwner externalOwner) (C.Parsed (RealmGateway'import'params internalRef externalRef internalOwner externalOwner))) instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.AllocateList (RealmGateway'import'params internalRef externalRef internalOwner externalOwner)) where type ListAllocHint (RealmGateway'import'params internalRef externalRef internalOwner externalOwner) = Std_.Int newList = C.newTypedStructList instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.EstimateListAlloc (RealmGateway'import'params internalRef externalRef internalOwner externalOwner) (C.Parsed (RealmGateway'import'params internalRef externalRef internalOwner externalOwner))) data instance C.Parsed (RealmGateway'import'params internalRef externalRef internalOwner externalOwner) = RealmGateway'import'params {cap :: (RP.Parsed (Persistent externalRef externalOwner)) ,params :: (RP.Parsed (Persistent'SaveParams internalRef internalOwner))} deriving(Generics.Generic) deriving instance ((Std_.Show (RP.Parsed internalRef)) ,(Std_.Show (RP.Parsed externalRef)) ,(Std_.Show (RP.Parsed internalOwner)) ,(Std_.Show (RP.Parsed externalOwner))) => (Std_.Show (C.Parsed (RealmGateway'import'params internalRef externalRef internalOwner externalOwner))) deriving instance ((Std_.Eq (RP.Parsed internalRef)) ,(Std_.Eq (RP.Parsed externalRef)) ,(Std_.Eq (RP.Parsed internalOwner)) ,(Std_.Eq (RP.Parsed externalOwner))) => (Std_.Eq (C.Parsed (RealmGateway'import'params internalRef externalRef internalOwner externalOwner))) instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.Parse (RealmGateway'import'params internalRef externalRef internalOwner externalOwner) (C.Parsed (RealmGateway'import'params internalRef externalRef internalOwner externalOwner))) where parse raw_ = (RealmGateway'import'params <$> (GH.parseField #cap raw_) <*> (GH.parseField #params raw_)) instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.Marshal (RealmGateway'import'params internalRef externalRef internalOwner externalOwner) (C.Parsed (RealmGateway'import'params internalRef externalRef internalOwner externalOwner))) where marshalInto raw_ RealmGateway'import'params{..} = (do (GH.encodeField #cap cap raw_) (GH.encodeField #params params raw_) (Std_.pure ()) ) instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (GH.HasField "cap" GH.Slot (RealmGateway'import'params internalRef externalRef internalOwner externalOwner) (Persistent externalRef externalOwner)) where fieldByLabel = (GH.ptrField 0) instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (GH.HasField "params" GH.Slot (RealmGateway'import'params internalRef externalRef internalOwner externalOwner) (Persistent'SaveParams internalRef internalOwner)) where fieldByLabel = (GH.ptrField 1) data RealmGateway'export'params internalRef externalRef internalOwner externalOwner type instance (R.ReprFor (RealmGateway'export'params internalRef externalRef internalOwner externalOwner)) = (R.Ptr (Std_.Just R.Struct)) instance (C.HasTypeId (RealmGateway'export'params internalRef externalRef internalOwner externalOwner)) where typeId = 17055027933458834346 instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.TypedStruct (RealmGateway'export'params internalRef externalRef internalOwner externalOwner)) where numStructWords = 0 numStructPtrs = 2 instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.Allocate (RealmGateway'export'params internalRef externalRef internalOwner externalOwner)) where type AllocHint (RealmGateway'export'params internalRef externalRef internalOwner externalOwner) = () new _ = C.newTypedStruct instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.EstimateAlloc (RealmGateway'export'params internalRef externalRef internalOwner externalOwner) (C.Parsed (RealmGateway'export'params internalRef externalRef internalOwner externalOwner))) instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.AllocateList (RealmGateway'export'params internalRef externalRef internalOwner externalOwner)) where type ListAllocHint (RealmGateway'export'params internalRef externalRef internalOwner externalOwner) = Std_.Int newList = C.newTypedStructList instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.EstimateListAlloc (RealmGateway'export'params internalRef externalRef internalOwner externalOwner) (C.Parsed (RealmGateway'export'params internalRef externalRef internalOwner externalOwner))) data instance C.Parsed (RealmGateway'export'params internalRef externalRef internalOwner externalOwner) = RealmGateway'export'params {cap :: (RP.Parsed (Persistent internalRef internalOwner)) ,params :: (RP.Parsed (Persistent'SaveParams externalRef externalOwner))} deriving(Generics.Generic) deriving instance ((Std_.Show (RP.Parsed internalRef)) ,(Std_.Show (RP.Parsed externalRef)) ,(Std_.Show (RP.Parsed internalOwner)) ,(Std_.Show (RP.Parsed externalOwner))) => (Std_.Show (C.Parsed (RealmGateway'export'params internalRef externalRef internalOwner externalOwner))) deriving instance ((Std_.Eq (RP.Parsed internalRef)) ,(Std_.Eq (RP.Parsed externalRef)) ,(Std_.Eq (RP.Parsed internalOwner)) ,(Std_.Eq (RP.Parsed externalOwner))) => (Std_.Eq (C.Parsed (RealmGateway'export'params internalRef externalRef internalOwner externalOwner))) instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.Parse (RealmGateway'export'params internalRef externalRef internalOwner externalOwner) (C.Parsed (RealmGateway'export'params internalRef externalRef internalOwner externalOwner))) where parse raw_ = (RealmGateway'export'params <$> (GH.parseField #cap raw_) <*> (GH.parseField #params raw_)) instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (C.Marshal (RealmGateway'export'params internalRef externalRef internalOwner externalOwner) (C.Parsed (RealmGateway'export'params internalRef externalRef internalOwner externalOwner))) where marshalInto raw_ RealmGateway'export'params{..} = (do (GH.encodeField #cap cap raw_) (GH.encodeField #params params raw_) (Std_.pure ()) ) instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (GH.HasField "cap" GH.Slot (RealmGateway'export'params internalRef externalRef internalOwner externalOwner) (Persistent internalRef internalOwner)) where fieldByLabel = (GH.ptrField 0) instance ((GH.TypeParam internalRef) ,(GH.TypeParam externalRef) ,(GH.TypeParam internalOwner) ,(GH.TypeParam externalOwner)) => (GH.HasField "params" GH.Slot (RealmGateway'export'params internalRef externalRef internalOwner externalOwner) (Persistent'SaveParams externalRef externalOwner)) where fieldByLabel = (GH.ptrField 1)
zenhack/haskell-capnp
gen/lib/Capnp/Gen/Capnp/Persistent/New.hs
mit
20,493
122
18
3,385
3,726
2,363
1,363
-1
-1
module Main where import Types cooperateF _ = return Cooperate cooperate :: Strategy cooperate = S ("cooperate", cooperateF) main = dilemmaMain cooperate
barkmadley/etd-retreat-2014-hteam
src/Cooperate/Main.hs
mit
157
0
6
25
45
25
20
6
1
-- | -- Module : Crypto.Noise.Internal -- Copyright : (c) Austin Seipp 2014 -- License : MIT -- -- Maintainer : [email protected] -- Stability : unstable -- Portability : portable -- -- Noise internal module. -- module Crypto.Noise.Internal where import Data.Bits import Data.Monoid import Data.Serialize import System.Random.MWC import Data.ByteString (ByteString) import qualified Data.ByteString as B import qualified Data.ByteString.Char8 as B8 import Data.Word import qualified Crypto.DH.Curve25519 as Curve25519 import Crypto.Encrypt.Stream.ChaCha20 (ChaCha20) import qualified Crypto.Encrypt.Stream.ChaCha20 as ChaCha20 import qualified Crypto.Hash.SHA as SHA import Crypto.Key import qualified Crypto.MAC.Poly1305 as Poly1305 import Crypto.Nonce import System.Crypto.Random (randombytes) import Crypto.Noise.Key -------------------------------------------------------------------------------- -- Types data Header = Header { _hdrPK :: PublicKey Noise , _hdrEncSenderPK :: ByteString , _hdrMAC :: Poly1305.Auth } instance Serialize Header where put (Header (PublicKey pk) espk (Poly1305.Auth mac)) = do putByteString pk putByteString espk putByteString mac get = do pk <- getBytes 32 espk <- getBytes 32 mac <- getBytes 16 return (Header (PublicKey pk) espk (Poly1305.Auth mac)) data Ciphertext = Ciphertext { _ctData :: ByteString , _ctMAC :: Poly1305.Auth } data Box = Box { _boxHdr :: Header , _boxCt :: Ciphertext } newtype Chain = Chain { _chainBS :: ByteString } deriving (Eq, Show, Ord) -------------------------------------------------------------------------------- -- Internal APIs header :: Chain -- Chain input -> KeyPair -- Ephemeral keys -> Maybe KeyPair -- Sender keys -> PublicKey Noise -- Receiver public key -> (Header, Chain) header (Chain chain) (ephPK, ephSK) senderKeys recvPK = (Header ephPK encSendPK mac, key2) where sendPK = maybe ephPK fst senderKeys -- Key exchange dh1 = curve25519 ephSK recvPK dh2 = maybe dh1 (\(_,s) -> curve25519 s recvPK) senderKeys -- Key derivation, step one key1 = SecretKey $ nhash (txt <> dh1 <> chain) where txt = B8.pack "Noise_Box_KDF1" -- Block encryption, key derivation step two (chainTmp, macKey, encSendPK) = ( B.take 32 block , B.take 32 $ B.drop 32 block , B.drop 64 block) where block = ncipher datum key1 datum = B.replicate 64 0x0 <> (unPublicKey sendPK) -- MAC derivation mac = Poly1305.authenticate (SecretKey macKey) dataBlock where dataBlock = (unPublicKey ephPK) <> encSendPK -- Key derivation, step three key2 = Chain $ nhash (txt <> dh2 <> chainTmp) where txt = B8.pack "Noise_Box_KDF2" ciphertext :: Chain -> Word32 -> Maybe Header -> ByteString -> IO (Ciphertext, Chain) ciphertext (Chain chain) pad hdr plaintext = do padding <- randombytes (fromIntegral pad) let (chainOut, macKey, encText) = ( B.take 32 block , B.take 32 $ B.drop 32 block , B.drop 64 block) where block = ncipher datum (SecretKey chain) datum = B.replicate 64 0x0 <> encode pad <> padding <> plaintext mac = Poly1305.authenticate (SecretKey macKey) dataBlock where dataBlock = maybe B.empty encode hdr <> encText return (Ciphertext encText mac, Chain chainOut) ciphertext_ :: Chain -> Word32 -> Maybe Header -> ByteString -> IO (ByteString, Chain) ciphertext_ chain pad hdr pt = do (Ciphertext et (Poly1305.Auth m), chainOut) <- ciphertext chain pad hdr pt return (et <> m, chainOut) boxInternal :: Maybe Chain -> KeyPair -- ^ Ephemeral keys -> Maybe KeyPair -- ^ Sender keys -> PublicKey Noise -- ^ Receiver public key -> Word32 -- ^ Padding length -> ByteString -- ^ Plaintext -> IO (Box, Chain) -- ^ Output boxInternal chain eph sender recvPK pad plaintext = do let (hdr, key2) = header chainIn eph sender recvPK (ct, chainOut) <- ciphertext key2 pad (Just hdr) plaintext return (Box hdr ct, chainOut) where chainIn = maybe (Chain chain1) id chain where chain1 = B8.pack "Noise_Box_IV" <> B.replicate 32 0x0 box_ :: Maybe Chain -> KeyPair -> Maybe KeyPair -- ^ Sender keys -> PublicKey Noise -- ^ Receiver public key -> Word32 -- ^ Padding length -> ByteString -- ^ Plaintext -> IO (ByteString, Chain) box_ chain eph sender recvPK pad plaintext = do (Box hdr ct, chainOut) <- boxInternal chain eph sender recvPK pad plaintext let (Header (PublicKey hdrPK) hdrESPK (Poly1305.Auth hdrMAC)) = hdr (Ciphertext ct' (Poly1305.Auth ctMAC)) = ct result = hdrPK <> hdrESPK <> hdrMAC <> ct' <> ctMAC return (result, chainOut) openCiphertext :: Chain -> Maybe ByteString -> ByteString -> Maybe (ByteString, Chain) openCiphertext (Chain chain) hdr ct = verifyMAC where (ctxt, ctMAC) = B.splitAt (B.length ct - 16) ct body = maybe B.empty id hdr <> ctxt -- Block encryption, key derivation step two (chainOut, macKey2, plaintxt) = ( B.take 32 block , B.take 32 $ B.drop 32 block , B.drop 64 block) where block = ncipher (B.replicate 64 0x0 <> ctxt) (SecretKey chain) -- MAC verification verifyMAC :: Maybe (ByteString, Chain) verifyMAC = case Poly1305.verify (SecretKey macKey2) a body of True -> case decode (B.take 4 plaintxt) of Left _ -> Nothing Right x -> Just $ ( B.drop (fromIntegral (x :: Word32) + 4) plaintxt , Chain chainOut ) False -> Nothing where a = Poly1305.Auth ctMAC open_ :: Maybe Chain -> SecretKey Noise -- ^ Receiver secret key -> Maybe (PublicKey Noise) -- ^ Sender public key (optional) -> ByteString -> Maybe (ByteString, Chain) open_ chain recvSK sendPK encText = verifyMAC >> verifyDecKey sendPK >> verifyCT where ephPK = PublicKey $ B.take 32 encText encSenderPK = B.take 32 $ B.drop 32 encText hdrMAC = Poly1305.Auth $ B.take 16 $ B.drop 64 encText chainIn = maybe chain1 _chainBS chain where chain1 = B8.pack "Noise_Box_IV" <> B.replicate 32 0x0 -- Key exchange dh1 = curve25519 recvSK ephPK dh2 = maybe dh1 (curve25519 recvSK) sendPK -- Key derivation, step one key1 = SecretKey $ nhash (txt <> dh1 <> chainIn) where txt = B8.pack "Noise_Box_KDF1" -- Block encryption, key derivation step two (chainTmp, macKey) = ( B.take 32 block , B.drop 32 block) where block = ncipher (B.replicate 64 0x0) key1 -- MAC verification verifyMAC :: Maybe Bool verifyMAC = case Poly1305.verify (SecretKey macKey) hdrMAC (B.take 64 encText) of True -> Just True False -> Nothing senderKey = PublicKey $ B.drop 64 (ncipher block key1) where block = chainTmp <> macKey <> encSenderPK -- Sender key verification verifyDecKey :: Maybe (PublicKey Noise) -> Maybe Bool verifyDecKey Nothing | equalPK senderKey ephPK = Just True | otherwise = Nothing verifyDecKey (Just pk) | equalPK senderKey pk = Just True | otherwise = Nothing -- Key derivation, step three key2 = Chain $ nhash (txt <> dh2 <> chainTmp) where txt = B8.pack "Noise_Box_KDF2" verifyCT :: Maybe (ByteString, Chain) verifyCT = openCiphertext key2 (Just $ B.take 80 encText) (B.drop 80 encText) -------------------------------------------------------------------------------- -- Utils randRange :: (Word32, Word32) -> IO Word32 randRange rng = withSystemRandom (\x -> uniformR rng x :: IO Word32) {-# INLINE randRange #-} encodeBE32 :: Word32 -> ByteString encodeBE32 = encode decodeBE32 :: ByteString -> Word32 decodeBE32 x | B.length x /= 4 = error "Invalid length!" | otherwise = either (error "Could not decode length!") id (decode x) equalPK :: PublicKey t -> PublicKey t -> Bool equalPK (PublicKey x) (PublicKey y) = equalBS x y equalBS :: ByteString -> ByteString -> Bool equalBS x y = B.length x == B.length y && 0 == sum (B.zipWith xor x y) curve25519 :: SecretKey Noise -> PublicKey Noise -> ByteString curve25519 = Curve25519.curve25519 nhash :: ByteString -> ByteString nhash = B.take 32 . SHA.sha512 ncipher :: ByteString -> SecretKey ChaCha20 -> ByteString ncipher inp key = ChaCha20.encrypt nonce inp key where nonce = Nonce $ B.pack [0,0,0,0,0,0,0,0]
thoughtpolice/hs-noise
src/Crypto/Noise/Internal.hs
mit
9,161
0
19
2,681
2,653
1,380
1,273
203
3
module Fib where -- https://www.codewars.com/kata/fibonacci fib :: Int -> Int fib 0 = 0 fib 1 = 1 fib n = (fib $ n - 1) + (fib $ n - 2)
cojoj/Codewars
Haskell/Codewars.hsproj/Fib.hs
mit
141
0
8
37
64
35
29
5
1
import Fasta (splitStr, formTuple, removeRosalind) import Data.List import Data.Function lcstr xs ys = maximumBy (compare `on` length) . concat $ [f xs' ys | xs' <- tails xs] ++ [f xs ys' | ys' <- drop 1 $ tails ys] where f xs ys = scanl g [] $ zip xs ys g z (x, y) = if x == y then z ++ [x] else [] commonSubStr = nub . words . unwords . noFalseLst zzz xs = zip (bbb xs (aaa xs xs)) (aaa xs xs) aaa [] _ = [] aaa (x:xs) xx = map (lcstr x) (filter (/= x) xx) ++ aaa xs xx bbb _ [] = [] bbb xx (y:ys) = [map (y `isInfixOf`) xx]++ bbb xx ys noFalse (xx@(x:xs),y) | not $ False `elem` xx = y | otherwise = "" noFalseLst [] = [] noFalseLst (x:xs) = [noFalse x] ++ noFalseLst xs main = do input <- readFile "rosalind_lcsm.txt" let s = splitStr input tmp = formTuple $ lines s tmp'= nub $ removeRosalind tmp res = commonSubStr $ zzz tmp' putStrLn $ res!!0
forgit/Rosalind
lcsm.hs
gpl-2.0
941
0
11
273
503
257
246
25
2
{-# LANGUAGE OverloadedStrings #-} module IntegrationSpec (main, spec) where import Test.Hspec import Test.QuickCheck import Prelude as P import Data.Torrent import Data.Maybe import Data.Text as T import Data.ByteString as BS import Data.ByteString.Lazy as BSL import System.FilePath.Posix import Crypto.Random.AESCtr as AESCtr import Data.IP import Data.Serialize as DS import Data.Map.Strict as Map import Data.Binary as Bin import Crypto.Random import Crypto.Curve25519 import Data.Byteable import qualified Network.BitTorrent.ClientControl as BT import qualified Network.BitTorrent.ClientControl.UTorrent as UT import System.IO import System.Log.Logger import Control.Concurrent import Control.Concurrent.Async import Control.Concurrent.STM import Control.Concurrent.STM.TChan import Control.Exception.Base import Control.Monad import Control.Monad.Trans.Resource import Control.Monad.IO.Class import Data.Conduit as DC import Data.Conduit.List as DC import Data.Conduit.Binary as CBin import Data.Conduit.Network import qualified Network.BitTorrent.Shepherd as Tracker import Network.TCP.Proxy.Server as Proxy hiding (UnsupportedFeature, logger) import Network.TCP.Proxy.Socks4 as Socks4 import Network.BitSmuggler.Proxy.Client (proxyClient) import Network.BitSmuggler.Proxy.Server (proxyServer) import Network.BitSmuggler.Common as Common import Network.BitSmuggler.Common as Protocol import Network.BitSmuggler.Utils import Network.BitSmuggler.TorrentFile import Network.BitSmuggler.Crypto as Crypto import Network.BitSmuggler.Server as Server import Network.BitSmuggler.Client as Client import Network.BitSmuggler.FileCache as Cache import Network.BitSmuggler.TorrentClientProc as Proc import Network.BitSmuggler.Protocol main :: IO () main = hspec spec spec :: Spec spec = do describe "bit-smuggler" $ do it "proxies data between 1 client and 1 server" $ do P.putStrLn "wtf" runClientServer clientChunkExchange serverChunkExchange [bigFile] return () {- Integration test for bitsmuggler with 1 server and 1 client both running on the same machine -} testRoot = "test-data/integration-test/" data TestFile = TestFile {metadata :: (FilePath, Text, Int), fileDataPath :: String} --1 gb file bigFile = TestFile bigTestFile bigTestDataFile bigTestFile = (testRoot </> "contactFile/testFileBig.torrent" , "ef967fc9d342a4ba5c4604c7b9f7b28e9e740b2f" , 69) bigTestDataFile = testRoot </> "contactFile/testFileBig.txt" -- 100 mb file smallFile = TestFile smallTestFile smallTestDataFile smallTestFile = (testRoot </> "contactFile/testFile.torrent" , "f921dd6548298527d40757fb264de07f7a47767f" , 23456) smallTestDataFile = testRoot </> "contactFile/testFile.txt" makePaths prefix = P.map ((testRoot </> prefix) </> ) ["cache", "utorrent-client"] localhostIP = IPv4 $ toIPv4 [127,0,0,1] runClientServer clientProto serverProto testFiles = runResourceT $ do liftIO $ updateGlobalLogger logger (setLevel DEBUG) liftIO $ updateGlobalLogger Tracker.logger (setLevel DEBUG) liftIO $ debugM logger "running integration test" let [serverCache, serverUTClientPath] = makePaths "server" let [clientCache, clientUTClientPath] = makePaths "client" contacts <- forM testFiles $ \testFile -> liftIO $ makeContactFile (metadata testFile) (serverDesc, serverSk) <- liftIO $ makeServerDescriptor contacts localhostIP -- launch the tracker trackEvents <- liftIO $ newTChanIO tracker <- allocAsync $ async $ Tracker.runTracker $ Tracker.Config { Tracker.listenPort = 6666 , Tracker.events = Just trackEvents} liftIO $ waitFor (== Tracker.Booting) trackEvents serverDone <- liftIO $ newGate clientDone <- liftIO $ newGate allocAsync $ async $ runServer (\c -> serverProto c `finally` (atomically $ openGate serverDone)) serverUTClientPath serverCache contacts (serverDesc, serverSk) liftIO $ debugM logger "booted server.." -- liftIO $ threadDelay $ 10 ^ 9 liftIO $ waitFor (\(Tracker.AnnounceEv a) -> True) trackEvents liftIO $ debugM logger "tracker got announce from the server" liftIO $ debugM logger "running client now" allocAsync $ async $ runClient (\ c -> clientProto c `finally` (atomically $ openGate clientDone)) clientUTClientPath clientCache serverDesc liftIO $ atomically $ goThroughGate clientDone liftIO $ atomically $ goThroughGate serverDone -- liftIO $ threadDelay $ 10 ^ 9 liftIO $ debugM logger "finished running integration test" return () -- UTORRENT based client and server runClient protocol torrentProcPath cachePath serverDesc = do proc <- uTorrentProc torrentProcPath let btC = clientBTClientConfig { btProc = proc , outgoingRedirects = redirectToRev (serverAddr serverDesc) serverBTClientConfig } Client.clientConnect (ClientConfig btC serverDesc cachePath) protocol runServer protocol torrentProcPath cachePath contacts (serverDesc, serverSk) = do proc <- uTorrentProc torrentProcPath let btC = serverBTClientConfig { btProc = proc , outgoingRedirects = redirectToRev (serverAddr serverDesc) clientBTClientConfig } Server.listen (ServerConfig serverSk btC contacts cachePath) protocol -- were are configuring the proxies to redirect the bittorrent traffic -- to the reverse proxy port -- so that we don't need to play with iptables redirectToRev ip conf = Map.fromList [((Right ip, pubBitTorrentPort conf),(Right ip, revProxyPort conf))] chunks = [ BS.replicate 1000 99, BS.replicate (10 ^ 4) 200 , BS.concat [BS.replicate (10 ^ 4) 39, BS.replicate (10 ^ 4) 40] , BS.replicate (10 ^ 4) 173 , BS.replicate (10 ^ 3) 201 , BS.replicate (10 ^ 3) 202] P.++ smallChunks smallChunks = P.take 10 $ P.map (BS.replicate (10 ^ 2)) $ P.cycle [1..255] -- TODO: reabilitate those to use the new connData serverChunkExchange c = do infoM logger "server ping pongs some chunks with the client.." (connSource c) =$ serverChunks (P.zip chunks [1..]) $$ (connSink c) return () serverChunks [] = return () serverChunks ((chunk, i) : cs) = do upstream <- await case upstream of (Just bigBlock) -> do liftIO $ bigBlock `shouldBe` chunk liftIO $ debugM logger $ "server received big chunk succesfully " P.++ (show i) DC.yield chunk serverChunks cs Nothing -> (liftIO $ debugM logger "terminated from upstream") >> return () clientChunks [] = return () clientChunks ((chunk, i) : cs) = do DC.yield chunk -- send first, recieve after upstream <- await case upstream of (Just bigBlock) -> do liftIO $ bigBlock `shouldBe` chunk liftIO $ debugM logger $ "server received big chunk succesfully " P.++ (show i) clientChunks cs Nothing -> (liftIO $ debugM logger "terminated from upstream") >> return () clientChunkExchange c = do infoM logger "client ping pongs some chunks with the server.." (connSource c) =$ clientChunks (P.zip chunks [1..]) $$ (connSink c) return () makeContactFile (filePath, infoHash, seed) = do Right t <- fmap readTorrent $ BSL.readFile $ filePath return $ FakeFile {seed = seed, torrentFile = t , infoHash = fromJust $ textToInfoHash infoHash} makeServerDescriptor contacts ip = do let cprg = cprgCreate $ createTestEntropyPool "leSeed" :: AESRNG let (skBytes, next2) = cprgGenerate Crypto.keySize cprg let serverSkWord = (fromRight $ DS.decode skBytes :: Key) let serverPk = derivePublicKey (fromBytes $ toBytes serverSkWord) let serverPkWord = (fromRight $ DS.decode (toBytes serverPk) :: Key) return $ (ServerDescriptor ip contacts serverPkWord , serverSkWord) initIntegrationTestCaches testFile = do let serverCache = P.head $ makePaths "server" let clientCache = P.head $ makePaths "client" initFileCache serverCache testFile initFileCache clientCache testFile initFileCache cachePath testFile = do let (tpath, ih, seed) = metadata testFile fHandle <- openFile (fileDataPath testFile) ReadMode cache <- Cache.load cachePath Cache.put cache (fromJust $ textToInfoHash ih) $ sourceHandle fHandle hClose fHandle Cache.close cache uTorrentConnect host port = UT.makeUTorrentConn host port ("admin", "") waitFor cond chan = do n <- atomically $ readTChan chan if (cond n) then return n else waitFor cond chan clientBTClientConfig = BTClientConfig { pubBitTorrentPort = 5881 , socksProxyPort = 2001 , revProxyPort = 2002 , cmdPort = 8000 -- port on which it's receiving commands -- host, port, (uname, password) , connectToClient = uTorrentConnect } serverBTClientConfig = BTClientConfig { pubBitTorrentPort = 7881 , socksProxyPort = 3001 , revProxyPort = 3002 , cmdPort = 9000 -- port on which it's receiving commands -- host, port, (uname, password) , connectToClient = uTorrentConnect }
danoctavian/bit-smuggler
BitSmuggler/test/integration/IntegrationSpec.hs
gpl-2.0
9,256
0
15
1,958
2,419
1,283
1,136
195
2
{-| Module: Utils Description: Utility functions. Copyright: (c) Taran Lynn, 2015 License: GPL-2 This module contains utility functions not tied to the other modules. -} module Utils where -- | Tries to read a value from a string. maybeRead :: Read a => String -> Maybe a maybeRead str = case reads str of [(x, "")] -> Just x _ -> Nothing
lambda-11235/funcmap
src/Utils.hs
gpl-2.0
376
0
9
99
64
34
30
5
2
{-# LANGUAGE NoImplicitPrelude, TemplateHaskellQuotes, OverloadedStrings, PostfixOperators #-} {-# LANGUAGE NoMonomorphismRestriction, FlexibleContexts #-} {-# OPTIONS_GHC -fno-warn-missing-signatures -fno-warn-partial-type-signatures #-} {-# OPTIONS_GHC -O0 -fno-cse -fno-full-laziness #-} -- preserve "lexical" sharing for observed sharing {-# OPTIONS_GHC -fno-warn-missing-signatures #-} {-| TODO "the import statements header" is a contiguous region, "every declaration" is a noncontiguous set of regions. you can navigate between regions, and you can perform actions on regions (e.g. jump to the start of it, copy it, et cetera ), just like anything highlighted -} module Commands.Plugins.Spiros.Language.Haskell where import Commands.Plugins.Spiros.Extra import Commands.Mixins.DNS13OSX9 import Control.Applicative import Prelude.Spiros derivingStatement = "deriving" -: derivingStrategy derivingStrategy = ["stock", "newtype", "anyclass"] haskellDeclaration = ["data", "newtype", "type", "class", "instance"]
sboosali/commands-spiros
config/Commands/Plugins/Spiros/Language/Haskell.hs
gpl-2.0
1,026
0
5
119
82
56
26
13
1
module StringPrimitives ( charPrimitives, strPrimitives ) where import IError import LispVal import Primitives import Control.Monad.Error import Data.Char charPrimitives :: [(String, [LispVal] -> ThrowsLispError LispVal)] charPrimitives = [("char?", isChar), ("char=?", charBoolBinop (==)), ("char<?", charBoolBinop (<)), ("char>?", charBoolBinop (>)), ("char<=?", charBoolBinop (<=)), ("char>=?", charBoolBinop (>=)), ("char->integer", charToInt), ("integer->char", intToChar)] isChar :: [LispVal] -> ThrowsLispError LispVal isChar [LispChar c] = return $ LispBool True isChar [_] = return $ LispBool False isChar args = throwError $ NumArgs 1 args charBoolBinop = boolBinop unpackChar unpackChar :: LispVal -> ThrowsLispError Char unpackChar (LispChar c) = return c unpackChar other = throwError $ TypeMismatch "char" other charToInt :: [LispVal] -> ThrowsLispError LispVal charToInt [LispChar c] = return . LispNumber . fromIntegral $ ord c charToInt [arg] = throwError $ TypeMismatch "char" arg charToInt args = throwError $ NumArgs 1 args intToChar :: [LispVal] -> ThrowsLispError LispVal intToChar [LispNumber n] | n < 256 = return . LispChar . chr $ fromIntegral n | otherwise = throwError $ InvalidArgument 1 "integer out of range" intToChar [arg] = throwError $ TypeMismatch "number" arg intToChar args = throwError $ NumArgs 1 args strPrimitives :: [(String, [LispVal] -> ThrowsLispError LispVal)] strPrimitives = [("string?", isStr), ("make-string", makeStr), ("string-length", strLength), ("string-ref", strRef), ("string=?", strBoolBinop (==)), ("string<?", strBoolBinop (<)), ("string>?", strBoolBinop (>)), ("string<=?", strBoolBinop (<=)), ("string>=?", strBoolBinop (>=))] isStr :: [LispVal] -> ThrowsLispError LispVal isStr [LispString s _] = return $ LispBool True isStr [_] = return $ LispBool False isStr args = throwError $ NumArgs 1 args makeStr :: [LispVal] -> ThrowsLispError LispVal makeStr [ln@(LispNumber n)] = makeStr [ln, LispChar '\0'] makeStr [LispNumber n, LispChar c] = return $ LispString (genString n c) True where genString 0 c = [] genString n c = [c] ++ genString (n - 1) c makeStr [LispNumber n, arg] = throwError $ TypeMismatch "char" arg makeStr [arg, _] = throwError $ TypeMismatch "integer" arg makeStr [arg] = throwError $ TypeMismatch "integer" arg makeStr args = throwError $ NumArgs 1 args strLength :: [LispVal] -> ThrowsLispError LispVal strLength [LispString s _] = return . LispNumber . fromIntegral $ length s strLength [arg] = throwError $ TypeMismatch "string" arg strLength args = throwError $ NumArgs 1 args strRef :: [LispVal] -> ThrowsLispError LispVal strRef [LispString s _, LispNumber n] | 0 <= n && n < fromIntegral (length s) = return . LispChar $ s !! fromIntegral n | otherwise = throwError $ InvalidArgument 2 "index out of range" strRef [LispString s _, arg] = throwError $ TypeMismatch "number" arg strRef [arg, _] = throwError $ TypeMismatch "string" arg strRef args = throwError $ NumArgs 2 args strBoolBinop = boolBinop unpackStr unpackStr :: LispVal -> ThrowsLispError String unpackStr (LispString s _) = return s unpackStr (LispNumber s) = return $ show s unpackStr (LispBool s) = return $ show s unpackStr other = throwError $ TypeMismatch "string" other
mhrheaume/hskme
StringPrimitives.hs
gpl-2.0
3,286
38
11
526
1,278
671
607
77
2
module H9 (pack ) where packWith :: (Eq a) => [a] -> [a] -> [[a]] packWith [] acc = [acc] packWith (x:xs) [] = packWith xs [x] packWith (x:xs) acc = if x == head acc then packWith xs (x:acc) else acc : packWith xs [x] pack :: (Eq a) => [a] -> [[a]] pack [] = [] pack xs = packWith xs []
hsinhuang/codebase
h99/H9.hs
gpl-2.0
313
0
9
90
195
106
89
10
2
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, OverloadedStrings #-} module Sound.Tidal.UI where {- UI.hs - Tidal's main 'user interface' functions, for transforming patterns, building on the Core ones. Copyright (C) 2020, Alex McLean and contributors This library is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this library. If not, see <http://www.gnu.org/licenses/>. -} import Prelude hiding ((<*), (*>)) import Data.Char (digitToInt, isDigit, ord) import Data.Bits (testBit, Bits, xor, shiftL, shiftR) import Data.Fixed (mod') import Data.Ratio ((%)) import Data.List (sort, sortOn, findIndices, elemIndex, groupBy, transpose, intercalate, findIndex) import Data.Maybe (isJust, fromJust, fromMaybe, mapMaybe) import qualified Data.Text as T import qualified Data.Map.Strict as Map import Data.Bool (bool) import Sound.Tidal.Bjorklund (bjorklund) import Sound.Tidal.Core import qualified Sound.Tidal.Params as P import Sound.Tidal.Pattern import Sound.Tidal.Utils ------------------------------------------------------------------------ -- * UI -- | Randomisation -- cf. George Marsaglia (2003). "Xorshift RNGs". Journal of Statistical Software 8:14. -- https://www.jstatsoft.org/article/view/v008i14 xorwise :: Int -> Int xorwise x = let a = xor (shiftL x 13) x b = xor (shiftR a 17) a in xor (shiftL b 5) b -- stretch 300 cycles over the range of [0,2**29 == 536870912) then apply the xorshift algorithm timeToIntSeed :: RealFrac a => a -> Int timeToIntSeed = xorwise . truncate . (* 536870912) . snd . (properFraction :: (RealFrac a => a -> (Int,a))) . (/ 300) intSeedToRand :: Fractional a => Int -> a intSeedToRand = (/ 536870912) . realToFrac . (`mod` 536870912) timeToRand :: (RealFrac a, Fractional b) => a -> b timeToRand = intSeedToRand . timeToIntSeed timeToRands :: (RealFrac a, Fractional b) => a -> Int -> [b] timeToRands t n = timeToRands' (timeToIntSeed t) n timeToRands' :: Fractional a => Int -> Int -> [a] timeToRands' seed n | n <= 0 = [] | otherwise = (intSeedToRand seed) : (timeToRands' (xorwise seed) (n-1)) {-| `rand` generates a continuous pattern of (pseudo-)random numbers between `0` and `1`. @ sound "bd*8" # pan rand @ pans bass drums randomly @ sound "sn sn ~ sn" # gain rand @ makes the snares' randomly loud and quiet. Numbers coming from this pattern are 'seeded' by time. So if you reset time (via `cps (-1)`, then `cps 1.1` or whatever cps you want to restart with) the random pattern will emit the exact same _random_ numbers again. In cases where you need two different random patterns, you can shift one of them around to change the time from which the _random_ pattern is read, note the difference: @ jux (# gain rand) $ sound "sn sn ~ sn" # gain rand @ and with the juxed version shifted backwards for 1024 cycles: @ jux (# ((1024 <~) $ gain rand)) $ sound "sn sn ~ sn" # gain rand @ -} rand :: Fractional a => Pattern a rand = Pattern (\(State a@(Arc s e) _) -> [Event (Context []) Nothing a (realToFrac $ (timeToRand ((e + s)/2) :: Double))]) -- | Boolean rand - a continuous stream of true/false values, with a 50/50 chance. brand :: Pattern Bool brand = _brandBy 0.5 -- | Boolean rand with probability as input, e.g. brandBy 0.25 is 25% chance of being true. brandBy :: Pattern Double -> Pattern Bool brandBy probpat = innerJoin $ (\prob -> _brandBy prob) <$> probpat _brandBy :: Double -> Pattern Bool _brandBy prob = fmap (< prob) rand {- | Just like `rand` but for whole numbers, `irand n` generates a pattern of (pseudo-) random whole numbers between `0` to `n-1` inclusive. Notably used to pick a random samples from a folder: @ d1 $ segment 4 $ n (irand 5) # sound "drum" @ -} irand :: Num a => Pattern Int -> Pattern a irand = (>>= _irand) _irand :: Num a => Int -> Pattern a _irand i = fromIntegral . (floor :: Double -> Int) . (* fromIntegral i) <$> rand {- | 1D Perlin (smooth) noise, works like rand but smoothly moves between random values each cycle. `perlinWith` takes a pattern as the RNG's "input" instead of automatically using the cycle count. @ d1 $ s "arpy*32" # cutoff (perlinWith (saw * 4) * 2000) @ will generate a smooth random pattern for the cutoff frequency which will repeat every cycle (because the saw does) The `perlin` function uses the cycle count as input and can be used much like @rand@. -} perlinWith :: Fractional a => Pattern Double -> Pattern a perlinWith p = fmap realToFrac $ (interp) <$> (p-pa) <*> (timeToRand <$> pa) <*> (timeToRand <$> pb) where pa = (fromIntegral :: Int -> Double) . floor <$> p pb = (fromIntegral :: Int -> Double) . (+1) . floor <$> p interp x a b = a + smootherStep x * (b-a) smootherStep x = 6.0 * x**5 - 15.0 * x**4 + 10.0 * x**3 perlin :: Fractional a => Pattern a perlin = perlinWith (sig fromRational) {- `perlin2With` is Perlin noise with a 2-dimensional input. This can be useful for more control over how the randomness repeats (or doesn't). @ d1 $ s "[supersaw:-12*32]" # lpf (rangex 60 5000 $ perlin2With (cosine*2) (sine*2)) # lpq 0.3 @ will generate a smooth random cutoff pattern that repeats every cycle without any reversals or discontinuities (because the 2D path is a circle). `perlin2` only needs one input because it uses the cycle count as the second input. -} perlin2With :: Pattern Double -> Pattern Double -> Pattern Double perlin2With x y = (/2) . (+1) $ interp2 <$> xfrac <*> yfrac <*> dota <*> dotb <*> dotc <*> dotd where fl = fmap ((fromIntegral :: Int -> Double) . floor) ce = fmap ((fromIntegral :: Int -> Double) . (+1) . floor) xfrac = x - fl x yfrac = y - fl y randAngle a b = 2 * pi * timeToRand (a + 0.0001 * b) pcos x' y' = cos $ randAngle <$> x' <*> y' psin x' y' = sin $ randAngle <$> x' <*> y' dota = pcos (fl x) (fl y) * xfrac + psin (fl x) (fl y) * yfrac dotb = pcos (ce x) (fl y) * (xfrac - 1) + psin (ce x) (fl y) * yfrac dotc = pcos (fl x) (ce y) * xfrac + psin (fl x) (ce y) * (yfrac - 1) dotd = pcos (ce x) (ce y) * (xfrac - 1) + psin (ce x) (ce y) * (yfrac - 1) interp2 x' y' a b c d = (1.0 - s x') * (1.0 - s y') * a + s x' * (1.0 - s y') * b + (1.0 - s x') * s y' * c + s x' * s y' * d s x' = 6.0 * x'**5 - 15.0 * x'**4 + 10.0 * x'**3 perlin2 :: Pattern Double -> Pattern Double perlin2 = perlin2With (sig fromRational) {- | Randomly picks an element from the given list @ sound "superpiano(3,8)" # note (choose ["a", "e", "g", "c"]) @ plays a melody randomly choosing one of the four notes \"a\", \"e\", \"g\", \"c\". -} choose :: [a] -> Pattern a choose = chooseBy rand chooseBy :: Pattern Double -> [a] -> Pattern a chooseBy _ [] = silence chooseBy f xs = (xs !!!) . floor <$> range 0 (fromIntegral $ length xs) f {- | Like @choose@, but works on an a list of tuples of values and weights @ sound "superpiano(3,8)" # note (wchoose [("a",1), ("e",0.5), ("g",2), ("c",1)]) @ In the above example, the "a" and "c" notes are twice as likely to play as the "e" note, and half as likely to play as the "g" note. -} wchoose :: [(a,Double)] -> Pattern a wchoose = wchooseBy rand wchooseBy :: Pattern Double -> [(a,Double)] -> Pattern a wchooseBy pat pairs = match <$> pat where match r = values !! head (findIndices (> (r*total)) cweights) cweights = scanl1 (+) (map snd pairs) values = map fst pairs total = sum $ map snd pairs {- | Similar to `degrade` `degradeBy` allows you to control the percentage of events that are removed. For example, to remove events 90% of the time: @ d1 $ slow 2 $ degradeBy 0.9 $ sound "[[[feel:5*8,feel*3] feel:3*8], feel*4]" # accelerate "-6" # speed "2" @ -} degradeBy :: Pattern Double -> Pattern a -> Pattern a degradeBy = tParam _degradeBy _degradeBy :: Double -> Pattern a -> Pattern a _degradeBy = _degradeByUsing rand -- Useful for manipulating random stream, e.g. to change 'seed' _degradeByUsing :: Pattern Double -> Double -> Pattern a -> Pattern a _degradeByUsing prand x p = fmap fst $ filterValues ((> x) . snd) $ (,) <$> p <* prand unDegradeBy :: Pattern Double -> Pattern a -> Pattern a unDegradeBy = tParam _unDegradeBy _unDegradeBy :: Double -> Pattern a -> Pattern a _unDegradeBy x p = fmap fst $ filterValues ((<= x) . snd) $ (,) <$> p <* rand degradeOverBy :: Int -> Pattern Double -> Pattern a -> Pattern a degradeOverBy i tx p = unwrap $ (\x -> fmap fst $ filterValues ((> x) . snd) $ (,) <$> p <* fastRepeatCycles i rand) <$> slow (fromIntegral i) tx {- | Use @sometimesBy@ to apply a given function "sometimes". For example, the following code results in `density 2` being applied about 25% of the time: @ d1 $ sometimesBy 0.25 (density 2) $ sound "bd*8" @ There are some aliases as well: @ sometimes = sometimesBy 0.5 often = sometimesBy 0.75 rarely = sometimesBy 0.25 almostNever = sometimesBy 0.1 almostAlways = sometimesBy 0.9 @ -} sometimesBy :: Pattern Double -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a sometimesBy x f pat = overlay (degradeBy x pat) (f $ unDegradeBy x pat) sometimesBy' :: Pattern Double -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a sometimesBy' x f pat = overlay (degradeBy x pat) (unDegradeBy x $ f pat) -- | @sometimes@ is an alias for sometimesBy 0.5. sometimes :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a sometimes = sometimesBy 0.5 sometimes' :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a sometimes' = sometimesBy' 0.5 -- | @often@ is an alias for sometimesBy 0.75. often :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a often = sometimesBy 0.75 often' :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a often' = sometimesBy' 0.75 -- | @rarely@ is an alias for sometimesBy 0.25. rarely :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a rarely = sometimesBy 0.25 rarely' :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a rarely' = sometimesBy' 0.25 -- | @almostNever@ is an alias for sometimesBy 0.1 almostNever :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a almostNever = sometimesBy 0.1 almostNever' :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a almostNever' = sometimesBy 0.1 -- | @almostAlways@ is an alias for sometimesBy 0.9 almostAlways :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a almostAlways = sometimesBy 0.9 almostAlways' :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a almostAlways' = sometimesBy' 0.9 never :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a never = flip const always :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a always = id {- | @someCyclesBy@ is a cycle-by-cycle version of @sometimesBy@. It has a `someCycles = someCyclesBy 0.5` alias -} someCyclesBy :: Pattern Double -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a someCyclesBy pd f pat = innerJoin $ (\d -> _someCyclesBy d f pat) <$> pd _someCyclesBy :: Double -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a _someCyclesBy x = when test where test c = timeToRand (fromIntegral c :: Double) < x somecyclesBy :: Pattern Double -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a somecyclesBy = someCyclesBy someCycles :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a someCycles = someCyclesBy 0.5 somecycles :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a somecycles = someCycles {- | `degrade` randomly removes events from a pattern 50% of the time: @ d1 $ slow 2 $ degrade $ sound "[[[feel:5*8,feel*3] feel:3*8], feel*4]" # accelerate "-6" # speed "2" @ The shorthand syntax for `degrade` is a question mark: `?`. Using `?` will allow you to randomly remove events from a portion of a pattern: @ d1 $ slow 2 $ sound "bd ~ sn bd ~ bd? [sn bd?] ~" @ You can also use `?` to randomly remove events from entire sub-patterns: @ d1 $ slow 2 $ sound "[[[feel:5*8,feel*3] feel:3*8]?, feel*4]" @ -} degrade :: Pattern a -> Pattern a degrade = _degradeBy 0.5 {- | (The above means that `brak` is a function from patterns of any type, to a pattern of the same type.) Make a pattern sound a bit like a breakbeat Example: @ d1 $ sound (brak "bd sn kurt") @ -} brak :: Pattern a -> Pattern a brak = when ((== 1) . (`mod` 2)) (((1%4) `rotR`) . (\x -> fastcat [x, silence])) {- | Divides a pattern into a given number of subdivisions, plays the subdivisions in order, but increments the starting subdivision each cycle. The pattern wraps to the first subdivision after the last subdivision is played. Example: @ d1 $ iter 4 $ sound "bd hh sn cp" @ This will produce the following over four cycles: @ bd hh sn cp hh sn cp bd sn cp bd hh cp bd hh sn @ There is also `iter'`, which shifts the pattern in the opposite direction. -} iter :: Pattern Int -> Pattern c -> Pattern c iter = tParam _iter _iter :: Int -> Pattern a -> Pattern a _iter n p = slowcat $ map (\i -> (fromIntegral i % fromIntegral n) `rotL` p) [0 .. (n-1)] -- | @iter'@ is the same as @iter@, but decrements the starting -- subdivision instead of incrementing it. iter' :: Pattern Int -> Pattern c -> Pattern c iter' = tParam _iter' _iter' :: Int -> Pattern a -> Pattern a _iter' n p = slowcat $ map (\i -> (fromIntegral i % fromIntegral n) `rotR` p) [0 .. (n-1)] -- | @palindrome p@ applies @rev@ to @p@ every other cycle, so that -- the pattern alternates between forwards and backwards. palindrome :: Pattern a -> Pattern a palindrome p = slowAppend p (rev p) -- | Composing patterns {- | The function @seqP@ allows you to define when a sound within a list starts and ends. The code below contains three separate patterns in a `stack`, but each has different start times (zero cycles, eight cycles, and sixteen cycles, respectively). All patterns stop after 128 cycles: @ d1 $ seqP [ (0, 128, sound "bd bd*2"), (8, 128, sound "hh*2 [sn cp] cp future*4"), (16, 128, sound (samples "arpy*8" (run 16))) ] @ -} seqP :: [(Time, Time, Pattern a)] -> Pattern a seqP ps = stack $ map (\(s, e, p) -> playFor s e (sam s `rotR` p)) ps -- | Degrades a pattern over the given time. fadeOut :: Time -> Pattern a -> Pattern a fadeOut dur p = innerJoin $ (`_degradeBy` p) <$> _slow dur envL -- | Alternate version to @fadeOut@ where you can provide the time from which the fade starts fadeOutFrom :: Time -> Time -> Pattern a -> Pattern a fadeOutFrom from dur p = innerJoin $ (`_degradeBy` p) <$> (from `rotR` _slow dur envL) -- | 'Undegrades' a pattern over the given time. fadeIn :: Time -> Pattern a -> Pattern a fadeIn dur p = innerJoin $ (`_degradeBy` p) <$> _slow dur envLR -- | Alternate version to @fadeIn@ where you can provide the time from -- which the fade in starts fadeInFrom :: Time -> Time -> Pattern a -> Pattern a fadeInFrom from dur p = innerJoin $ (`_degradeBy` p) <$> (from `rotR` _slow dur envLR) {- | The 'spread' function allows you to take a pattern transformation which takes a parameter, such as `slow`, and provide several parameters which are switched between. In other words it 'spreads' a function across several values. Taking a simple high hat loop as an example: @ d1 $ sound "ho ho:2 ho:3 hc" @ We can slow it down by different amounts, such as by a half: @ d1 $ slow 2 $ sound "ho ho:2 ho:3 hc" @ Or by four thirds (i.e. speeding it up by a third; `4%3` means four over three): @ d1 $ slow (4%3) $ sound "ho ho:2 ho:3 hc" @ But if we use `spread`, we can make a pattern which alternates between the two speeds: @ d1 $ spread slow [2,4%3] $ sound "ho ho:2 ho:3 hc" @ Note that if you pass ($) as the function to spread values over, you can put functions as the list of values. For example: @ d1 $ spread ($) [density 2, rev, slow 2, striate 3, (# speed "0.8")] $ sound "[bd*2 [~ bd]] [sn future]*2 cp jvbass*4" @ Above, the pattern will have these transforms applied to it, one at a time, per cycle: * cycle 1: `density 2` - pattern will increase in speed * cycle 2: `rev` - pattern will be reversed * cycle 3: `slow 2` - pattern will decrease in speed * cycle 4: `striate 3` - pattern will be granualized * cycle 5: `(# speed "0.8")` - pattern samples will be played back more slowly After `(# speed "0.8")`, the transforms will repeat and start at `density 2` again. -} spread :: (a -> t -> Pattern b) -> [a] -> t -> Pattern b spread f xs p = slowcat $ map (`f` p) xs slowspread :: (a -> t -> Pattern b) -> [a] -> t -> Pattern b slowspread = spread {- | @fastspread@ works the same as @spread@, but the result is squashed into a single cycle. If you gave four values to @spread@, then the result would seem to speed up by a factor of four. Compare these two: d1 $ spread chop [4,64,32,16] $ sound "ho ho:2 ho:3 hc" d1 $ fastspread chop [4,64,32,16] $ sound "ho ho:2 ho:3 hc" There is also @slowspread@, which is an alias of @spread@. -} fastspread :: (a -> t -> Pattern b) -> [a] -> t -> Pattern b fastspread f xs p = fastcat $ map (`f` p) xs {- | There's a version of this function, `spread'` (pronounced "spread prime"), which takes a *pattern* of parameters, instead of a list: @ d1 $ spread' slow "2 4%3" $ sound "ho ho:2 ho:3 hc" @ This is quite a messy area of Tidal - due to a slight difference of implementation this sounds completely different! One advantage of using `spread'` though is that you can provide polyphonic parameters, e.g.: @ d1 $ spread' slow "[2 4%3, 3]" $ sound "ho ho:2 ho:3 hc" @ -} spread' :: Monad m => (a -> b -> m c) -> m a -> b -> m c spread' f vpat pat = vpat >>= \v -> f v pat {- | `spreadChoose f xs p` is similar to `slowspread` but picks values from `xs` at random, rather than cycling through them in order. It has a shorter alias `spreadr`. -} spreadChoose :: (t -> t1 -> Pattern b) -> [t] -> t1 -> Pattern b spreadChoose f vs p = do v <- _segment 1 (choose vs) f v p spreadr :: (t -> t1 -> Pattern b) -> [t] -> t1 -> Pattern b spreadr = spreadChoose {-| Decide whether to apply one or another function depending on the result of a test function that is passed the current cycle as a number. @ d1 $ ifp ((== 0).(flip mod 2)) (striate 4) (# coarse "24 48") $ sound "hh hc" @ This will apply `striate 4` for every _even_ cycle and aply `# coarse "24 48"` for every _odd_. Detail: As you can see the test function is arbitrary and does not rely on anything tidal specific. In fact it uses only plain haskell functionality, that is: it calculates the modulo of 2 of the current cycle which is either 0 (for even cycles) or 1. It then compares this value against 0 and returns the result, which is either `True` or `False`. This is what the `ifp` signature's first part signifies `(Int -> Bool)`, a function that takes a whole number and returns either `True` or `False`. -} ifp :: (Int -> Bool) -> (Pattern a -> Pattern a) -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a ifp test f1 f2 p = splitQueries $ p {query = q} where q a | test (floor $ start $ arc a) = query (f1 p) a | otherwise = query (f2 p) a -- | @wedge t p p'@ combines patterns @p@ and @p'@ by squashing the -- @p@ into the portion of each cycle given by @t@, and @p'@ into the -- remainer of each cycle. wedge :: Pattern Time -> Pattern a -> Pattern a -> Pattern a wedge pt pa pb = innerJoin $ (\t -> _wedge t pa pb) <$> pt _wedge :: Time -> Pattern a -> Pattern a -> Pattern a _wedge 0 _ p' = p' _wedge 1 p _ = p _wedge t p p' = overlay (_fastGap (1/t) p) (t `rotR` _fastGap (1/(1-t)) p') {- | @whenmod@ has a similar form and behavior to `every`, but requires an additional number. Applies the function to the pattern, when the remainder of the current loop number divided by the first parameter, is greater or equal than the second parameter. For example the following makes every other block of four loops twice as dense: @ d1 $ whenmod 8 4 (density 2) (sound "bd sn kurt") @ -} whenmod :: Pattern Time -> Pattern Time -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a whenmod a b f pat = innerJoin $ (\a' b' -> _whenmod a' b' f pat) <$> a <*> b _whenmod :: Time -> Time -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a _whenmod a b = whenT (\t -> ((t `mod'` a) >= b )) {- | @ superimpose f p = stack [p, f p] @ `superimpose` plays a modified version of a pattern at the same time as the original pattern, resulting in two patterns being played at the same time. @ d1 $ superimpose (density 2) $ sound "bd sn [cp ht] hh" d1 $ superimpose ((# speed "2") . (0.125 <~)) $ sound "bd sn cp hh" @ -} superimpose :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a superimpose f p = stack [p, f p] {- | @trunc@ truncates a pattern so that only a fraction of the pattern is played. The following example plays only the first quarter of the pattern: @ d1 $ trunc 0.25 $ sound "bd sn*2 cp hh*4 arpy bd*2 cp bd*2" @ -} trunc :: Pattern Time -> Pattern a -> Pattern a trunc = tParam _trunc _trunc :: Time -> Pattern a -> Pattern a _trunc t = compress (0, t) . zoomArc (Arc 0 t) {- | @linger@ is similar to `trunc` but the truncated part of the pattern loops until the end of the cycle. @ d1 $ linger 0.25 $ sound "bd sn*2 cp hh*4 arpy bd*2 cp bd*2" @ If you give it a negative number, it will linger on the last part of the pattern, instead of the start of it. E.g. to linger on the last quarter: @ d1 $ linger (-0.25) $ sound "bd sn*2 cp hh*4 arpy bd*2 cp bd*2" @ -} linger :: Pattern Time -> Pattern a -> Pattern a linger = tParam _linger _linger :: Time -> Pattern a -> Pattern a _linger n p | n < 0 = _fast (1/n) $ zoomArc (Arc (1 + n) 1) p | otherwise = _fast (1/n) $ zoomArc (Arc 0 n) p {- | Use `within` to apply a function to only a part of a pattern. For example, to apply `density 2` to only the first half of a pattern: @ d1 $ within (0, 0.5) (density 2) $ sound "bd*2 sn lt mt hh hh hh hh" @ Or, to apply `(# speed "0.5") to only the last quarter of a pattern: @ d1 $ within (0.75, 1) (# speed "0.5") $ sound "bd*2 sn lt mt hh hh hh hh" @ -} within :: (Time, Time) -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a within (s, e) f p = stack [filterWhen (\t -> cyclePos t >= s && cyclePos t < e) $ f p, filterWhen (\t -> not $ cyclePos t >= s && cyclePos t < e) p ] withinArc :: Arc -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a withinArc (Arc s e) = within (s, e) {- | For many cases, @within'@ will function exactly as within. The difference between the two occurs when applying functions that change the timing of notes such as 'fast' or '<~'. within first applies the function to all notes in the cycle, then keeps the results in the specified interval, and then combines it with the old cycle (an "apply split combine" paradigm). within' first keeps notes in the specified interval, then applies the function to these notes, and then combines it with the old cycle (a "split apply combine" paradigm). For example, whereas using the standard version of within @ d1 $ within (0, 0.25) (fast 2) $ sound "bd hh cp sd" @ sounds like: @ d1 $ sound "[bd hh] hh cp sd" @ using this alternative version, within' @ d1 $ within' (0, 0.25) (fast 2) $ sound "bd hh cp sd" @ sounds like: @ d1 $ sound "[bd bd] hh cp sd" @ -} within' :: (Time, Time) -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a within' a@(s, e) f p = stack [ filterWhen (\t -> cyclePos t >= s && cyclePos t < e) $ compress a $ f $ zoom a p , filterWhen (\t -> not $ cyclePos t >= s && cyclePos t < e) p ] revArc :: (Time, Time) -> Pattern a -> Pattern a revArc a = within a rev {- | You can use the @e@ function to apply a Euclidean algorithm over a complex pattern, although the structure of that pattern will be lost: @ d1 $ e 3 8 $ sound "bd*2 [sn cp]" @ In the above, three sounds are picked from the pattern on the right according to the structure given by the `e 3 8`. It ends up picking two `bd` sounds, a `cp` and missing the `sn` entirely. These types of sequences use "Bjorklund's algorithm", which wasn't made for music but for an application in nuclear physics, which is exciting. More exciting still is that it is very similar in structure to the one of the first known algorithms written in Euclid's book of elements in 300 BC. You can read more about this in the paper [The Euclidean Algorithm Generates Traditional Musical Rhythms](http://cgm.cs.mcgill.ca/~godfried/publications/banff.pdf) by Toussaint. Some examples from this paper are included below, including rotation in some cases. @ - (2,5) : A thirteenth century Persian rhythm called Khafif-e-ramal. - (3,4) : The archetypal pattern of the Cumbia from Colombia, as well as a Calypso rhythm from Trinidad. - (3,5,2) : Another thirteenth century Persian rhythm by the name of Khafif-e-ramal, as well as a Rumanian folk-dance rhythm. - (3,7) : A Ruchenitza rhythm used in a Bulgarian folk-dance. - (3,8) : The Cuban tresillo pattern. - (4,7) : Another Ruchenitza Bulgarian folk-dance rhythm. - (4,9) : The Aksak rhythm of Turkey. - (4,11) : The metric pattern used by Frank Zappa in his piece titled Outside Now. - (5,6) : Yields the York-Samai pattern, a popular Arab rhythm. - (5,7) : The Nawakhat pattern, another popular Arab rhythm. - (5,8) : The Cuban cinquillo pattern. - (5,9) : A popular Arab rhythm called Agsag-Samai. - (5,11) : The metric pattern used by Moussorgsky in Pictures at an Exhibition. - (5,12) : The Venda clapping pattern of a South African children’s song. - (5,16) : The Bossa-Nova rhythm necklace of Brazil. - (7,8) : A typical rhythm played on the Bendir (frame drum). - (7,12) : A common West African bell pattern. - (7,16,14) : A Samba rhythm necklace from Brazil. - (9,16) : A rhythm necklace used in the Central African Republic. - (11,24,14) : A rhythm necklace of the Aka Pygmies of Central Africa. - (13,24,5) : Another rhythm necklace of the Aka Pygmies of the upper Sangha. @ -} euclid :: Pattern Int -> Pattern Int -> Pattern a -> Pattern a euclid = tParam2 _euclid _euclid :: Int -> Int -> Pattern a -> Pattern a _euclid n k a = fastcat $ fmap (bool silence a) $ bjorklund (n,k) {- | `euclidfull n k pa pb` stacks @e n k pa@ with @einv n k pb@ -} euclidFull :: Pattern Int -> Pattern Int -> Pattern a -> Pattern a -> Pattern a euclidFull n k pa pb = stack [ euclid n k pa, euclidInv n k pb ] _euclidBool :: Int -> Int -> Pattern Bool _euclidBool n k = fastFromList $ bjorklund (n,k) _euclid' :: Int -> Int -> Pattern a -> Pattern a _euclid' n k p = fastcat $ map (\x -> if x then p else silence) (bjorklund (n,k)) euclidOff :: Pattern Int -> Pattern Int -> Pattern Int -> Pattern a -> Pattern a euclidOff = tParam3 _euclidOff eoff :: Pattern Int -> Pattern Int -> Pattern Int -> Pattern a -> Pattern a eoff = euclidOff _euclidOff :: Int -> Int -> Int -> Pattern a -> Pattern a _euclidOff _ 0 _ _ = silence _euclidOff n k s p = (rotL $ fromIntegral s%fromIntegral k) (_euclid n k p) euclidOffBool :: Pattern Int -> Pattern Int -> Pattern Int -> Pattern Bool -> Pattern Bool euclidOffBool = tParam3 _euclidOffBool _euclidOffBool :: Int -> Int -> Int -> Pattern Bool -> Pattern Bool _euclidOffBool _ 0 _ _ = silence _euclidOffBool n k s p = ((fromIntegral s % fromIntegral k) `rotL`) ((\a b -> if b then a else not a) <$> _euclidBool n k <*> p) distrib :: [Pattern Int] -> Pattern a -> Pattern a distrib ps p = do p' <- sequence ps _distrib p' p _distrib :: [Int] -> Pattern a -> Pattern a _distrib xs p = boolsToPat (foldr distrib' (replicate (last xs) True) (reverse $ layers xs)) p where distrib' :: [Bool] -> [Bool] -> [Bool] distrib' [] _ = [] distrib' (_:a) [] = False : distrib' a [] distrib' (True:a) (x:b) = x : distrib' a b distrib' (False:a) b = False : distrib' a b layers = map bjorklund . (zip<*>tail) boolsToPat a b' = flip const <$> filterValues (== True) (fastFromList a) <*> b' {- | `euclidInv` fills in the blanks left by `e` - @e 3 8 "x"@ -> @"x ~ ~ x ~ ~ x ~"@ @euclidInv 3 8 "x"@ -> @"~ x x ~ x x ~ x"@ -} euclidInv :: Pattern Int -> Pattern Int -> Pattern a -> Pattern a euclidInv = tParam2 _euclidInv _euclidInv :: Int -> Int -> Pattern a -> Pattern a _euclidInv n k a = fastcat $ fmap (bool a silence) $ bjorklund (n,k) index :: Real b => b -> Pattern b -> Pattern c -> Pattern c index sz indexpat pat = spread' (zoom' $ toRational sz) (toRational . (*(1-sz)) <$> indexpat) pat where zoom' tSz s = zoomArc (Arc s (s+tSz)) {- -- | @prrw f rot (blen, vlen) beatPattern valuePattern@: pattern rotate/replace. prrw :: (a -> b -> c) -> Int -> (Time, Time) -> Pattern a -> Pattern b -> Pattern c prrw f rot (blen, vlen) beatPattern valuePattern = let ecompare (_,e1,_) (_,e2,_) = compare (fst e1) (fst e2) beats = sortBy ecompare $ arc beatPattern (0, blen) values = fmap thd' . sortBy ecompare $ arc valuePattern (0, vlen) cycles = blen * (fromIntegral $ lcm (length beats) (length values) `div` (length beats)) in _slow cycles $ stack $ zipWith (\( _, (start, end), v') v -> (start `rotR`) $ densityGap (1 / (end - start)) $ pure (f v' v)) (sortBy ecompare $ arc (_fast cycles $ beatPattern) (0, blen)) (drop (rot `mod` length values) $ cycle values) -- | @prr rot (blen, vlen) beatPattern valuePattern@: pattern rotate/replace. prr :: Int -> (Time, Time) -> Pattern String -> Pattern b -> Pattern b prr = prrw $ flip const {-| @preplace (blen, plen) beats values@ combines the timing of @beats@ with the values of @values@. Other ways of saying this are: * sequential convolution * @values@ quantized to @beats@. Examples: @ d1 $ sound $ preplace (1,1) "x [~ x] x x" "bd sn" d1 $ sound $ preplace (1,1) "x(3,8)" "bd sn" d1 $ sound $ "x(3,8)" <~> "bd sn" d1 $ sound "[jvbass jvbass:5]*3" |+| (shape $ "1 1 1 1 1" <~> "0.2 0.9") @ It is assumed the pattern fits into a single cycle. This works well with pattern literals, but not always with patterns defined elsewhere. In those cases use @preplace@ and provide desired pattern lengths: @ let p = slow 2 $ "x x x" d1 $ sound $ preplace (2,1) p "bd sn" @ -} preplace :: (Time, Time) -> Pattern String -> Pattern b -> Pattern b preplace = preplaceWith $ flip const -- | @prep@ is an alias for preplace. prep :: (Time, Time) -> Pattern String -> Pattern b -> Pattern b prep = preplace preplace1 :: Pattern String -> Pattern b -> Pattern b preplace1 = preplace (1, 1) preplaceWith :: (a -> b -> c) -> (Time, Time) -> Pattern a -> Pattern b -> Pattern c preplaceWith f (blen, plen) = prrw f 0 (blen, plen) prw :: (a -> b -> c) -> (Time, Time) -> Pattern a -> Pattern b -> Pattern c prw = preplaceWith preplaceWith1 :: (a -> b -> c) -> Pattern a -> Pattern b -> Pattern c preplaceWith1 f = prrw f 0 (1, 1) prw1 :: (a -> b -> c) -> Pattern a -> Pattern b -> Pattern c prw1 = preplaceWith1 (<~>) :: Pattern String -> Pattern b -> Pattern b (<~>) = preplace (1, 1) -- | @protate len rot p@ rotates pattern @p@ by @rot@ beats to the left. -- @len@: length of the pattern, in cycles. -- Example: @d1 $ every 4 (protate 2 (-1)) $ slow 2 $ sound "bd hh hh hh"@ protate :: Time -> Int -> Pattern a -> Pattern a protate len rot p = prrw (flip const) rot (len, len) p p prot :: Time -> Int -> Pattern a -> Pattern a prot = protate prot1 :: Int -> Pattern a -> Pattern a prot1 = protate 1 {-| The @<<~@ operator rotates a unit pattern to the left, similar to @<~@, but by events rather than linear time. The timing of the pattern remains constant: @ d1 $ (1 <<~) $ sound "bd ~ sn hh" -- will become d1 $ sound "sn ~ hh bd" @ -} (<<~) :: Int -> Pattern a -> Pattern a (<<~) = protate 1 -- | @~>>@ is like @<<~@ but for shifting to the right. (~>>) :: Int -> Pattern a -> Pattern a (~>>) = (<<~) . (0-) -- | @pequal cycles p1 p2@: quickly test if @p1@ and @p2@ are the same. pequal :: Ord a => Time -> Pattern a -> Pattern a -> Bool pequal cycles p1 p2 = (sort $ arc p1 (0, cycles)) == (sort $ arc p2 (0, cycles)) -} -- | @rot n p@ rotates the values in a pattern @p@ by @n@ beats to the left. -- Example: @d1 $ every 4 (rot 2) $ slow 2 $ sound "bd hh hh hh"@ rot :: Ord a => Pattern Int -> Pattern a -> Pattern a rot = tParam _rot -- Calculates a whole cycle, rotates it, then constrains events to the original query arc _rot :: Ord a => Int -> Pattern a -> Pattern a _rot i pat = splitQueries $ pat {query = \st -> f st (query pat (st {arc = wholeCycle (arc st)}))} where -- TODO maybe events with the same arc (part+whole) should be -- grouped together in the rotation? f st es = constrainEvents (arc st) $ shiftValues $ sort $ defragParts es shiftValues es | i >= 0 = zipWith (\e s -> e {value = s}) es (drop i $ cycle $ map value es) | otherwise = zipWith (\e s -> e{value = s}) es (drop (length es - abs i) $ cycle $ map value es) wholeCycle (Arc s _) = Arc (sam s) (nextSam s) constrainEvents :: Arc -> [Event a] -> [Event a] constrainEvents a es = mapMaybe (constrainEvent a) es constrainEvent :: Arc -> Event a -> Maybe (Event a) constrainEvent a e = do p' <- subArc (part e) a return e {part = p'} -- | @segment n p@: 'samples' the pattern @p@ at a rate of @n@ -- events per cycle. Useful for turning a continuous pattern into a -- discrete one. segment :: Pattern Time -> Pattern a -> Pattern a segment = tParam _segment _segment :: Time -> Pattern a -> Pattern a _segment n p = _fast n (pure id) <* p -- | @discretise@: the old (deprecated) name for 'segment' discretise :: Pattern Time -> Pattern a -> Pattern a discretise = segment -- | @randcat ps@: does a @slowcat@ on the list of patterns @ps@ but -- randomises the order in which they are played. randcat :: [Pattern a] -> Pattern a randcat ps = spread' rotL (_segment 1 $ (%1) . fromIntegral <$> (_irand (length ps) :: Pattern Int)) (slowcat ps) wrandcat :: [(Pattern a, Double)] -> Pattern a wrandcat ps = unwrap $ wchooseBy (segment 1 rand) ps -- @fromNote p@: converts a pattern of human-readable pitch names -- into pitch numbers. For example, @"cs2"@ will be parsed as C Sharp -- in the 2nd octave with the result of @11@, and @"b-3"@ as -- @-25@. Pitches can be decorated using: -- -- * s = Sharp, a half-step above (@"gs-1"@) -- * f = Flat, a half-step below (@"gf-1"@) -- * n = Natural, no decoration (@"g-1" and "gn-1"@ are equivalent) -- * ss = Double sharp, a whole step above (@"gss-1"@) -- * ff = Double flat, a whole step below (@"gff-1"@) -- -- Note that TidalCycles now assumes that middle C is represented by -- the value 0, rather than the previous value of 60. This function -- is similar to previously available functions @tom@ and @toMIDI@, -- but the default octave is now 0 rather than 5. {- definition moved to Parse.hs .. toMIDI :: Pattern String -> Pattern Int toMIDI p = fromJust <$> (filterValues (isJust) (noteLookup <$> p)) where noteLookup :: String -> Maybe Int noteLookup [] = Nothing noteLookup s | not (last s `elem` ['0' .. '9']) = noteLookup (s ++ "0") | not (isLetter (s !! 1)) = noteLookup((head s):'n':(tail s)) | otherwise = parse s parse x = (\a b c -> a+b+c) <$> pc x <*> sym x <*> Just(12*digitToInt (last x)) pc x = lookup (head x) [('c',0),('d',2),('e',4),('f',5),('g',7),('a',9),('b',11)] sym x = lookup (init (tail x)) [("s",1),("f",-1),("n",0),("ss",2),("ff",-2)] -} -- @tom p@: Alias for @toMIDI@. -- tom = toMIDI {- | The `fit` function takes a pattern of integer numbers, which are used to select values from the given list. What makes this a bit strange is that only a given number of values are selected each cycle. For example: @ d1 $ sound (fit 3 ["bd", "sn", "arpy", "arpy:1", "casio"] "0 [~ 1] 2 1") @ The above fits three samples into the pattern, i.e. for the first cycle this will be `"bd"`, `"sn"` and `"arpy"`, giving the result `"bd [~ sn] arpy sn"` (note that we start counting at zero, so that `0` picks the first value). The following cycle the *next* three values in the list will be picked, i.e. `"arpy:1"`, `"casio"` and `"bd"`, giving the pattern `"arpy:1 [~ casio] bd casio"` (note that the list wraps round here). -} _fit :: Int -> [a] -> Pattern Int -> Pattern a _fit perCycle xs p = (xs !!!) <$> (p {query = map (\e -> fmap (+ pos e) e) . query p}) where pos e = perCycle * floor (start $ part e) fit :: Pattern Int -> [a] -> Pattern Int -> Pattern a fit pint xs p = (tParam func) pint (xs,p) where func i (xs',p') = _fit i xs' p' permstep :: RealFrac b => Int -> [a] -> Pattern b -> Pattern a permstep nSteps things p = unwrap $ (\n -> fastFromList $ concatMap (\x -> replicate (fst x) (snd x)) $ zip (ps !! floor (n * fromIntegral (length ps - 1))) things) <$> _segment 1 p where ps = permsort (length things) nSteps deviance avg xs = sum $ map (abs . (avg-) . fromIntegral) xs permsort n total = map fst $ sortOn snd $ map (\x -> (x,deviance (fromIntegral total / (fromIntegral n :: Double)) x)) $ perms n total perms 0 _ = [] perms 1 n = [[n]] perms n total = concatMap (\x -> map (x:) $ perms (n-1) (total-x)) [1 .. (total-(n-1))] -- | @struct a b@: structures pattern @b@ in terms of the pattern of -- boolean values @a@. Only @True@ values in the boolean pattern are -- used. struct :: Pattern Bool -> Pattern a -> Pattern a struct ps pv = filterJust $ (\a b -> if a then Just b else Nothing ) <$> ps <* pv -- | @substruct a b@: similar to @struct@, but each event in pattern @a@ gets replaced with pattern @b@, compressed to fit the timespan of the event. substruct :: Pattern Bool -> Pattern b -> Pattern b substruct s p = p {query = f} where f st = concatMap ((\a' -> queryArc (compressArcTo a' p) a') . wholeOrPart) $ filter value $ query s st randArcs :: Int -> Pattern [Arc] randArcs n = do rs <- mapM (\x -> pure (toRational x / toRational n) <~ choose [1 :: Int,2,3]) [0 .. (n-1)] let rats = map toRational rs total = sum rats pairs = pairUp $ accumulate $ map (/total) rats return pairs where pairUp [] = [] pairUp xs = Arc 0 (head xs) : pairUp' xs pairUp' [] = [] pairUp' [_] = [] pairUp' [a, _] = [Arc a 1] pairUp' (a:b:xs) = Arc a b: pairUp' (b:xs) -- TODO - what does this do? Something for @stripe@ .. randStruct :: Int -> Pattern Int randStruct n = splitQueries $ Pattern {query = f} where f st = map (\(a,b,c) -> Event (Context []) (Just a) (fromJust b) c) $ filter (\(_,x,_) -> isJust x) as where as = map (\(i, Arc s' e') -> (Arc (s' + sam s) (e' + sam s), subArc (Arc s e) (Arc (s' + sam s) (e' + sam s)), i)) $ enumerate $ value $ head $ queryArc (randArcs n) (Arc (sam s) (nextSam s)) (Arc s e) = arc st -- TODO - what does this do? substruct' :: Pattern Int -> Pattern a -> Pattern a substruct' s p = p {query = \st -> concatMap (f st) (query s st)} where f st (Event c (Just a') _ i) = map (\e -> e {context = combineContexts [c, context e]}) $ queryArc (compressArcTo a' (inside (pure $ 1/toRational(length (queryArc s (Arc (sam (start $ arc st)) (nextSam (start $ arc st)))))) (rotR (toRational i)) p)) a' -- Ignore analog events (ones without wholes) f _ _ = [] -- | @stripe n p@: repeats pattern @p@, @n@ times per cycle. So -- similar to @fast@, but with random durations. The repetitions will -- be continguous (touching, but not overlapping) and the durations -- will add up to a single cycle. @n@ can be supplied as a pattern of -- integers. stripe :: Pattern Int -> Pattern a -> Pattern a stripe = tParam _stripe _stripe :: Int -> Pattern a -> Pattern a _stripe = substruct' . randStruct -- | @slowstripe n p@: The same as @stripe@, but the result is also -- @n@ times slower, so that the mean average duration of the stripes -- is exactly one cycle, and every @n@th stripe starts on a cycle -- boundary (in indian classical terms, the @sam@). slowstripe :: Pattern Int -> Pattern a -> Pattern a slowstripe n = slow (toRational <$> n) . stripe n -- Lindenmayer patterns, these go well with the step sequencer -- general rule parser (strings map to strings) parseLMRule :: String -> [(String,String)] parseLMRule s = map (splitOn ':') commaSplit where splitOn sep str = splitAt (fromJust $ elemIndex sep str) $ filter (/= sep) str commaSplit = map T.unpack $ T.splitOn (T.pack ",") $ T.pack s -- specific parser for step sequencer (chars map to string) -- ruleset in form "a:b,b:ab" parseLMRule' :: String -> [(Char, String)] parseLMRule' str = map fixer $ parseLMRule str where fixer (c,r) = (head c, r) {- | returns the `n`th iteration of a [Lindenmayer System](https://en.wikipedia.org/wiki/L-system) with given start sequence. for example: @ lindenmayer 1 "a:b,b:ab" "ab" -> "bab" @ -} lindenmayer :: Int -> String -> String -> String lindenmayer _ _ [] = [] lindenmayer 1 r (c:cs) = fromMaybe [c] (lookup c $ parseLMRule' r) ++ lindenmayer 1 r cs lindenmayer n r s = iterate (lindenmayer 1 r) s !! n {- | @lindenmayerI@ converts the resulting string into a a list of integers with @fromIntegral@ applied (so they can be used seamlessly where floats or rationals are required) -} lindenmayerI :: Num b => Int -> String -> String -> [b] lindenmayerI n r s = fmap (fromIntegral . digitToInt) $ lindenmayer n r s {- | @runMarkov n tmat xi seed@ generates a Markov chain (as a list) of length @n@ using the transition matrix @tmat@ starting from initial state @xi@, starting with random numbers generated from @seed@ Each entry in the chain is the index of state (starting from zero). Each row of the matrix will be automatically normalized. For example: @ runMarkov 8 [[2,3], [1,3]] 0 0 @ will produce a two-state chain 8 steps long, from initial state @0@, where the transition probability from state 0->0 is 2/5, 0->1 is 3/5, 1->0 is 1/4, and 1->1 is 3/4. -} runMarkov :: Int -> [[Double]] -> Int -> Time -> [Int] runMarkov n tp xi seed = reverse $ (iterate (markovStep $ renorm) [xi])!! (n-1) where markovStep tp' xs = (fromJust $ findIndex (r <=) $ scanl1 (+) (tp'!!(head xs))) : xs where r = timeToRand $ seed + (fromIntegral . length) xs / fromIntegral n renorm = [ map (/ sum x) x | x <- tp ] {- @markovPat n xi tp@ generates a one-cycle pattern of @n@ steps in a Markov chain starting from state @xi@ with transition matrix @tp@. Each row of the transition matrix is automatically normalized. For example: @ tidal> markovPat 8 1 [[3,5,2], [4,4,2], [0,1,0]] (0>⅛)|1 (⅛>¼)|2 (¼>⅜)|1 (⅜>½)|1 (½>⅝)|2 (⅝>¾)|1 (¾>⅞)|1 (⅞>1)|0 @ -} markovPat :: Pattern Int -> Pattern Int -> [[Double]] -> Pattern Int markovPat = tParam2 _markovPat _markovPat :: Int -> Int -> [[Double]] -> Pattern Int _markovPat n xi tp = splitQueries $ Pattern (\(State a@(Arc s _) _) -> queryArc (listToPat $ runMarkov n tp xi (sam s)) a) {-| Removes events from second pattern that don't start during an event from first. Consider this, kind of messy rhythm without any rests. @ d1 $ sound (slowcat ["sn*8", "[cp*4 bd*4, hc*5]"]) # n (run 8) @ If we apply a mask to it @ d1 $ s (mask ("1 1 1 ~ 1 1 ~ 1" :: Pattern Bool) (slowcat ["sn*8", "[cp*4 bd*4, bass*5]"] )) # n (run 8) @ Due to the use of `slowcat` here, the same mask is first applied to `"sn*8"` and in the next cycle to `"[cp*4 bd*4, hc*5]". You could achieve the same effect by adding rests within the `slowcat` patterns, but mask allows you to do this more easily. It kind of keeps the rhythmic structure and you can change the used samples independently, e.g. @ d1 $ s (mask ("1 ~ 1 ~ 1 1 ~ 1") (slowcat ["can*8", "[cp*4 sn*4, jvbass*16]"] )) # n (run 8) @ -} mask :: Pattern Bool -> Pattern a -> Pattern a mask b p = const <$> p <* (filterValues id b) -- | TODO: refactor towards union enclosingArc :: [Arc] -> Arc enclosingArc [] = Arc 0 1 enclosingArc as = Arc (minimum (map start as)) (maximum (map stop as)) stretch :: Pattern a -> Pattern a -- TODO - should that be whole or part? stretch p = splitQueries $ p {query = q} where q st = query (zoomArc (cycleArc $ enclosingArc $ map wholeOrPart $ query p (st {arc = Arc (sam s) (nextSam s)})) p) st where s = start $ arc st {- | `fit'` is a generalization of `fit`, where the list is instead constructed by using another integer pattern to slice up a given pattern. The first argument is the number of cycles of that latter pattern to use when slicing. It's easier to understand this with a few examples: @ d1 $ sound (fit' 1 2 "0 1" "1 0" "bd sn") @ So what does this do? The first `1` just tells it to slice up a single cycle of `"bd sn"`. The `2` tells it to select two values each cycle, just like the first argument to `fit`. The next pattern `"0 1"` is the "from" pattern which tells it how to slice, which in this case means `"0"` maps to `"bd"`, and `"1"` maps to `"sn"`. The next pattern `"1 0"` is the "to" pattern, which tells it how to rearrange those slices. So the final result is the pattern `"sn bd"`. A more useful example might be something like @ d1 $ fit' 1 4 (run 4) "[0 3*2 2 1 0 3*2 2 [1*8 ~]]/2" $ chop 4 $ (sound "breaks152" # unit "c") @ which uses `chop` to break a single sample into individual pieces, which `fit'` then puts into a list (using the `run 4` pattern) and reassembles according to the complicated integer pattern. -} fit' :: Pattern Time -> Int -> Pattern Int -> Pattern Int -> Pattern a -> Pattern a fit' cyc n from to p = squeezeJoin $ _fit n mapMasks to where mapMasks = [stretch $ mask (const True <$> filterValues (== i) from') p' | i <- [0..n-1]] p' = density cyc p from' = density cyc from {-| @chunk n f p@ treats the given pattern @p@ as having @n@ chunks, and applies the function @f@ to one of those sections per cycle, running from left to right. @ d1 $ chunk 4 (density 4) $ sound "cp sn arpy [mt lt]" @ -} _chunk :: Int -> (Pattern b -> Pattern b) -> Pattern b -> Pattern b _chunk n f p = cat [withinArc (Arc (i % fromIntegral n) ((i+1) % fromIntegral n)) f p | i <- [0 .. fromIntegral n - 1]] chunk :: Pattern Int -> (Pattern b -> Pattern b) -> Pattern b -> Pattern b chunk npat f p = innerJoin $ (\n -> _chunk n f p) <$> npat -- deprecated (renamed to chunk) runWith :: Int -> (Pattern b -> Pattern b) -> Pattern b -> Pattern b runWith = _chunk {-| @chunk'@ works much the same as `chunk`, but runs from right to left. -} -- this was throwing a parse error when I ran it in tidal whenever I changed the function name.. _chunk' :: Integral a => a -> (Pattern b -> Pattern b) -> Pattern b -> Pattern b _chunk' n f p = do i <- _slow (toRational n) $ rev $ run (fromIntegral n) withinArc (Arc (i % fromIntegral n) ((i+)1 % fromIntegral n)) f p chunk' :: Integral a1 => Pattern a1 -> (Pattern a2 -> Pattern a2) -> Pattern a2 -> Pattern a2 chunk' npat f p = innerJoin $ (\n -> _chunk' n f p) <$> npat _inside :: Time -> (Pattern a1 -> Pattern a) -> Pattern a1 -> Pattern a _inside n f p = _fast n $ f (_slow n p) inside :: Pattern Time -> (Pattern a1 -> Pattern a) -> Pattern a1 -> Pattern a inside np f p = innerJoin $ (\n -> _inside n f p) <$> np _outside :: Time -> (Pattern a1 -> Pattern a) -> Pattern a1 -> Pattern a _outside n = _inside (1/n) outside :: Pattern Time -> (Pattern a1 -> Pattern a) -> Pattern a1 -> Pattern a outside np f p = innerJoin $ (\n -> _outside n f p) <$> np loopFirst :: Pattern a -> Pattern a loopFirst p = splitQueries $ p {query = f} where f st = map (\(Event c w p' v) -> Event c (plus <$> w) (plus p') v) $ query p (st {arc = minus $ arc st}) where minus = fmap (subtract (sam s)) plus = fmap (+ sam s) s = start $ arc st timeLoop :: Pattern Time -> Pattern a -> Pattern a timeLoop n = outside n loopFirst seqPLoop :: [(Time, Time, Pattern a)] -> Pattern a seqPLoop ps = timeLoop (pure $ maxT - minT) $ minT `rotL` seqP ps where minT = minimum $ map (\(x,_,_) -> x) ps maxT = maximum $ map (\(_,x,_) -> x) ps {- | @toScale@ lets you turn a pattern of notes within a scale (expressed as a list) to note numbers. For example `toScale [0, 4, 7] "0 1 2 3"` will turn into the pattern `"0 4 7 12"`. It assumes your scale fits within an octave; to change this use `toScale' size`. Example: `toScale' 24 [0,4,7,10,14,17] (run 8)` turns into `"0 4 7 10 14 17 24 28"` -} toScale' :: Num a => Int -> [a] -> Pattern Int -> Pattern a toScale' _ [] = const silence toScale' o s = fmap noteInScale where octave x = x `div` length s noteInScale x = (s !!! x) + fromIntegral (o * octave x) toScale :: Num a => [a] -> Pattern Int -> Pattern a toScale = toScale' 12 {- | `swingBy x n` divides a cycle into `n` slices and delays the notes in the second half of each slice by `x` fraction of a slice . @swing@ is an alias for `swingBy (1%3)` -} swingBy :: Pattern Time -> Pattern Time -> Pattern a -> Pattern a swingBy x n = inside n (withinArc (Arc 0.5 1) (x ~>)) swing :: Pattern Time -> Pattern a -> Pattern a swing = swingBy (pure $ 1%3) {- | `cycleChoose` is like `choose` but only picks a new item from the list once each cycle -} cycleChoose :: [a] -> Pattern a cycleChoose = segment 1 . choose {- | Internal function used by shuffle and scramble -} _rearrangeWith :: Pattern Int -> Int -> Pattern a -> Pattern a _rearrangeWith ipat n pat = innerJoin $ (\i -> _fast nT $ _repeatCycles n $ pats !! i) <$> ipat where pats = map (\i -> zoom (fromIntegral i / nT, fromIntegral (i+1) / nT) pat) [0 .. (n-1)] nT :: Time nT = fromIntegral n {- | `shuffle n p` evenly divides one cycle of the pattern `p` into `n` parts, and returns a random permutation of the parts each cycle. For example, `shuffle 3 "a b c"` could return `"a b c"`, `"a c b"`, `"b a c"`, `"b c a"`, `"c a b"`, or `"c b a"`. But it will **never** return `"a a a"`, because that is not a permutation of the parts. -} shuffle :: Pattern Int -> Pattern a -> Pattern a shuffle = tParam _shuffle _shuffle :: Int -> Pattern a -> Pattern a _shuffle n = _rearrangeWith (randrun n) n {- | `scramble n p` is like `shuffle` but randomly selects from the parts of `p` instead of making permutations. For example, `scramble 3 "a b c"` will randomly select 3 parts from `"a"` `"b"` and `"c"`, possibly repeating a single part. -} scramble :: Pattern Int -> Pattern a -> Pattern a scramble = tParam _scramble _scramble :: Int -> Pattern a -> Pattern a _scramble n = _rearrangeWith (_segment (fromIntegral n) $ _irand n) n randrun :: Int -> Pattern Int randrun 0 = silence randrun n' = splitQueries $ Pattern (\(State a@(Arc s _) _) -> events a $ sam s) where events a seed = mapMaybe toEv $ zip arcs shuffled where shuffled = map snd $ sortOn fst $ zip rs [0 .. (n'-1)] rs = timeToRands seed n' :: [Double] arcs = zipWith Arc fractions (tail fractions) fractions = map (+ (sam $ start a)) [0, 1 / fromIntegral n' .. 1] toEv (a',v) = do a'' <- subArc a a' return $ Event (Context []) (Just a') a'' v ur :: Time -> Pattern String -> [(String, Pattern a)] -> [(String, Pattern a -> Pattern a)] -> Pattern a ur t outer_p ps fs = _slow t $ unwrap $ adjust <$> timedValues (getPat . split <$> outer_p) where split = wordsBy (==':') getPat (s:xs) = (match s, transform xs) -- TODO - check this really can't happen.. getPat _ = error "can't happen?" match s = fromMaybe silence $ lookup s ps' ps' = map (fmap (_fast t)) ps adjust (a, (p, f)) = f a p transform (x:_) a = transform' x a transform _ _ = id transform' str (Arc s e) p = s `rotR` inside (pure $ 1/(e-s)) (matchF str) p matchF str = fromMaybe id $ lookup str fs timedValues = withEvent (\(Event c (Just a) a' v) -> Event c (Just a) a' (a,v)) . filterDigital inhabit :: [(String, Pattern a)] -> Pattern String -> Pattern a inhabit ps p = squeezeJoin $ (\s -> fromMaybe silence $ lookup s ps) <$> p {- | @spaceOut xs p@ repeats a pattern @p@ at different durations given by the list of time values in @xs@ -} spaceOut :: [Time] -> Pattern a -> Pattern a spaceOut xs p = _slow (toRational $ sum xs) $ stack $ map (`compressArc` p) spaceArcs where markOut :: Time -> [Time] -> [Arc] markOut _ [] = [] markOut offset (x:xs') = Arc offset (offset+x):markOut (offset+x) xs' spaceArcs = map (\(Arc a b) -> Arc (a/s) (b/s)) $ markOut 0 xs s = sum xs -- | @flatpat@ takes a Pattern of lists and pulls the list elements as -- separate Events flatpat :: Pattern [a] -> Pattern a flatpat p = p {query = concatMap (\(Event c b b' xs) -> map (Event c b b') xs) . query p} -- | @layer@ takes a Pattern of lists and pulls the list elements as -- separate Events layer :: [a -> Pattern b] -> a -> Pattern b layer fs p = stack $ map ($ p) fs -- | @arpeggiate@ finds events that share the same timespan, and spreads -- them out during that timespan, so for example @arpeggiate "[bd,sn]"@ -- gets turned into @"bd sn"@. Useful for creating arpeggios/broken chords. arpeggiate :: Pattern a -> Pattern a arpeggiate = arpWith id -- | Shorthand alias for arpeggiate arpg :: Pattern a -> Pattern a arpg = arpeggiate arpWith :: ([EventF (ArcF Time) a] -> [EventF (ArcF Time) b]) -> Pattern a -> Pattern b arpWith f p = withEvents munge p where munge es = concatMap (spreadOut . f) (groupBy (\a b -> whole a == whole b) $ sortOn whole es) spreadOut xs = mapMaybe (\(n, x) -> shiftIt n (length xs) x) $ enumerate xs shiftIt n d (Event c (Just (Arc s e)) a' v) = do a'' <- subArc (Arc newS newE) a' return (Event c (Just $ Arc newS newE) a'' v) where newS = s + (dur * fromIntegral n) newE = newS + dur dur = (e - s) / fromIntegral d -- TODO ignoring analog events.. Should we just leave them as-is? shiftIt _ _ _ = Nothing arp :: Pattern String -> Pattern a -> Pattern a arp = tParam _arp _arp :: String -> Pattern a -> Pattern a _arp name p = arpWith f p where f = fromMaybe id $ lookup name arps arps :: [(String, [a] -> [a])] arps = [("up", id), ("down", reverse), ("updown", \x -> init x ++ init (reverse x)), ("downup", \x -> init (reverse x) ++ init x), ("up&down", \x -> x ++ reverse x), ("down&up", \x -> reverse x ++ x), ("converge", converge), ("diverge", reverse . converge), ("disconverge", \x -> converge x ++ tail (reverse $ converge x)), ("pinkyup", pinkyup), ("pinkyupdown", \x -> init (pinkyup x) ++ init (reverse $ pinkyup x)), ("thumbup", thumbup), ("thumbupdown", \x -> init (thumbup x) ++ init (reverse $ thumbup x)) ] converge [] = [] converge (x:xs) = x : converge' xs converge' [] = [] converge' xs = last xs : converge (init xs) pinkyup xs = concatMap (:[pinky]) $ init xs where pinky = last xs thumbup xs = concatMap (\x -> [thumb,x]) $ tail xs where thumb = head xs {- TODO ! -- | @fill@ 'fills in' gaps in one pattern with events from another. For example @fill "bd" "cp ~ cp"@ would result in the equivalent of `"~ bd ~"`. This only finds gaps in a resulting pattern, in other words @"[bd ~, sn]"@ doesn't contain any gaps (because @sn@ covers it all), and @"bd ~ ~ sn"@ only contains a single gap that bridges two steps. fill :: Pattern a -> Pattern a -> Pattern a fill p' p = struct (splitQueries $ p {query = q}) p' where q st = removeTolerance (s,e) $ invert (s-tolerance, e+tolerance) $ query p (st {arc = (s-tolerance, e+tolerance)}) where (s,e) = arc st invert (s,e) es = map arcToEvent $ foldr remove [(s,e)] (map part es) remove (s,e) xs = concatMap (remove' (s, e)) xs remove' (s,e) (s',e') | s > s' && e < e' = [(s',s),(e,e')] -- inside | s > s' && s < e' = [(s',s)] -- cut off right | e > s' && e < e' = [(e,e')] -- cut off left | s <= s' && e >= e' = [] -- swallow | otherwise = [(s',e')] -- miss arcToEvent a = ((a,a),"x") removeTolerance (s,e) es = concatMap (expand) $ map (withPart f) es where f a = concatMap (remove' (e,e+tolerance)) $ remove' (s-tolerance,s) a expand ((a,xs),c) = map (\x -> ((a,x),c)) xs tolerance = 0.01 -} -- Repeats each event @n@ times within its arc ply :: Pattern Rational -> Pattern a -> Pattern a ply = tParam _ply _ply :: Rational -> Pattern a -> Pattern a _ply n pat = squeezeJoin $ (_fast n . pure) <$> pat -- Like ply, but applies a function each time. The applications are compounded. plyWith :: (Ord t, Num t) => Pattern t -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a plyWith np f p = innerJoin $ (\n -> _plyWith n f p) <$> np _plyWith :: (Ord t, Num t) => t -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a _plyWith numPat f p = arpeggiate $ compound numPat where compound n | n <= 1 = p | otherwise = overlay p (f $ compound $ n-1) -- | Syncopates a rhythm, shifting each event halfway into its arc (aka timespan), e.g. @"a b [c d] e"@ becomes the equivalent of @"[~ a] [~ b] [[~ c] [~ d]] [~ e]"@ press :: Pattern a -> Pattern a press = _pressBy 0.5 -- | Like @press@, but allows you to specify the amount in which each event is shifted. @pressBy 0.5@ is the same as @press@, while @pressBy (1/3)@ shifts each event by a third of its arc. pressBy :: Pattern Time -> Pattern a -> Pattern a pressBy = tParam _pressBy _pressBy :: Time -> Pattern a -> Pattern a _pressBy r pat = squeezeJoin $ (compressTo (r,1) . pure) <$> pat -- | Uses the first (binary) pattern to switch between the following -- two patterns. The resulting structure comes from the source patterns, not the -- binary pattern. See also @stitch@. sew :: Pattern Bool -> Pattern a -> Pattern a -> Pattern a sew pb a b = overlay (mask pb a) (mask (inv pb) b) -- | Uses the first (binary) pattern to switch between the following -- two patterns. The resulting structure comes from the binary -- pattern, not the source patterns. See also @sew@. stitch :: Pattern Bool -> Pattern a -> Pattern a -> Pattern a stitch pb a b = overlay (struct pb a) (struct (inv pb) b) -- | A binary pattern is used to conditionally apply a function to a -- source pattern. The function is applied when a @True@ value is -- active, and the pattern is let through unchanged when a @False@ -- value is active. No events are let through where no binary values -- are active. while :: Pattern Bool -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a while b f pat = sew b (f pat) pat stutter :: Integral i => i -> Time -> Pattern a -> Pattern a stutter n t p = stack $ map (\i -> (t * fromIntegral i) `rotR` p) [0 .. (n-1)] echo, triple, quad, double :: Time -> Pattern a -> Pattern a echo = stutter (2 :: Int) triple = stutter (3 :: Int) quad = stutter (4 :: Int) double = echo {- | The `jux` function creates strange stereo effects, by applying a function to a pattern, but only in the right-hand channel. For example, the following reverses the pattern on the righthand side: @ d1 $ slow 32 $ jux (rev) $ striateBy 32 (1/16) $ sound "bev" @ When passing pattern transforms to functions like [jux](#jux) and [every](#every), it's possible to chain multiple transforms together with `.`, for example this both reverses and halves the playback speed of the pattern in the righthand channel: @ d1 $ slow 32 $ jux ((# speed "0.5") . rev) $ striateBy 32 (1/16) $ sound "bev" @ -} jux :: (Pattern ValueMap -> Pattern ValueMap) -> Pattern ValueMap -> Pattern ValueMap jux = juxBy 1 juxcut :: (Pattern ValueMap -> Pattern ValueMap) -> Pattern ValueMap -> Pattern ValueMap juxcut f p = stack [p # P.pan (pure 0) # P.cut (pure (-1)), f $ p # P.pan (pure 1) # P.cut (pure (-2)) ] juxcut' :: [t -> Pattern ValueMap] -> t -> Pattern ValueMap juxcut' fs p = stack $ map (\n -> ((fs !! n) p |+ P.cut (pure $ 1-n)) # P.pan (pure $ fromIntegral n / fromIntegral l)) [0 .. l-1] where l = length fs {- | In addition to `jux`, `jux'` allows using a list of pattern transform. resulting patterns from each transformation will be spread via pan from left to right. For example: @ d1 $ jux' [iter 4, chop 16, id, rev, palindrome] $ sound "bd sn" @ will put `iter 4` of the pattern to the far left and `palindrome` to the far right. In the center the original pattern will play and mid left mid right the chopped and the reversed version will appear. One could also write: @ d1 $ stack [ iter 4 $ sound "bd sn" # pan "0", chop 16 $ sound "bd sn" # pan "0.25", sound "bd sn" # pan "0.5", rev $ sound "bd sn" # pan "0.75", palindrome $ sound "bd sn" # pan "1", ] @ -} jux' :: [t -> Pattern ValueMap] -> t -> Pattern ValueMap jux' fs p = stack $ map (\n -> (fs !! n) p |+ P.pan (pure $ fromIntegral n / fromIntegral l)) [0 .. l-1] where l = length fs -- | Multichannel variant of `jux`, _not sure what it does_ jux4 :: (Pattern ValueMap -> Pattern ValueMap) -> Pattern ValueMap -> Pattern ValueMap jux4 f p = stack [p # P.pan (pure (5/8)), f $ p # P.pan (pure (1/8))] {- | With `jux`, the original and effected versions of the pattern are panned hard left and right (i.e., panned at 0 and 1). This can be a bit much, especially when listening on headphones. The variant `juxBy` has an additional parameter, which brings the channel closer to the centre. For example: @ d1 $ juxBy 0.5 (density 2) $ sound "bd sn:1" @ In the above, the two versions of the pattern would be panned at 0.25 and 0.75, rather than 0 and 1. -} juxBy :: Pattern Double -> (Pattern ValueMap -> Pattern ValueMap) -> Pattern ValueMap -> Pattern ValueMap juxBy n f p = stack [p |+ P.pan 0.5 |- P.pan (n/2), f $ p |+ P.pan 0.5 |+ P.pan (n/2)] pick :: String -> Int -> String pick name n = name ++ ":" ++ show n -- samples "jvbass [~ latibro] [jvbass [latibro jvbass]]" ((1%2) `rotL` slow 6 "[1 6 8 7 3]") samples :: Applicative f => f String -> f Int -> f String samples p p' = pick <$> p <*> p' samples' :: Applicative f => f String -> f Int -> f String samples' p p' = flip pick <$> p' <*> p {- scrumple :: Time -> Pattern a -> Pattern a -> Pattern a scrumple o p p' = p'' -- overlay p (o `rotR` p'') where p'' = Pattern $ \a -> concatMap (\((s,d), vs) -> map (\x -> ((s,d), snd x ) ) (arc p' (s,s)) ) (arc p a) -} spreadf :: [a -> Pattern b] -> a -> Pattern b spreadf = spread ($) stackwith :: Unionable a => Pattern a -> [Pattern a] -> Pattern a stackwith p ps | null ps = silence | otherwise = stack $ map (\(i, p') -> p' # ((fromIntegral i % l) `rotL` p)) (zip [0::Int ..] ps) where l = fromIntegral $ length ps {- cross f p p' = Pattern $ \t -> concat [filter flt $ arc p t, filter (not . flt) $ arc p' t ] ] where flt = f . cyclePos . fst . fst -} {- | `range` will take a pattern which goes from 0 to 1 (like `sine`), and range it to a different range - between the first and second arguments. In the below example, `range 1 1.5` shifts the range of `sine1` from 0 - 1 to 1 - 1.5. @ d1 $ jux (iter 4) $ sound "arpy arpy:2*2" |+ speed (slow 4 $ range 1 1.5 sine1) @ -} range :: Num a => Pattern a -> Pattern a -> Pattern a -> Pattern a range fromP toP p = (\from to v -> ((v * (to-from)) + from)) <$> fromP *> toP *> p _range :: (Functor f, Num b) => b -> b -> f b -> f b _range from to p = (+ from) . (* (to-from)) <$> p {- | `rangex` is an exponential version of `range`, good for using with frequencies. Do *not* use negative numbers or zero as arguments! -} rangex :: (Functor f, Floating b) => b -> b -> f b -> f b rangex from to p = exp <$> _range (log from) (log to) p off :: Pattern Time -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a off tp f p = innerJoin $ (\tv -> _off tv f p) <$> tp _off :: Time -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a _off t f p = superimpose (f . (t `rotR`)) p offadd :: Num a => Pattern Time -> Pattern a -> Pattern a -> Pattern a offadd tp pn p = off tp (+pn) p -- | Step sequencing step :: String -> String -> Pattern String step s cs = fastcat $ map f cs where f c | c == 'x' = pure s | isDigit c = pure $ s ++ ":" ++ [c] | otherwise = silence steps :: [(String, String)] -> Pattern String steps = stack . map (uncurry step) -- | like `step`, but allows you to specify an array of strings to use for 0,1,2... step' :: [String] -> String -> Pattern String step' ss cs = fastcat $ map f cs where f c | c == 'x' = pure $ head ss | isDigit c = pure $ ss !! digitToInt c | otherwise = silence ghost'' :: Time -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a ghost'' a f p = superimpose (((a*2.5) `rotR`) . f) $ superimpose (((a*1.5) `rotR`) . f) p ghost' :: Time -> Pattern ValueMap -> Pattern ValueMap ghost' a p = ghost'' a ((|*| P.gain (pure 0.7)) . (|> P.end (pure 0.2)) . (|*| P.speed (pure 1.25))) p ghost :: Pattern ValueMap -> Pattern ValueMap ghost = ghost' 0.125 {- | tabby - A more literal weaving than the `weave` function, give number of 'threads' per cycle and two patterns, and this function will weave them together using a plain (aka 'tabby') weave, with a simple over/under structure -} tabby :: Int -> Pattern a -> Pattern a -> Pattern a tabby nInt p p' = stack [maskedWarp, maskedWeft ] where n = fromIntegral nInt weft = concatMap (const [[0..n-1], reverse [0..n-1]]) [0 .. (n `div` 2) - 1] warp = transpose weft thread xs p'' = _slow (n%1) $ fastcat $ map (\i -> zoomArc (Arc (i%n) ((i+1)%n)) p'') (concat xs) weftP = thread weft p' warpP = thread warp p maskedWeft = mask (every 2 rev $ _fast (n % 2) $ fastCat [silence, pure True]) weftP maskedWarp = mask (every 2 rev $ _fast (n % 2) $ fastCat [pure True, silence]) warpP -- | chooses between a list of patterns, using a pattern of floats (from 0-1) select :: Pattern Double -> [Pattern a] -> Pattern a select = tParam _select _select :: Double -> [Pattern a] -> Pattern a _select f ps = ps !! floor (max 0 (min 1 f) * fromIntegral (length ps - 1)) -- | chooses between a list of functions, using a pattern of floats (from 0-1) selectF :: Pattern Double -> [Pattern a -> Pattern a] -> Pattern a -> Pattern a selectF pf ps p = innerJoin $ (\f -> _selectF f ps p) <$> pf _selectF :: Double -> [Pattern a -> Pattern a] -> Pattern a -> Pattern a _selectF f ps p = (ps !! floor (max 0 (min 0.999999 f) * fromIntegral (length ps))) p -- | chooses between a list of functions, using a pattern of integers pickF :: Pattern Int -> [Pattern a -> Pattern a] -> Pattern a -> Pattern a pickF pInt fs pat = innerJoin $ (\i -> _pickF i fs pat) <$> pInt _pickF :: Int -> [Pattern a -> Pattern a] -> Pattern a -> Pattern a _pickF i fs p = (fs !!! i) p -- | @contrast p f f' p'@ splits controlpattern @p'@ in two, applying -- the function @f@ to one and @f'@ to the other. This depends on -- whether events in it contains values matching with those in @p@. -- For example in @contrast (# crush 3) (# vowel "a") (n "1") $ n "0 1" # s "bd sn" # speed 3@, -- the first event will have the vowel effect applied and the second -- will have the crush applied. contrast :: (ControlPattern -> ControlPattern) -> (ControlPattern -> ControlPattern) -> ControlPattern -> ControlPattern -> ControlPattern contrast = contrastBy (==) contrastBy :: (a -> Value -> Bool) -> (ControlPattern -> Pattern b) -> (ControlPattern -> Pattern b) -> Pattern (Map.Map String a) -> Pattern (Map.Map String Value) -> Pattern b contrastBy comp f f' p p' = overlay (f matched) (f' unmatched) where matches = matchManyToOne (flip $ Map.isSubmapOfBy comp) p p' matched :: ControlPattern matched = filterJust $ (\(t, a) -> if t then Just a else Nothing) <$> matches unmatched :: ControlPattern unmatched = filterJust $ (\(t, a) -> if not t then Just a else Nothing) <$> matches contrastRange :: (ControlPattern -> Pattern a) -> (ControlPattern -> Pattern a) -> Pattern (Map.Map String (Value, Value)) -> ControlPattern -> Pattern a contrastRange = contrastBy f where f (VI s, VI e) (VI v) = v >= s && v <= e f (VF s, VF e) (VF v) = v >= s && v <= e f (VN s, VN e) (VN v) = v >= s && v <= e f (VS s, VS e) (VS v) = v == s && v == e f _ _ = False -- | Like @contrast@, but one function is given, and applied to events with matching controls. fix :: (ControlPattern -> ControlPattern) -> ControlPattern -> ControlPattern -> ControlPattern fix f = contrast f id -- | Like @contrast@, but one function is given, and applied to events -- with controls which don't match. unfix :: (ControlPattern -> ControlPattern) -> ControlPattern -> ControlPattern -> ControlPattern unfix = contrast id fixRange :: (ControlPattern -> Pattern ValueMap) -> Pattern (Map.Map String (Value, Value)) -> ControlPattern -> ControlPattern fixRange f = contrastRange f id unfixRange :: (ControlPattern -> Pattern ValueMap) -> Pattern (Map.Map String (Value, Value)) -> ControlPattern -> ControlPattern unfixRange = contrastRange id -- | limit values in a Pattern (or other Functor) to n equally spaced -- divisions of 1. quantise :: (Functor f, RealFrac b) => b -> f b -> f b quantise n = fmap ((/n) . (fromIntegral :: RealFrac b => Int -> b) . round . (*n)) -- quantise but with floor qfloor :: (Functor f, RealFrac b) => b -> f b -> f b qfloor n = fmap ((/n) . (fromIntegral :: RealFrac b => Int -> b) . floor . (*n)) qceiling :: (Functor f, RealFrac b) => b -> f b -> f b qceiling n = fmap ((/n) . (fromIntegral :: RealFrac b => Int -> b) . ceiling . (*n)) qround :: (Functor f, RealFrac b) => b -> f b -> f b qround = quantise -- | Inverts all the values in a boolean pattern inv :: Functor f => f Bool -> f Bool inv = (not <$>) -- | Serialises a pattern so there's only one event playing at any one -- time, making it 'monophonic'. Events which start/end earlier are given priority. mono :: Pattern a -> Pattern a mono p = Pattern $ \(State a cm) -> flatten $ query p (State a cm) where flatten :: [Event a] -> [Event a] flatten = mapMaybe constrainPart . truncateOverlaps . sortOn whole truncateOverlaps [] = [] truncateOverlaps (e:es) = e : truncateOverlaps (mapMaybe (snip e) es) -- TODO - decide what to do about analog events.. snip a b | start (wholeOrPart b) >= stop (wholeOrPart a) = Just b | stop (wholeOrPart b) <= stop (wholeOrPart a) = Nothing | otherwise = Just b {whole = Just $ Arc (stop $ wholeOrPart a) (stop $ wholeOrPart b)} constrainPart :: Event a -> Maybe (Event a) constrainPart e = do a <- subArc (wholeOrPart e) (part e) return $ e {part = a} -- serialize the given pattern -- find the middle of the query's arc and use that to query the serialized pattern. We should get either no events or a single event back -- if we don't get any events, return nothing -- if we get an event, get the stop of its arc, and use that to query the serialized pattern, to see if there's an adjoining event -- if there isn't, return the event as-is. -- if there is, check where we are in the 'whole' of the event, and use that to tween between the values of the event and the next event -- smooth :: Pattern Double -> Pattern Double -- TODO - test this with analog events smooth :: Fractional a => Pattern a -> Pattern a smooth p = Pattern $ \st@(State a cm) -> tween st a $ query monoP (State (midArc a) cm) where midArc a = Arc (mid (start a, stop a)) (mid (start a, stop a)) tween _ _ [] = [] tween st queryA (e:_) = maybe [e {whole = Just queryA, part = queryA}] (tween' queryA) (nextV st) where aStop = Arc (wholeStop e) (wholeStop e) nextEs st' = query monoP (st' {arc = aStop}) nextV st' | null (nextEs st') = Nothing | otherwise = Just $ value (head (nextEs st')) tween' queryA' v = [ Event { context = context e, whole = Just queryA' , part = queryA' , value = value e + ((v - value e) * pc)} ] pc | delta' (wholeOrPart e) == 0 = 0 | otherwise = fromRational $ (eventPartStart e - wholeStart e) / delta' (wholeOrPart e) delta' a = stop a - start a monoP = mono p -- | Looks up values from a list of tuples, in order to swap values in the given pattern swap :: Eq a => [(a, b)] -> Pattern a -> Pattern b swap things p = filterJust $ (`lookup` things) <$> p {- snowball | snowball takes a function that can combine patterns (like '+'), a function that transforms a pattern (like 'slow'), a depth, and a starting pattern, it will then transform the pattern and combine it with the last transformation until the depth is reached this is like putting an effect (like a filter) in the feedback of a delay line each echo is more effected d1 $ note (scale "hexDorian" $ snowball (+) (slow 2 . rev) 8 "0 ~ . -1 . 5 3 4 . ~ -2") # s "gtr" -} snowball :: Int -> (Pattern a -> Pattern a -> Pattern a) -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a snowball depth combinationFunction f pattern = cat $ take depth $ scanl combinationFunction pattern $ drop 1 $ iterate f pattern {- @soak@ | applies a function to a pattern and cats the resulting pattern, then continues applying the function until the depth is reached this can be used to create a pattern that wanders away from the original pattern by continually adding random numbers d1 $ note (scale "hexDorian" mutateBy (+ (range -1 1 $ irand 2)) 8 $ "0 1 . 2 3 4") # s "gtr" -} soak :: Int -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a soak depth f pattern = cat $ take depth $ iterate f pattern deconstruct :: Int -> Pattern String -> String deconstruct n p = intercalate " " $ map showStep $ toList p where showStep :: [String] -> String showStep [] = "~" showStep [x] = x showStep xs = "[" ++ (intercalate ", " xs) ++ "]" toList :: Pattern a -> [[a]] toList pat = map (\(s,e) -> map value $ queryArc (_segment n' pat) (Arc s e)) arcs where breaks = [0, (1/n') ..] arcs = zip (take n breaks) (drop 1 breaks) n' = fromIntegral n {- @bite@ n ipat pat | slices a pattern `pat` into `n` pieces, then uses the `ipat` pattern of integers to index into those slices. So `bite 4 "0 2*2" (run 8)` is the same as `"[0 1] [4 5]*2"`. -} bite :: Pattern Int -> Pattern Int -> Pattern a -> Pattern a bite npat ipat pat = innerJoin $ (\n -> _bite n ipat pat) <$> npat _bite :: Int -> Pattern Int -> Pattern a -> Pattern a _bite n ipat pat = squeezeJoin $ zoompat <$> ipat where zoompat i = zoom (i'/(fromIntegral n), (i'+1)/(fromIntegral n)) pat where i' = fromIntegral $ i `mod` n {- @squeeze@ ipat pats | uses a pattern of integers to index into a list of patterns. -} squeeze :: Pattern Int -> [Pattern a] -> Pattern a squeeze _ [] = silence squeeze ipat pats = squeezeJoin $ (pats !!!) <$> ipat squeezeJoinUp :: Pattern (ControlPattern) -> ControlPattern squeezeJoinUp pp = pp {query = q} where q st = concatMap (f st) (query (filterDigital pp) st) f st (Event c (Just w) p v) = mapMaybe (munge c w p) $ query (compressArc (cycleArc w) (v |* P.speed (pure $ fromRational $ 1/(stop w - start w)))) st {arc = p} -- already ignoring analog events, but for completeness.. f _ _ = [] munge co oWhole oPart (Event ci (Just iWhole) iPart v) = do w' <- subArc oWhole iWhole p' <- subArc oPart iPart return (Event (combineContexts [ci,co]) (Just w') p' v) munge _ _ _ _ = Nothing _chew :: Int -> Pattern Int -> ControlPattern -> ControlPattern _chew n ipat pat = (squeezeJoinUp $ zoompat <$> ipat) |/ P.speed (pure $ fromIntegral n) where zoompat i = zoom (i'/(fromIntegral n), (i'+1)/(fromIntegral n)) (pat) where i' = fromIntegral $ i `mod` n -- TODO maybe _chew could pattern the first parameter directly.. chew :: Pattern Int -> Pattern Int -> ControlPattern -> ControlPattern chew npat ipat pat = innerJoin $ (\n -> _chew n ipat pat) <$> npat __binary :: Data.Bits.Bits b => Int -> b -> [Bool] __binary n num = map (testBit num) $ reverse [0 .. n-1] _binary :: Data.Bits.Bits b => Int -> b -> Pattern Bool _binary n num = listToPat $ __binary n num _binaryN :: Int -> Pattern Int -> Pattern Bool _binaryN n p = squeezeJoin $ _binary n <$> p binaryN :: Pattern Int -> Pattern Int -> Pattern Bool binaryN n p = tParam _binaryN n p binary :: Pattern Int -> Pattern Bool binary = binaryN 8 ascii :: Pattern String -> Pattern Bool ascii p = squeezeJoin $ (listToPat . concatMap (__binary 8 . ord)) <$> p grain :: Pattern Double -> Pattern Double -> ControlPattern grain s w = P.begin b # P.end e where b = s e = s + w -- | For specifying a boolean pattern according to a list of offsets -- (aka inter-onset intervals). For example `necklace 12 [4,2]` is -- the same as "t f f f t f t f f f t f". That is, 12 steps per cycle, -- with true values alternating between every 4 and every 2 steps. necklace :: Rational -> [Int] -> Pattern Bool necklace perCycle xs = _slow ((toRational $ sum xs) / perCycle) $ listToPat $ list xs where list :: [Int] -> [Bool] list [] = [] list (x:xs') = (True:(replicate (x-1) False)) ++ list xs'
bgold-cosmos/Tidal
src/Sound/Tidal/UI.hs
gpl-3.0
79,026
0
26
18,836
20,336
10,351
9,985
-1
-1