code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Books.Onboarding.ListCategories
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List categories for onboarding experience.
--
-- /See:/ <https://developers.google.com/books/docs/v1/getting_started Books API Reference> for @books.onboarding.listCategories@.
module Network.Google.Resource.Books.Onboarding.ListCategories
(
-- * REST Resource
OnboardingListCategoriesResource
-- * Creating a Request
, onboardingListCategories
, OnboardingListCategories
-- * Request Lenses
, olcLocale
) where
import Network.Google.Books.Types
import Network.Google.Prelude
-- | A resource alias for @books.onboarding.listCategories@ method which the
-- 'OnboardingListCategories' request conforms to.
type OnboardingListCategoriesResource =
"books" :>
"v1" :>
"onboarding" :>
"listCategories" :>
QueryParam "locale" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Category
-- | List categories for onboarding experience.
--
-- /See:/ 'onboardingListCategories' smart constructor.
newtype OnboardingListCategories = OnboardingListCategories'
{ _olcLocale :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OnboardingListCategories' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'olcLocale'
onboardingListCategories
:: OnboardingListCategories
onboardingListCategories =
OnboardingListCategories'
{ _olcLocale = Nothing
}
-- | ISO-639-1 language and ISO-3166-1 country code. Default is en-US if
-- unset.
olcLocale :: Lens' OnboardingListCategories (Maybe Text)
olcLocale
= lens _olcLocale (\ s a -> s{_olcLocale = a})
instance GoogleRequest OnboardingListCategories where
type Rs OnboardingListCategories = Category
type Scopes OnboardingListCategories =
'["https://www.googleapis.com/auth/books"]
requestClient OnboardingListCategories'{..}
= go _olcLocale (Just AltJSON) booksService
where go
= buildClient
(Proxy :: Proxy OnboardingListCategoriesResource)
mempty
| rueshyna/gogol | gogol-books/gen/Network/Google/Resource/Books/Onboarding/ListCategories.hs | mpl-2.0 | 2,937 | 0 | 13 | 625 | 305 | 187 | 118 | 48 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
#include "overlap.h"
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Opaleye.X.Optional
( Optional
, MatchOptional, matchOptional
, Defaults, defaults
, Override, override
, Optionalize, optionalize
, Deoptionalize
)
where
-- base ----------------------------------------------------------------------
import Control.Applicative (Alternative)
#if !MIN_VERSION_base(4, 8, 0)
import Control.Applicative (Applicative)
#endif
import Control.Monad (MonadPlus)
#if !MIN_VERSION_base(4, 8, 0)
import Data.Foldable (Foldable)
#endif
#if !MIN_VERSION_base(4, 8, 0)
import Data.Traversable (Traversable)
#endif
#if !MIN_VERSION_base(4, 8, 0)
import Data.Monoid (Monoid)
#endif
import Data.Semigroup (Semigroup)
import Data.Typeable (Typeable)
import GHC.Generics (Generic, Generic1)
-- opaleye-x -----------------------------------------------------------------
import Opaleye.X.Internal
-- profunctors ---------------------------------------------------------------
import Data.Profunctor (Profunctor, dimap)
-- product-profunctors -------------------------------------------------------
import Data.Profunctor.Product (ProductProfunctor, (***!), empty)
import Data.Profunctor.Product.Default (Default, def)
------------------------------------------------------------------------------
--newtype Option a = Option (Maybe a)
deriving instance Functor Option
deriving instance Foldable Option
deriving instance Traversable Option
deriving instance Applicative Option
deriving instance Alternative Option
deriving instance Monad Option
deriving instance MonadPlus Option
deriving instance Eq a => Eq (Option a)
deriving instance Ord a => Ord (Option a)
deriving instance Read a => Read (Option a)
deriving instance Show a => Show (Option a)
deriving instance Monoid a => Monoid (Option a)
deriving instance Semigroup a => Semigroup (Option a)
deriving instance Generic (Option a)
deriving instance Generic1 Option
deriving instance Typeable Option
------------------------------------------------------------------------------
instance __OVERLAPPABLE__ (Default p (Maybe a) (Maybe b), Profunctor p) =>
Default p (Option a) (Option b)
where
def = dimap (\(Option a) -> a) Option def
------------------------------------------------------------------------------
instance Default (R Maybe (->)) (Option a) a where
def = R (\(Option a) -> a)
------------------------------------------------------------------------------
--newtype Optional a = Optional (DistributeOption a)
deriving instance Generic (Optional a)
deriving instance Typeable Optional
------------------------------------------------------------------------------
instance (Options a oa, Options b ob, Default p oa ob, Profunctor p) =>
Default p (Optional a) (Optional b)
where
def = dimap (\(Optional a) -> a) Optional def
------------------------------------------------------------------------------
type MatchOptional a =
( Options a (DistributeOption a)
, Default (R Maybe (->)) (DistributeOption a) a
)
------------------------------------------------------------------------------
matchOptional :: MatchOptional a => b -> (a -> b) -> Optional a -> b
matchOptional b f (Optional a) = let R p = def in maybe b f (p a)
------------------------------------------------------------------------------
newtype DefaultsPP a b = DefaultsPP b
------------------------------------------------------------------------------
instance Profunctor DefaultsPP where
dimap _ r (DefaultsPP p) = DefaultsPP (r p)
------------------------------------------------------------------------------
instance ProductProfunctor DefaultsPP where
empty = DefaultsPP ()
DefaultsPP a ***! DefaultsPP b = DefaultsPP (a, b)
------------------------------------------------------------------------------
instance Default DefaultsPP (Option a) (Option a) where
def = DefaultsPP (Option Nothing)
------------------------------------------------------------------------------
type Defaults a =
( Options a (DistributeOption a)
, Default DefaultsPP (DistributeOption a) (DistributeOption a)
)
------------------------------------------------------------------------------
defaults :: forall a. Defaults a => Optional a
defaults = let DefaultsPP n = p in Optional n
where
p = def :: DefaultsPP (DistributeOption a) (DistributeOption a)
------------------------------------------------------------------------------
newtype OverridePP a b = OverridePP (a -> b)
deriving (Profunctor, ProductProfunctor)
------------------------------------------------------------------------------
instance Default OverridePP a (Option a) where
def = OverridePP (Option . Just)
------------------------------------------------------------------------------
type Override a =
( Options a (DistributeOption a)
, Default OverridePP a (DistributeOption a)
)
------------------------------------------------------------------------------
override :: Override a => a -> Optional a
override = let OverridePP f = def in Optional . f
------------------------------------------------------------------------------
newtype OptionalizePP a b = OptionalizePP (a -> b)
deriving (Profunctor, ProductProfunctor)
------------------------------------------------------------------------------
instance __INCOHERENT__ Default OptionalizePP a a where
def = OptionalizePP id
------------------------------------------------------------------------------
instance Default OptionalizePP a (Option a) where
def = OptionalizePP (Option . Just)
------------------------------------------------------------------------------
instance __OVERLAPS__
(Options a oa, Options b ob, Default OptionalizePP oa ob)
=>
Default OptionalizePP (Optional a) (Optional b)
where
def = dimap (\(Optional a) -> a) Optional def
------------------------------------------------------------------------------
instance __OVERLAPPABLE__ (Options b o, Default OptionalizePP a o) =>
Default OptionalizePP a (Optional b)
where
def = let OptionalizePP p = def in OptionalizePP $ Optional . p
------------------------------------------------------------------------------
type Optionalize a b = (Optionals a b, Default OptionalizePP a b)
------------------------------------------------------------------------------
optionalize :: Optionalize a b => a -> b
optionalize = let OptionalizePP p = def in p
------------------------------------------------------------------------------
type Deoptionalize a = CollectOptional a
| duairc/opaleye-x | src/Opaleye/X/Optional.hs | mpl-2.0 | 7,153 | 0 | 10 | 1,042 | 1,514 | 818 | 696 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigQuery.Jobs.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all jobs that you started in the specified project. Job
-- information is available for a six month period after creation. The job
-- list is sorted in reverse chronological order, by job creation time.
-- Requires the Can View project role, or the Is Owner project role if you
-- set the allUsers property.
--
-- /See:/ <https://cloud.google.com/bigquery/ BigQuery API Reference> for @bigquery.jobs.list@.
module Network.Google.Resource.BigQuery.Jobs.List
(
-- * REST Resource
JobsListResource
-- * Creating a Request
, jobsList
, JobsList
-- * Request Lenses
, jlMaxCreationTime
, jlMinCreationTime
, jlStateFilter
, jlProjection
, jlPageToken
, jlProjectId
, jlAllUsers
, jlParentJobId
, jlMaxResults
) where
import Network.Google.BigQuery.Types
import Network.Google.Prelude
-- | A resource alias for @bigquery.jobs.list@ method which the
-- 'JobsList' request conforms to.
type JobsListResource =
"bigquery" :>
"v2" :>
"projects" :>
Capture "projectId" Text :>
"jobs" :>
QueryParam "maxCreationTime" (Textual Word64) :>
QueryParam "minCreationTime" (Textual Word64) :>
QueryParams "stateFilter" JobsListStateFilter :>
QueryParam "projection" JobsListProjection :>
QueryParam "pageToken" Text :>
QueryParam "allUsers" Bool :>
QueryParam "parentJobId" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :> Get '[JSON] JobList
-- | Lists all jobs that you started in the specified project. Job
-- information is available for a six month period after creation. The job
-- list is sorted in reverse chronological order, by job creation time.
-- Requires the Can View project role, or the Is Owner project role if you
-- set the allUsers property.
--
-- /See:/ 'jobsList' smart constructor.
data JobsList =
JobsList'
{ _jlMaxCreationTime :: !(Maybe (Textual Word64))
, _jlMinCreationTime :: !(Maybe (Textual Word64))
, _jlStateFilter :: !(Maybe [JobsListStateFilter])
, _jlProjection :: !(Maybe JobsListProjection)
, _jlPageToken :: !(Maybe Text)
, _jlProjectId :: !Text
, _jlAllUsers :: !(Maybe Bool)
, _jlParentJobId :: !(Maybe Text)
, _jlMaxResults :: !(Maybe (Textual Word32))
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'JobsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'jlMaxCreationTime'
--
-- * 'jlMinCreationTime'
--
-- * 'jlStateFilter'
--
-- * 'jlProjection'
--
-- * 'jlPageToken'
--
-- * 'jlProjectId'
--
-- * 'jlAllUsers'
--
-- * 'jlParentJobId'
--
-- * 'jlMaxResults'
jobsList
:: Text -- ^ 'jlProjectId'
-> JobsList
jobsList pJlProjectId_ =
JobsList'
{ _jlMaxCreationTime = Nothing
, _jlMinCreationTime = Nothing
, _jlStateFilter = Nothing
, _jlProjection = Nothing
, _jlPageToken = Nothing
, _jlProjectId = pJlProjectId_
, _jlAllUsers = Nothing
, _jlParentJobId = Nothing
, _jlMaxResults = Nothing
}
-- | Max value for job creation time, in milliseconds since the POSIX epoch.
-- If set, only jobs created before or at this timestamp are returned
jlMaxCreationTime :: Lens' JobsList (Maybe Word64)
jlMaxCreationTime
= lens _jlMaxCreationTime
(\ s a -> s{_jlMaxCreationTime = a})
. mapping _Coerce
-- | Min value for job creation time, in milliseconds since the POSIX epoch.
-- If set, only jobs created after or at this timestamp are returned
jlMinCreationTime :: Lens' JobsList (Maybe Word64)
jlMinCreationTime
= lens _jlMinCreationTime
(\ s a -> s{_jlMinCreationTime = a})
. mapping _Coerce
-- | Filter for job state
jlStateFilter :: Lens' JobsList [JobsListStateFilter]
jlStateFilter
= lens _jlStateFilter
(\ s a -> s{_jlStateFilter = a})
. _Default
. _Coerce
-- | Restrict information returned to a set of selected fields
jlProjection :: Lens' JobsList (Maybe JobsListProjection)
jlProjection
= lens _jlProjection (\ s a -> s{_jlProjection = a})
-- | Page token, returned by a previous call, to request the next page of
-- results
jlPageToken :: Lens' JobsList (Maybe Text)
jlPageToken
= lens _jlPageToken (\ s a -> s{_jlPageToken = a})
-- | Project ID of the jobs to list
jlProjectId :: Lens' JobsList Text
jlProjectId
= lens _jlProjectId (\ s a -> s{_jlProjectId = a})
-- | Whether to display jobs owned by all users in the project. Default false
jlAllUsers :: Lens' JobsList (Maybe Bool)
jlAllUsers
= lens _jlAllUsers (\ s a -> s{_jlAllUsers = a})
-- | If set, retrieves only jobs whose parent is this job. Otherwise,
-- retrieves only jobs which have no parent
jlParentJobId :: Lens' JobsList (Maybe Text)
jlParentJobId
= lens _jlParentJobId
(\ s a -> s{_jlParentJobId = a})
-- | Maximum number of results to return
jlMaxResults :: Lens' JobsList (Maybe Word32)
jlMaxResults
= lens _jlMaxResults (\ s a -> s{_jlMaxResults = a})
. mapping _Coerce
instance GoogleRequest JobsList where
type Rs JobsList = JobList
type Scopes JobsList =
'["https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient JobsList'{..}
= go _jlProjectId _jlMaxCreationTime
_jlMinCreationTime
(_jlStateFilter ^. _Default)
_jlProjection
_jlPageToken
_jlAllUsers
_jlParentJobId
_jlMaxResults
(Just AltJSON)
bigQueryService
where go
= buildClient (Proxy :: Proxy JobsListResource)
mempty
| brendanhay/gogol | gogol-bigquery/gen/Network/Google/Resource/BigQuery/Jobs/List.hs | mpl-2.0 | 6,768 | 0 | 21 | 1,668 | 1,036 | 598 | 438 | 143 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Betty.Model where
import ClassyPrelude.Yesod
import Data.Time.LocalTime (TimeZone)
------------------------------------------------------------------------
-- Blood glucose units.
data BGUnit = MgDL | Mmol
deriving (Show, Read, Eq, Enum, Bounded, Generic)
derivePersistField "BGUnit"
instance FromJSON BGUnit
instance ToJSON BGUnit
------------------------------------------------------------------------
-- Weight units.
data WtUnit = Kg | Lb
deriving (Show, Read, Eq, Enum, Bounded, Generic)
derivePersistField "WtUnit"
instance FromJSON WtUnit
instance ToJSON WtUnit
------------------------------------------------------------------------
-- A custome timezone field to replace ZonedTime, which has been
-- deprecated as of Persistent 2.0. We'll use UTCTime + TZ instead of
-- ZonedTime.
newtype TZ = TZ TimeZone
deriving (Show, Read, Eq, Generic)
derivePersistField "TZ"
------------------------------------------------------------------------
| sajith/betty-web | Betty/Model.hs | agpl-3.0 | 1,040 | 0 | 6 | 156 | 179 | 97 | 82 | -1 | -1 |
--------------------------------------------------------------------------------
{-| Module : MultiSet
Copyright : (c) Daan Leijen 2002
License : BSD-style
Maintainer : [email protected]
Stability : provisional
Portability : portable
An implementation of multi sets on top of the "Map" module. A multi set
differs from a /bag/ in the sense that it is represented as a map from elements
to occurrence counts instead of retaining all elements. This means that equality
on elements should be defined as a /structural/ equality instead of an
equivalence relation. If this is not the case, operations that observe the
elements, like 'filter' and 'fold', should be used with care.
-}
---------------------------------------------------------------------------------}
module MultiSet (
-- * MultiSet type
MultiSet -- instance Eq,Show
-- * Operators
, (\\)
-- *Query
, isEmpty
, size
, distinctSize
, member
, occur
, subset
, properSubset
-- * Construction
, empty
, single
, insert
, insertMany
, delete
, deleteAll
-- * Combine
, union
, difference
, intersection
, unions
-- * Filter
, filter
, partition
-- * Fold
, fold
, foldOccur
-- * Min\/Max
, findMin
, findMax
, deleteMin
, deleteMax
, deleteMinAll
, deleteMaxAll
-- * Conversion
, elems
-- ** List
, toList
, fromList
-- ** Ordered list
, toAscList
, fromAscList
, fromDistinctAscList
-- ** Occurrence lists
, toOccurList
, toAscOccurList
, fromOccurList
, fromAscOccurList
-- ** Map
, toMap
, fromMap
, fromOccurMap
-- * Debugging
, showTree
, showTreeWith
, valid
) where
import Prelude hiding (map,filter)
import qualified Prelude (map,filter)
import qualified Map as M
{--------------------------------------------------------------------
Operators
--------------------------------------------------------------------}
infixl 9 \\
-- | /O(n+m)/. See 'difference'.
(\\) :: Ord a => MultiSet a -> MultiSet a -> MultiSet a
b1 \\ b2 = difference b1 b2
{--------------------------------------------------------------------
MultiSets are a simple wrapper around Maps, 'Map.Map'
--------------------------------------------------------------------}
-- | A multi set of values @a@.
newtype MultiSet a = MultiSet (M.Map a Int)
{--------------------------------------------------------------------
Query
--------------------------------------------------------------------}
-- | /O(1)/. Is the multi set empty?
isEmpty :: MultiSet a -> Bool
isEmpty (MultiSet m)
= M.isEmpty m
-- | /O(1)/. Returns the number of distinct elements in the multi set, ie. (@distinctSize mset == Set.size ('toSet' mset)@).
distinctSize :: MultiSet a -> Int
distinctSize (MultiSet m)
= M.size m
-- | /O(n)/. The number of elements in the multi set.
size :: MultiSet a -> Int
size b
= foldOccur (\x n m -> n+m) 0 b
-- | /O(log n)/. Is the element in the multi set?
member :: Ord a => a -> MultiSet a -> Bool
member x m
= (occur x m > 0)
-- | /O(log n)/. The number of occurrences of an element in the multi set.
occur :: Ord a => a -> MultiSet a -> Int
occur x (MultiSet m)
= case M.lookup x m of
Nothing -> 0
Just n -> n
-- | /O(n+m)/. Is this a subset of the multi set?
subset :: Ord a => MultiSet a -> MultiSet a -> Bool
subset (MultiSet m1) (MultiSet m2)
= M.subsetBy (<=) m1 m2
-- | /O(n+m)/. Is this a proper subset? (ie. a subset and not equal)
properSubset :: Ord a => MultiSet a -> MultiSet a -> Bool
properSubset b1 b2
| distinctSize b1 == distinctSize b2 = (subset b1 b2) && (b1 /= b2)
| distinctSize b1 < distinctSize b2 = (subset b1 b2)
| otherwise = False
{--------------------------------------------------------------------
Construction
--------------------------------------------------------------------}
-- | /O(1)/. Create an empty multi set.
empty :: MultiSet a
empty
= MultiSet (M.empty)
-- | /O(1)/. Create a singleton multi set.
single :: a -> MultiSet a
single x
= MultiSet (M.single x 0)
{--------------------------------------------------------------------
Insertion, Deletion
--------------------------------------------------------------------}
-- | /O(log n)/. Insert an element in the multi set.
insert :: Ord a => a -> MultiSet a -> MultiSet a
insert x (MultiSet m)
= MultiSet (M.insertWith (+) x 1 m)
-- | /O(min(n,W))/. The expression (@insertMany x count mset@)
-- inserts @count@ instances of @x@ in the multi set @mset@.
insertMany :: Ord a => a -> Int -> MultiSet a -> MultiSet a
insertMany x count (MultiSet m)
= MultiSet (M.insertWith (+) x count m)
-- | /O(log n)/. Delete a single element.
delete :: Ord a => a -> MultiSet a -> MultiSet a
delete x (MultiSet m)
= MultiSet (M.updateWithKey f x m)
where
f x n | n > 0 = Just (n-1)
| otherwise = Nothing
-- | /O(log n)/. Delete all occurrences of an element.
deleteAll :: Ord a => a -> MultiSet a -> MultiSet a
deleteAll x (MultiSet m)
= MultiSet (M.delete x m)
{--------------------------------------------------------------------
Combine
--------------------------------------------------------------------}
-- | /O(n+m)/. Union of two multisets. The union adds the elements together.
--
-- > MultiSet\> union (fromList [1,1,2]) (fromList [1,2,2,3])
-- > {1,1,1,2,2,2,3}
union :: Ord a => MultiSet a -> MultiSet a -> MultiSet a
union (MultiSet t1) (MultiSet t2)
= MultiSet (M.unionWith (+) t1 t2)
-- | /O(n+m)/. Intersection of two multisets.
--
-- > MultiSet\> intersection (fromList [1,1,2]) (fromList [1,2,2,3])
-- > {1,2}
intersection :: Ord a => MultiSet a -> MultiSet a -> MultiSet a
intersection (MultiSet t1) (MultiSet t2)
= MultiSet (M.intersectionWith min t1 t2)
-- | /O(n+m)/. Difference between two multisets.
--
-- > MultiSet\> difference (fromList [1,1,2]) (fromList [1,2,2,3])
-- > {1}
difference :: Ord a => MultiSet a -> MultiSet a -> MultiSet a
difference (MultiSet t1) (MultiSet t2)
= MultiSet (M.differenceWithKey f t1 t2)
where
f x n m | n-m > 0 = Just (n-m)
| otherwise = Nothing
-- | The union of a list of multisets.
unions :: Ord a => [MultiSet a] -> MultiSet a
unions multisets
= MultiSet (M.unions [m | MultiSet m <- multisets])
{--------------------------------------------------------------------
Filter and partition
--------------------------------------------------------------------}
-- | /O(n)/. Filter all elements that satisfy some predicate.
filter :: Ord a => (a -> Bool) -> MultiSet a -> MultiSet a
filter p (MultiSet m)
= MultiSet (M.filterWithKey (\x n -> p x) m)
-- | /O(n)/. Partition the multi set according to some predicate.
partition :: Ord a => (a -> Bool) -> MultiSet a -> (MultiSet a,MultiSet a)
partition p (MultiSet m)
= (MultiSet l,MultiSet r)
where
(l,r) = M.partitionWithKey (\x n -> p x) m
{--------------------------------------------------------------------
Fold
--------------------------------------------------------------------}
-- | /O(n)/. Fold over each element in the multi set.
fold :: (a -> b -> b) -> b -> MultiSet a -> b
fold f z (MultiSet m)
= M.foldWithKey apply z m
where
apply x n z | n > 0 = apply x (n-1) (f x z)
| otherwise = z
-- | /O(n)/. Fold over all occurrences of an element at once.
foldOccur :: (a -> Int -> b -> b) -> b -> MultiSet a -> b
foldOccur f z (MultiSet m)
= M.foldWithKey f z m
{--------------------------------------------------------------------
Minimal, Maximal
--------------------------------------------------------------------}
-- | /O(log n)/. The minimal element of a multi set.
findMin :: MultiSet a -> a
findMin (MultiSet m)
= fst (M.findMin m)
-- | /O(log n)/. The maximal element of a multi set.
findMax :: MultiSet a -> a
findMax (MultiSet m)
= fst (M.findMax m)
-- | /O(log n)/. Delete the minimal element.
deleteMin :: MultiSet a -> MultiSet a
deleteMin (MultiSet m)
= MultiSet (M.updateMin f m)
where
f n | n > 0 = Just (n-1)
| otherwise = Nothing
-- | /O(log n)/. Delete the maximal element.
deleteMax :: MultiSet a -> MultiSet a
deleteMax (MultiSet m)
= MultiSet (M.updateMax f m)
where
f n | n > 0 = Just (n-1)
| otherwise = Nothing
-- | /O(log n)/. Delete all occurrences of the minimal element.
deleteMinAll :: MultiSet a -> MultiSet a
deleteMinAll (MultiSet m)
= MultiSet (M.deleteMin m)
-- | /O(log n)/. Delete all occurrences of the maximal element.
deleteMaxAll :: MultiSet a -> MultiSet a
deleteMaxAll (MultiSet m)
= MultiSet (M.deleteMax m)
{--------------------------------------------------------------------
List variations
--------------------------------------------------------------------}
-- | /O(n)/. The list of elements.
elems :: MultiSet a -> [a]
elems s
= toList s
{--------------------------------------------------------------------
Lists
--------------------------------------------------------------------}
-- | /O(n)/. Create a list with all elements.
toList :: MultiSet a -> [a]
toList s
= toAscList s
-- | /O(n)/. Create an ascending list of all elements.
toAscList :: MultiSet a -> [a]
toAscList (MultiSet m)
= [y | (x,n) <- M.toAscList m, y <- replicate n x]
-- | /O(n*log n)/. Create a multi set from a list of elements.
fromList :: Ord a => [a] -> MultiSet a
fromList xs
= MultiSet (M.fromListWith (+) [(x,1) | x <- xs])
-- | /O(n)/. Create a multi set from an ascending list in linear time.
fromAscList :: Eq a => [a] -> MultiSet a
fromAscList xs
= MultiSet (M.fromAscListWith (+) [(x,1) | x <- xs])
-- | /O(n)/. Create a multi set from an ascending list of distinct elements in linear time.
fromDistinctAscList :: [a] -> MultiSet a
fromDistinctAscList xs
= MultiSet (M.fromDistinctAscList [(x,1) | x <- xs])
-- | /O(n)/. Create a list of element\/occurrence pairs.
toOccurList :: MultiSet a -> [(a,Int)]
toOccurList b
= toAscOccurList b
-- | /O(n)/. Create an ascending list of element\/occurrence pairs.
toAscOccurList :: MultiSet a -> [(a,Int)]
toAscOccurList (MultiSet m)
= M.toAscList m
-- | /O(n*log n)/. Create a multi set from a list of element\/occurrence pairs.
fromOccurList :: Ord a => [(a,Int)] -> MultiSet a
fromOccurList xs
= MultiSet (M.fromListWith (+) (Prelude.filter (\(x,i) -> i > 0) xs))
-- | /O(n)/. Create a multi set from an ascending list of element\/occurrence pairs.
fromAscOccurList :: Ord a => [(a,Int)] -> MultiSet a
fromAscOccurList xs
= MultiSet (M.fromAscListWith (+) (Prelude.filter (\(x,i) -> i > 0) xs))
{--------------------------------------------------------------------
Maps
--------------------------------------------------------------------}
-- | /O(1)/. Convert to a 'Map.Map' from elements to number of occurrences.
toMap :: MultiSet a -> M.Map a Int
toMap (MultiSet m)
= m
-- | /O(n)/. Convert a 'Map.Map' from elements to occurrences into a multi set.
fromMap :: Ord a => M.Map a Int -> MultiSet a
fromMap m
= MultiSet (M.filter (>0) m)
-- | /O(1)/. Convert a 'Map.Map' from elements to occurrences into a multi set.
-- Assumes that the 'Map.Map' contains only elements that occur at least once.
fromOccurMap :: M.Map a Int -> MultiSet a
fromOccurMap m
= MultiSet m
{--------------------------------------------------------------------
Eq, Ord
--------------------------------------------------------------------}
instance Eq a => Eq (MultiSet a) where
(MultiSet m1) == (MultiSet m2) = (m1==m2)
{--------------------------------------------------------------------
Show
--------------------------------------------------------------------}
instance Show a => Show (MultiSet a) where
showsPrec d b = showSet (toAscList b)
showSet :: Show a => [a] -> ShowS
showSet []
= showString "{}"
showSet (x:xs)
= showChar '{' . shows x . showTail xs
where
showTail [] = showChar '}'
showTail (x:xs) = showChar ',' . shows x . showTail xs
{--------------------------------------------------------------------
Debugging
--------------------------------------------------------------------}
-- | /O(n)/. Show the tree structure that implements the 'MultiSet'. The tree
-- is shown as a compressed and /hanging/.
showTree :: (Show a) => MultiSet a -> String
showTree mset
= showTreeWith True False mset
-- | /O(n)/. The expression (@showTreeWith hang wide map@) shows
-- the tree that implements the multi set. The tree is shown /hanging/ when @hang@ is @True@
-- and otherwise as a /rotated/ tree. When @wide@ is @True@ an extra wide version
-- is shown.
showTreeWith :: Show a => Bool -> Bool -> MultiSet a -> String
showTreeWith hang wide (MultiSet m)
= M.showTreeWith (\x n -> show x ++ " (" ++ show n ++ ")") hang wide m
-- | /O(n)/. Is this a valid multi set?
valid :: Ord a => MultiSet a -> Bool
valid (MultiSet m)
= M.valid m && (M.isEmpty (M.filter (<=0) m))
| alanz/Blobs | lib/DData/MultiSet.hs | lgpl-2.1 | 13,651 | 0 | 12 | 3,211 | 3,049 | 1,581 | 1,468 | 208 | 2 |
-- import Demo.StateT (demos)
-- import Demo.ReaderT1 (demos)
-- import Demo.ExceptT (demos)
-- import Demo.ContT (demos)
-- import Demo.Array (demos)
-- import Demo.Surpassing
-- import Demo.Search
-- import Demo.Foldable
import Demo.Monad
runDemo demos = print demos
main = runDemo demos
| seckcoder/lang-learn | haskell/lambda-calculus/src/Demo/Main.hs | unlicense | 297 | 0 | 5 | 46 | 33 | 20 | 13 | 3 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Openshift.V1.ImageSource where
import GHC.Generics
import Openshift.V1.ImageSourcePath
import Openshift.V1.LocalObjectReference
import Openshift.V1.ObjectReference
import qualified Data.Aeson
-- |
data ImageSource = ImageSource
{ from :: ObjectReference -- ^ reference to ImageStreamTag, ImageStreamImage, or DockerImage
, paths :: [ImageSourcePath] -- ^ paths to copy from image
, pullSecret :: Maybe LocalObjectReference -- ^ overrides the default pull secret for the source image
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON ImageSource
instance Data.Aeson.ToJSON ImageSource
| minhdoboi/deprecated-openshift-haskell-api | openshift/lib/Openshift/V1/ImageSource.hs | apache-2.0 | 793 | 0 | 9 | 114 | 113 | 71 | 42 | 18 | 0 |
-------------------------------------------------------------------------------
-- |
-- Module : CCO.Diag.Lexer
-- Copyright : (c) 2008 Utrecht University
-- License : All rights reserved
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- A 'Lexer' for 'Diag's.
--
-------------------------------------------------------------------------------
module CCO.Diag.Lexer (
-- * Tokens
Token -- abstract, instance: Symbol
-- * Lexer
, lexer -- :: Lexer Token
-- * Token parsers
, keyword -- :: String -> Parser Token String
, ident -- :: Parser Token Ident
) where
import CCO.Diag.Base (Ident)
import CCO.Lexing hiding (satisfy)
import CCO.Parsing (Symbol (describe), Parser, satisfy, (<!>))
import Control.Applicative (Alternative ((<|>)), (<$>), some)
-------------------------------------------------------------------------------
-- Tokens
-------------------------------------------------------------------------------
-- | Type of 'Diag' tokens.
data Token
= Keyword { fromKeyword :: String } -- ^ Keyword.
| Ident { fromIdent :: Ident } -- ^ Identifier.
instance Symbol Token where
describe (Keyword _) lexeme = "keyword " ++ lexeme
describe (Ident _) lexeme = "identifier " ++ lexeme
-- | Retrieves whether a 'Token' is a 'Keyword'.
isKeyword :: Token -> Bool
isKeyword (Keyword _) = True
isKeyword _ = False
-- | Retrieves whether a 'Token' is an 'Ident'.
isIdent :: Token -> Bool
isIdent (Ident _) = True
isIdent _ = False
-------------------------------------------------------------------------------
-- Lexer
-------------------------------------------------------------------------------
-- | A 'Lexer' that recognises (and ignores) whitespace.
layout_ :: Lexer Token
layout_ = ignore (some (anyCharFrom " \n\t"))
-- | A 'Lexer' that recognises 'Keyword' tokens.
keyword_ :: Lexer Token
keyword_ = fmap Keyword $ string "compiler" <|> string "compile" <|>
string "end" <|> string "execute" <|>
string "for" <|> string "from" <|> string "in" <|>
string "interpreter" <|> string "on" <|>
string "platform" <|> string "program" <|>
string "to" <|> string "with"
-- | A 'Lexer' that recognises 'Ident' tokens.
ident_ :: Lexer Token
ident_ = Ident <$> some (anyCharBut " \n\t")
-- | A 'Lexer' for 'Diag's.
lexer :: Lexer Token
lexer = layout_ <|> keyword_ <|> ident_
-------------------------------------------------------------------------------
-- Token parsers
-------------------------------------------------------------------------------
-- | A 'Parser' that recognises a specified keyword.
keyword :: String -> Parser Token String
keyword key = fromKeyword <$>
satisfy (\tok -> isKeyword tok && fromKeyword tok == key) <!>
"keyword " ++ key
-- | A 'Parser' that recognises an identifier.
ident :: Parser Token Ident
ident = fromIdent <$> satisfy isIdent <!> "identifier" | aochagavia/CompilerConstruction | tdiagrams/lib/CCO/Diag/Lexer.hs | apache-2.0 | 3,144 | 0 | 18 | 700 | 535 | 301 | 234 | 40 | 1 |
-- Module that makes Expr and instance of the Arbitrary type-class allowing
-- random expressions to be generated for use in QuickTests
-- This module only need export the function 'arbitrary' since it is the existence of that function (defined for 'Arbitrary (Expr a)' that makes Expr and instance of the Arbitrary class)
module QuickTests.Arbitrary (arbitrary)
where
import Test.QuickCheck
import CAS
-- Since Expr is a custom class we MUST make it an instance of the Arbitrary type-class before we can use it inside QuickCheck properties. The instantiation will let QuickCheck know how to generate random objects of type Expr
-- 'arbitrary' is a definition (it is a function that takes no arguments so it is in effect a constant) which in this context must be of type 'Gen (Expr a)' i.e. an IO which corresponds to a random expression.
-- We define it using the 'sized' function which takes as its single argument a function taking an integer and returning a Gen (Expr a)
-- When we use 'sized' we get access to the size integer that QuickCheck uses to create arbitrary instances. We can use this size value to more intelligently construct the expressions (which is the purpose of arbitrary')
instance (Show a, Integral a) => Arbitrary (Expr a) where
arbitrary = sized arbitrary'
arbitrary' :: (Show a, Integral a) => Int -> Gen (Expr a)
arbitrary' 0 = arbitrary_const -- Base case which we define to be an arbitrary constant
arbitrary' 1 = oneof [arbitrary_atom, arbitrary_neg_atom] -- When the required size is 1 we simply return an atomic expression (which can be negative)
arbitrary' n = do
ns <- split n
assemble ns
where
assemble (b:bs) = assemble' bs $! arbitrary' b -- The result of split is never [] (at least a singleton)
assemble' [] e = e
assemble' (c:cs) e = assemble' cs $! apply op (arbitrary' c) e
op = oneof [pure (+), pure (*)]
apply o a b = o <*> a <*> b
split 0 = pure []
split a = do
p <- pick a
fmap (p:) (split (a - p))
pick a = choose (1, a)
-- The non-base case uses recursion, monad theory, and applicative functor techniques.
-- Since the result of arbitrary' is the Gen monad (which is analogous to Random) all our calculations must be monadic and so we use 'do' notation.
-- The first task in the 'do' is to use 'split' to construct a random separation of 'n' elements in to parts. 'split' takes an integer 'n' and returns a randomly generated list of integers which all add up to 'n'.
-- It does so recursively. The base case is 'split 0' where we return an empty list.
-- For non-zero 'n' we use 'pick' to get a random integer inside a Gen context. We use '<-' to extract the integer from the Gen context.
-- The next statement inside the 'do' concats the integer to the list inside 'split (a - p)'. Since the recursive call 'split (p - a)' returns a list inside a Gen we use fmap to append 'p' to the list inside the Gen to get a larger list inside the Gen.
-- Since pick returns a monad and split is called from inside a monadic do sequence we are forced to respect the context throughout the calculation.
-- The definition of assemble basically sets it up to use assemble' which performs a "strict" accumulation of the expression as it goes along
-- Note how we take the first integer 'b' and use arbitrary' to create an expression from it. This serves as the initial state of the accumulator. We use $! to force strict evaluation of the result of arbitrary' b to avoid a Stack Overflow
-- The base case of assemble' occurs when the list of integers is empty in which case we simply return the accumulated expression 'e'
-- For the recursive case we apply an operation between arbitrary' c and the expression e which is the accumulator to create the new accumulator. The result of this operation is evaulated strictly and becomes the new accumulator for the recusrive call to assemble' using cs
-- 'apply' is a function which takes three arguments. The first is an operator (* or +) placed inside the Gen context. It is choosen randomly by the definition of 'op'.
-- The definition of 'apply' takes the operator and two arguments and uses applicative functor technique to apply the operation between the two expressions, all of them inside the Gen context (since the whole calculation is Gen monadic).
-- Using currying this means that 'apply op' is a function that takes two Gen monads and returns a Gen monad i.e. its signature is "(Gen a -> Gen a) -> Gen a -> Gen a -> Gen a".
-- Constants and Symbols are the atomic expressions. Everything else is constructed from these (or by encapsulatng them in some fashion).
-- We collect the Const and Symbol expression in to a single arbitrary definition which produces them with equal likelihood
-- This definition will be used to create negative atomic expressions as well.
arbitrary_atom :: (Integral a, Show a) => Gen (Expr a)
arbitrary_atom = oneof [arbitrary_const, arbitrary_symbol]
-- arbitrary_const returns a random Const object by taking a random integer from 1 to 9 and wrapping it inside Const.
-- We don't include 0 because it leaves sums unchanged and more importantly it reduces products to zero which is counter-productive for testing.
-- Negative constants are handled by 'arbitrary_negative' which takes positive constants and negates them.
arbitrary_const :: (Integral a, Show a) => Gen (Expr a)
arbitrary_const = frequency $
map (\(f, n) -> (f, return n)) $
[(1000, 1), (100, 2), (10, 3), (1, 4), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)]
-- The constraint 'Integral a' in the signature is crucial since it allows us to use the const' smart constructor to create Const objects from randomly selected Int.
-- We use 'frequency' to change the, well, frequency with which the constants are generated when arbitrary_const is called. Our aim is to have lower integers be more frequently produces than higher ones since it will keep the expressions manageable (verboseCheck lets us know how the distribution is coming out).
-- 'frequency' takes a list of (Int, Gen) tuples where the integer is the weight with which the Gen is produced. So the higher the integer the more likely that Gen will be generated.
-- We first create a list of (Int, Int) tuples where we list the integers 1 to 9 and attach the required weights to them. Highest for 1, then 2, then 3 and the rest are equally weighted at the bottom.
-- The list of tuples for 1,2,3 is created explicitly. The remainder is added to it using ++ and is constructed by taking the list of integers from 4 to 9 and mapping a simple lambda function over it which transforms it in to a list of tuples with frequency 1.
-- We then use map and a lambda function to create Gen (Const Int) objects out of the second element of each tuple using "return n".
-- Note the use of pattern-matching within the lamdba function definition to gain access to the second element.
-- Finally we present the constructed list to frequency for the generation of these objects.
-- Analogous to 'arbitrary_const' this definition, 'arbitrary_symbol', returns a Symbol object corresponding (randomly) to x, y or z.
-- Note that the signature reveals this to be a definition (and not a function). It corresponds to a randomly selected Expr.
arbitrary_symbol :: Gen (Expr a)
arbitrary_symbol = fmap Symbol $ elements ["x", "y", "z"]
-- This definition creates randomly generated negative atomic expressions
arbitrary_neg_atom :: (Show a, Integral a) => Gen (Expr a)
arbitrary_neg_atom = fmap negate arbitrary_atom -- We map the negate function on the expression inside the Gen returned by arbitrary_atom
| abid-mujtaba/haskell-cas | test/QuickTests/Arbitrary.hs | apache-2.0 | 7,981 | 0 | 13 | 1,786 | 642 | 364 | 278 | 31 | 3 |
-- https://rafal.io/posts/haskell-lenses-notes.html --
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveFunctor #-}
import Control.Lens.TH
--import qualified Data.Map as Map
import Data.Char -- toLower
data Person = P { _name :: String
, _addr :: Address
, _salary :: Int } deriving Show
data Address = A { _road :: String
, _city :: String
, _postcode :: String } deriving Show
$(makeLenses ''Person)
$(makeLenses ''Address)
type Lens' s a = forall f. Functor f => (a -> f a) -> s -> f s
newtype Identity a = Identity a deriving (Show, Functor)
-- https://hackage.haskell.org/package/transformers-0.2.2.0/docs/src/Data-Functor-Identity.html
instance Applicative (Identity) where
pure a = Identity a
Identity f <*> Identity x = Identity (f x)
runIdentity :: Identity s -> s
runIdentity (Identity x) = x
over :: Lens' s a -> (a -> a) -> s -> s
over ln f = runIdentity . ln (Identity . f)
set :: Lens' s a -> a -> s -> s
set ln = (over ln) . const
newtype Const v ignore_type = Const v deriving (Show,Functor)
--instance Functor (Const v) where
-- fmap f (Const x) = Const x
--instance Applicative (Const v) where
-- pure x = Const x
-- Const f <*> Const v = Const (f `mappend` v)
view :: Lens' s a -> s -> Const a s
view ln = ln Const
--(.~) = set
--(%~) = view
setPostcode :: String -> Person -> Person
setPostcode pc p = set (addr . postcode) pc p
-- setPostcode pc p = addr.postcode .~ pc $ p
-- |___ (.~) = set
-- |___ f $ x = f x
-- ~= Applications =~
data Temp = Temp { _fahrenheit :: Float } deriving Show
$(makeLenses ''Temp)
--fahrenheit :: Lens Temp Float
centigrade :: Lens' Temp Float
centigrade centi_fn (Temp faren) =
(\centi' -> Temp (cToF centi')) <$> (centi_fn (fToC faren))
cToF :: Float -> Float -- Centigrade to Fahrenheit
cToF c = c*9 / 5 + 32
fToC :: Float -> Float -- Fahrenheit to Centigrade
fToC f = (f-32)*5 / 9
temp100 :: Temp
temp100 = Temp 100
-- > view centigrade temp100
-- Const 37.77778
-- > set centigrade 100 temp100
-- T {_fahrenheit = 212.0}
--
data Time = Time { _hours :: Int, _mins :: Int } deriving Show
$(makeLenses ''Time)
now = Time { _hours = 3, _mins = 58 }
mins' :: Lens' Time Int
mins' min_fn (Time h m) = wrap <$> (min_fn m)
where
wrap :: Int -> Time
wrap m' | m' >= 60 = Time (h+(m' `div` 60)) (m' `mod` 60)
| m' < 0 = Time (h-1) (m'+60)
| otherwise = Time h m'
-- > over mins' (+4) now
-- Time {_hours = 4, _mins = 2}
-- ~= Non-record structures =~
{- Control.Lens.At
at :: Ord k => k -> Lens' (Map.Map k v) (Maybe v)
at k mb_fn m = wrap <$> (mb_fn mv)
where
mv = Map.lookup k m
wrap :: Maybe v -> Map.Map k v
wrap (Just v') = Map.insert k v' m
wrap Nothing = case mv of
Nothing -> a
Just _ -> Map.delete k m
-}
-- ~= Bit fields =~
{- Data.Bits.Lens
bitAt :: Int -> Lens' Int Bool
> view (bitAt 1) 3
True
> view (bitAt 1) 2
True
> view (bitAt 1) 5
False
bitAt :: Bits b => Int -> Lens' b Bool
-}
-- ~= Web-scraper =~
{- package <hexpat-lens>
p ^ .. _HTML' . to allNodes
. traverse . named "a"
. traverse . ix "href"
. filtered isLocal
. to trimSpaces
-}
--------------------------------
-- ~= Edwards' second insight =~
--------------------------------
-- Multi-focus lens
type Traversal' s a =
forall f. Applicative f => (a -> f a) -> (s -> f s)
-- 's' -> type of the container
-- 'a' -> type of the (multiple) foci
-- Applicative
{-
class Functor f => Applicative f where
pure :: a -> f a
(<*>) :: f (a -> b) -> f a -> f b
a bit like Monad but weaker | class Monad m where
| return :: a -> m a
| (>>=) :: m a -> (a -> m b) -> m b
every Monad is Applicative
| pure = return
| mf <*> mx = do { f <- mf; x <- mx; return (f x) }
but not vice verse
-}
-- road :: Lens' Address String -- defined by $(makeLenses ''Address)
-- road elt_fn (A r c p) = (\r' -> A r' c p) <$> (elt_fn r)
-- |__ box with | hole in it
-- |_____ thing to put in the hole
-- addr_strs :: Traversal' Address String
-- addr_strs elt_fn (A r c p) =
-- ...(\r' c' -> A r' c' p)...(elt_fn r)...(elt_fn c)
-- |___ box with | two holes :: | Stirng -> String -> Address
-- |______________|___ :: f String
-- |___ :: f String
addr_strs :: Traversal' Address String
--addr_strs elt_fn (A r c p) =
-- pure (\r' c' -> A r' c' p) <*> (elt_fn r) <*> (elt_fn c)
addr_strs elt_fn (A r c p) =
(\r' c' -> A r' c' p) <$> (elt_fn r)
<*> (elt_fn c)
-- ~= Using Traversals =~
over' :: Traversal' s a -> (a -> a) -> s -> s
over' ln f = runIdentity . ln (Identity . f)
-- | class Functor f => Applicative f where
-- | pure :: a -> f a
-- | (<*>) :: f (a -> b) -> f a -> f b
getConst :: Const v a -> v
getConst (Const x) = x
instance Monoid a => Applicative (Const a) where
pure x = Const mempty
(Const vf) <*> (Const va) = Const (vf `mappend` va)
-- | class Monoid m where
-- | mempty :: a
-- mappend :: a -> a -> a
-- definedin GHC.Base
-- | instance Monoid [a] where
-- | mempty = []
-- | mappend = (++)
--view' :: Traversal' s a -> (s -> a)
view' ln s = getConst (ln Const s)
fredA = A "72 Humberstone Rd" "Cambridge" "CB4 1JD"
-- > over' addr_strs (map toLower) fredA
-- A {_road = "72 humberstone rd", _city = "cambridge", _postcode = "CB4 1JD"}
-- > view' addr_strs fredA
-- "72 Humberstone RdCambridge"
-- ~= Non-uniform traversals =~
{-
The foci of a traversal can be highly selective
* Every alternate element of list
* All the even elements of a tree
* The 'name' fields of all records in a table whose
'salary' fields is > $20,000
-}
-- ~= Composing traversals =`
{-
ln1 :: Lens' s1 s2
tr1 :: Traversal' s1 s2
ln2 :: Lens' s2 a
tr2 :: Traversal' s2 a
ln1 . ln2 :: Lens' s1 a
tr1 . tr2 :: Traversal' s1 a
tr1 . ln2 :: Traversal' s1 a
ln1 . tr2 :: Lens' s1 a
-}
-- =======> lens-3.9.1
{- It all rests on Haskell's abstraction facilities:
* Type classes
* Higher rank types
* Higher kinded type variables
QA: Records in Haskell, same field name in multiple records
type class Has f, which check if record has a filed f
-}
| egaburov/funstuff | Haskell/lenses/lens2.hs | apache-2.0 | 6,773 | 0 | 12 | 2,128 | 1,145 | 633 | 512 | 64 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Mangekyo.Conduit.Csv where
import Data.Conduit
import Data.Csv (HasHeader(..), defaultEncodeOptions, defaultDecodeOptions)
import Data.Csv.Conduit (fromCsvLiftError, toCsv)
import qualified Data.ByteString.Char8 as B
import qualified Data.Conduit.List as CL
import qualified Data.Text as T
import qualified Data.Vector as V
import Mangekyo.Conduit
import Mangekyo.Type as Type
format :: Format
format = Format { name = "csv"
, input = Just toValue
, output = Just fromValue
}
toValue :: Conduit B.ByteString MangekyoIO Type.Value
toValue = fromCsvLiftError errorHandler defaultDecodeOptions NoHeader =$= CL.map (Array . V.fromList . map String)
where
errorHandler e = userError $ show e
fromValue :: Monad m => Conduit Type.Value m B.ByteString
fromValue = CL.map toRecord =$= toCsv defaultEncodeOptions
toRecord :: Value -> [T.Text]
toRecord (Tuple vs) = map toText vs
toRecord (Array v) = map toText $ V.toList v
toRecord (String t) = [t]
toRecord v = [toText v]
toText :: Value -> T.Text
toText (String t) = t
toText v = string' v
| nakamuray/mangekyo | src/Mangekyo/Conduit/Csv.hs | bsd-2-clause | 1,138 | 0 | 10 | 218 | 365 | 202 | 163 | 28 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Category -- base
import Control.Scrobbler -- scrobblers
import Control.Scrobbler.Algorithm.MPD -- scrobblers
import Prelude hiding ((.), id) -- base
-- Simple scrobbler without fancy stuff
--
-- Each scrobbling step is accompanied with announcement in stdout
main :: IO ()
main = scrobbler $
-- Tell lastfm about desire to scrobble previous track
announce . scrobble credentials .
-- Check if previous track is worth scrobbling
announce . contest .
-- Tell lastfm about started tracks
announce . updateNowPlaying credentials .
-- Get scrobble candidate from player of choice (MPD here)
candidate
-- Lastfm credentials. Easy to get with "liblastfm"
credentials :: Credentials
credentials = Credentials
{ apiKey = "__YOUR_API_KEY__"
, sessionKey = "__YOUR_SESSION_KEY__"
, secret = "__YOUR_SECRET__"
}
| supki/scrobblers | examples/Casual.hs | bsd-2-clause | 923 | 0 | 11 | 196 | 124 | 77 | 47 | 16 | 1 |
{-# LANGUAGE Arrows #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
module Models.Post (
BlogSeries(..)
, BlogPost(..)
, PostSeries(..)
, PostOverview(..)
, postOverviewAllQuery
, seriesPostsQuery
, createSeriesTable
, createPostTable
) where
import Control.Applicative ((<$>), (<*>))
import Control.Lens
import Data.Aeson hiding (Series)
import qualified Data.ByteString.Char8 as B
import Data.Data
import Data.Int
import Data.Maybe
import Data.Proxy
import qualified Data.Text as T
import Data.Time (UTCTime)
import Database.PostgreSQL.Simple.FromRow (FromRow, field, fromRow)
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField (toField)
import Database.PostgreSQL.Simple.ToRow (ToRow, toRow)
import Database.PostgreSQL.Simple.Types (Query (..))
import GHC.Generics
import Prelude (Eq, Int, Show, ($), (.))
import Servant.Elm
import Web.FormUrlEncoded (FromForm)
--
-- Api Helpers for Frontend --
--
-- PostOverview
data PostOverview = PostOverview {
pid :: !Int
, ptitle :: !T.Text
, psynopsis :: Maybe T.Text
, ppubdate :: Maybe UTCTime
, pordinal :: Maybe Int
, pseriesid :: Maybe Int
, pseriesname :: Maybe T.Text
, pseriesdescription :: Maybe T.Text
} deriving (Eq, Show, Generic)
instance ElmType PostOverview
instance ToJSON PostOverview
instance FromJSON PostOverview
instance FromRow PostOverview where
fromRow = PostOverview <$> field <*> field <*> field
<*> field <*> field <*> field
<*> field <*> field
postOverviewAllQuery :: Query
postOverviewAllQuery = Query $ B.unwords [
"select p.id, p.title, p.synopsis, p.pubdate, p.ordinal, "
, "s.id, s.name, s.description from post p left join series s "
, "on p.seriesid = s.id where p.pubdate is NOT NULL "
, "order by p.pubdate DESC"
]
seriesPostsQuery :: Query
seriesPostsQuery = Query $ B.unwords [
"select id, authorid, seriesid, title, body, synopsis, "
, "created, modified, pubdate, ordinal "
, "from post where seriesid = (select seriesid from post p where p.id = ?) "
, "and pubdate is NOT NULL "
, "order by ordinal"
]
data PostSeries = PostSeries {
previous :: [BlogPost]
, current :: BlogPost
, next :: [BlogPost]
, series :: BlogSeries
} deriving (Eq, Show, Generic)
instance ElmType PostSeries
instance ToJSON PostSeries
instance FromJSON PostSeries
--
-- Table Definitions --
--
-- Table Definition Series
-- Post
data BlogPost = BlogPost {
bid :: !Int
, authorId :: !Int
, seriesId :: Maybe Int
, title :: !T.Text
, body :: Maybe T.Text
, synopsis:: Maybe T.Text
, created :: !UTCTime
, modified :: Maybe UTCTime
, pubdate :: Maybe UTCTime
, ordinal :: Maybe Int
} deriving (Eq, Show, Generic, Data)
instance FromForm BlogPost
instance ElmType BlogPost
instance FromJSON BlogPost
instance ToJSON BlogPost
instance FromRow BlogPost where
fromRow = BlogPost <$> field <*> field <*> field
<*> field <*> field <*> field
<*> field <*> field <*> field
<*> field
instance ToRow BlogPost where
toRow p = [toField $ bid p
, toField $ authorId p
, toField $ seriesId p
, toField $ title p
, toField $ body p
, toField $ synopsis p
, toField $ created p
, toField $ modified p
, toField $ pubdate p
, toField $ ordinal p
]
createPostTable :: Query
createPostTable =
[sql|
CREATE TABLE IF NOT EXISTS post (
id SERIAL UNIQUE,
authorid INTEGER NOT NULL,
seriesid integer,
title character varying(255) NOT NULL,
body text,
synopsis text,
created timestamp with time zone NOT NULL,
modified timestamp with time zone,
pubdate timestamp with time zone,
ordinal integer,
CONSTRAINT post_pkey PRIMARY KEY (id),
CONSTRAINT author_id_post_fk FOREIGN KEY (authorid)
REFERENCES public.author (id) MATCH SIMPLE
ON UPDATE NO ACTION ON DELETE CASCADE,
CONSTRAINT series_id_post_fk FOREIGN KEY (seriesid)
REFERENCES public.series (id) MATCH SIMPLE
ON UPDATE NO ACTION ON DELETE NO ACTION
);
|]
data BlogSeries = BlogSeries {
sid :: !Int
, name :: !T.Text
, description :: !T.Text
, parentid :: Maybe Int
} deriving (Eq, Show, Generic, Data)
instance ElmType BlogSeries
instance FromJSON BlogSeries
instance ToJSON BlogSeries
instance FromRow BlogSeries where
fromRow = BlogSeries <$> field <*> field <*> field <*> field
instance ToRow BlogSeries where
toRow s = [toField $ sid s
, toField $ name s
, toField $ description s
, toField $ parentid s]
createSeriesTable :: Query
createSeriesTable =
[sql|
CREATE TABLE series (
id serial primary key,
name text NOT NULL,
description text NOT NULL,
parentid integer,
CONSTRAINT parent_series_fkey FOREIGN KEY (parentid)
REFERENCES public.series (id) MATCH SIMPLE
ON UPDATE NO ACTION ON DELETE NO ACTION
);
|]
| pellagic-puffbomb/simpleservantblog | src/Models/Post.hs | bsd-3-clause | 6,410 | 0 | 15 | 2,278 | 1,076 | 616 | 460 | 151 | 1 |
{-# LANGUAGE DeriveFoldable, DeriveTraversable, TupleSections #-}
-- | AI strategies to direct actors not controlled directly by human players.
-- No operation in this module involves the 'State' or 'Action' type.
module Game.LambdaHack.Client.AI.Strategy
( Strategy, nullStrategy, liftFrequency
, (.|), reject, (.=>), only, bestVariant, renameStrategy, returN, mapStrategyM
) where
import Prelude ()
import Prelude.Compat
import Control.Applicative
import Control.Monad (MonadPlus(..), ap)
import Data.Maybe
import Data.Text (Text)
import Game.LambdaHack.Common.Frequency as Frequency
import Game.LambdaHack.Common.Msg
-- | A strategy is a choice of (non-empty) frequency tables
-- of possible actions.
newtype Strategy a = Strategy { runStrategy :: [Frequency a] }
deriving (Show, Foldable, Traversable)
-- | Strategy is a monad. TODO: Can we write this as a monad transformer?
instance Monad Strategy where
{-# INLINE return #-}
return x = Strategy $ return $! uniformFreq "Strategy_return" [x]
m >>= f = normalizeStrategy $ Strategy
[ toFreq name [ (p * q, b)
| (p, a) <- runFrequency x
, y <- runStrategy (f a)
, (q, b) <- runFrequency y
]
| x <- runStrategy m
, let name = "Strategy_bind (" <> nameFrequency x <> ")"]
instance Functor Strategy where
fmap f (Strategy fs) = Strategy (map (fmap f) fs)
instance Applicative Strategy where
pure = return
(<*>) = ap
instance MonadPlus Strategy where
mzero = Strategy []
{-# INLINE mplus #-}
mplus (Strategy xs) (Strategy ys) = Strategy (xs ++ ys)
instance Alternative Strategy where
(<|>) = mplus
empty = mzero
normalizeStrategy :: Strategy a -> Strategy a
normalizeStrategy (Strategy fs) = Strategy $ filter (not . nullFreq) fs
nullStrategy :: Strategy a -> Bool
nullStrategy strat = null $ runStrategy strat
-- | Strategy where only the actions from the given single frequency table
-- can be picked.
liftFrequency :: Frequency a -> Strategy a
liftFrequency f = normalizeStrategy $ Strategy $ return f
infixr 2 .|
-- | Strategy with the actions from both argument strategies,
-- with original frequencies.
(.|) :: Strategy a -> Strategy a -> Strategy a
(.|) = mplus
-- | Strategy with no actions at all.
reject :: Strategy a
reject = mzero
infix 3 .=>
-- | Conditionally accepted strategy.
(.=>) :: Bool -> Strategy a -> Strategy a
p .=> m | p = m
| otherwise = mzero
-- | Strategy with all actions not satisfying the predicate removed.
-- The remaining actions keep their original relative frequency values.
only :: (a -> Bool) -> Strategy a -> Strategy a
only p s = normalizeStrategy $ do
x <- s
p x .=> return x
-- | When better choices are towards the start of the list,
-- this is the best frequency of the strategy.
bestVariant :: Strategy a -> Frequency a
bestVariant (Strategy []) = mzero
bestVariant (Strategy (f : _)) = f
-- | Overwrite the description of all frequencies within the strategy.
renameStrategy :: Text -> Strategy a -> Strategy a
renameStrategy newName (Strategy fs) = Strategy $ map (renameFreq newName) fs
-- | Like 'return', but pick a name of the single frequency.
returN :: Text -> a -> Strategy a
returN name x = Strategy $ return $! uniformFreq name [x]
-- TODO: express with traverse?
mapStrategyM :: Monad m => (a -> m (Maybe b)) -> Strategy a -> m (Strategy b)
mapStrategyM f s = do
let mapFreq freq = do
let g (k, a) = do
mb <- f a
return $! (k,) <$> mb
lbm <- mapM g $ runFrequency freq
return $! toFreq "mapStrategyM" $ catMaybes lbm
ls = runStrategy s
lt <- mapM mapFreq ls
return $! normalizeStrategy $ Strategy lt
| beni55/LambdaHack | Game/LambdaHack/Client/AI/Strategy.hs | bsd-3-clause | 3,729 | 0 | 19 | 823 | 1,046 | 549 | 497 | 73 | 1 |
module Unfolds where
myIterate :: (a -> a) -> a -> [a]
myIterate f x = go f x []
where go f x xs = x : go f (f x) xs
myUnfoldr :: (b -> Maybe (a, b)) -> b -> [a]
myUnfoldr f x = go f x []
where go f b as = case f b of
Nothing -> as
Just (a, nb) -> a : go f nb as
betterIterate :: (a -> a) -> a -> [a]
betterIterate f = myUnfoldr (\x -> Just (x, f x))
| abhean/phffp-examples | src/Unfolds.hs | bsd-3-clause | 403 | 0 | 11 | 146 | 235 | 122 | 113 | 11 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TupleSections #-}
import Plots
import Plots.Axis
import Plots.Types hiding (B)
import Plots.Themes
import Plots.Utils
import Data.List
import Diagrams.Prelude
import Diagrams.Backend.Rasterific
import Diagrams.Backend.CmdLine
import Diagrams.Coordinates.Polar
import Data.Array
import Data.Monoid.Recommend
import Dataset
fillOpacity = barStyle . mapped . _opacity
data1 = [(2.7,(1/7 @@ turn)),(3.0,(2/7 @@ turn)),(9.0,(3/7 @@ turn)),(6.2,(4/7 @@ turn)),(7.1,(5/7 @@ turn)),(8.5,(6/7 @@ turn)),(5.3,(7/7 @@ turn))]
data2 = [(5.7,(1/7 @@ turn)),(1.1,(2/7 @@ turn)),(9.0,(3/7 @@ turn)),(4.2,(4/7 @@ turn)),(7.1,(5/7 @@ turn)),(2.5,(6/7 @@ turn)),(6.3,(7/7 @@ turn))]
myaxis :: Axis B Polar Double
myaxis = polarAxis &~ do
pointsPlot' data1 $ do
doFill .= True
fillOpacity .= 0.5
pointsPlot' data2 $ do
plotColor .= blue
fillOpacity .= 0.5
doFill .= True
make :: Diagram B -> IO ()
make = renderRasterific "test.png" (mkWidth 1000) . frame 20
main :: IO ()
main = make $ renderAxis myaxis
| bergey/plots | examples/pointarea.hs | bsd-3-clause | 1,107 | 3 | 11 | 211 | 529 | 293 | 236 | 31 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -Wno-missing-signatures #-}
{-# OPTIONS_GHC -fwarn-incomplete-uni-patterns #-}
module AsmBunny where
-- Shared items between days 12, 23 and 25
import Data.Functor (($>))
import qualified Text.Megaparsec.String as P
import qualified Text.Megaparsec as P
import qualified Data.Map as Map
import Data.Map (Map)
-- AST
data RegisterOrInt = RegisterRI Register | IntRI Int deriving (Show)
data Register = Register Char deriving (Show, Ord, Eq)
data Asm = Copy RegisterOrInt Register
| Inc Register
| Dec Register
| Jump RegisterOrInt RegisterOrInt
deriving (Show)
-- PARSING
copy, inc, dec, jump :: P.Parser Asm
-- All Days
copy = (P.string "cpy" $> Copy) <*> parseRegisterOrInt <*> parseRegister
inc = (P.string "inc" $> Inc) <*> parseRegister
dec = (P.string "dec" $> Dec) <*> parseRegister
jump = (P.string "jnz" $> Jump) <*> parseRegisterOrInt <*> parseRegisterOrInt
-- All Days
parseRegisterOrInt :: P.Parser RegisterOrInt
parseRegisterOrInt = P.choice [P.try (RegisterRI <$> parseRegister),
IntRI <$> parseInt]
parseInt :: P.Parser Int
parseInt = do
_ <- P.string " "
minus <- P.optional (P.string "-")
v <- read <$> P.many (P.oneOf "0123456789")
return $ case minus of
Just _ -> -v
Nothing -> v
parseRegister :: P.Parser Register
parseRegister = P.string " " *> (Register <$> P.oneOf ['a' .. 'z'])
-- UTILS
increment r m = Map.insert r (getRegister r m + 1) m
decrement r m = Map.insert r (getRegister r m - 1) m
cp v r m = Map.insert r (getROI v m) m
getROI v m = case v of
RegisterRI r -> getRegister r m
IntRI i -> i
getRegister r m = Map.findWithDefault 0 r m
get c m = registers m Map.! (Register c)
evalAsm (Inc r) computer = incPc . (modifyRegisters (increment r)) $ computer
evalAsm (Dec r) computer = incPc . (modifyRegisters (decrement r)) $ computer
evalAsm (Copy a b) computer = incPc . (modifyRegisters (cp a b)) $ computer
evalAsm (Jump v doffset) computer = if (getROI v m) /= 0
then modifyPc (+(getROI doffset m)) computer
else incPc computer
where m = registers computer
-- Computer state
incPc = modifyPc (+1)
modifyPc :: (Int -> Int) -> Computer -> Computer
modifyPc f computer = computer {pc = f (pc computer)}
modifyRegisters f computer = computer {registers = f (registers computer)}
data Computer = Computer { pc :: Int
, registers :: (Map Register Int)
}
deriving (Show)
emptyComputer = Computer 0 Map.empty
computerWithRegisters r = Computer 0 (Map.fromList r)
| guibou/AdventOfCode2016 | src/AsmBunny.hs | bsd-3-clause | 2,684 | 0 | 12 | 654 | 939 | 496 | 443 | 58 | 2 |
module Problem174 where
import Data.Array
m :: Int
m = 10 ^ 6
main :: IO ()
-- a^2-b^2 = (a-b)(a+b) = t
-- a-b<a+b => (a-b)^2<t => a-b<sqrt[t]
main = print $ length $ filter (\x -> x > 0 && x <= 10) $ elems $ accumArray
(+)
0
(1, m)
[ (diff * sm, 1)
| diff <- takeWhile (\d -> d * d < m) [2, 4 .. m]
, sm <- takeWhile (\s -> diff * s <= m) [diff + 2, diff + 4 .. m]
]
| adityagupta1089/Project-Euler-Haskell | src/problems/Problem174.hs | bsd-3-clause | 404 | 0 | 14 | 129 | 193 | 108 | 85 | 12 | 1 |
{-# LANGUAGE QuasiQuotes #-}
{-# OPTIONS -fno-warn-name-shadowing #-}
module Atomo.Kernel.Block (load) where
import qualified Data.Vector as V
import Atomo
import Atomo.Method
import Atomo.Pattern (bindings')
load :: VM ()
load = do
[p|Block new: (es: List) in: t|] =:::
[e|Block new: es arguments: [] in: t|]
[p|Block new: (es: List) arguments: (as: List) in: t|] =: do
t <- here "t"
es <- getList [e|es|]
as <- getList [e|as|]
return (Block t (map fromPattern as) (map fromExpression es))
[p|(b: Block) call|] =: do
b <- here "b" >>= findBlock
callBlock b []
[p|(b: Block) repeat|] =: do
Block c as es <- here "b" >>= findBlock
when (length as > 0) (throwError (BlockArity 0 (length as)))
withTop c (forever (evalAll es))
[p|(b: Block) repeat: (n: Integer)|] =: do
Block c as cs <- here "b" >>= findBlock
when (length as > 0) (throwError (BlockArity 0 (length as)))
Integer n <- here "n" >>= findInteger
vs <- V.replicateM (fromIntegral n) (withTop c (evalAll cs))
return $ List vs
[p|(b: Block) call: (... args)|] =: do
b <- here "b" >>= findBlock
vs <- getList [e|args|]
callBlock b vs
[p|(b: Block) call-in: c|] =: do
Block _ _ es <- here "b" >>= findBlock
c <- here "c"
withTop c (evalAll es)
[p|(b: Block) context|] =: do
Block s _ _ <- here "b" >>= findBlock
return s
[p|(b: Block) arguments|] =: do
Block _ as _ <- here "b" >>= findBlock
return $ list (map Pattern as)
[p|(b: Block) contents|] =: do
Block _ _ es <- here "b" >>= findBlock
return $ list (map Expression es)
[p|v do: (b: Block)|] =: do
v <- here "v"
b <- here "b" >>= findBlock
joinWith v b []
return v
[p|v do: (b: Block) with: (... args)|] =: do
v <- here "v"
b <- here "b" >>= findBlock
as <- getList [e|args|]
joinWith v b as
return v
[p|v join: (b: Block)|] =: do
v <- here "v"
b <- here "b" >>= findBlock
joinWith v b []
[p|v join: (b: Block) with: (... args)|] =: do
v <- here "v"
b <- here "b" >>= findBlock
as <- getList [e|args|]
joinWith v b as
joinWith :: Value -> Value -> [Value] -> VM Value
joinWith t (Block s ps bes) as
| length ps > length as =
throwError (BlockArity (length ps) (length as))
| null as || null ps =
case t of
Object { oDelegates = ds } ->
withTop (t { oDelegates = s:ds }) (evalAll bes)
_ -> do
blockScope <- newObject [s, t] noMethods
withTop blockScope (evalAll bes)
| otherwise = do
-- argument bindings
args <- newObject []
( toMethods . concat $ zipWith bindings' ps as
, emptyMap
)
case t of
Object { oDelegates = ds } ->
withTop (t { oDelegates = args : s : ds })
(evalAll bes)
_ -> do
blockScope <- newObject [args, s, t] noMethods
withTop blockScope (evalAll bes)
joinWith _ v _ = error $ "impossible: joinWith on " ++ show v
| vito/atomo | src/Atomo/Kernel/Block.hs | bsd-3-clause | 3,323 | 0 | 16 | 1,185 | 1,219 | 601 | 618 | 89 | 3 |
{-# LANGUAGE Trustworthy #-}
-- | Labeled expressions.
module MAC.Labeled
(
Labeled ()
, Id (MkId)
, label
, unlabel
)
where
import MAC.Lattice
import MAC.Core (MAC(), Res())
import MAC.Effects
-- | Type denoting values of type @a@
data Id a = MkId { unId :: a }
-- | Labeled expressions
type Labeled l a = Res l (Id a)
-- | Creation of labeled expressions
label :: Less l l' => a -> MAC l (Labeled l' a)
label = create . return . MkId
-- | Observing labeled expressions
unlabel :: Less l' l => Labeled l' a -> MAC l a
unlabel = readdown (return . unId)
| alejandrorusso/mac-privacy | MAC/Labeled.hs | bsd-3-clause | 591 | 0 | 9 | 148 | 184 | 106 | 78 | 21 | 1 |
import Test.DocTest
main :: IO ()
main = doctest ["Data/Patch/Internal.hs", "-i", "test", "-i."]
| liamoc/patches-vector | doctest.hs | bsd-3-clause | 98 | 1 | 6 | 14 | 39 | 20 | 19 | 3 | 1 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Transformations.Optimising.ArityRaisingSpec where
import Transformations.Optimising.ArityRaising
import Transformations.Names (ExpChanges(..))
import Test.Hspec
import Grin.Grin
import Grin.TH
import Test.Test hiding (newVar)
import Test.Assertions
import Grin.TypeEnv
import Grin.TypeCheck
import Data.Monoid
import Control.Arrow
import qualified Data.Map.Strict as Map
import qualified Data.Vector as Vector
runTests :: IO ()
runTests = hspec spec
spec :: Spec
spec = do
it "split_undefined" $ do
let tyEnv = inferTypeEnv testProgBefore
arityRaising 0 tyEnv testProgBefore `sameAs` (testProgAfter, NewNames)
testProgBefore :: Exp
testProgBefore = [prog|
grinMain =
v.0 <- pure (CInt 0)
p1 <- store v.0
v.1 <- pure (CInt 1)
p2 <- store v.1
v.2 <- pure (CInt 1000)
p3 <- store v.2
v.3 <- pure (Fupto p2 p3)
p4 <- store v.3
v.4 <- pure (Fsum p1 p4)
p5 <- store v.4
v.5 <- fetch p5
(Fsum p15 p16) <- pure v.5
n13' <- sum $ p15 p16
_prim_int_print $ n13'
sum p10 p11 =
v.6 <- fetch p11
(Fupto p17 p18) <- pure v.6
v.7 <- fetch p17
(CInt n2') <- pure v.7
v.8 <- fetch p18
(CInt n3') <- pure v.8
b1' <- _prim_int_gt $ n2' n3'
case b1' of
#True ->
v.9 <- pure (CNil)
case v.9 of
(CNil) ->
v.10 <- fetch p10
(CInt n14') <- pure v.10
pure n14'
(CCons.0) ->
sum $ (#undefined :: T_Dead) (#undefined :: T_Dead)
#False ->
n4' <- _prim_int_add $ n2' 1
v.14 <- pure (CInt n4')
p8 <- store v.14
v.15 <- pure (Fupto p8 p18)
p9 <- store v.15
v.16 <- pure (CCons p17 p9)
case v.16 of
(CNil) ->
pure (#undefined :: T_Dead)
(CCons p12_2 p13_2) ->
v.18 <- fetch p10
(CInt n5'_2) <- pure v.18
v.19 <- fetch p12_2
(CInt n6'_2) <- pure v.19
n7'_2 <- _prim_int_add $ n5'_2 n6'_2
v.20 <- pure (CInt n7'_2)
p14_2 <- store v.20
sum $ p14_2 p13_2
|]
testProgAfter :: Exp
testProgAfter = [prog|
grinMain =
v.0 <- pure (CInt 0)
p1 <- store v.0
v.1 <- pure (CInt 1)
p2 <- store v.1
v.2 <- pure (CInt 1000)
p3 <- store v.2
v.3 <- pure (Fupto p2 p3)
p4 <- store v.3
v.4 <- pure (Fsum p1 p4)
p5 <- store v.4
v.5 <- fetch p5
(Fsum p15 p16) <- pure v.5
n13' <- do
(CInt p15.0.0.arity.1) <- fetch p15
(Fupto p16.0.0.arity.1 p16.0.0.arity.2) <- fetch p16
sum $ p15.0.0.arity.1 p16.0.0.arity.1 p16.0.0.arity.2
_prim_int_print $ n13'
sum p10.0.arity.1 p11.0.arity.1 p11.0.arity.2 =
v.6 <- pure (Fupto p11.0.arity.1 p11.0.arity.2)
(Fupto p17 p18) <- pure v.6
v.7 <- fetch p17
(CInt n2') <- pure v.7
v.8 <- fetch p18
(CInt n3') <- pure v.8
b1' <- _prim_int_gt $ n2' n3'
case b1' of
#True ->
v.9 <- pure (CNil)
case v.9 of
(CNil) ->
v.10 <- pure (CInt p10.0.arity.1)
(CInt n14') <- pure v.10
pure n14'
(CCons.0) ->
sum $ (#undefined :: T_Dead) (#undefined :: T_Dead) (#undefined :: T_Dead)
#False ->
n4' <- _prim_int_add $ n2' 1
v.14 <- pure (CInt n4')
p8 <- store v.14
v.15 <- pure (Fupto p8 p18)
p9 <- store v.15
v.16 <- pure (CCons p17 p9)
case v.16 of
(CNil) ->
pure (#undefined :: T_Dead)
(CCons p12_2 p13_2) ->
v.18 <- pure (CInt p10.0.arity.1)
(CInt n5'_2) <- pure v.18
v.19 <- fetch p12_2
(CInt n6'_2) <- pure v.19
n7'_2 <- _prim_int_add $ n5'_2 n6'_2
v.20 <- pure (CInt n7'_2)
p14_2 <- store v.20
do
(CInt p14_2.0.2.arity.1) <- fetch p14_2
(Fupto p13_2.0.2.arity.1 p13_2.0.2.arity.2) <- fetch p13_2
sum $ p14_2.0.2.arity.1 p13_2.0.2.arity.1 p13_2.0.2.arity.2
|]
| andorp/grin | grin/test/Transformations/Optimising/ArityRaisingSpec.hs | bsd-3-clause | 3,886 | 0 | 13 | 1,172 | 196 | 119 | 77 | 26 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
module Parse (
parseExpr
) where
import Control.Applicative ((<|>))
import Data.Bifunctor (first)
import qualified Data.List as L
import qualified Data.Text as T
import qualified Text.Parsec as P
import qualified Text.Parsec.Language as P
import qualified Text.Parsec.Expr as P
import qualified Text.Parsec.String as P
import qualified Text.Parsec.Token as P
import Model
parseExpr :: T.Text -> Either T.Text (Expr ())
parseExpr t = first (T.pack . show) $ P.parse expr "expression" (T.unpack t)
expr :: P.Parser (Expr ())
expr = lamb <|> P.try app <|> P.buildExpressionParser opTable term
app :: P.Parser (Expr ())
app = do
t1 <- term
P.spaces
tN <- P.sepBy1 term P.spaces
return $ L.foldl' (EApp ()) t1 tN
tokP = P.makeTokenParser P.emptyDef
opTable = [ [prefix "-" (EPrim1 () P1Negate)
-- ,
-- prefix "log10" (UPrim1 PLog10),
-- prefix "log" (UPrim1 PLogE),
-- prefix "exp10" (UPrim1 PExp10),
-- prefix "exp" (UPrim1 PExpE),
-- postfix "dB" (UPrim1 PToDb)]
-- , [binary "^" (UPrim2 PPow) AssocLeft
]
, [binary "*" (EPrim2 () P2Prod) P.AssocLeft
-- , binary "/" (UPrim2 PDiv) AssocLeft
]
, [binary "+" (EPrim2 () P2Sum) P.AssocLeft
-- , binary "-" (UPrim2 PDiff) AssocLeft
-- , binary "->" (UPrim2 PRange) AssocLeft
]
]
binary name fun assoc = P.Infix (do{ P.reservedOp tokP name; return fun}) assoc
prefix name fun = P.Prefix (do{P.reservedOp tokP name; return fun})
postfix name fun = P.Postfix (do{P.reservedOp tokP name; return fun})
term = P.try (P.between (P.char '(') (P.char ')') expr
<|> lamb
<|> lit
<|> (EVar () . T.pack <$> varName)) <* P.spaces
varName :: P.Parser String
varName = P.identifier tokP -- alphanumeric name
<|> ((('#':) . show) -- #n widget index style name
<$> (P.char '#' >> P.natural tokP))
lamb :: P.Parser (Expr ())
lamb = do
P.char '\\'
n <- P.identifier tokP
P.spaces
P.string "->"
P.spaces
body <- expr
return $ ELambda () (T.pack n) body
lit :: P.Parser (Expr ())
lit = do
n <- P.optionMaybe (P.char '-')
v <- either fromIntegral id <$>
P.naturalOrFloat (P.makeTokenParser P.emptyDef)
case n of
Nothing -> return $ ELit () $ VDouble v
Just _ -> return (ELit () $ VDouble $ negate v)
| CBMM/CBaaS | cbaas-lib/src/Parse.hs | bsd-3-clause | 2,493 | 0 | 15 | 660 | 855 | 444 | 411 | 56 | 2 |
import Network.Socket
import Network.BSD
import System.IO (Handle, IOMode(ReadWriteMode), hPutStrLn, hGetLine, hClose)
import Control.Monad (liftM)
import Control.Concurrent (forkIO)
import Control.Exception (finally)
import System
connectTo :: String -> Int -> IO Handle
connectTo host port_ = do
let port = toEnum port_
sock <- socket AF_INET Stream 0
addrs <- liftM hostAddresses $ getHostByName host
if null addrs then error $ "no such host : " ++ host else return ()
connect sock $ SockAddrInet port (head addrs)
handle <- socketToHandle sock ReadWriteMode
return handle
listenAt :: Int -> (Handle -> IO ()) -> IO ()
listenAt port_ f = do
let port = toEnum port_
lsock <- socket AF_INET Stream 0
bindSocket lsock $ SockAddrInet port iNADDR_ANY
listen lsock sOMAXCONN
loop lsock `finally` sClose lsock
where
loop lsock = do
(sock,SockAddrInet _ _) <- accept lsock
handle <- socketToHandle sock ReadWriteMode
f handle
loop lsock
server = withSocketsDo $ do
print "server listening ..."
listenAt 12345 (\h -> forkIO (do
putStrLn "connection."
hPutStrLn h "Hello, and Goodbye!"
`finally` hClose h) >> return ())
client = withSocketsDo $ do
print "connecting as client..."
h <- connectTo "localhost" 12345
hGetLine h >>= putStrLn
hClose h
main = do
[a] <- getArgs
if a == "1"
then server
else client
| marcmo/hsDiagnosis | other/sockettests.hs | bsd-3-clause | 1,449 | 5 | 17 | 355 | 514 | 242 | 272 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
{-# OPTIONS_GHC -fno-warn-implicit-prelude #-}
module Paths_LockingService (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
#if defined(VERSION_base)
#if MIN_VERSION_base(4,0,0)
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#else
catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a
#endif
#else
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#endif
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,0] []
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/home/ggunn/DFS/LockingService/.stack-work/install/x86_64-linux/lts-7.13/8.0.1/bin"
libdir = "/home/ggunn/DFS/LockingService/.stack-work/install/x86_64-linux/lts-7.13/8.0.1/lib/x86_64-linux-ghc-8.0.1/LockingService-0.1.0.0-7k0JqlEeWD7F2VFBzyPr5l"
datadir = "/home/ggunn/DFS/LockingService/.stack-work/install/x86_64-linux/lts-7.13/8.0.1/share/x86_64-linux-ghc-8.0.1/LockingService-0.1.0.0"
libexecdir = "/home/ggunn/DFS/LockingService/.stack-work/install/x86_64-linux/lts-7.13/8.0.1/libexec"
sysconfdir = "/home/ggunn/DFS/LockingService/.stack-work/install/x86_64-linux/lts-7.13/8.0.1/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "LockingService_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "LockingService_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "LockingService_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "LockingService_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "LockingService_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| Garygunn94/DFS | LockingService/.stack-work/dist/x86_64-linux/Cabal-1.24.0.0/build/autogen/Paths_LockingService.hs | bsd-3-clause | 2,001 | 0 | 10 | 223 | 371 | 215 | 156 | 31 | 1 |
-- |
-- Module : Data.Hourglass.Compat
-- License : BSD-style
-- Maintainer : Nicolas DI PRIMA <[email protected]>
--
-- Basic Time conversion compatibility.
--
-- This module aims to help conversion between the types from the package
-- time to the package hourglass.
--
-- Example of use (extracted from file Example/Time/Compat.hs):
--
-- > import Data.Hourglass as H
-- > import Data.Hourglass.Compat as C
-- > import Data.Time as T
-- >
-- > transpose :: T.ZonedTime
-- > -> H.LocalTime H.DateTime
-- > transpose oldTime =
-- > H.localTime
-- > offsetTime
-- > (H.DateTime newDate timeofday)
-- > where
-- > newDate :: H.Date
-- > newDate = C.dateFromTAIEpoch $ T.toModifiedJulianDay $ T.localDay $ T.zonedTimeToLocalTime oldTime
-- >
-- > timeofday :: H.TimeOfDay
-- > timeofday = C.diffTimeToTimeOfDay $ T.timeOfDayToTime $ T.localTimeOfDay $ T.zonedTimeToLocalTime oldTime
-- >
-- > offsetTime = H.TimezoneOffset $ fromIntegral $ T.timeZoneMinutes $ T.zonedTimeZone oldTime
--
module Data.Hourglass.Compat
( dateFromPOSIXEpoch
, dateFromTAIEpoch
, diffTimeToTimeOfDay
) where
import Data.Hourglass
-- | Convert an integer which represent the Number of days (To/From) POSIX Epoch
-- to a Date (POSIX Epoch is 1970-01-01).
dateFromPOSIXEpoch :: Integer -- ^ number of days since POSIX Epoch
-> Date
dateFromPOSIXEpoch day = do
let sec = Elapsed $ fromIntegral $ day * 86400
timeConvert sec
-- | Number of days between POSIX Epoch and TAI Epoch
-- (between 1858-11-17 and 1970-01-01)
daysTAItoPOSIX :: Integer
daysTAItoPOSIX = 40587
-- | Convert an integer which represents the Number of days (To/From) TAI Epoch
-- This function allows use of the package time to easily convert the Day into
-- the Hourglass Date representation (TAI Epoch is 1858-11-17).
--
-- This function allows user to easily convert a Data.Time.Calendar.Day into Date
--
-- > import qualified Data.Time.Calendar as T
-- >
-- > timeDay :: T.Day
-- >
-- > dateFromTAIEpoch $ T.toModifiedJulianDay timeDay
dateFromTAIEpoch :: Integer -- ^ number of days since TAI Epoch
-> Date
dateFromTAIEpoch dtai =
dateFromPOSIXEpoch (dtai - daysTAItoPOSIX)
-- | Convert of differential of time of a day.
-- (it convers a Data.Time.Clock.DiffTime into a TimeOfDay)
--
-- Example with DiffTime type from time:
--
-- > import qualified Data.Time.Clock as T
-- >
-- > difftime :: T.DiffTime
-- >
-- > diffTimeToTimeOfDay difftime
--
-- Example with the TimeOfDay type from time:
--
-- > import qualified Data.Time.Clock as T
-- >
-- > timeofday :: T.TimeOfDay
-- >
-- > diffTimeToTimeOfDay $ T.timeOfDayToTime timeofday
diffTimeToTimeOfDay :: Real t
=> t -- ^ number of seconds of the time of the day
-> TimeOfDay
diffTimeToTimeOfDay dt = do
TimeOfDay
{ todHour = fromIntegral hours
, todMin = fromIntegral minutes
, todSec = fromIntegral seconds
, todNSec = fromIntegral nsecs
}
where
r :: Rational
r = toRational dt
(secs, nR) = properFraction r :: (Integer, Rational)
nsecs :: Integer
nsecs = round (nR * 1000000000)
(minsofday, seconds) = secs `divMod` 60 :: (Integer, Integer)
(hours, minutes) = minsofday `divMod` 60 :: (Integer, Integer)
| ppelleti/hs-hourglass | Data/Hourglass/Compat.hs | bsd-3-clause | 3,381 | 0 | 12 | 774 | 349 | 224 | 125 | 32 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Models.Exception
(module Models.Internal.Exception)
where
import Models.Internal.Exception
| HaskellCNOrg/snap-web | src/Models/Exception.hs | bsd-3-clause | 161 | 0 | 5 | 38 | 22 | 15 | 7 | 4 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : Graphics.HGL.Internals.Draw
-- Copyright : (c) Alastair Reid, 1999-2003
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (requires concurrency)
--
-- Drawing in a simple graphics library.
--
-----------------------------------------------------------------------------
#include "HsHGLConfig.h"
-- #hide
module Graphics.HGL.Internals.Draw
( Graphic -- = Draw ()
, Draw
, ioToDraw -- :: IO a -> Draw a
, bracket -- :: Draw a -> (a -> Draw b) -> (a -> Draw c) -> Draw c
, bracket_ -- :: Draw a -> (a -> Draw b) -> Draw c -> Draw c
, unDraw -- :: Draw a -> (DC -> IO a)
, mkDraw -- :: (DC -> IO a) -> Draw a
) where
#if !X_DISPLAY_MISSING
import Graphics.HGL.X11.Types(DC)
#else
import Graphics.HGL.Win32.Types(DC)
#endif
import qualified Graphics.HGL.Internals.Utilities as Utils (bracket, bracket_)
import Control.Monad (liftM)
----------------------------------------------------------------
-- Graphics
----------------------------------------------------------------
-- | An abstract representation of an image.
type Graphic = Draw ()
-- | Monad for sequential construction of images.
newtype Draw a = MkDraw (DC -> IO a)
unDraw :: Draw a -> (DC -> IO a)
unDraw (MkDraw m) = m
-- | Embed an 'IO' action in a drawing action.
ioToDraw :: IO a -> Draw a
ioToDraw m = MkDraw (\ _ -> m)
mkDraw :: (DC -> IO a) -> Draw a
mkDraw = MkDraw
-- a standard reader monad
instance Monad Draw where
return a = MkDraw (\ hdc -> return a)
m >>= k = MkDraw (\ hdc -> do { a <- unDraw m hdc; unDraw (k a) hdc })
m >> k = MkDraw (\ dc -> do { unDraw m dc; unDraw k dc })
instance Functor Draw where fmap = liftM
-- | Wrap a drawing action in initialization and finalization actions.
bracket
:: Draw a -- ^ a pre-operation, whose value is passed to the
-- other two components.
-> (a -> Draw b) -- ^ a post-operation, to be performed on exit from
-- the bracket, whether normal or by an exception.
-> (a -> Draw c) -- ^ the drawing action inside the bracket.
-> Draw c
bracket left right m = MkDraw (\ hdc ->
Utils.bracket (unDraw left hdc)
(\ a -> unDraw (right a) hdc)
(\ a -> unDraw (m a) hdc))
-- | A variant of 'bracket' in which the inner drawing action does not
-- use the result of the pre-operation.
bracket_
:: Draw a -- ^ a pre-operation, whose value is passed to the
-- other two components.
-> (a -> Draw b) -- ^ a post-operation, to be performed on exit from
-- the bracket, whether normal or by an exception.
-> Draw c -- ^ the drawing action inside the bracket.
-> Draw c
bracket_ left right m = MkDraw (\ hdc ->
Utils.bracket_ (unDraw left hdc)
(\ a -> unDraw (right a) hdc)
(unDraw m hdc))
----------------------------------------------------------------
| FranklinChen/hugs98-plus-Sep2006 | packages/HGL/Graphics/HGL/Internals/Draw.hs | bsd-3-clause | 3,012 | 4 | 13 | 649 | 606 | 340 | 266 | 42 | 1 |
module Yawn.HTTP.Request where
import Data.Maybe (fromMaybe)
import Network.URL (URL, importParams, url_params, url_path)
import qualified Data.ByteString as BS (ByteString)
import qualified Data.ByteString.Char8 as BS8 (unpack)
import qualified Data.Map as M (Map, lookup)
data Request = Request {
method :: RequestMethod,
url :: URL,
version :: HttpVersion,
headers :: M.Map String String,
body :: BS.ByteString
} deriving (Show, Eq)
data RequestMethod = GET |
PUT |
POST |
DELETE |
HEAD |
OPTIONS |
CONNECT |
TRACE deriving (Show, Eq)
data HttpVersion = HTTP_1_0 | HTTP_1_1 deriving Eq
instance Show HttpVersion where
show HTTP_1_0 = "HTTP/1.0"
show HTTP_1_1 = "HTTP/1.1"
getParams :: Request -> [(String, String)]
getParams = url_params . url
postParams :: Request -> Maybe [(String, String)]
postParams = importParams . BS8.unpack . body
requestPath :: Request -> String
requestPath = url_path . url
findHeader :: String -> Request -> Maybe String
findHeader name = M.lookup name . headers
hasHeader :: String -> Request -> Bool
hasHeader s r = findHeader s r /= Nothing
-- catch read exceptions
contentLength :: Request -> Int
contentLength = read . fromMaybe "0" . findHeader "Content-Length"
-- Under HTTP/1.0 all connections are closed unless Connection: Keep-Alive is supplied
-- Under HTTP/1.1 all connections are open unless Connection: close is supplied
isKeepAlive :: Request -> Bool
isKeepAlive r = case findHeader "Connection" r of
Nothing -> version r == HTTP_1_1
Just con -> if version r == HTTP_1_0 then "Keep-Alive" == con
else "close" /= con
| ameingast/yawn | src/Yawn/HTTP/Request.hs | bsd-3-clause | 1,765 | 0 | 10 | 443 | 462 | 262 | 200 | 42 | 3 |
-- | ISO 8601 Week Date format
module Data.Time.Calendar.WeekDate where
import Data.Int
import Data.Time.Calendar.OrdinalDate
import Data.Time.Calendar.Days
import Data.Time.Calendar.Private
-- | convert to ISO 8601 Week Date format. First element of result is year, second week number (1-53), third day of week (1 for Monday to 7 for Sunday).
-- Note that \"Week\" years are not quite the same as Gregorian years, as the first day of the year is always a Monday.
-- The first week of a year is the first week to contain at least four days in the corresponding Gregorian year.
toWeekDate :: Day -> (Int64,Int,Int)
toWeekDate date@(ModifiedJulianDay mjd) = (y1,fromIntegral (w1 + 1),fromIntegral (mod d 7) + 1) where
(y0,yd) = toOrdinalDate date
d = mjd + 2
foo :: Int64 -> Int64
foo y = bar (toModifiedJulianDay (fromOrdinalDate y 6))
bar k = (div d 7) - (div k 7)
w0 = bar (d - (fromIntegral yd) + 4)
(y1,w1) = if w0 == -1
then (y0 - 1,foo (y0 - 1))
else if w0 == 52
then if (foo (y0 + 1)) == 0
then (y0 + 1,0)
else (y0,w0)
else (y0,w0)
-- | convert from ISO 8601 Week Date format. First argument is year, second week number (1-52 or 53), third day of week (1 for Monday to 7 for Sunday).
-- Invalid week and day values will be clipped to the correct range.
fromWeekDate :: Int64 -> Int -> Int -> Day
fromWeekDate y w d = ModifiedJulianDay (k - (mod k 7) + (fromIntegral (((clip 1 (if longYear then 53 else 52) w) * 7) + (clip 1 7 d))) - 10) where
k = toModifiedJulianDay (fromOrdinalDate y 6)
longYear = case toWeekDate (fromOrdinalDate y 365) of
(_,53,_) -> True
_ -> False
-- | convert from ISO 8601 Week Date format. First argument is year, second week number (1-52 or 53), third day of week (1 for Monday to 7 for Sunday).
-- Invalid week and day values will return Nothing.
fromWeekDateValid :: Int64 -> Int -> Int -> Maybe Day
fromWeekDateValid y w d = do
d' <- clipValid 1 7 d
let
longYear = case toWeekDate (fromOrdinalDate y 365) of
(_,53,_) -> True
_ -> False
w' <- clipValid 1 (if longYear then 53 else 52) w
let
k = toModifiedJulianDay (fromOrdinalDate y 6)
return (ModifiedJulianDay (k - (mod k 7) + (fromIntegral ((w' * 7) + d')) - 10))
-- | show in ISO 8601 Week Date format as yyyy-Www-d (e.g. \"2006-W46-3\").
showWeekDate :: Day -> String
showWeekDate date = (show4 (Just '0') y) ++ "-W" ++ (show2 (Just '0') w) ++ "-" ++ (show d) where
(y,w,d) = toWeekDate date
| takano-akio/time | Data/Time/Calendar/WeekDate.hs | bsd-3-clause | 2,435 | 12 | 18 | 496 | 785 | 423 | 362 | 40 | 4 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Data.ROS.Formattable ( Formattable(..) ) where
class Show a => Formattable a where
format :: a -> String
format = show
instance Formattable String where
format = id
| ROSVendor/msg_dbg | lib/Data/ROS/Formattable.hs | bsd-3-clause | 255 | 0 | 7 | 46 | 61 | 35 | 26 | 8 | 0 |
{-# LANGUAGE OverloadedStrings, TypeFamilies, QuasiQuotes #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Text.XML.Papillon(Xmlns, XEQName, parseXmlEvent, XmlEvent(..)) where
import Control.Arrow
import Data.List
import Data.Char
import Data.ByteString.Char8 (ByteString, pack)
import Text.Papillon
import Numeric
import qualified Data.ByteString.Char8 as BSC
type Xmlns = (BSC.ByteString, BSC.ByteString)
type XEQName = (BSC.ByteString, BSC.ByteString)
data XmlEvent
= XEXmlDecl (Int, Int)
| XESTag XEQName [Xmlns] [(XEQName, BSC.ByteString)]
| XEETag XEQName
| XEEmptyElemTag XEQName [Xmlns] [(XEQName, BSC.ByteString)]
| XECharData BSC.ByteString
deriving Show
data Attribute
= NSAttribute BSC.ByteString BSC.ByteString
| Attribute (BSC.ByteString, BSC.ByteString) BSC.ByteString
deriving Show
procAtts :: [Attribute] -> (
[(BSC.ByteString, BSC.ByteString)],
[((BSC.ByteString, BSC.ByteString), BSC.ByteString)])
procAtts = (map fromNSAttribute *** map fromAttribute) . partition isNSAtt
fromNSAttribute :: Attribute -> (BSC.ByteString, BSC.ByteString)
fromNSAttribute (NSAttribute k v) = (k, v)
fromNSAttribute _ = error "bad"
fromAttribute :: Attribute -> ((BSC.ByteString, BSC.ByteString), BSC.ByteString)
fromAttribute (Attribute k v) = (k, v)
fromAttribute _ = error "bad"
isNSAtt :: Attribute -> Bool
isNSAtt (NSAttribute _ _) = True
isNSAtt _ = False
parseXmlEvent :: ByteString -> Maybe XmlEvent
parseXmlEvent = either (const Nothing) (Just . fst) . runError . xmlEvent . parse
fromHex :: String -> Char
fromHex = chr . fst . head . readHex
[papillon|
source: ByteString
xmlEvent :: XmlEvent
= et:emptyElemTag { et }
/ st:sTag { st }
/ et:eTag { et }
/ cd:charData { cd }
/ xd:xmlDecl { xd }
spaces = _:(' ' / '\t' / '\r' / '\n')+
nameStartChar :: Char = <(`elem` (":_" ++ ['a' .. 'z'] ++ ['A' .. 'Z']))>
nameChar :: Char
= s:nameStartChar { s }
/ <(`elem` ("-." ++ ['0' .. '9']))>
ncNameStartChar :: Char = !':' s:nameStartChar { s }
ncNameChar :: Char = !':' c:nameChar { c }
-- name :: ByteString
-- = sc:nameStartChar cs:(c:nameChar { c })* { pack $ sc : cs }
ncName :: ByteString
= sc:ncNameStartChar cs:(c:ncNameChar { c })* { pack $ sc : cs }
qName :: (ByteString, ByteString)
= pn:prefixedName { pn }
/ un:unprefixedName { ("", un) }
prefixedName :: (ByteString, ByteString) = p:prefix ':' l:localPart
{ (p, l) }
unprefixedName :: ByteString = l:localPart { l }
prefix :: ByteString = n:ncName { n }
localPart :: ByteString = n:ncName { n }
attValue :: ByteString
= '"' v:(c:<(`notElem` "<&\"")> { c } / c:charEntRef { c })* '"'
{ pack v }
/ '\'' v:(c:<(`notElem` "<&'")> { c } / c:charEntRef { c })* '\''
{ pack v }
charData :: XmlEvent
= '>' cds:(c:<(`notElem` "<&")> { c } / c:charEntRef { c })*
{ XECharData $ pack cds }
charEntRef :: Char = c:charRef { c } / c:entityRef { c }
charRef :: Char = '&' '#' 'x'
ds:(<(`elem` "0123456789abcdefABCDEF")>)+ ';' { fromHex ds }
entityRef :: Char
= '&' 'a' 'm' 'p' ';' { '&' }
/ '&' 'l' 't' ';' { '<' }
/ '&' 'g' 't' ';' { '>' }
/ '&' 'q' 'u' 'o' 't' ';' { '"' }
/ '&' 'a' 'p' 'o' 's' ';' { '\'' }
xmlDecl :: XmlEvent
= '<' '?' 'x' 'm' 'l' vi:versionInfo _:spaces? '?' _:eof
{ XEXmlDecl vi }
versionInfo :: (Int, Int)
= _:spaces 'v' 'e' 'r' 's' 'i' 'o' 'n' _:eq
vn:('"' v:versionNum '"' { v } / '\'' v:versionNum '\'' { v })
{ vn }
eq :: () = _:spaces? '=' _:spaces?
versionNum :: (Int, Int)
= '1' '.' d:<isDigit>+ { (1, read d) }
sTag :: XmlEvent
= '<' n:qName as:(_:spaces a:attribute { a })* _:spaces? _:eof
{ uncurry (XESTag n) $ procAtts as }
emptyElemTag :: XmlEvent
= '<' n:qName as:(_:spaces a:attribute { a })* _:spaces? '/' _:eof
{ uncurry (XEEmptyElemTag n) $ procAtts as }
prefixedAttName :: ByteString
= 'x' 'm' 'l' 'n' 's' ':' n:ncName { n }
defaultAttName = 'x' 'm' 'l' 'n' 's'
nsAttName :: ByteString
= n:prefixedAttName { n }
/ _:defaultAttName { "" }
attribute :: Attribute
= n:nsAttName _:eq v:attValue { NSAttribute n v }
/ n:qName _:eq v:attValue { Attribute n v }
eTag :: XmlEvent
= '<' '/' n:qName _:spaces? _:eof { XEETag n }
eof = !_
|]
| YoshikuniJujo/xml-pipe | src/Text/XML/Papillon.hs | bsd-3-clause | 4,192 | 12 | 10 | 844 | 532 | 307 | 225 | 41 | 1 |
{-# LANGUAGE RebindableSyntax #-}
-- Copyright : (C) 2009 Corey O'Connor
-- License : BSD-style (see the file LICENSE)
import Bind.Marshal.Prelude
import Bind.Marshal.Verify
import Bind.Marshal.Action
import Bind.Marshal.DataModel
main = run_test $ do
returnM () :: Test ()
| coreyoconnor/bind-marshal | test/verify_action.hs | bsd-3-clause | 290 | 0 | 9 | 53 | 53 | 31 | 22 | 7 | 1 |
{-# OPTIONS_GHC -fno-warn-unused-binds -fno-warn-unused-matches -fno-warn-name-shadowing -fno-warn-missing-signatures #-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, UndecidableInstances, FlexibleContexts, TypeSynonymInstances #-}
---------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------
-- |
-- | Module : Codensity tutorial
-- | Creator: Xiao Ling
-- | Created: 12/8/2015
-- | see : Asymptotic Improvement of Computations over Free Monads by Janis Voigtlader
-- | http://www.janis-voigtlaender.eu/papers/AsymptoticImprovementOfComputationsOverFreeMonads.pdf
-- |
---------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------
import Control.Monad
{-----------------------------------------------------------------------------
Data Type and Type Class
------------------------------------------------------------------------------}
data Tree a = Leaf a | Tree a `Node` Tree a
deriving (Eq,Show)
instance Functor Tree where
fmap g (Leaf a) = Leaf $ g a
fmap g (Node t1 t2) = Node (fmap g t1) (fmap g t2)
instance Applicative Tree where
pure = return
(<*>) = ap
instance Monad Tree where
return = Leaf
(Leaf a) >>= g = g a
(Node t1 t2) >>= g = Node (t1 >>= g) (t2 >>= g)
{-----------------------------------------------------------------------------
Functions over Tree
------------------------------------------------------------------------------}
-- * in order traversal by fiat
toList :: Tree a -> [a]
toList (Leaf a) = [a]
toList (Node t1 t2) = toList t1 ++ toList t2
-- * not quite the same ...
fullTree :: Int -> Tree Int
fullTree 1 = Leaf 1
fullTree n = do
i <- fullTree $ n - 1
Node (Leaf $ n - 1) (Leaf $ n - 2)
{-----------------------------------------------------------------------------
Tree Examples
------------------------------------------------------------------------------}
ls :: [Tree Int]
ls@[l1,l2,l3,l4,l5,l6] = Leaf <$> [1..6]
t1 :: Tree Int
t1 = Node (Node l1 l2) (Node l3 l4)
{-----------------------------------------------------------------------------
Datatype - abstract over leaves of a tree
------------------------------------------------------------------------------}
| lingxiao/CIS700 | depricated/CodensityTutorial.hs | bsd-3-clause | 2,535 | 4 | 10 | 390 | 447 | 234 | 213 | 27 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoRebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.EN.PH.Rules
( rules
) where
import Data.Maybe
import Prelude
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (parseInt)
import Duckling.Regex.Types
import Duckling.Time.Helpers
import Duckling.Time.Types (TimeData (..))
import Duckling.Types
ruleMMDD :: Rule
ruleMMDD = Rule
{ name = "mm/dd"
, pattern =
[ regex "(1[0-2]|0?[1-9])\\s?[/-]\\s?(3[01]|[12]\\d|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (mm:dd:_)):_) -> do
m <- parseInt mm
d <- parseInt dd
tt $ monthDay m d
_ -> Nothing
}
ruleMMDDYYYY :: Rule
ruleMMDDYYYY = Rule
{ name = "mm/dd/yyyy"
, pattern =
[ regex "(1[0-2]|0?[1-9])[-/\\s](3[01]|[12]\\d|0?[1-9])[-/\\s](\\d{2,4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (mm:dd:yy:_)):_) -> do
y <- parseInt yy
m <- parseInt mm
d <- parseInt dd
tt $ yearMonthDay y m d
_ -> Nothing
}
-- Clashes with HHMMSS, hence only 4-digit years
ruleMMDDYYYYDot :: Rule
ruleMMDDYYYYDot = Rule
{ name = "mm.dd.yyyy"
, pattern =
[ regex "(1[0-2]|0?[1-9])\\.(3[01]|[12]\\d|0?[1-9])\\.(\\d{4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (mm:dd:yy:_)):_) -> do
y <- parseInt yy
m <- parseInt mm
d <- parseInt dd
tt $ yearMonthDay y m d
_ -> Nothing
}
rulePeriodicHolidays :: [Rule]
rulePeriodicHolidays = mkRuleHolidays
-- Fixed dates, year over year
[ ( "Arbor Day", "arbor day", monthDay 6 25 )
, ( "Bonifacio Day", "bonifacio day", monthDay 11 30 )
, ( "Independence Day", "independence day", monthDay 6 12 )
, ( "Labour Day", "labour day", monthDay 5 1 )
, ( "Ninoy Aquino Day", "ninoy aquino day", monthDay 8 21 )
, ( "People Power Anniversary", "(edsa revolution|people power) anniversary"
, monthDay 2 25 )
, ( "Rizal Day", "rizal day", monthDay 12 30 )
, ( "The Day of Valor", "the day of valor", monthDay 4 9 )
-- Fixed day/week/month, year over year
, ( "Father's Day", "father'?s?'? day", nthDOWOfMonth 3 7 6 )
, ( "Mother's Day", "mother'?s?'? day", nthDOWOfMonth 2 7 5 )
, ( "National Elections Day", "national elections day", nthDOWOfMonth 2 1 5 )
, ( "National Heroes' Day", "(national )?heroes' day"
, predLastOf (dayOfWeek 1) (month 8) )
, ( "Parents' Day", "parents' day", nthDOWOfMonth 1 1 12 )
, ( "Thanksgiving Day", "thanks?giving( day)?", nthDOWOfMonth 4 4 11 )
]
rules :: [Rule]
rules =
[ ruleMMDD
, ruleMMDDYYYY
, ruleMMDDYYYYDot
]
++ rulePeriodicHolidays
| facebookincubator/duckling | Duckling/Time/EN/PH/Rules.hs | bsd-3-clause | 2,929 | 0 | 19 | 668 | 770 | 431 | 339 | 72 | 2 |
module Main where
import Math.ProxN.VecN as Vec
import Math.ProxN.Tree2N as Tree
import Math.ProxN.Peano
import Math.ProxN.Pretty
import Math.ProxN.Proximity
import System.Random
import Control.Applicative
import Control.Monad.Random(runRandT, RandT, getRandomR)
import Control.Monad.IO.Class
import Control.Monad
import Data.Maybe
-- number of points
_NUM :: Int
_NUM = 2000
-- range of one coordinate
_RANGE :: (Double, Double)
_RANGE = (0, 100)
-- r^2
_TOLERANCE2 :: Double
_TOLERANCE2 = 500.0
-- dimension
type Dim = Five
------------
randomList :: Int -> IO [Double]
randomList n = withStdGen (replicateM n (getRandomR _RANGE))
withStdGen :: (MonadIO m) => RandT StdGen m a -> m a
withStdGen r = do
gen <- liftIO getStdGen
(a, nextGen) <- runRandT r gen
liftIO $ setStdGen nextGen
return a
-- generates a random vector, then 20 random vector-trees of _NUM vectors and calculates the proximity sets of the vector
main :: IO ()
main = do
let peano = undefined :: Dim
ranVec <- fromJust . Vec.fromList <$> randomList (fromPeano peano)
mapM_ putStrLn
[ "Dimension: " ++ show (fromPeano peano)
, "Number of points: " ++ show _NUM
, "Random Vector: " ++ prettySimp (ranVec :: VecN Dim Double)
, "Tolerance^2: " ++ show _TOLERANCE2
, "Depth lower bound: " ++ show (logarithm peano (fromIntegral _NUM))
]
replicateM_ 20 $ do
let vecM = Vec.fromList <$> randomList (fromPeano peano)
tr <- Tree.fromList <$> replicateM _NUM (fromJust <$> vecM)
putStrLn $ "Depth: " ++ show (depth tr)
putStrLn $ "Vecs in proximity: "
++ show ((prox _TOLERANCE2 ranVec tr))
| exFalso/ProxN | example/Example.hs | bsd-3-clause | 1,625 | 0 | 16 | 318 | 504 | 267 | 237 | 43 | 1 |
{-# LANGUAGE Rank2Types #-}
module Main where
import Test.Framework (defaultMain)
import qualified Examples as Examples
main :: IO ()
main = defaultMain
[ Examples.tests
]
| jaspervdj/dcpu16-hs | tests/TestSuite.hs | bsd-3-clause | 183 | 0 | 7 | 36 | 44 | 27 | 17 | 7 | 1 |
module Main where
import Control.Monad (unless)
import System.Environment (getArgs)
import System.Exit (exitFailure)
import Data.ConfigParser
import Web.Server
main :: IO ()
main = do
args <- getArgs
unless (length args == 1) $ do
putStrLn "Usage: tinfoilsub <config>"
exitFailure
feeds <- parseConfig <$> readFile (head args)
runServer feeds
| sulami/tinfoilsub | app/Main.hs | bsd-3-clause | 413 | 0 | 11 | 117 | 121 | 61 | 60 | 14 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{- |
Category rendering.
The main function this module provides is 'renderCategory'.
-}
module Guide.Views.Category
(
-- * Main functions
renderCategory,
-- * Helpers
renderCategoryInfo,
renderCategoryStatus,
renderCategoryNotes,
)
where
import Imports
-- Text
import qualified Data.Text.All as T
-- HTML
import Lucid hiding (for_)
import Guide.Types.Core
import qualified Guide.JS as JS
import Guide.Cache
import Guide.Markdown
import Guide.Utils
import Guide.Views.Utils
import Guide.Views.Item
----------------------------------------------------------------------------
-- Main functions
----------------------------------------------------------------------------
-- | Render the whole category.
renderCategory :: MonadIO m => Category -> HtmlT m ()
renderCategory category = cached (CacheCategory (category^.uid)) $ do
div_ [class_ "category", id_ (categoryNodeId category)] $ do
renderCategoryInfo category
renderCategoryNotes category
itemsNode <- div_ [class_ "items"] $ do
mapM_ (renderItem category) (category^.items)
thisNode
textInput [
class_ " add-item ",
placeholder_ "add an item",
autocomplete_ "off",
onEnter $ JS.addItem (itemsNode, category^.uid, inputValue) <>
clearInput ]
----------------------------------------------------------------------------
-- Helpers
----------------------------------------------------------------------------
-- | Render info about the category (the header with category name + the edit
-- form + possibly status banner).
renderCategoryInfo :: MonadIO m => Category -> HtmlT m ()
renderCategoryInfo category = cached (CacheCategoryInfo (category^.uid)) $ do
let thisId = "category-info-" <> uidToText (category^.uid)
this = JS.selectId thisId
div_ [id_ thisId, class_ "category-info"] $ do
section "normal" [shown, noScriptShown] $ do
h2_ $ do
-- TODO: this link shouldn't be absolute [absolute-links]
span_ [class_ "controls"] $
a_ [class_ "category-feed",
href_ ("/haskell/feed/category/" <> uidToText (category^.uid))] $
img_ [src_ "/rss-alt.svg",
alt_ "category feed", title_ "category feed"]
a_ [href_ (categoryLink category), class_ "category-title"] $
toHtml (category^.title)
emptySpan "1em"
span_ [class_ "group"] $
toHtml (category^.group_)
emptySpan "1em"
textButton "edit" $
JS.switchSection (this, "editing" :: Text)
emptySpan "1em"
textButton "delete" $
JS.deleteCategoryAndRedirect [category^.uid]
renderCategoryStatus category
section "editing" [] $ do
let formSubmitHandler formNode =
JS.submitCategoryInfo (this, category^.uid, formNode)
form_ [onFormSubmit formSubmitHandler] $ do
-- All inputs have "autocomplete = off" thanks to
-- <http://stackoverflow.com/q/8311455>
label_ $ do
"Title" >> br_ []
input_ [type_ "text", name_ "title",
autocomplete_ "off",
value_ (category^.title)]
br_ []
label_ $ do
"Group" >> br_ []
input_ [type_ "text", name_ "group",
autocomplete_ "off",
value_ (category^.group_)]
br_ []
label_ $ do
"Status" >> br_ []
select_ [name_ "status", autocomplete_ "off"] $ do
option_ [value_ "finished"] "Complete"
& selectedIf (category^.status == CategoryFinished)
option_ [value_ "wip"] "Work in progress"
& selectedIf (category^.status == CategoryWIP)
option_ [value_ "stub"] "Stub"
& selectedIf (category^.status == CategoryStub)
br_ []
label_ $ do
input_ [type_ "checkbox", name_ "pros-cons-enabled",
autocomplete_ "off"]
& checkedIf (ItemProsConsSection `elem` category^.enabledSections)
"Pros/cons enabled"
br_ []
label_ $ do
input_ [type_ "checkbox", name_ "ecosystem-enabled",
autocomplete_ "off"]
& checkedIf (ItemEcosystemSection `elem` category^.enabledSections)
"“Ecosystem” field enabled"
br_ []
label_ $ do
input_ [type_ "checkbox", name_ "notes-enabled",
autocomplete_ "off"]
& checkedIf (ItemNotesSection `elem` category^.enabledSections)
"“Notes” field enabled"
br_ []
input_ [type_ "submit", value_ "Save", class_ "save"]
button "Cancel" [class_ "cancel"] $
JS.switchSection (this, "normal" :: Text)
-- | Render the category status banner that is shown on the page of each
-- unfinished category.
renderCategoryStatus :: MonadIO m => Category -> HtmlT m ()
renderCategoryStatus category = do
case category^.status of
CategoryFinished -> return ()
CategoryWIP -> catBanner $ do
"This category is a work in progress"
CategoryStub -> catBanner $ do
"This category is a stub, contributions are welcome!"
where
catBanner :: MonadIO m => HtmlT m () -> HtmlT m ()
catBanner divContent = do
div_ [class_ "category-status-banner"] $
strong_ divContent
-- | Render category notes (or “description”).
renderCategoryNotes :: MonadIO m => Category -> HtmlT m ()
renderCategoryNotes category = cached (CacheCategoryNotes (category^.uid)) $ do
let thisId = "category-notes-" <> uidToText (category^.uid)
this = JS.selectId thisId
div_ [id_ thisId, class_ "category-notes"] $ do
section "normal" [shown, noScriptShown] $ do
div_ [class_ "notes-like"] $ do
if markdownNull (category^.notes)
then p_ "write something here!"
else toHtml (category^.notes)
textButton "edit description" $
JS.switchSection (this, "editing" :: Text) <>
JS.focusOn [(this `JS.selectSection` "editing")
`JS.selectChildren`
JS.selectClass "editor"]
section "editing" [] $ do
contents <- if markdownNull (category^.notes)
then liftIO $ toMarkdownBlock <$>
T.readFile "static/category-notes-template.md"
else return (category^.notes)
markdownEditor
[rows_ "10", class_ " editor "]
contents
(\val -> JS.submitCategoryNotes
(this, category^.uid, category^.notes.mdSource, val))
(JS.switchSection (this, "normal" :: Text))
"or press Ctrl+Enter to save"
| aelve/hslibs | src/Guide/Views/Category.hs | bsd-3-clause | 6,687 | 0 | 27 | 1,768 | 1,720 | 844 | 876 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
module Step where
import Prelude hiding (map)
import qualified Prelude
import Data.Foldable
import Data.Bifunctor
import Control.Monad.Writer.Lazy
-- | 'Step' is used do incremental calculated something
-- intermediate valuse are writen out
type Step w a = Writer [w] a
runStep :: Step w a -> (a,[w])
runStep = runWriter
last :: Step w a -> Step w (Maybe w,a)
last steps = each list >> return (lastMay list ,a)
where
(a,list) = runStep steps
lastMay [] = Nothing
lastMay [lastA] = Just lastA
lastMay (_:rest) = lastMay rest
yield :: a -> Step a ()
yield a = tell [a]
each :: Foldable f => f w -> Step w ()
each = tell.toList
map :: (w1 -> w2) -> Step w1 a -> Step w2 a
map f = mapWriterT (fmap $ second (Prelude.map f))
censors :: MonadWriter [w] m => (w->w) -> m a -> m a
censors f = censor (Prelude.map f)
| kwibus/myLang | src/Step.hs | bsd-3-clause | 870 | 0 | 11 | 187 | 374 | 197 | 177 | 24 | 3 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.AMD.VertexShaderTessellator
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/AMD/vertex_shader_tessellator.txt AMD_vertex_shader_tessellator> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.AMD.VertexShaderTessellator (
-- * Enums
gl_CONTINUOUS_AMD,
gl_DISCRETE_AMD,
gl_INT_SAMPLER_BUFFER_AMD,
gl_SAMPLER_BUFFER_AMD,
gl_TESSELLATION_FACTOR_AMD,
gl_TESSELLATION_MODE_AMD,
gl_UNSIGNED_INT_SAMPLER_BUFFER_AMD,
-- * Functions
glTessellationFactorAMD,
glTessellationModeAMD
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/AMD/VertexShaderTessellator.hs | bsd-3-clause | 977 | 0 | 4 | 109 | 70 | 54 | 16 | 12 | 0 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RecordWildCards #-}
-- | A wrapping of createProcess to provide a more flexible interface.
module General.Process(
Buffer, newBuffer, readBuffer,
process, ProcessOpts(..), Source(..), Destination(..)
) where
import Control.Concurrent.Extra
import Control.DeepSeq
import Control.Exception.Extra as C
import Control.Monad.Extra
import Data.List.Extra
import Data.Maybe
import Foreign.C.Error
import System.Exit
import System.IO.Extra
import System.Info.Extra
import System.Process
import System.Time.Extra
import Data.Unique
import Data.IORef.Extra
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy as LBS
import General.Extra
import Development.Shake.Internal.Errors
import GHC.IO.Exception (IOErrorType(..), IOException(..))
---------------------------------------------------------------------
-- BUFFER ABSTRACTION
data Buffer a = Buffer Unique (IORef [a])
instance Eq (Buffer a) where Buffer x _ == Buffer y _ = x == y
instance Ord (Buffer a) where compare (Buffer x _) (Buffer y _) = compare x y
newBuffer :: IO (Buffer a)
newBuffer = liftM2 Buffer newUnique (newIORef [])
addBuffer :: Buffer a -> a -> IO ()
addBuffer (Buffer _ ref) x = atomicModifyIORef_ ref (x:)
readBuffer :: Buffer a -> IO [a]
readBuffer (Buffer _ ref) = reverse <$> readIORef ref
---------------------------------------------------------------------
-- OPTIONS
data Source
= SrcFile FilePath
| SrcString String
| SrcBytes LBS.ByteString
| SrcInherit
data Destination
= DestEcho
| DestFile FilePath
| DestString (Buffer String)
| DestBytes (Buffer BS.ByteString)
deriving (Eq,Ord)
isDestString DestString{} = True; isDestString _ = False
isDestBytes DestBytes{} = True; isDestBytes _ = False
data ProcessOpts = ProcessOpts
{poCommand :: CmdSpec
,poCwd :: Maybe FilePath
,poEnv :: Maybe [(String, String)]
,poTimeout :: Maybe Double
,poStdin :: [Source]
,poStdout :: [Destination]
,poStderr :: [Destination]
,poAsync :: Bool
,poCloseFds :: Bool
,poGroup :: Bool
}
---------------------------------------------------------------------
-- IMPLEMENTATION
-- | If two buffers can be replaced by one and a copy, do that (only if they start empty)
optimiseBuffers :: ProcessOpts -> IO (ProcessOpts, IO ())
optimiseBuffers po@ProcessOpts{..} = pure (po{poStdout = nubOrd poStdout, poStderr = nubOrd poStderr}, pure ())
stdStream :: (FilePath -> Handle) -> [Destination] -> [Destination] -> StdStream
stdStream _ [DestEcho] _ = Inherit
stdStream file [DestFile x] other | other == [DestFile x] || DestFile x `notElem` other = UseHandle $ file x
stdStream _ _ _ = CreatePipe
stdIn :: (FilePath -> Handle) -> [Source] -> (StdStream, Handle -> IO ())
stdIn _ [SrcInherit] = (Inherit, const $ pure ())
stdIn file [SrcFile x] = (UseHandle $ file x, const $ pure ())
stdIn file src = (,) CreatePipe $ \h -> ignoreSigPipe $ do
forM_ src $ \case
SrcString x -> hPutStr h x
SrcBytes x -> LBS.hPutStr h x
SrcFile x -> LBS.hPutStr h =<< LBS.hGetContents (file x)
SrcInherit -> pure () -- Can't both inherit and set it
hClose h
ignoreSigPipe :: IO () -> IO ()
ignoreSigPipe = handleIO $ \e -> case e of
IOError {ioe_type=ResourceVanished, ioe_errno=Just ioe} | Errno ioe == ePIPE -> pure ()
_ -> throwIO e
withExceptions :: IO () -> IO a -> IO a
withExceptions stop go = do
bar <- newBarrier
v <- mask $ \unmask -> do
forkFinally (unmask go) $ signalBarrier bar
unmask (waitBarrier bar) `onException` do
forkIO stop
waitBarrier bar
either throwIO pure v
withTimeout :: Maybe Double -> IO () -> IO a -> IO a
withTimeout Nothing _ go = go
withTimeout (Just s) stop go = bracket (forkIO $ sleep s >> stop) killThread $ const go
cmdSpec :: CmdSpec -> CreateProcess
cmdSpec (ShellCommand x) = shell x
cmdSpec (RawCommand x xs) = proc x xs
forkWait :: IO a -> IO (IO a)
forkWait a = do
res <- newEmptyMVar
_ <- mask $ \restore -> forkIO $ try_ (restore a) >>= putMVar res
pure $ takeMVar res >>= either throwIO pure
abort :: Bool -> ProcessHandle -> IO ()
abort poGroup pid = do
when poGroup $ do
interruptProcessGroupOf pid
sleep 3 -- give the process a few seconds grace period to die nicely
terminateProcess pid
withFiles :: IOMode -> [FilePath] -> ((FilePath -> Handle) -> IO a) -> IO a
withFiles mode files act = withs (map (`withFile` mode) files) $ \handles ->
act $ \x -> fromJust $ lookup x $ zipExact files handles
-- General approach taken from readProcessWithExitCode
process :: ProcessOpts -> IO (ProcessHandle, ExitCode)
process po = do
(ProcessOpts{..}, flushBuffers) <- optimiseBuffers po
let outFiles = nubOrd [x | DestFile x <- poStdout ++ poStderr]
let inFiles = nubOrd [x | SrcFile x <- poStdin]
withFiles WriteMode outFiles $ \outHandle -> withFiles ReadMode inFiles $ \inHandle -> do
let cp = (cmdSpec poCommand){cwd = poCwd, env = poEnv, create_group = poGroup, close_fds = poCloseFds
,std_in = fst $ stdIn inHandle poStdin
,std_out = stdStream outHandle poStdout poStderr, std_err = stdStream outHandle poStderr poStdout}
withCreateProcessCompat cp $ \inh outh errh pid ->
withTimeout poTimeout (abort poGroup pid) $ withExceptions (abort poGroup pid) $ do
let streams = [(outh, stdout, poStdout) | Just outh <- [outh], CreatePipe <- [std_out cp]] ++
[(errh, stderr, poStderr) | Just errh <- [errh], CreatePipe <- [std_err cp]]
wait <- forM streams $ \(h, hh, dest) -> do
-- no point tying the streams together if one is being streamed directly
let isTied = not (poStdout `disjoint` poStderr) && length streams == 2
let isBinary = any isDestBytes dest || not (any isDestString dest)
when isTied $ hSetBuffering h LineBuffering
when (DestEcho `elem` dest) $ do
buf <- hGetBuffering hh
case buf of
BlockBuffering{} -> pure ()
_ -> hSetBuffering h buf
if isBinary then do
hSetBinaryMode h True
dest<- pure $ flip map dest $ \case
DestEcho -> BS.hPut hh
DestFile x -> BS.hPut (outHandle x)
DestString x -> addBuffer x . (if isWindows then replace "\r\n" "\n" else id) . BS.unpack
DestBytes x -> addBuffer x
forkWait $ whileM $ do
src <- BS.hGetSome h 4096
mapM_ ($ src) dest
notM $ hIsEOF h
else if isTied then do
dest<- pure $ flip map dest $ \case
DestEcho -> hPutStrLn hh
DestFile x -> hPutStrLn (outHandle x)
DestString x -> addBuffer x . (++ "\n")
DestBytes{} -> throwImpure $ errorInternal "Not reachable due to isBinary condition"
forkWait $ whileM $
ifM (hIsEOF h) (pure False) $ do
src <- hGetLine h
mapM_ ($ src) dest
pure True
else do
src <- hGetContents h
wait1 <- forkWait $ C.evaluate $ rnf src
waits <- forM dest $ \case
DestEcho -> forkWait $ hPutStr hh src
DestFile x -> forkWait $ hPutStr (outHandle x) src
DestString x -> do addBuffer x src; pure $ pure ()
DestBytes{} -> throwImpure $ errorInternal "Not reachable due to isBinary condition"
pure $ sequence_ $ wait1 : waits
whenJust inh $ snd $ stdIn inHandle poStdin
if poAsync then
pure (pid, ExitSuccess)
else do
sequence_ wait
flushBuffers
res <- waitForProcess pid
whenJust outh hClose
whenJust errh hClose
pure (pid, res)
---------------------------------------------------------------------
-- COMPATIBILITY
-- available in process-1.4.3.0, GHC ??? (Nov 2015)
-- logic copied directly (apart from Ctrl-C handling magic using internal pieces)
withCreateProcessCompat :: CreateProcess -> (Maybe Handle -> Maybe Handle -> Maybe Handle -> ProcessHandle -> IO a) -> IO a
withCreateProcessCompat cp act = bracketOnError (createProcess cp) cleanup
(\(m_in, m_out, m_err, ph) -> act m_in m_out m_err ph)
where
cleanup (inh, outh, errh, pid) = do
terminateProcess pid
whenJust inh $ ignoreSigPipe . hClose
whenJust outh hClose
whenJust errh hClose
forkIO $ void $ waitForProcess pid
| ndmitchell/shake | src/General/Process.hs | bsd-3-clause | 9,305 | 1 | 39 | 2,911 | 2,881 | 1,457 | 1,424 | 178 | 15 |
-- ---
-- title: Exploring survival on the Titanic
-- subtitle: A port of a Kaggle's notebook
-- author: Nikita Tchayka
--
-- ---
--
-- 1. Introduction
-- ===============
--
-- This is a direct port of the [Exploring survival on the titanic notebook](https://www.kaggle.com/mrisdal/titanic/exploring-survival-on-the-titanic) by Megan Risdal.
-- The intention of this is to have an example of what can be achieved with the data science tools we have in Haskell as for **February 27th, 2017**.
{-# LANGUAGE OverloadedStrings #-}
module Lib where
-- Be sure to fire your repl with `stack repl` and loading the
-- `OverloadedStrings` extension by issuing `:set -XOverloadedStrings`
-- into it.
import Data.List
import Data.Maybe
import Data.Monoid
import qualified Control.Foldl as L
import qualified Data.Text as T
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Analyze.Csv as CSV
import qualified Analyze.RFrame as RF
import qualified Data.Vector as V
import Data.Text (Text)
import Analyze.RFrame (RFrame, RFrameUpdate)
import Data.Vector (Vector)
import Text.Regex
import Text.Regex.Base
import Graphics.Rendering.Chart.Easy hiding (Vector)
import Graphics.Rendering.Chart.Backend.Diagrams
-- 1.1 Load and check data
-- -----------------------
--
-- Let's begin by loading our data
trainingSet :: IO (RFrame Text Text)
trainingSet = do
train <- readFile "input/train.csv" >>= loadCSV
return train
where
loadCSV = CSV.decodeWithHeader . BL.pack
-- Lets check what's there, from our REPL:
--
-- ```
-- *Lib> ts <- trainingSet
-- *Lib> RF._rframeKeys ts
-- ["PassengerId","Pclass","Name","Sex","Age","SibSp","Parch","Ticket","Fare","Cabin","Embarked"]
-- *Lib> V.head $ RF._rframeData ts
-- ["1","3","Braund, Mr. Owen Harris","male","22","1","0","A/5 21171","7.25","","S"]
-- *Lib> V.length $ RF._rframeData ts
-- 902
-- ```
--
-- From here we see what we have to deal with:
--
-- | **Variable Name** | **Description** |
-- |-------------------|----------------------------------|
-- | Survived | True or False |
-- | Pclass | Passenger's class |
-- | name | Passenger's name |
-- | sex | Passenger's sex |
-- | age | Passenger's age |
-- | SibSp | Number of siblings/spouses aboard|
-- | Parch | Number of parents/children aboard|
-- | Ticket | Ticket number |
-- | Fare | Fare |
-- | Cabin | Cabin |
-- | Embarked | Port of embarkation |
--
-- 2. Feature Engineering
-- ======================
--
-- 2.1 What's in a name?
-- ---------------------
--
-- We can see that in the passenger name we have the *passenger title*, so we
-- can break it down into additional variables to have better predictions. Also,
-- we can break it into *surname* too to represent families.
getTitleFromName :: Text -> Text
getTitleFromName name = T.pack
$ subRegex titleRegex unpackedName ""
where
titleRegex = mkRegex "(.*, )|(\\..*)"
unpackedName = T.unpack name
-- Now we can use this function in our REPL to extract the title:
--
-- ```
-- *Lib> getTitleFromName "The best, Capt. Obvious"
-- "Capt"
-- *Lib> getTitleFromName "No title here"
-- ""
-- ```
--
-- What if we wanted to count how many people of each title are there?
-- Well, we can construct it very easily using Haskell!
--
-- ```
-- countPrefixes :: Text -> Vector Text -> Int
-- countPrefixes title names = length $ filter (isInfixOf title) names
-- ```
--
-- But we can do better and make a synonym of the function by omitting the
-- last argument.
countPrefix :: Text -> Vector Text -> Int
countPrefix p = V.length . V.filter (T.isInfixOf p)
-- Also, let's make a function that counts how many times a title appears
-- in the names:
countedTitles :: Vector Text -> Vector (Text, Int)
countedTitles names = V.zip titles counts
where
counts = flip countPrefix names <$> titles
titles = removeDupes $ getTitleFromName <$> names
removeDupes = V.fromList . nub . V.toList
-- Just in case you are wondering, we can read `<$>` as _over_ ,
-- if you are familiar with the `map` function, it is just an alias
-- for it.
--
-- From our REPL, now we can run:
-- ```
-- *Lib> ts <- trainingSet
-- *Lib> countedTitles <$> RF.col "Name" ts
-- [("Mr",657),("Mrs",132),("Miss",183),("Master",40),("Don",2),("Rev",6)
-- ,("Dr",11),("Mme",1),("Ms",1),("Major",2),("Lady",1),("Sir",3),("Mlle",2)
-- ,("Col",10),("Capt",1),("the Countess",1),("Jonkheer",1)
-- ]
-- ```
--
-- Let's take the rare titles out by replacing them with "Rare Title",
-- "Mlle" and "Ms" by "Miss" and "Mme" by "Mrs"
rareTitles :: [Text]
rareTitles = [ "Dona"
, "Lady"
, "the Countess"
, "Capt"
, "Col"
, "Don"
, "Dr"
, "Major"
, "Rev"
, "Sir"
, "Jonkheer"
]
addColumn :: RFrame Text Text -> Text -> Vector Text -> IO (RFrame Text Text)
addColumn rf name v = do
c <- newRFrameColumn name $ V.singleton <$> v
RF.extendCols rf c
where
newRFrameColumn rfName = RF.fromUpdate . RF.RFrameUpdate (V.singleton rfName)
extractTitle :: Text -> Text
extractTitle n
| getTitleFromName n `elem` rareTitles = "Rare Title"
| getTitleFromName n `elem` ["Mlle", "Ms"] = "Miss"
| getTitleFromName n == "Mme" = "Mrs"
| otherwise = getTitleFromName n
addTitleColumn :: RFrame Text Text -> IO (RFrame Text Text)
addTitleColumn frame = do
nameColumn <- RF.col "Name" frame
let titles = extractTitle <$> nameColumn
addColumn frame "Title" titles
-- What we are doing here is basically creating another title column.
--
-- - We are extracting the "Name" column from our `namesFrame`
-- - We create a new column **after** being sure that each row contains
-- a single element like ["Mrs"], **after** extracting the titles **over**
-- the `nameColumn` we extracted.
-- - We extend the RFrame we got passed and return it.
--
-- As these functions can fail by different reasons, for example if the "Name"
-- column cannot be found, or if there is a mismatch on row number when
-- extending the RFrame, we make sure that it is under the `IO` type.
--
-- Let's see how many unique surnames we have in our dataset:
extractSurname :: Text -> Text
extractSurname = head . T.split dotOrComma
where
dotOrComma ',' = True
dotOrComma '.' = True
dotOrComma _ = False
addSurnameColumn :: RFrame Text Text -> IO (RFrame Text Text)
addSurnameColumn frame = do
nameColumn <- RF.col "Name" frame
let surnames = extractSurname <$> nameColumn
addColumn frame "Surname" surnames
differents :: (Eq a) => Vector a -> Int
differents = length . nub . V.toList
-- We can now use this in our REPL:
-- ```
-- *Lib> ts <- trainingSet >>= addTitleColumn >>= addSurnameColumn
-- *Lib> differents <$> RF.col "Surname" ts
-- ```
--
-- 2.2 Do families sink or swim together?
-- --------------------------------------
--
-- Now that we know what families are there thanks to surname extraction, let's
-- make it a bit more interesting to know about them and how can we relate them.
-- Let's make a family variable, which tells us which size the family is and a
-- number of children/parents.
addFamilySizeColumn :: RFrame Text Text -> IO (RFrame Text Text)
addFamilySizeColumn frame = do
sibSpColumn <- fmap (read . T.unpack) <$> RF.col "SibSp" frame :: IO (Vector Int)
parchColumn <- fmap (read . T.unpack) <$> RF.col "Parch" frame :: IO (Vector Int)
let familySizes = T.pack <$> show <$> (+1) <$> V.zipWith (+) sibSpColumn parchColumn
addColumn frame "Fsize" familySizes
addFamilyColumn :: RFrame Text Text -> IO (RFrame Text Text)
addFamilyColumn frame = do
surnameColumn <- RF.col "Surname" frame
fsizeColumn <- RF.col "Fsize" frame
let families = V.zipWith (\fs sn -> fs <> "_" <> sn) fsizeColumn surnameColumn
addColumn frame "Family" families
-- To add the columns:
-- ```
-- *Lib> ts <- trainingSet >>= addTitleColumn >>= addSurnameColumn >>= addFamilySizeColumn >>= addFamilyColumn
-- *Lib> differents <$> RF.col "Surname" ts
-- ```
--
-- It is better to create an alias for this, so we don't have to type it
-- constantly.
extendedTrainingSet :: IO (RFrame Text Text)
extendedTrainingSet = trainingSet
>>= addTitleColumn
>>= addSurnameColumn
>>= addFamilySizeColumn
>>= addFamilyColumn
-- To use it:
-- ```
-- *Lib> ts <- extendedTrainingSet
-- ```
drawSurvivedPlot frame = do
let chartTitles = ["True","False"]
survivedColumn <- RF.col "Survived" frame
familySizeColumn <- RF.col "Fsize" frame :: IO (Vector Text)
let familySizesInt = (read . T.unpack) <$> familySizeColumn :: Vector Int
let survivedsBool = (== "1") <$> familySizeColumn
let plotData = countSurvivedForSize familySizesInt survivedsBool
print plotData
toFile def "img/plot1.svg" $ do
layout_x_axis . laxis_generate .= autoIndexAxis (show <$> fst <$> plotData)
plot $ fmap plotBars $ bars chartTitles (addIndexes (snd <$> plotData))
countSurvivedForSize :: Vector Int -> Vector Bool -> [(Int, [Int])]
countSurvivedForSize familySizes surviveds =
constructInfo <$> [(V.minimum familySizes)..(V.maximum familySizes)]
where
constructInfo familySize = ( familySize
, [ aliveWithFamilySize familySize
, deadWithFamilySize familySize
]
)
aliveWithFamilySize fs = V.length
. V.filter ((== fs) . fst)
. V.filter snd
. V.zip familySizes
$ surviveds
deadWithFamilySize fs = V.length
. V.filter ((== fs) . fst)
. V.filter (not . snd)
. V.zip familySizes
$ surviveds
-- After executing the following commands, we can get the following chart:
-- ```
-- *Lib> extendedTrainingSet >>= drawSurvivedPlot
-- ```
--
-- 
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
--
-- # FIXME
| NickSeagull/ex01-exploring-titanic | src/Lib.hs | bsd-3-clause | 10,785 | 0 | 15 | 2,878 | 1,703 | 949 | 754 | -1 | -1 |
{-# LANGUAGE TypeApplications, UndecidableInstances #-}
module RenameTest where
import qualified Data.Map as Map
import qualified Data.Text as T
import Data.Time (getCurrentTime, Day, UTCTime(..))
import Init
-- persistent used to not allow types with an "Id" suffix
-- this verifies that the issue is fixed
type TextId = Text
-- Test lower case names
share [mkPersist sqlSettings { mpsGeneric = True }, mkMigrate "migration"] [persistLowerCase|
-- This just tests that a field can be named "key"
KeyTable
key Text
deriving Eq Show
IdTable
-- this used to have a default=CURRENT_DATE, but the test that uses it
-- specifies that there is no default on this column. the default is
-- failing MySQL and sqlite tests since they don't have shared overlap on
-- an appropriate default for a date.
Id Day
name Text
-- This was added to test the ability to break a cycle
-- getting rid of the Maybe should be a compilation failure
keyTableEmbed IdTable Maybe
deriving Eq Show
LowerCaseTable
Id sql=my_id
fullName Text
ExtraBlock
foo bar
baz
bin
ExtraBlock2
something
RefTable
someVal Int sql=something_else
lct LowerCaseTableId
text TextId
UniqueRefTable someVal
-- Test a reference to a non-int Id
ForeignIdTable
idId IdTableId
|]
cleanDB
:: forall backend.
( BaseBackend backend ~ backend
, PersistQueryWrite backend
)
=> ReaderT backend IO ()
cleanDB = do
deleteWhere ([] :: [Filter (IdTableGeneric backend)])
deleteWhere ([] :: [Filter (LowerCaseTableGeneric backend)])
deleteWhere ([] :: [Filter (RefTableGeneric backend)])
specsWith
::
( PersistStoreWrite backend, PersistQueryRead backend
, backend ~ BaseBackend backend
, MonadIO m, MonadFail m
, Eq (BackendKey backend)
)
=> RunDb backend m
-> Spec
specsWith runDb = describe "rename specs" $ do
describe "LowerCaseTable" $ do
it "LowerCaseTable has the right sql name" $ do
fmap fieldDB (getEntityIdField (entityDef (Proxy @LowerCaseTable)))
`shouldBe`
Just (FieldNameDB "my_id")
it "user specified id, insertKey, no default=" $ runDb $ do
let rec2 = IdTable "Foo2" Nothing
let rec1 = IdTable "Foo1" $ Just rec2
let rec = IdTable "Foo" $ Just rec1
now <- liftIO getCurrentTime
let key = IdTableKey $ utctDay now
insertKey key rec
Just rec' <- get key
rec' @== rec
(Entity key' _):_ <- selectList ([] :: [Filter (IdTableGeneric backend)]) []
key' @== key
it "extra blocks" $
getEntityExtra (entityDef (Nothing :: Maybe LowerCaseTable)) @?=
Map.fromList
[ ("ExtraBlock", map T.words ["foo bar", "baz", "bin"])
, ("ExtraBlock2", map T.words ["something"])
]
| paul-rouse/persistent | persistent-test/src/RenameTest.hs | mit | 2,925 | 0 | 22 | 805 | 603 | 305 | 298 | -1 | -1 |
module TupleMatch where
main :: Int
main = let (x, y) = (13, x) in x + y
-- let (Tup x y) = Tup 13 x in x+y
| roberth/uu-helium | test/simple/correct/TupleMatch.hs | gpl-3.0 | 118 | 0 | 9 | 39 | 43 | 25 | 18 | 3 | 1 |
x :: Eq a => a -> a -> Bool
y :: Eq a => a -> a -> Bool
z :: Eq a => a -> a -> Bool
(x, _) = undefined
(y) = undefined
z@_ = undefined
f :: Eq a => a -> a -> Bool
f a b = (a == b)
| roberth/uu-helium | test/staticerrors/SignatureForRestrPat.hs | gpl-3.0 | 183 | 1 | 9 | 60 | 146 | 70 | 76 | 8 | 1 |
{-
Copyright 2015-2017 Markus Ongyerth
This file is part of Monky.
Monky is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Monky is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with Monky. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Monky.Examples.MPD
Description : An example module instance for the MPD module
Maintainer : ongy
Stability : testing
Portability : Linux
-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module Monky.Examples.MPD
( MPDHandle
, getMPDHandle
, getMPDHandleF
)
where
import Data.Text (Text)
import qualified Data.Text as T
import Data.IORef
import System.IO (hPutStrLn, stderr)
import System.Posix.Types (Fd)
import Monky.MPD
import Monky.Modules
import Monky.Examples.Utility
#if MIN_VERSION_base(4,8,0)
#else
import Control.Applicative ((<$>), pure, (<*>))
#endif
type ConvertFun = (State, Maybe SongInfo) -> Text
getSongTitle :: MPDSocket -> ConvertFun -> IO Text
-- TODO: Clean this up a bit. Probably do notation?
getSongTitle sock fun = (fmap state <$> getMPDStatus sock) >>= getSong
where getSong (Left x) = return . T.pack $ x
getSong (Right Playing) = do
info <- getMPDSong sock
case info of
Right x -> pure $ fun (Playing, Just x)
Left x -> pure $ T.pack x
getSong (Right x) = pure $ fun (x, Nothing)
-- |The handle for this example
data MPDHandle = MPDHandle
{ _host :: String
, _port :: String
, _sock :: IORef (Maybe MPDSocket)
, _convert :: ConvertFun
}
-- TODO ignoring errors is never a good idea
getEvent :: MPDSocket -> ConvertFun -> IO Text
getEvent s fun = do
_ <- readOk s
t <- getSongTitle s fun
_ <- goIdle s " player"
return t
getFd :: MPDSocket -> IO [Fd]
getFd s = do
fd <- getMPDFd s
_ <- goIdle s " player"
return [fd]
instance PollModule MPDHandle where
getOutput (MPDHandle _ _ s f) = do
r <- readIORef s
case r of
Nothing -> return [MonkyPlain "Broken"]
(Just x) -> do
ret <- getSongTitle x f
return [MonkyPlain ret]
initialize (MPDHandle h p r _) = do
s <- getMPDSocket h p
case s of
(Right x) -> writeIORef r (Just x)
(Left _) -> return ()
instance EvtModule MPDHandle where
startEvtLoop h@(MPDHandle _ _ s f) fun = do
initialize h
fun =<< getOutput h
r <- readIORef s
case r of
Nothing -> hPutStrLn stderr "Could not initialize MPDHandle :("
(Just x) -> do
[fd] <- getFd x
loopFd x fd fun (fmap (\y -> [MonkyPlain y]) . flip getEvent f)
defaultConvert :: (State, Maybe SongInfo) -> Text
defaultConvert (Playing, Just x) = case tagTitle . songTags $ x of
Nothing -> "Can't extract song title"
Just y -> y
defaultConvert (Playing, Nothing) = "Can't extract song"
defaultConvert _ = "Not Playing"
-- |Get an 'MPDHandle' (server has to be running when this is executed)
getMPDHandle
:: String -- ^The host to connect to
-> String -- ^The port to connect to
-> IO MPDHandle
getMPDHandle h p =
MPDHandle h p <$> newIORef Nothing <*> pure defaultConvert
-- | Get the 'MPDHandle' with a custom conversion function. You will need to
-- import `Monky.MPD` to get the definitions into scope
getMPDHandleF
:: String -- ^The host to connect to
-> String -- ^The port to connect to
-> ConvertFun -- ^The function to extract the text
-> IO MPDHandle
getMPDHandleF h p f =
MPDHandle h p <$> newIORef Nothing <*> pure f
| Ongy/monky | Monky/Examples/MPD.hs | lgpl-3.0 | 4,048 | 0 | 20 | 983 | 950 | 481 | 469 | 85 | 4 |
{- |
Module : $Header$
Description : Gtk GUI for the selection of linktypes
Copyright : (c) Thiemo Wiedemeyer, Uni Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
This module provides a GUI for the selection of linktypes.
-}
module GUI.GtkLinkTypeChoice
(showLinkTypeChoice)
where
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Glade
import GUI.GtkUtils
import qualified GUI.Glade.LinkTypeChoice as LinkTypeChoice
import Static.DgUtils
import Control.Monad (filterM)
import Data.IORef
import qualified Data.Map as Map
mapEdgeTypesToNames :: Map.Map String (DGEdgeType, DGEdgeType)
mapEdgeTypesToNames = Map.fromList $ map
(\ e -> ("cb" ++ getDGEdgeTypeName e, (e, e { isInc = True })))
(filter (not . isInc) listDGEdgeTypes)
-- | Displays the linktype selection window
showLinkTypeChoice :: IORef [String] -> ([DGEdgeType] -> IO ()) -> IO ()
showLinkTypeChoice ioRefDeselect updateFunction = postGUIAsync $ do
xml <- getGladeXML LinkTypeChoice.get
window <- xmlGetWidget xml castToWindow "linktypechoice"
ok <- xmlGetWidget xml castToButton "btnOk"
cancel <- xmlGetWidget xml castToButton "btnCancel"
select <- xmlGetWidget xml castToButton "btnSelect"
deselect <- xmlGetWidget xml castToButton "btnDeselect"
invert <- xmlGetWidget xml castToButton "btnInvert"
deselectEdgeTypes <- readIORef ioRefDeselect
mapM_ (\ name -> do
cb <- xmlGetWidget xml castToCheckButton name
toggleButtonSetActive cb False
) deselectEdgeTypes
let
edgeMap = mapEdgeTypesToNames
keys = Map.keys edgeMap
setAllTo to = mapM_ (\ name -> do
cb <- xmlGetWidget xml castToCheckButton name
to' <- to cb
toggleButtonSetActive cb to'
) keys
onClicked select $ setAllTo (\ _ -> return True)
onClicked deselect $ setAllTo (\ _ -> return False)
onClicked invert $ setAllTo (\ cb -> do
selected <- toggleButtonGetActive cb
return $ not selected
)
onClicked cancel $ widgetDestroy window
onClicked ok $ do
edgeTypeNames <- filterM (\ name -> do
cb <- xmlGetWidget xml castToCheckButton name
selected <- toggleButtonGetActive cb
return $ not selected
) keys
writeIORef ioRefDeselect edgeTypeNames
let edgeTypes = foldl (\ eList (e, eI) -> e : eI : eList) []
$ map (flip (Map.findWithDefault
(error "GtkLinkTypeChoice: lookup error!"))
edgeMap
) edgeTypeNames
forkIO_ $ updateFunction edgeTypes
widgetDestroy window
widgetShow window
| keithodulaigh/Hets | GUI/GtkLinkTypeChoice.hs | gpl-2.0 | 3,003 | 0 | 21 | 936 | 688 | 335 | 353 | 57 | 1 |
module Main where
helloWorld :: IO ()
helloWorld = print "helloWorld"
data Hello = HelloWorld deriving ( Show )
main = do
helloWorld
print HelloWorld
| rahulmutt/ghcvm | tests/suite/similar-names/compile/PassFunctionDataConstructor.hs | bsd-3-clause | 159 | 0 | 7 | 34 | 50 | 26 | 24 | 7 | 1 |
module Yesod.FeedTypes
( Feed (..)
, FeedEntry (..)
, EntryEnclosure (..)
) where
import Text.Hamlet (Html)
import Data.Time.Clock (UTCTime)
import Data.Text (Text)
-- | The overall feed
data Feed url = Feed
{ feedTitle :: Text
, feedLinkSelf :: url
, feedLinkHome :: url
, feedAuthor :: Text
-- | note: currently only used for Rss
, feedDescription :: Html
-- | note: currently only used for Rss, possible values:
-- <http://www.rssboard.org/rss-language-codes>
, feedLanguage :: Text
, feedUpdated :: UTCTime
, feedLogo :: Maybe (url, Text)
, feedEntries :: [FeedEntry url]
}
-- | RSS and Atom allow for linked content to be enclosed in a feed entry.
-- This represents the enclosed content.
--
-- Atom feeds ignore 'enclosedSize' and 'enclosedMimeType'.
--
-- @since 1.6
data EntryEnclosure url = EntryEnclosure
{ enclosedUrl :: url
, enclosedSize :: Int -- ^ Specified in bytes
, enclosedMimeType :: Text
}
-- | Each feed entry
data FeedEntry url = FeedEntry
{ feedEntryLink :: url
, feedEntryUpdated :: UTCTime
, feedEntryTitle :: Text
, feedEntryContent :: Html
, feedEntryEnclosure :: Maybe (EntryEnclosure url)
-- ^ Allows enclosed data: RSS \<enclosure> or Atom \<link
-- rel=enclosure>
--
-- @since 1.5
}
| psibi/yesod | yesod-newsfeed/Yesod/FeedTypes.hs | mit | 1,411 | 0 | 11 | 394 | 232 | 153 | 79 | 27 | 0 |
{- Tests let-expressions in do-statements -}
module Main( main ) where
foo = do
putStr "a"
let x = "b" in putStr x
putStr "c"
main = do
putStr "a"
foo
let x = "b" in putStrLn x
| ezyang/ghc | testsuite/tests/deSugar/should_run/dsrun002.hs | bsd-3-clause | 232 | 0 | 10 | 93 | 75 | 34 | 41 | 9 | 1 |
-- Just used to test that we correctly handle non-existant plugins
module Main where
-- The contents of this file are actually irrelevant
main = return () | ghc-android/ghc | testsuite/tests/plugins/plugins03.hs | bsd-3-clause | 155 | 0 | 6 | 27 | 16 | 10 | 6 | 2 | 1 |
module Exercises.Test where
import Exercises
import Test.QuickCheck
import Test.Tasty
import Test.Tasty.QuickCheck as QC
import Test.Tasty.SmallCheck as SC
import Test.Tasty.HUnit
import Data.Set as DS
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Tests" [properties] --, unitTests, examples]
properties :: TestTree
properties = testGroup "Properties" [qcProps]
qcProps = testGroup "(checked by QuickCheck)"
]
| JeremyLWright/haskellbook | src-test/Exercises.Test.hs | isc | 451 | 1 | 6 | 68 | 113 | 68 | 45 | -1 | -1 |
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module ErrorF where
import FFIExample
import DOM
import Data.Text (fromString)
import qualified Data.Text as T
import Fay.Yesod
--import Fay.Text (toJSON)
import Prelude hiding (sort)
import FFI
import SharedTypes
import Widget
main :: Fay ()
main = do
addComboBox
let combos = [region, mark, model, age, generation]
mapM_ disable [ "mark-input","model-input","year-input"
, "generation-input","budget","form-submit"
]
mapM_ addCommonEvent combos
addBudgetEvent
addSubmitEvent
mapM_ clickInputCombo $ map (fst . selectId) combos
| swamp-agr/carbuyer-advisor | fay/ErrorF.hs | mit | 718 | 0 | 10 | 194 | 158 | 90 | 68 | 22 | 1 |
module HAD.Y2014.M02.D27.Exercise where
-- | Divide all the elements of the list (2nd parameter) by the first parameter
-- iff all the elements of the lists are exact multiple of it
-- returns nothing otherwise
--
-- Examples:
--
-- >>> divIfMultiple 3 [3, 6 .. 12]
-- Just [1,2,3,4]
-- >>> divIfMultiple 2 [3, 6 .. 12]
-- Nothing
--
divIf' :: Integral a => (a, a) -> Maybe a
divIf' (r, q)
| q == 0 = Just r
| otherwise = Nothing
divIfMultiple :: Integral a => a -> [a] -> Maybe [a]
divIfMultiple x = mapM (divIf' . (flip quotRem $ x))
| espencer/1HAD | exercises/HAD/Y2014/M02/D27/Exercise.hs | mit | 551 | 0 | 10 | 121 | 138 | 77 | 61 | 7 | 1 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
import qualified Workflow.OSX.C.Example
main = Workflow.OSX.C.Example.main
| sboosali/workflow-osx | workflow-osx-ffi/executables/Main.hs | mit | 125 | 0 | 5 | 11 | 20 | 14 | 6 | 3 | 1 |
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module TextAdvise where
import FFIExample
import DOM
import Data.Text (fromString)
import qualified Data.Text as T
import Fay.Yesod
import Prelude
import SharedTypes
main :: Fay ()
main = do
input <- getElementById "del"
val <- getValById "textadvise"
let url = T.concat [ "/text-advise" , "/" , val ]
onClick input $
ajaxDelete url
taInt <- parseMaybeInt val
addBtn <- getElementById "lkadd"
onClick addBtn $ do
gen <- getOptionVal "genupd"
genInt <- parseMaybeInt gen
call (LkAdd (genInt, taInt)) $ addGen "lkaddtable"
delBtn <- getElementsByClass "lkdel"
mapM_ (\x -> onClick x $ do
cell <- parentNode x
row <- parentNode cell
ival <- getAttr x "data"
iival <- parseMaybeInt ival
call (LkDel (iival,taInt)) $ delRow row) delBtn
imgBtn <- getElementById "imgadd"
onClick imgBtn $ do
imgs <- getElementsByClass "generated"
imgData <- mapM (\x -> getAttr x "src") imgs
call (ImgAdd (taInt, imgData)) $ addImg "imgaddtable"
imgDelBtn <- getElementsByClass "imgdel"
mapM_ (\x -> onClick x $ do
cell <- parentNode x
row <- parentNode cell
ival <- getAttr x "data"
iival <- parseMaybeInt ival
call (ImgDel iival) $ delRow row) imgDelBtn
addGen :: T.Text -> Maybe (T.Text, T.Text, T.Text, T.Text) -> Fay ()
addGen tab g = case g of
Nothing -> return ()
Just gid -> createRow "link" tab gid
createRow :: T.Text -> T.Text -> (T.Text, T.Text, T.Text, T.Text) -> Fay ()
createRow "link" tab (gid, gname, gba, gta) = do
row <- createElement "tr"
table <- getElementById tab
appendChild table row
setAttr row "class" "row"
createButtonCell row gid "Удалить"
mapM_ (createCell row)
[ (gname, "col-md-5")
, (gba, "col-md-3")
, (gta, "col-md-3")
]
createRow "image" tab (gid, imgurl, _, _) = do
row <- createElement "tr"
table <- getElementById tab
appendChild table row
setAttr row "class" "row"
createButtonCell row gid "Удалить"
createImgCell row imgurl
createCell :: Element -> (T.Text,T.Text) -> Fay ()
createCell row (val,cval) = do
cell <- createElement "td"
appendChild row cell
setInnerHTML cell val
setAttr cell "class" cval
createImgCell :: Element -> T.Text -> Fay ()
createImgCell row url = do
cell <- createElement "td"
appendChild row cell
img <- createElement "img"
appendChild cell img
setAttr img "src" url
setAttr img "class" "thumb"
createButtonCell :: Element -> T.Text -> T.Text -> Fay ()
createButtonCell row gid val = do
cell <- createElement "td"
appendChild row cell
setAttr cell "class" "col-md-1"
btn <- createElement "button"
appendChild cell btn
setValue btn val
setInnerHTML btn val
setAttr btn "class" "btn lkdel"
setAttr btn "data" gid
delRow :: Element -> Bool -> Fay ()
delRow _ False = return ()
delRow row True = do
table <- parentNode row
removeChild table row
addImg :: T.Text -> [(T.Text, T.Text)] -> Fay ()
addImg tab g = do
g4 <- mapM t2t4 g
mapM_ (createRow "image" tab) g4
t2t4 :: (T.Text, T.Text) -> Fay (T.Text, T.Text, T.Text, T.Text)
t2t4 (a,b) = return (a,b,"","")
| swamp-agr/carbuyer-advisor | fay/TextAdvise.hs | mit | 3,323 | 0 | 17 | 816 | 1,273 | 603 | 670 | 100 | 2 |
import Data.Ratio
toIntegerList :: Integer -> [Integer]
toIntegerList n = map toInt (show n)
where toInt c = read (c:"") :: Integer
hasCondition r = let
numDigits = length $ toIntegerList $ numerator r
denomDigits = length $ toIntegerList $ denominator r
in numDigits > denomDigits
squareExpans :: Integer -> Ratio Integer
squareExpans n = 1 + 1/(x n)
where
x 1 = 2
x n = (2 + 1/(x (n-1)))
ans' = filter hasCondition [squareExpans n | n <- [1..1000]]
ans = length ans'
| stefan-j/ProjectEuler | q57.hs | mit | 591 | 0 | 10 | 207 | 224 | 113 | 111 | 14 | 2 |
-- author: Kiyoshi Ikehara <[email protected]>
module FailoverTests (failoverTests) where
import Test.Framework
import Test.Framework.Providers.Sandbox (sandboxTests, sandboxTest, sandboxTestGroup)
import Test.Sandbox (liftIO, signal)
import Test.Sandbox.HUnit (assertFailure)
import Main.Internals
import Control.Monad
import GHC.Conc
import System.FilePath
import System.Posix.Signals
failoverTests :: Maybe FilePath -> Test
failoverTests binDir = do
let
check = withTimeout 1000 $ do
resp <- sendTo "flarei" "stats nodes\r\n"
when (resp == "") $ assertFailure "flarei does not respond"
sandboxTests "failover" [
sandboxTest "1. flared setup" $ (setupWithPath binDir) >> setupFlareCluster
, sandboxTest "2. stop" $ signal "flarei" sigSTOP
, sandboxTest "3. kill" $ mapM_ (`signal` sigKILL) [ fdId (FlareDaemon 0 Master), fdId (FlareDaemon 0 $ Slave 0) ]
, sandboxTest "4. wait 100ms" $ liftIO $ threadDelay 100000
, sandboxTest "5. cont" $ signal "flarei" sigCONT
, sandboxTest "6. wait" $ liftIO $ threadDelay (6*1000000)
, sandboxTest "7. ping" $ assertSendTo "flarei" "ping\r\n" "OK\r\n"
, sandboxTest "8. check" check
, sandboxTest "9. stop" $ signal "flarei" sigSTOP
, sandboxTest "10. kill" $ mapM_ (`signal` sigKILL) [ fdId (FlareDaemon 1 Master), fdId (FlareDaemon 1 $ Slave 0) ]
, sandboxTest "11. wait 100ms" $ liftIO $ threadDelay 100000
, sandboxTest "12. cont" $ signal "flarei" sigCONT
, sandboxTest "13. wait" $ liftIO $ threadDelay (6*1000000)
, sandboxTest "14. ping" $ assertSendTo "flarei" "ping\r\n" "OK\r\n"
, sandboxTest "15. check" check
, sandboxTest "16. stop" $ signal "flarei" sigSTOP
, sandboxTest "17. kill" $ mapM_ (`signal` sigKILL) [ fdId (FlareDaemon 2 Master), fdId (FlareDaemon 2 $ Slave 0) ]
, sandboxTest "18. wait 100ms" $ liftIO $ threadDelay 100000
, sandboxTest "19. cont" $ signal "flarei" sigCONT
, sandboxTest "20. wait" $ liftIO $ threadDelay (8*1000000)
, sandboxTest "21. ping" $ assertSendTo "flarei" "ping\r\n" "OK\r\n"
, sandboxTest "22. check" check
]
| gree/flare-tests | src/FailoverTests.hs | mit | 2,131 | 0 | 16 | 405 | 629 | 319 | 310 | 39 | 1 |
{-# LANGUAGE MonadComprehensions #-}
{-# LANGUAGE RebindableSyntax #-}
module Set4 where
import MCPrelude
import Set2
import Set3 (Card(..))
import Control.Arrow ((&&&))
-- 1. Generalizing State and Maybe
-- generalA :: (a -> b) -> Gen a -> Gen b
-- transMaybe :: (a -> b) -> Maybe a -> Maybe b
-- f :: (a -> b) -> m a -> m b
-- generalB :: (a -> b -> c) -> Gen a -> Gen b -> Gen c
-- yLink :: (a -> b -> c) -> Maybe a -> Maybe b -> Maybe c
-- f :: (a -> b -> c) -> m a -> m b -> m c
-- genTwo :: Gen a -> (a -> Gen b) -> Gen b
-- link :: Maybe a -> (a -> Maybe b) -> Maybe b
-- f :: m a -> (a -> m b) -> m b
-- mkGen :: a -> Gen a
-- mkMaybe :: a -> Maybe a
-- f :: a -> m a
-- 2. A missed generalization
-- generalB2 :: (a -> b -> c) -> Gen a -> Gen b -> Gen c
-- generalB2 f ga gb = genTwo ga (\a -> genTwo gb (mkGen . f a))
-- repRandom' :: [Gen a] -> Gen [a]
-- repRandom' [] = mkGen []
-- repRandom' (x:xs) = genTwo x (\x' -> genTwo (repRandom' xs) (\xs' -> mkGen $ x':xs'))
-- 3. Formalizing the pattern
class Monad m where
bind :: m a -> (a -> m b) -> m b
return :: a -> m a
-- generalB2/yLink
g :: Monad m => (a -> b -> c) -> m a -> m b -> m c
g f ma mb = bind ma (\a -> bind mb (return . f a))
-- 4. Creating instances
newtype Gen a = Gen { runGen :: Seed -> (a, Seed) }
evalGen :: Gen a -> Seed -> a
evalGen g = fst . runGen g
instance Monad Maybe where
bind = link
return = mkMaybe
instance Monad [] where
bind = flip concatMap
return = (:[])
instance Monad Gen where
bind gen f = Gen(uncurry (runGen . f) . runGen gen)
return a = Gen (\s -> (a, s))
-- 5. Revisiting other generic functions
sequence :: Monad m => [m a] -> m [a]
sequence [] = return []
sequence (x:xs) = bind x (\x' -> bind (sequence xs) (\xs' -> return $ x':xs'))
liftM :: Monad m => (a -> b) -> m a -> m b
liftM f ma = bind ma (return . f)
liftM2 :: Monad m => (a -> b -> c) -> m a -> m b -> m c
liftM2 = g
(=<<) :: Monad m => (a -> m b) -> m a -> m b
(=<<) = flip bind
join :: Monad m => m (m a) -> m a
join = (id =<<)
liftM3 :: Monad m => (a -> b -> c -> d) -> m a -> m b -> m c -> m d
liftM3 f ma mb mc = bind ma (\a -> bind mb (\b -> bind mc (return . f a b)))
ap :: Monad m => m (a -> b) -> m a -> m b
ap = liftM2 id
-- 6. Using the abstraction
fiveRands :: [Integer]
fiveRands = evalGen (sequence (replicate 5 (Gen rand))) (mkSeed 1)
generalA :: (a -> b) -> Gen a -> Gen b
generalA = liftM
randEven = generalA (*2) (Gen rand)
randOdd = generalA succ randEven
randTen = generalA (*5) randEven
randLetter :: Gen Char
randLetter = generalA toLetter (Gen rand)
randString3 :: String
randString3 = evalGen (sequence (replicate 3 randLetter)) (mkSeed 1)
generalB :: (a -> b -> c) -> Gen a -> Gen b -> Gen c
generalB = liftM2
generalPair :: Gen a -> Gen b -> Gen (a,b)
generalPair = generalB (,)
randPair :: Gen (Char, Integer)
randPair = generalPair randLetter (Gen rand)
repRandom :: [Gen a] -> Gen [a]
repRandom = sequence
mkGen :: a -> Gen a
mkGen = return
genTwo :: Gen a -> (a -> Gen b) -> Gen b
genTwo = bind
--
queryGreek' :: GreekData -> String -> Maybe Double
queryGreek' d e = join . uncurry (liftM2 divMay) . ((liftM fromIntegral . (headMay =<<)) &&& (liftM fromIntegral . (maximumMay =<<) . (tailMay =<<))) . lookupMay e $ d
chain' :: (a -> Maybe b) -> Maybe a -> Maybe b
chain' = (=<<)
link' :: Maybe a -> (a -> Maybe b) -> Maybe b
link' = bind
mkMaybe' :: a -> Maybe a
mkMaybe' = return
addSalaries' :: [(String, Integer)] -> String -> String -> Maybe Integer
addSalaries' ss k1 k2 = liftM2 (+) (lookupMay k1 ss) (lookupMay k2 ss)
transMaybe' :: (a -> b) -> Maybe a -> Maybe b
transMaybe' = liftM
combine' :: Maybe (Maybe a) -> Maybe a
combine' = join
--
allPairs :: [a] -> [b] -> [(a,b)]
allPairs = allCombs (,)
allCards :: [Int] -> [String] -> [Card]
allCards = allCombs Card
allCombs :: (a -> b -> c) -> [a] -> [b] -> [c]
allCombs = liftM2
allCombs3 :: (a -> b -> c -> d) -> [a] -> [b] -> [c] -> [d]
allCombs3 = liftM3
combStep :: [a -> b] -> [a] -> [b]
combStep = ap
| gafiatulin/monad-challenges | src/Set4.hs | mit | 4,053 | 0 | 14 | 992 | 1,675 | 890 | 785 | 86 | 1 |
module Main where
import Control.Monad (forM_)
import Data.IORef
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT hiding (exit)
import System.Exit (exitSuccess)
import Hogldev.Pipeline (
Pipeline(..), getTrans,
PersProj(..)
)
import Hogldev.Camera ( Camera(..), cameraOnKeyboard,
initCamera, cameraOnMouse, cameraOnRender
)
import Hogldev.LightingTechnique
import Mesh
import SimpleColorTechnique
import PickingTexture
import PickingTechnique
windowWidth = 1680
windowHeight = 1050
persProjection = PersProj
{ persFOV = 60
, persWidth = fromIntegral windowWidth
, persHeigh = fromIntegral windowHeight
, persZNear = 1
, persZFar = 100
}
dirLight :: DirectionLight
dirLight = DirectionLight
{ ambientColor = Vertex3 1 1 1
, ambientIntensity = 1.0
, diffuseIntensity = 0.01
, diffuseDirection = Vertex3 1.0 (-1.0) 0.0
}
worldPos :: [(GLuint, Vector3 GLfloat)]
worldPos = zip [0..] [Vector3 (-10.0) 0.0 5.0, Vector3 10.0 0.0 5.0]
main :: IO ()
main = do
getArgsAndInitialize
initialDisplayMode $= [DoubleBuffered, RGBAMode, WithDepthBuffer]
initialWindowSize $= Size windowWidth windowHeight
initialWindowPosition $= Position 100 100
createWindow "Tutorial 29"
-- frontFace $= CW
-- cullFace $= Just Back
depthFunc $= Just Lequal
gScale <- newIORef 0.0
cameraRef <- newIORef newCamera
lightingEffect <- initLightingTechnique
enableLightingTechnique lightingEffect
setLightingTextureUnit lightingEffect 0
setDirectionalLight lightingEffect dirLight
pointerPosition $= mousePos
pickingTexture <- initPickingTexture windowWidth windowHeight
pickingEffect <- initPickingTechnique
simpleColorEffect <- initSimpleColorTechnique
mesh <- loadMesh "assets/spider.obj"
initializeGlutCallbacks mesh lightingEffect pickingEffect simpleColorEffect
gScale cameraRef pickingTexture
clearColor $= Color4 0 0 0 0
mainLoop
where
newCamera = initCamera (Just
(Vector3 0 5 (-22.0), Vector3 0 (-0.2) 1, Vector3 0 1 0)
) windowWidth windowHeight
mousePos = Position (windowWidth `div` 2) (windowHeight `div` 2)
initializeGlutCallbacks :: Mesh
-> LightingTechnique
-> PickingTechnique
-> SimpleColorTechnique
-> IORef GLfloat
-> IORef Camera
-> PickingTexture
-> IO ()
initializeGlutCallbacks mesh lightingEffect pickingEffect simpleColorEffect gScale cameraRef pickingTexture = do
displayCallback $=
renderSceneCB mesh lightingEffect pickingEffect simpleColorEffect gScale cameraRef pickingTexture
idleCallback $= Just (idleCB gScale cameraRef)
specialCallback $= Just (specialKeyboardCB cameraRef)
keyboardCallback $= Just keyboardCB
passiveMotionCallback $= Just (passiveMotionCB cameraRef)
keyboardCB :: KeyboardCallback
keyboardCB 'q' _ = exitSuccess
keyboardCB _ _ = return ()
specialKeyboardCB :: IORef Camera -> SpecialCallback
specialKeyboardCB cameraRef key _ = cameraRef $~! cameraOnKeyboard key
passiveMotionCB :: IORef Camera -> MotionCallback
passiveMotionCB cameraRef position = cameraRef $~! cameraOnMouse position
idleCB :: IORef GLfloat -> IORef Camera -> IdleCallback
idleCB gScale cameraRef = do
gScale $~! (+ 0.01)
cameraRef $~! cameraOnRender
postRedisplay Nothing
renderSceneCB :: Mesh
-> LightingTechnique
-> PickingTechnique
-> SimpleColorTechnique
-> IORef GLfloat
-> IORef Camera
-> PickingTexture
-> DisplayCallback
renderSceneCB mesh lightingEffect pickingEffect simpleColorEffect gScale cameraRef pickingTexture = do
cameraRef $~! cameraOnRender
gScaleVal <- readIORef gScale
camera <- readIORef cameraRef
let pickingPhase = do
enableWriting pickingTexture
clear [ColorBuffer, DepthBuffer]
enablePickingTechnique pickingEffect
forM_ worldPos $ \ (i, pos) -> do
setPickingTechniqueWVP pickingEffect $ getTrans
WVPPipeline {
worldInfo = pos,
scaleInfo = Vector3 0.1 0.1 0.1,
rotateInfo = Vector3 (-90) 90 0,
persProj = persProjection,
pipeCamera = camera
}
setPickingObjectIndex pickingEffect i
renderMesh mesh -- pickingEffect
disableWriting pickingTexture
renderPhase = do
clear [ColorBuffer, DepthBuffer]
enableLightingTechnique lightingEffect
setEyeWorldPos lightingEffect (cameraPos camera)
forM_ worldPos $ \ (_, pos) -> do
setLightingWVP lightingEffect $ getTrans
WVPPipeline {
worldInfo = pos,
scaleInfo = Vector3 0.1 0.1 0.1,
rotateInfo = Vector3 (-90) 90 0,
persProj = persProjection,
pipeCamera = camera
}
setLightingWorldMatrix lightingEffect $ getTrans
WPipeline {
worldInfo = pos,
scaleInfo = Vector3 0.1 0.1 0.1,
rotateInfo = Vector3 (-90) 90 0
}
renderMesh mesh
pickingPhase
renderPhase
swapBuffers
where
vpTrans camera = getTrans
VPPipeline {
persProj = persProjection,
pipeCamera = camera
}
| triplepointfive/hogldev | tutorial29/Tutorial29.hs | mit | 6,059 | 0 | 23 | 2,093 | 1,301 | 659 | 642 | 138 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.DatabaseCallback
(newDatabaseCallback, newDatabaseCallbackSync,
newDatabaseCallbackAsync, DatabaseCallback)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DatabaseCallback Mozilla DatabaseCallback documentation>
newDatabaseCallback ::
(MonadIO m) => (Maybe Database -> IO ()) -> m DatabaseCallback
newDatabaseCallback callback
= liftIO
(DatabaseCallback <$>
syncCallback1 ThrowWouldBlock
(\ database ->
fromJSValUnchecked database >>= \ database' -> callback database'))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DatabaseCallback Mozilla DatabaseCallback documentation>
newDatabaseCallbackSync ::
(MonadIO m) => (Maybe Database -> IO ()) -> m DatabaseCallback
newDatabaseCallbackSync callback
= liftIO
(DatabaseCallback <$>
syncCallback1 ContinueAsync
(\ database ->
fromJSValUnchecked database >>= \ database' -> callback database'))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DatabaseCallback Mozilla DatabaseCallback documentation>
newDatabaseCallbackAsync ::
(MonadIO m) => (Maybe Database -> IO ()) -> m DatabaseCallback
newDatabaseCallbackAsync callback
= liftIO
(DatabaseCallback <$>
asyncCallback1
(\ database ->
fromJSValUnchecked database >>= \ database' -> callback database')) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/DatabaseCallback.hs | mit | 2,305 | 0 | 13 | 416 | 532 | 316 | 216 | 42 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.ImageData
(newImageData, newImageData', getWidth, getHeight, getData,
ImageData(..), gTypeImageData)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ImageData Mozilla ImageData documentation>
newImageData ::
(MonadDOM m, IsUint8ClampedArray data') =>
data' -> Word -> Maybe Word -> m ImageData
newImageData data' sw sh
= liftDOM
(ImageData <$>
new (jsg "ImageData") [toJSVal data', toJSVal sw, toJSVal sh])
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ImageData Mozilla ImageData documentation>
newImageData' :: (MonadDOM m) => Word -> Word -> m ImageData
newImageData' sw sh
= liftDOM
(ImageData <$> new (jsg "ImageData") [toJSVal sw, toJSVal sh])
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ImageData.width Mozilla ImageData.width documentation>
getWidth :: (MonadDOM m) => ImageData -> m Word
getWidth self
= liftDOM (round <$> ((self ^. js "width") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ImageData.height Mozilla ImageData.height documentation>
getHeight :: (MonadDOM m) => ImageData -> m Word
getHeight self
= liftDOM (round <$> ((self ^. js "height") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/ImageData.data Mozilla ImageData.data documentation>
getData :: (MonadDOM m) => ImageData -> m Uint8ClampedArray
getData self = liftDOM ((self ^. js "data") >>= fromJSValUnchecked)
| ghcjs/jsaddle-dom | src/JSDOM/Generated/ImageData.hs | mit | 2,405 | 0 | 12 | 344 | 630 | 372 | 258 | 38 | 1 |
{-# LANGUAGE OverloadedStrings, TemplateHaskell, QuasiQuotes, TypeFamilies, ViewPatterns, FlexibleInstances, MultiParamTypeClasses #-}
module HelloSub where
{-
A simple subsite
-}
import Snap.Routes
import Data.Text (Text)
import qualified Data.Text as T
-- The Subsite argument
data HelloSubRoute = HelloSubRoute {getHello :: Text}
-- The contract with the master site
-- The master site should -
-- 1. Have renderable routes (RenderRoute constraint)
-- 2. Allow access to a parent route to go back to (parentRoute)
-- 3. Allow access to the current user name (currentUserName)
class RenderRoute master => HelloMaster master where
parentRoute :: master -> Route master
currentUserName :: master -> Text
-- Generate routing code using mkRouteSub
-- Note that for subsites, you also need to provide the constraint class
-- (in this case `HelloMaster`), which provides the contract with the master site
mkRouteSub "HelloSubRoute" "HelloMaster" [parseRoutes|
/ HomeR GET
/foo FooR GET
|]
-- Subsite Handlers
-- For subsites use SubrouteHandler instead of RouteHandler
-- Note that you specify the contract with the master site explicitly using typeclass constraints
-- Hello
getHomeR :: HelloMaster master => SubrouteHandler HelloSubRoute master
getHomeR env reqdata = do
-- let subroute = showRoute $ currentRoute reqdata
-- let masterroute = showRoute $ envToMaster env subroute
let m = envMaster env
let s = envSub env
writeText $ T.concat
[ "<h1>"
, getHello s
, currentUserName m
, "</h1>"
, "<a href=\""
, showRoute $ envToMaster env FooR
, "\">Go to an internal subsite route - Foo</a>"
, "<br />"
, "<a href=\""
, showRoute $ parentRoute m
, "\">Go back to the Master site /</a>"
]
-- Foo
getFooR :: HelloMaster master => SubrouteHandler HelloSubRoute master
getFooR env reqdata = do
writeText $ T.concat
["<h1>FOOO</h1>"
, "<a href=\""
, showRoute HomeR
, "\">Go back</a>"
]
| ajnsit/snap-routes | examples/subsites/HelloSub.hs | mit | 1,985 | 0 | 11 | 395 | 283 | 157 | 126 | 33 | 1 |
module Main where
import Parser
import While
import Assembly
process file = do
r <- parseFile file
case r of
Left x -> print x
Right (w,[]) -> putStrLn (s . compile $ w)
| orchid-hybrid/WHILE | EllaBased/Main.hs | gpl-2.0 | 178 | 0 | 13 | 42 | 81 | 41 | 40 | 9 | 2 |
import Parser
{-|
The grammar would be:
expr ::= expr - nat | nat
nat ::= 0 | 1 | 2 ...
-}
expr :: Parser Int
expr = do e <- Main.expr
do symbol "-"
n <- nat
return (e - n)
<|> return e
--The parser loops forever because of its left recursivity.
expr' :: Parser Int
expr' = do n <- nat
ns <- many (do symbol "-"
nat)
return (foldl (-) n ns)
| mdipirro/functional-languages-homeworks | Christmas/Exercise3.hs | gpl-3.0 | 453 | 2 | 12 | 189 | 130 | 58 | 72 | 12 | 1 |
module AbsCLike where
-- Haskell module generated by the BNF converted
newtype Ident = Ident String
deriving (Eq, Ord, Show)
data Type
= T_Int
| T_Float
| T_Char
| T_Void
| Boolean
| ArrDef Type Int
| Pointer Type
deriving (Eq, Ord, Show)
data Boolean = Boolean_True | Boolean_False
deriving (Eq, Ord, Show)
data Program = Prog [Decl]
deriving (Eq, Ord, Show)
data Decl
= Dvar Type [VarDeclInit]
{- dichiarata, ma non inizializzata -}
| UndVar Type [Ident]
| Dfun Type Ident [Parameter] [StmtDecl]
{- le funzioni possono ritornare anche tipi complessi-}
deriving (Eq, Ord, Show)
data StmtDecl = Decls Decl | Stmts Stmt
deriving (Eq, Ord, Show)
data Stmt
= ProcCall FunCall
| BlockDecl [StmtDecl]
| Jmp JumpStmt
| Iter IterStmt
| Sel SelectionStmt
| Assgn LExpr Assignment_op RExpr
| LExprStmt LExpr
| Comment String
deriving (Eq, Ord, Show )
data RExpr
= InfixOp InfixOp RExpr RExpr
| UnaryOp UnaryOp RExpr
| Ref LExpr
| FCall Ident [RExpr]
| Lexpr LExpr
| Int Integer
| Char Char
| Double Double
| Bool Boolean
deriving (Eq, Ord, Show )
data InfixOp = ArithOp ArithOp | RelOp RelOp | BoolOp BoolOp
deriving (Eq,Ord,Show)
data ArithOp = Add | Sub | Mul | Div | Mod | Pow
deriving (Eq,Ord,Show)
data BoolOp = And | Or | Xor
deriving (Eq,Ord,Show)
data RelOp = Eq | Neq | Lt | LtE | Gt | GtE
deriving (Eq,Ord,Show)
data UnaryOp = Not | Neg
deriving (Eq,Ord,Show)
data FunCall = Call Ident [RExpr]
deriving (Eq, Ord, Show )
data LExpr
= Deref RExpr
| PrePostIncDecr PrePost IncDecr LExpr
| BasLExpr BLExpr
deriving (Eq, Ord, Show )
data PrePost = Post | Pre
deriving (Eq,Ord,Show)
data IncDecr = Inc | Decr
deriving (Eq,Ord,Show)
data BLExpr = ArrayEl BLExpr RExpr | Id Ident
deriving (Eq, Ord, Show )
data VarDeclInit = VarDeclIn Ident ComplexRExpr
deriving (Eq, Ord, Show )
data ComplexRExpr = Simple RExpr | Array [ComplexRExpr]
deriving (Eq, Ord, Show )
data Parameter = Param Modality Type Ident
deriving (Eq, Ord, Show )
data Modality
= M_Void
| M_Val
| M_Ref
| M_Const
| M_Res
| M_Valres
| M_Name
deriving (Eq, Ord, Show )
data Assignment_op
= Assign
| AssgnMul
| AssgnAdd
| AssgnDiv
| AssgnSub
| AssgnPow
| AssgnAnd
| AssgnOr
deriving (Eq, Ord, Show )
data JumpStmt = Break | Continue | RetExpVoid | RetExp RExpr
deriving (Eq, Ord, Show )
data SelectionStmt
= IfNoElse RExpr [StmtDecl] | IfElse RExpr [StmtDecl] [StmtDecl]
deriving (Eq, Ord, Show )
data IterStmt
= While RExpr [StmtDecl]
| DoWhile [StmtDecl] RExpr
| For Stmt RExpr Stmt [StmtDecl]
deriving (Eq, Ord, Show )
| AleVq/CompilerBuilding | AbsCLike.hs | gpl-3.0 | 2,771 | 0 | 7 | 726 | 962 | 550 | 412 | 102 | 0 |
--------------------------------------------------------------------------------
{- |
Module : Math.Line
Description : A Haskell implementation of Lines in Vector form
License : GPL-3
Maintainer : [email protected]
Stability : Stable
Portability : Portable
A Haskell implementation of Numeric Vectors, Matrices and Lines.
This class defines functions to operate on Lines in vector form.
-}
--------------------------------------------------------------------------------
module Math.Line where
import Data.List
import Math.Point
import Math.Util
import Math.Vector
import qualified Data.Vector as V
{- | Line in Vector form. A line is a geometrical object in Rⁿ that contains a
point P and all points that lie in one direction and its inverse direction
from point P.
Every line l in Rⁿ can be parametrized by a vector equation of the form:
>l: v➝(t) = v₀➝ + t.d➝ for t ∈ R.
where v₀➝ corresponds to some point on the line and d➝ gives the direction
of the line.
-}
data Line a = Line { point :: Point a -- ^ Any point on the line
, vector :: Vector a -- ^ Direction vector
} deriving (Eq)
-- | Converts a line to a String its Vector equation form:
-- (Point) + t(vector).
-- Example:
-- @(0,0,0) + t(1,2,3)@
instance Show a => Show (Line a) where
show l = concat ["Line (", intercalate "," . map show . V.toList $ point l
, ") + t(", intercalate "," . map show . V.toList $ vector l, ")"]
-- | Creates a line from a point and a Vector
line :: Point a -> Vector a -> Line a
line = Line
--Needs Testing
-- | Gets the perpendicular distance from a point to a line.
distPointToLine :: (Num a, Floating a) => Point a -> Line a -> a
distPointToLine p l = abs (p0p `dot` d) / magnitude d
where p0p = newVector p (point l)
d = vector l
-- | True if two lines are parallel.
parallelLine :: (Eq a, Floating a) => Line a -> Line a -> Bool
parallelLine a b = vector a -|| vector b
-- | True if two lines are perpendicular
perpendicularLine :: (Eq a, Num a) => Line a -> Line a -> Bool
perpendicularLine a b = vector a -| vector b
| Jiggins/Matrix | Math/Line.hs | gpl-3.0 | 2,152 | 0 | 11 | 477 | 401 | 212 | 189 | 22 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Chat.Dms.Messages
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Legacy path for creating message. Calling these will result in a
-- BadRequest response.
--
-- /See:/ <https://developers.google.com/hangouts/chat Google Chat API Reference> for @chat.dms.messages@.
module Network.Google.Resource.Chat.Dms.Messages
(
-- * REST Resource
DmsMessagesResource
-- * Creating a Request
, dmsMessages
, DmsMessages
-- * Request Lenses
, dmParent
, dmXgafv
, dmUploadProtocol
, dmAccessToken
, dmUploadType
, dmPayload
, dmCallback
, dmThreadKey
) where
import Network.Google.Chat.Types
import Network.Google.Prelude
-- | A resource alias for @chat.dms.messages@ method which the
-- 'DmsMessages' request conforms to.
type DmsMessagesResource =
"v1" :>
Capture "parent" Text :>
"messages" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "threadKey" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Message :> Post '[JSON] Message
-- | Legacy path for creating message. Calling these will result in a
-- BadRequest response.
--
-- /See:/ 'dmsMessages' smart constructor.
data DmsMessages =
DmsMessages'
{ _dmParent :: !Text
, _dmXgafv :: !(Maybe Xgafv)
, _dmUploadProtocol :: !(Maybe Text)
, _dmAccessToken :: !(Maybe Text)
, _dmUploadType :: !(Maybe Text)
, _dmPayload :: !Message
, _dmCallback :: !(Maybe Text)
, _dmThreadKey :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DmsMessages' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dmParent'
--
-- * 'dmXgafv'
--
-- * 'dmUploadProtocol'
--
-- * 'dmAccessToken'
--
-- * 'dmUploadType'
--
-- * 'dmPayload'
--
-- * 'dmCallback'
--
-- * 'dmThreadKey'
dmsMessages
:: Text -- ^ 'dmParent'
-> Message -- ^ 'dmPayload'
-> DmsMessages
dmsMessages pDmParent_ pDmPayload_ =
DmsMessages'
{ _dmParent = pDmParent_
, _dmXgafv = Nothing
, _dmUploadProtocol = Nothing
, _dmAccessToken = Nothing
, _dmUploadType = Nothing
, _dmPayload = pDmPayload_
, _dmCallback = Nothing
, _dmThreadKey = Nothing
}
-- | Required. Space resource name, in the form \"spaces\/*\". Example:
-- spaces\/AAAAMpdlehY
dmParent :: Lens' DmsMessages Text
dmParent = lens _dmParent (\ s a -> s{_dmParent = a})
-- | V1 error format.
dmXgafv :: Lens' DmsMessages (Maybe Xgafv)
dmXgafv = lens _dmXgafv (\ s a -> s{_dmXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
dmUploadProtocol :: Lens' DmsMessages (Maybe Text)
dmUploadProtocol
= lens _dmUploadProtocol
(\ s a -> s{_dmUploadProtocol = a})
-- | OAuth access token.
dmAccessToken :: Lens' DmsMessages (Maybe Text)
dmAccessToken
= lens _dmAccessToken
(\ s a -> s{_dmAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
dmUploadType :: Lens' DmsMessages (Maybe Text)
dmUploadType
= lens _dmUploadType (\ s a -> s{_dmUploadType = a})
-- | Multipart request metadata.
dmPayload :: Lens' DmsMessages Message
dmPayload
= lens _dmPayload (\ s a -> s{_dmPayload = a})
-- | JSONP
dmCallback :: Lens' DmsMessages (Maybe Text)
dmCallback
= lens _dmCallback (\ s a -> s{_dmCallback = a})
-- | Opaque thread identifier string that can be specified to group messages
-- into a single thread. If this is the first message with a given thread
-- identifier, a new thread is created. Subsequent messages with the same
-- thread identifier will be posted into the same thread. This relieves
-- bots and webhooks from having to store the Hangouts Chat thread ID of a
-- thread (created earlier by them) to post further updates to it. Has no
-- effect if thread field, corresponding to an existing thread, is set in
-- message.
dmThreadKey :: Lens' DmsMessages (Maybe Text)
dmThreadKey
= lens _dmThreadKey (\ s a -> s{_dmThreadKey = a})
instance GoogleRequest DmsMessages where
type Rs DmsMessages = Message
type Scopes DmsMessages = '[]
requestClient DmsMessages'{..}
= go _dmParent _dmXgafv _dmUploadProtocol
_dmAccessToken
_dmUploadType
_dmCallback
_dmThreadKey
(Just AltJSON)
_dmPayload
chatService
where go
= buildClient (Proxy :: Proxy DmsMessagesResource)
mempty
| brendanhay/gogol | gogol-chat/gen/Network/Google/Resource/Chat/Dms/Messages.hs | mpl-2.0 | 5,467 | 0 | 18 | 1,316 | 867 | 507 | 360 | 119 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.CloudIOT.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.CloudIOT.Types.Sum where
import Network.Google.Prelude hiding (Bytes)
-- | If \`GATEWAY\` is specified, only gateways are returned. If
-- \`NON_GATEWAY\` is specified, only non-gateway devices are returned. If
-- \`GATEWAY_TYPE_UNSPECIFIED\` is specified, all devices are returned.
data ProjectsLocationsRegistriesGroupsDevicesListGatewayListOptionsGatewayType
= GatewayTypeUnspecified
-- ^ @GATEWAY_TYPE_UNSPECIFIED@
-- If unspecified, the device is considered a non-gateway device.
| Gateway
-- ^ @GATEWAY@
-- The device is a gateway.
| NonGateway
-- ^ @NON_GATEWAY@
-- The device is not a gateway.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ProjectsLocationsRegistriesGroupsDevicesListGatewayListOptionsGatewayType
instance FromHttpApiData ProjectsLocationsRegistriesGroupsDevicesListGatewayListOptionsGatewayType where
parseQueryParam = \case
"GATEWAY_TYPE_UNSPECIFIED" -> Right GatewayTypeUnspecified
"GATEWAY" -> Right Gateway
"NON_GATEWAY" -> Right NonGateway
x -> Left ("Unable to parse ProjectsLocationsRegistriesGroupsDevicesListGatewayListOptionsGatewayType from: " <> x)
instance ToHttpApiData ProjectsLocationsRegistriesGroupsDevicesListGatewayListOptionsGatewayType where
toQueryParam = \case
GatewayTypeUnspecified -> "GATEWAY_TYPE_UNSPECIFIED"
Gateway -> "GATEWAY"
NonGateway -> "NON_GATEWAY"
instance FromJSON ProjectsLocationsRegistriesGroupsDevicesListGatewayListOptionsGatewayType where
parseJSON = parseJSONText "ProjectsLocationsRegistriesGroupsDevicesListGatewayListOptionsGatewayType"
instance ToJSON ProjectsLocationsRegistriesGroupsDevicesListGatewayListOptionsGatewayType where
toJSON = toJSONText
-- | **Beta Feature** The logging verbosity for device activity. If
-- unspecified, DeviceRegistry.log_level will be used.
data DeviceLogLevel
= LogLevelUnspecified
-- ^ @LOG_LEVEL_UNSPECIFIED@
-- No logging specified. If not specified, logging will be disabled.
| None
-- ^ @NONE@
-- Disables logging.
| Error'
-- ^ @ERROR@
-- Error events will be logged.
| Info
-- ^ @INFO@
-- Informational events will be logged, such as connections and
-- disconnections.
| Debug
-- ^ @DEBUG@
-- All events will be logged.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DeviceLogLevel
instance FromHttpApiData DeviceLogLevel where
parseQueryParam = \case
"LOG_LEVEL_UNSPECIFIED" -> Right LogLevelUnspecified
"NONE" -> Right None
"ERROR" -> Right Error'
"INFO" -> Right Info
"DEBUG" -> Right Debug
x -> Left ("Unable to parse DeviceLogLevel from: " <> x)
instance ToHttpApiData DeviceLogLevel where
toQueryParam = \case
LogLevelUnspecified -> "LOG_LEVEL_UNSPECIFIED"
None -> "NONE"
Error' -> "ERROR"
Info -> "INFO"
Debug -> "DEBUG"
instance FromJSON DeviceLogLevel where
parseJSON = parseJSONText "DeviceLogLevel"
instance ToJSON DeviceLogLevel where
toJSON = toJSONText
-- | If \`GATEWAY\` is specified, only gateways are returned. If
-- \`NON_GATEWAY\` is specified, only non-gateway devices are returned. If
-- \`GATEWAY_TYPE_UNSPECIFIED\` is specified, all devices are returned.
data ProjectsLocationsRegistriesDevicesListGatewayListOptionsGatewayType
= PLRDLGLOGTGatewayTypeUnspecified
-- ^ @GATEWAY_TYPE_UNSPECIFIED@
-- If unspecified, the device is considered a non-gateway device.
| PLRDLGLOGTGateway
-- ^ @GATEWAY@
-- The device is a gateway.
| PLRDLGLOGTNonGateway
-- ^ @NON_GATEWAY@
-- The device is not a gateway.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ProjectsLocationsRegistriesDevicesListGatewayListOptionsGatewayType
instance FromHttpApiData ProjectsLocationsRegistriesDevicesListGatewayListOptionsGatewayType where
parseQueryParam = \case
"GATEWAY_TYPE_UNSPECIFIED" -> Right PLRDLGLOGTGatewayTypeUnspecified
"GATEWAY" -> Right PLRDLGLOGTGateway
"NON_GATEWAY" -> Right PLRDLGLOGTNonGateway
x -> Left ("Unable to parse ProjectsLocationsRegistriesDevicesListGatewayListOptionsGatewayType from: " <> x)
instance ToHttpApiData ProjectsLocationsRegistriesDevicesListGatewayListOptionsGatewayType where
toQueryParam = \case
PLRDLGLOGTGatewayTypeUnspecified -> "GATEWAY_TYPE_UNSPECIFIED"
PLRDLGLOGTGateway -> "GATEWAY"
PLRDLGLOGTNonGateway -> "NON_GATEWAY"
instance FromJSON ProjectsLocationsRegistriesDevicesListGatewayListOptionsGatewayType where
parseJSON = parseJSONText "ProjectsLocationsRegistriesDevicesListGatewayListOptionsGatewayType"
instance ToJSON ProjectsLocationsRegistriesDevicesListGatewayListOptionsGatewayType where
toJSON = toJSONText
-- | The certificate format.
data PublicKeyCertificateFormat
= UnspecifiedPublicKeyCertificateFormat
-- ^ @UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT@
-- The format has not been specified. This is an invalid default value and
-- must not be used.
| X509CertificatePem
-- ^ @X509_CERTIFICATE_PEM@
-- An X.509v3 certificate
-- ([RFC5280](https:\/\/www.ietf.org\/rfc\/rfc5280.txt)), encoded in
-- base64, and wrapped by \`-----BEGIN CERTIFICATE-----\` and \`-----END
-- CERTIFICATE-----\`.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable PublicKeyCertificateFormat
instance FromHttpApiData PublicKeyCertificateFormat where
parseQueryParam = \case
"UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT" -> Right UnspecifiedPublicKeyCertificateFormat
"X509_CERTIFICATE_PEM" -> Right X509CertificatePem
x -> Left ("Unable to parse PublicKeyCertificateFormat from: " <> x)
instance ToHttpApiData PublicKeyCertificateFormat where
toQueryParam = \case
UnspecifiedPublicKeyCertificateFormat -> "UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT"
X509CertificatePem -> "X509_CERTIFICATE_PEM"
instance FromJSON PublicKeyCertificateFormat where
parseJSON = parseJSONText "PublicKeyCertificateFormat"
instance ToJSON PublicKeyCertificateFormat where
toJSON = toJSONText
-- | The format of the key.
data PublicKeyCredentialFormat
= UnspecifiedPublicKeyFormat
-- ^ @UNSPECIFIED_PUBLIC_KEY_FORMAT@
-- The format has not been specified. This is an invalid default value and
-- must not be used.
| RsaPem
-- ^ @RSA_PEM@
-- An RSA public key encoded in base64, and wrapped by \`-----BEGIN PUBLIC
-- KEY-----\` and \`-----END PUBLIC KEY-----\`. This can be used to verify
-- \`RS256\` signatures in JWT tokens ([RFC7518](
-- https:\/\/www.ietf.org\/rfc\/rfc7518.txt)).
| RsaX509Pem
-- ^ @RSA_X509_PEM@
-- As RSA_PEM, but wrapped in an X.509v3 certificate ([RFC5280](
-- https:\/\/www.ietf.org\/rfc\/rfc5280.txt)), encoded in base64, and
-- wrapped by \`-----BEGIN CERTIFICATE-----\` and \`-----END
-- CERTIFICATE-----\`.
| ES256Pem
-- ^ @ES256_PEM@
-- Public key for the ECDSA algorithm using P-256 and SHA-256, encoded in
-- base64, and wrapped by \`-----BEGIN PUBLIC KEY-----\` and \`-----END
-- PUBLIC KEY-----\`. This can be used to verify JWT tokens with the
-- \`ES256\` algorithm
-- ([RFC7518](https:\/\/www.ietf.org\/rfc\/rfc7518.txt)). This curve is
-- defined in [OpenSSL](https:\/\/www.openssl.org\/) as the \`prime256v1\`
-- curve.
| ES256X509Pem
-- ^ @ES256_X509_PEM@
-- As ES256_PEM, but wrapped in an X.509v3 certificate ([RFC5280](
-- https:\/\/www.ietf.org\/rfc\/rfc5280.txt)), encoded in base64, and
-- wrapped by \`-----BEGIN CERTIFICATE-----\` and \`-----END
-- CERTIFICATE-----\`.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable PublicKeyCredentialFormat
instance FromHttpApiData PublicKeyCredentialFormat where
parseQueryParam = \case
"UNSPECIFIED_PUBLIC_KEY_FORMAT" -> Right UnspecifiedPublicKeyFormat
"RSA_PEM" -> Right RsaPem
"RSA_X509_PEM" -> Right RsaX509Pem
"ES256_PEM" -> Right ES256Pem
"ES256_X509_PEM" -> Right ES256X509Pem
x -> Left ("Unable to parse PublicKeyCredentialFormat from: " <> x)
instance ToHttpApiData PublicKeyCredentialFormat where
toQueryParam = \case
UnspecifiedPublicKeyFormat -> "UNSPECIFIED_PUBLIC_KEY_FORMAT"
RsaPem -> "RSA_PEM"
RsaX509Pem -> "RSA_X509_PEM"
ES256Pem -> "ES256_PEM"
ES256X509Pem -> "ES256_X509_PEM"
instance FromJSON PublicKeyCredentialFormat where
parseJSON = parseJSONText "PublicKeyCredentialFormat"
instance ToJSON PublicKeyCredentialFormat where
toJSON = toJSONText
-- | **Beta Feature** The default logging verbosity for activity from devices
-- in this registry. The verbosity level can be overridden by
-- Device.log_level.
data DeviceRegistryLogLevel
= DRLLLogLevelUnspecified
-- ^ @LOG_LEVEL_UNSPECIFIED@
-- No logging specified. If not specified, logging will be disabled.
| DRLLNone
-- ^ @NONE@
-- Disables logging.
| DRLLError'
-- ^ @ERROR@
-- Error events will be logged.
| DRLLInfo
-- ^ @INFO@
-- Informational events will be logged, such as connections and
-- disconnections.
| DRLLDebug
-- ^ @DEBUG@
-- All events will be logged.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DeviceRegistryLogLevel
instance FromHttpApiData DeviceRegistryLogLevel where
parseQueryParam = \case
"LOG_LEVEL_UNSPECIFIED" -> Right DRLLLogLevelUnspecified
"NONE" -> Right DRLLNone
"ERROR" -> Right DRLLError'
"INFO" -> Right DRLLInfo
"DEBUG" -> Right DRLLDebug
x -> Left ("Unable to parse DeviceRegistryLogLevel from: " <> x)
instance ToHttpApiData DeviceRegistryLogLevel where
toQueryParam = \case
DRLLLogLevelUnspecified -> "LOG_LEVEL_UNSPECIFIED"
DRLLNone -> "NONE"
DRLLError' -> "ERROR"
DRLLInfo -> "INFO"
DRLLDebug -> "DEBUG"
instance FromJSON DeviceRegistryLogLevel where
parseJSON = parseJSONText "DeviceRegistryLogLevel"
instance ToJSON DeviceRegistryLogLevel where
toJSON = toJSONText
-- | V1 error format.
data Xgafv
= X1
-- ^ @1@
-- v1 error format
| X2
-- ^ @2@
-- v2 error format
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable Xgafv
instance FromHttpApiData Xgafv where
parseQueryParam = \case
"1" -> Right X1
"2" -> Right X2
x -> Left ("Unable to parse Xgafv from: " <> x)
instance ToHttpApiData Xgafv where
toQueryParam = \case
X1 -> "1"
X2 -> "2"
instance FromJSON Xgafv where
parseJSON = parseJSONText "Xgafv"
instance ToJSON Xgafv where
toJSON = toJSONText
-- | Indicates whether the device is a gateway.
data GatewayConfigGatewayType
= GCGTGatewayTypeUnspecified
-- ^ @GATEWAY_TYPE_UNSPECIFIED@
-- If unspecified, the device is considered a non-gateway device.
| GCGTGateway
-- ^ @GATEWAY@
-- The device is a gateway.
| GCGTNonGateway
-- ^ @NON_GATEWAY@
-- The device is not a gateway.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable GatewayConfigGatewayType
instance FromHttpApiData GatewayConfigGatewayType where
parseQueryParam = \case
"GATEWAY_TYPE_UNSPECIFIED" -> Right GCGTGatewayTypeUnspecified
"GATEWAY" -> Right GCGTGateway
"NON_GATEWAY" -> Right GCGTNonGateway
x -> Left ("Unable to parse GatewayConfigGatewayType from: " <> x)
instance ToHttpApiData GatewayConfigGatewayType where
toQueryParam = \case
GCGTGatewayTypeUnspecified -> "GATEWAY_TYPE_UNSPECIFIED"
GCGTGateway -> "GATEWAY"
GCGTNonGateway -> "NON_GATEWAY"
instance FromJSON GatewayConfigGatewayType where
parseJSON = parseJSONText "GatewayConfigGatewayType"
instance ToJSON GatewayConfigGatewayType where
toJSON = toJSONText
-- | Indicates how to authorize and\/or authenticate devices to access the
-- gateway.
data GatewayConfigGatewayAuthMethod
= GatewayAuthMethodUnspecified
-- ^ @GATEWAY_AUTH_METHOD_UNSPECIFIED@
-- No authentication\/authorization method specified. No devices are
-- allowed to access the gateway.
| AssociationOnly
-- ^ @ASSOCIATION_ONLY@
-- The device is authenticated through the gateway association only. Device
-- credentials are ignored even if provided.
| DeviceAuthTokenOnly
-- ^ @DEVICE_AUTH_TOKEN_ONLY@
-- The device is authenticated through its own credentials. Gateway
-- association is not checked.
| AssociationAndDeviceAuthToken
-- ^ @ASSOCIATION_AND_DEVICE_AUTH_TOKEN@
-- The device is authenticated through both device credentials and gateway
-- association. The device must be bound to the gateway and must provide
-- its own credentials.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable GatewayConfigGatewayAuthMethod
instance FromHttpApiData GatewayConfigGatewayAuthMethod where
parseQueryParam = \case
"GATEWAY_AUTH_METHOD_UNSPECIFIED" -> Right GatewayAuthMethodUnspecified
"ASSOCIATION_ONLY" -> Right AssociationOnly
"DEVICE_AUTH_TOKEN_ONLY" -> Right DeviceAuthTokenOnly
"ASSOCIATION_AND_DEVICE_AUTH_TOKEN" -> Right AssociationAndDeviceAuthToken
x -> Left ("Unable to parse GatewayConfigGatewayAuthMethod from: " <> x)
instance ToHttpApiData GatewayConfigGatewayAuthMethod where
toQueryParam = \case
GatewayAuthMethodUnspecified -> "GATEWAY_AUTH_METHOD_UNSPECIFIED"
AssociationOnly -> "ASSOCIATION_ONLY"
DeviceAuthTokenOnly -> "DEVICE_AUTH_TOKEN_ONLY"
AssociationAndDeviceAuthToken -> "ASSOCIATION_AND_DEVICE_AUTH_TOKEN"
instance FromJSON GatewayConfigGatewayAuthMethod where
parseJSON = parseJSONText "GatewayConfigGatewayAuthMethod"
instance ToJSON GatewayConfigGatewayAuthMethod where
toJSON = toJSONText
-- | If enabled, allows connections using the MQTT protocol. Otherwise, MQTT
-- connections to this registry will fail.
data MqttConfigMqttEnabledState
= MqttStateUnspecified
-- ^ @MQTT_STATE_UNSPECIFIED@
-- No MQTT state specified. If not specified, MQTT will be enabled by
-- default.
| MqttEnabled
-- ^ @MQTT_ENABLED@
-- Enables a MQTT connection.
| MqttDisabled
-- ^ @MQTT_DISABLED@
-- Disables a MQTT connection.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MqttConfigMqttEnabledState
instance FromHttpApiData MqttConfigMqttEnabledState where
parseQueryParam = \case
"MQTT_STATE_UNSPECIFIED" -> Right MqttStateUnspecified
"MQTT_ENABLED" -> Right MqttEnabled
"MQTT_DISABLED" -> Right MqttDisabled
x -> Left ("Unable to parse MqttConfigMqttEnabledState from: " <> x)
instance ToHttpApiData MqttConfigMqttEnabledState where
toQueryParam = \case
MqttStateUnspecified -> "MQTT_STATE_UNSPECIFIED"
MqttEnabled -> "MQTT_ENABLED"
MqttDisabled -> "MQTT_DISABLED"
instance FromJSON MqttConfigMqttEnabledState where
parseJSON = parseJSONText "MqttConfigMqttEnabledState"
instance ToJSON MqttConfigMqttEnabledState where
toJSON = toJSONText
-- | If enabled, allows devices to use DeviceService via the HTTP protocol.
-- Otherwise, any requests to DeviceService will fail for this registry.
data HTTPConfigHTTPEnabledState
= HTTPStateUnspecified
-- ^ @HTTP_STATE_UNSPECIFIED@
-- No HTTP state specified. If not specified, DeviceService will be enabled
-- by default.
| HTTPEnabled
-- ^ @HTTP_ENABLED@
-- Enables DeviceService (HTTP) service for the registry.
| HTTPDisabled
-- ^ @HTTP_DISABLED@
-- Disables DeviceService (HTTP) service for the registry.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable HTTPConfigHTTPEnabledState
instance FromHttpApiData HTTPConfigHTTPEnabledState where
parseQueryParam = \case
"HTTP_STATE_UNSPECIFIED" -> Right HTTPStateUnspecified
"HTTP_ENABLED" -> Right HTTPEnabled
"HTTP_DISABLED" -> Right HTTPDisabled
x -> Left ("Unable to parse HTTPConfigHTTPEnabledState from: " <> x)
instance ToHttpApiData HTTPConfigHTTPEnabledState where
toQueryParam = \case
HTTPStateUnspecified -> "HTTP_STATE_UNSPECIFIED"
HTTPEnabled -> "HTTP_ENABLED"
HTTPDisabled -> "HTTP_DISABLED"
instance FromJSON HTTPConfigHTTPEnabledState where
parseJSON = parseJSONText "HTTPConfigHTTPEnabledState"
instance ToJSON HTTPConfigHTTPEnabledState where
toJSON = toJSONText
| brendanhay/gogol | gogol-cloudiot/gen/Network/Google/CloudIOT/Types/Sum.hs | mpl-2.0 | 17,596 | 0 | 11 | 3,598 | 2,176 | 1,179 | 997 | 254 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.FirebaseDynamicLinks.ManagedShortLinks.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a managed short Dynamic Link given either a valid long Dynamic
-- Link or details such as Dynamic Link domain, Android and iOS app
-- information. The created short Dynamic Link will not expire. This
-- differs from CreateShortDynamicLink in the following ways: - The request
-- will also contain a name for the link (non unique name for the front
-- end). - The response must be authenticated with an auth token (generated
-- with the admin service account). - The link will appear in the FDL list
-- of links in the console front end. The Dynamic Link domain in the
-- request must be owned by requester\'s Firebase project.
--
-- /See:/ <https://firebase.google.com/docs/dynamic-links/ Firebase Dynamic Links API Reference> for @firebasedynamiclinks.managedShortLinks.create@.
module Network.Google.Resource.FirebaseDynamicLinks.ManagedShortLinks.Create
(
-- * REST Resource
ManagedShortLinksCreateResource
-- * Creating a Request
, managedShortLinksCreate
, ManagedShortLinksCreate
-- * Request Lenses
, mslcXgafv
, mslcUploadProtocol
, mslcAccessToken
, mslcUploadType
, mslcPayload
, mslcCallback
) where
import Network.Google.FirebaseDynamicLinks.Types
import Network.Google.Prelude
-- | A resource alias for @firebasedynamiclinks.managedShortLinks.create@ method which the
-- 'ManagedShortLinksCreate' request conforms to.
type ManagedShortLinksCreateResource =
"v1" :>
"managedShortLinks:create" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CreateManagedShortLinkRequest :>
Post '[JSON] CreateManagedShortLinkResponse
-- | Creates a managed short Dynamic Link given either a valid long Dynamic
-- Link or details such as Dynamic Link domain, Android and iOS app
-- information. The created short Dynamic Link will not expire. This
-- differs from CreateShortDynamicLink in the following ways: - The request
-- will also contain a name for the link (non unique name for the front
-- end). - The response must be authenticated with an auth token (generated
-- with the admin service account). - The link will appear in the FDL list
-- of links in the console front end. The Dynamic Link domain in the
-- request must be owned by requester\'s Firebase project.
--
-- /See:/ 'managedShortLinksCreate' smart constructor.
data ManagedShortLinksCreate =
ManagedShortLinksCreate'
{ _mslcXgafv :: !(Maybe Xgafv)
, _mslcUploadProtocol :: !(Maybe Text)
, _mslcAccessToken :: !(Maybe Text)
, _mslcUploadType :: !(Maybe Text)
, _mslcPayload :: !CreateManagedShortLinkRequest
, _mslcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ManagedShortLinksCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mslcXgafv'
--
-- * 'mslcUploadProtocol'
--
-- * 'mslcAccessToken'
--
-- * 'mslcUploadType'
--
-- * 'mslcPayload'
--
-- * 'mslcCallback'
managedShortLinksCreate
:: CreateManagedShortLinkRequest -- ^ 'mslcPayload'
-> ManagedShortLinksCreate
managedShortLinksCreate pMslcPayload_ =
ManagedShortLinksCreate'
{ _mslcXgafv = Nothing
, _mslcUploadProtocol = Nothing
, _mslcAccessToken = Nothing
, _mslcUploadType = Nothing
, _mslcPayload = pMslcPayload_
, _mslcCallback = Nothing
}
-- | V1 error format.
mslcXgafv :: Lens' ManagedShortLinksCreate (Maybe Xgafv)
mslcXgafv
= lens _mslcXgafv (\ s a -> s{_mslcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
mslcUploadProtocol :: Lens' ManagedShortLinksCreate (Maybe Text)
mslcUploadProtocol
= lens _mslcUploadProtocol
(\ s a -> s{_mslcUploadProtocol = a})
-- | OAuth access token.
mslcAccessToken :: Lens' ManagedShortLinksCreate (Maybe Text)
mslcAccessToken
= lens _mslcAccessToken
(\ s a -> s{_mslcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
mslcUploadType :: Lens' ManagedShortLinksCreate (Maybe Text)
mslcUploadType
= lens _mslcUploadType
(\ s a -> s{_mslcUploadType = a})
-- | Multipart request metadata.
mslcPayload :: Lens' ManagedShortLinksCreate CreateManagedShortLinkRequest
mslcPayload
= lens _mslcPayload (\ s a -> s{_mslcPayload = a})
-- | JSONP
mslcCallback :: Lens' ManagedShortLinksCreate (Maybe Text)
mslcCallback
= lens _mslcCallback (\ s a -> s{_mslcCallback = a})
instance GoogleRequest ManagedShortLinksCreate where
type Rs ManagedShortLinksCreate =
CreateManagedShortLinkResponse
type Scopes ManagedShortLinksCreate =
'["https://www.googleapis.com/auth/firebase"]
requestClient ManagedShortLinksCreate'{..}
= go _mslcXgafv _mslcUploadProtocol _mslcAccessToken
_mslcUploadType
_mslcCallback
(Just AltJSON)
_mslcPayload
firebaseDynamicLinksService
where go
= buildClient
(Proxy :: Proxy ManagedShortLinksCreateResource)
mempty
| brendanhay/gogol | gogol-firebase-dynamiclinks/gen/Network/Google/Resource/FirebaseDynamicLinks/ManagedShortLinks/Create.hs | mpl-2.0 | 6,175 | 0 | 16 | 1,299 | 719 | 426 | 293 | 105 | 1 |
{-# LANGUAGE CPP #-}
module StoryMode.Menus (
newStoryModeAvailability,
storyModeMenuItem,
storyMode,
) where
import Data.Map (lookup, findWithDefault)
import Data.Maybe
import Data.Initial
import Control.Concurrent
import System.FilePath
import Network.Download
import Graphics.Qt
#if MIN_VERSION_base(4,7,0)
import Utils hiding (tryReadMVar)
#else
import Utils
#endif
import Base
import Editor.Pickle.LevelFile
import StoryMode.Types
import StoryMode.Configuration
import StoryMode.Episode
import StoryMode.Purchasing
-- * item in main menu
newStoryModeAvailability :: Ptr MainWindow -> Configuration -> IO (MVar StoryModeAvailability)
newStoryModeAvailability window config = do
ref <- newEmptyMVar
_ <- forkIO $ do
lookForStoryModeSite config >>= putMVar ref
updateMainWindow window
return ref
lookForStoryModeSite :: Configuration -> IO StoryModeAvailability
lookForStoryModeSite config = do
isInstalled <- isJust <$> loadEpisodes
if isInstalled then
return Installed
else
either (const NotAvailable) (const Buyable) <$>
downloadLazy (fromMaybe defaultPurchasingUrl (story_mode_purchasing_url config))
storyModeMenuItem :: StoryModeMenuItem
storyModeMenuItem = StoryModeMenuItem False
data StoryModeMenuItem = StoryModeMenuItem {selected :: Bool}
instance Renderable StoryModeMenuItem where
render ptr app config size (StoryModeMenuItem selected) = do
available <- tryReadMVar $ storyModeAvailability app
let prose = case available of
Nothing -> selMod $ p "Story Episodes"
Just Installed -> selMod $ p "Story Episodes"
Just NotAvailable -> selMod $ p "Story Episodes (coming soon!)"
Just Buyable -> colorizeProse yellow $ selMod $
p "Story Episodes (buy now!)"
selMod = if selected then select else deselect
render ptr app config size prose
label = const "StoryModeMenuItem"
select = const $ StoryModeMenuItem True
deselect = const $ StoryModeMenuItem False
-- * story mode menu itself
-- | Menu for the story mode
storyMode :: Application -> Play -> Parent -> AppState
storyMode app play parent = NoGUIAppState $ do
mEpisodes <- io $ loadEpisodes
case mEpisodes of
Nothing -> return $ suggestPurchase app this parent 0
Just episodes -> return $ mkEpisodesMenu app play parent episodes 0
where
this :: AppState
this = storyMode app play parent
-- | a menu showing all available episodes
mkEpisodesMenu :: Application -> Play -> Parent -> [Episode LevelFile] -> Int -> AppState
mkEpisodesMenu app play parent episodes =
menuAppState app
(NormalMenu (p "Story Episodes") (Just $ p "choose an episode"))
(Just parent)
(map (mkMenuItem app play this) episodes)
where
this = mkEpisodesMenu app play parent episodes
mkMenuItem :: Application -> Play -> (Int -> Parent)
-> Episode LevelFile -> MenuItem
mkMenuItem app play parent e =
MenuItem (pv $ epTitle $ euid e) (\ i -> mkEpisodeMenu app play (parent i) e 0)
-- | a menu for one episode.
mkEpisodeMenu :: Application -> Play -> Parent
-> Episode LevelFile -> Int -> AppState
mkEpisodeMenu app play parent ep ps = NoGUIAppState $ do
scores <- io $ getScores
let passedIntro = hasPassedIntro (highScores scores) ep
introItem = mkItem scores False (intro ep)
restItems = if not passedIntro then [] else
let bodyItems = map (mkItem scores False) (body ep)
outroItem = mkItem scores True (outro ep)
in (bodyItems +: outroItem)
happyEndItem = if episodeCompleted scores
then pure $ mkItem scores False (happyEnd ep)
else []
creditsItem = MenuItem (renderable $ p "credits") (credits app . this)
return $ menuAppState app
(NormalMenu (p "Story Episodes") (Just $ p "choose a level"))
(Just parent)
(introItem :
restItems ++
happyEndItem ++
creditsItem :
[])
ps
where
episodeCompleted :: HighScoreFile -> Bool
episodeCompleted hsf =
let outroScore = Data.Map.lookup (levelUID $ outro ep) (highScores hsf)
in maybe False isPassedScore outroScore
mkItem :: HighScoreFile -> Bool -> LevelFile -> MenuItem
mkItem scores isOutro level = MenuItem
(showLevel scores isOutro level)
(\ i -> play (this i) level)
showLevel :: HighScoreFile -> Bool -> LevelFile -> Prose
showLevel scores isOutro = if isOutro
then showOutroLevelForMenu (highScores scores) ep
(findWithDefault initial (euid ep) (episodeScores scores))
else showLevelForMenu (highScores scores)
this = mkEpisodeMenu app play parent ep
hasPassedIntro :: Scores -> Episode LevelFile -> Bool
hasPassedIntro scores e =
maybe False isPassedScore $
Data.Map.lookup (levelUID $ intro e) scores
credits :: Application -> Parent -> AppState
credits app parent = NoGUIAppState $ io $ do
mFile <- getStoryModeDataFileName ("manual" </> "credits" <.> "txt")
prose <- maybe
(return $ pure $ p "storymode not found")
pFile
mFile
return $ scrollingAppState app prose parent
| changlinli/nikki | src/StoryMode/Menus.hs | lgpl-3.0 | 5,321 | 0 | 18 | 1,332 | 1,469 | 738 | 731 | 118 | 4 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Network.Haskoin.Wallet.Arbitrary where
import Test.QuickCheck (Arbitrary, arbitrary, oneof)
import Network.Haskoin.Test
import Network.Haskoin.Wallet
instance Arbitrary AccountType where
arbitrary = oneof
[ AccountRegular <$> arbitrary
, do
ArbitraryMSParam m n <- arbitrary
r <- arbitrary
return $ AccountMultisig r m n
]
instance Arbitrary NodeAction where
arbitrary = oneof [ NodeActionRescan <$> arbitrary
, return NodeActionStatus
]
instance Arbitrary TxAction where
arbitrary = oneof
[ do
as' <- arbitrary
let as = map (\(ArbitraryAddress a, x) -> (a, x)) as'
fee <- arbitrary
rcptFee <- arbitrary
minConf <- arbitrary
sign <- arbitrary
return $ CreateTx as fee rcptFee minConf sign
, do
ArbitraryTx tx <- arbitrary
return (ImportTx tx)
, SignTx <$> arbitrary
]
| tphyahoo/haskoin-wallet | tests/Network/Haskoin/Wallet/Arbitrary.hs | unlicense | 1,075 | 0 | 17 | 378 | 266 | 136 | 130 | 29 | 0 |
ans (a:b:_) = (a + b) `div` 2
main = do
l <- getLine
let i = map read $ words l ::[Int]
o = ans i
print o
| a143753/AOJ | 0357.hs | apache-2.0 | 119 | 0 | 11 | 42 | 84 | 42 | 42 | 6 | 1 |
import Abstract.Testing.Queue
import System.Environment
main :: IO ()
main = do
print "queue"
{-
argv <- getArgs
case argv of
(a1:a2:[]) -> runQueueTests (read a1 :: Int) (read a2 :: Int)
_ -> error "usage: ./run-queue-tests <threads> <maxN>"
-}
| adarqui/Abstract | examples/queue.hs | bsd-3-clause | 254 | 0 | 7 | 47 | 34 | 18 | 16 | 5 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Rel.User
( Config(Config)
, User(runUser)
, createSystemUser
, defaultConfig
, exists
, getUID
, lookupUser
, lookupGroup
) where
import Prelude hiding (fail)
import qualified System.Posix.User as PUser
import qualified System.Posix.Types as PTypes
import qualified Rel.Cmd as Cmd
import qualified Rel.Log as Log
import Monad.Result
data Config = Config {}
data User a = User { runUser :: Config -> IO (Result a) }
instance Functor User where
fmap f ma = User $ \c -> fmap f `fmap` runUser ma c
instance Applicative User where
pure x = User $ \_ -> return $ pure x
f <*> g = User $ \c -> runUser f c `mapAp` runUser g c
instance Monad User where
ma >>= f = User $ \c -> runUser ma c >>= flatten . fmap (flip runUser c . f)
return x = User $ \_ -> return $ pure x
instance ResultantMonad User where
point x = User $ \_ -> x
mapResult f ma = User $ \c -> f `fmap` runUser ma c
type Rel m = (ResultR User m, ResultR Cmd.Cmd m, ResultR Log.Log m)
defaultConfig :: Config
defaultConfig = Config {}
exists :: Rel m => String -> m Bool
exists x = recover (const False) $
safe (PUser.getUserEntryForName x >> return True)
createSystemUser :: Rel m => String -> FilePath -> m ()
createSystemUser x dir =
Cmd.run "useradd" ["--system", "--home-dir", dir, x] >> return ()
-- | Get the effective UID of this process.
getUID :: Rel m => m Int
getUID = safe $ fromIntegral `fmap` PUser.getEffectiveUserID
lookupUser :: Rel m => String -> m PTypes.UserID
lookupUser x = safe $ PUser.userID `fmap` PUser.getUserEntryForName x
lookupGroup :: Rel m => String -> m PTypes.GroupID
lookupGroup x = safe $ PUser.groupID `fmap` PUser.getGroupEntryForName x
| shmookey/pure | src/Rel/User.hs | bsd-3-clause | 1,811 | 2 | 12 | 374 | 666 | 359 | 307 | 51 | 1 |
module FreeDSL.VWalk (
VWalkInstructions(..)
, VWalkDSL
, stepWith
, getNeighbors
, walkTo
, history
, whereAmI
) where
import Control.Monad
import Control.Monad.Free
import PolyGraph.Common.DslSupport.Coproduct ((:<:), liftDSL)
import Control.Monad.State.Strict
import PolyGraph.Common.DslSupport (MInterpreterWithCtx (..))
import PolyGraph.ReadOnly.Graph (AdjacencyIndex(..), neighborsOf)
--import Data.Functor.Identity
--import Control.Monad.Trans.Free
data VWalkInstructions v r = GetNeighbors ([v] -> r) |
WalkTo v (v -> r) |
History ([v] -> r) deriving (Functor)
type VWalkDSL v = Free (VWalkInstructions v)
-- need different pairing to do FreeT
--type VWalkT v m a = FreeT (VWalkInstructions v) m a
--type VWalkDSL v a = VWalkDSL v Identity a
--stepWith :: Monad m => ([v] -> v) -> VWalkDSL v m v
--getRating :: forall a polyglot.(Functor polyglot, (RatingInstructions a) :<: polyglot)
-- => a -> Free polyglot Int
--
-- using polymorphic signatures to allow for composability a la carte
--
stepWith :: forall v polyglot.(Functor polyglot, (VWalkInstructions v) :<: polyglot)
=> ([v] -> v) -> Free polyglot v
stepWith f = do
n <- getNeighbors
walkTo $ f n
getNeighbors :: forall v polyglot.(Functor polyglot, (VWalkInstructions v) :<: polyglot)
=> Free polyglot [v]
getNeighbors = liftDSL $ liftF (GetNeighbors id)
walkTo :: forall v polyglot.(Functor polyglot, (VWalkInstructions v) :<: polyglot)
=> v -> Free polyglot v
walkTo v = liftDSL $ liftF (WalkTo v id)
history :: forall v polyglot.(Functor polyglot, (VWalkInstructions v) :<: polyglot)
=> Free polyglot [v]
history = liftDSL $ liftF (History id)
whereAmI :: forall v polyglot.(Functor polyglot, (VWalkInstructions v) :<: polyglot)
=> Free polyglot v
whereAmI = (liftM head) history
instance forall g v e t m. (Eq v, AdjacencyIndex g v e t, MonadState ([v]) m) =>
MInterpreterWithCtx g m (VWalkInstructions v) where
interpretStepM g (GetNeighbors nF) = do
(vx:_) <- get
let choiceVs = neighborsOf g vx
if null choiceVs
then fail "out of cheese error"
else nF choiceVs
interpretStepM _ (WalkTo choiceV nF) =
do
modify $ (:) choiceV
nF choiceV
interpretStepM _ (History nF) =
do
path <- get
nF path
| rpeszek/GraphPlay | src/FreeDSL/VWalk.hs | bsd-3-clause | 2,512 | 0 | 11 | 677 | 693 | 383 | 310 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Data.Dfa.Equivalence
Description : Various equivalence tests for Dfas.
The algorithms here are derived from various algorithms presented by
Hopcroft. Outside of his textbook, a few were taken from this paper:
https://arxiv.org/pdf/0907.5058.pdf
-}
module Data.Dfa.Equivalence
( checkDfa
, equivalent
, equivalentText
, isomorphic
, isomorphicText
) where
import Data.Dfa
import Data.Foldable (forM_)
import qualified Data.Map as M
import Data.Maybe (fromJust)
import qualified Data.Set as S
import qualified Data.Text as T
import Control.Applicative ((<$>))
import Control.Monad.Except (MonadError, runExcept,
throwError)
import Control.Monad.Identity hiding (forM_)
import Control.Monad.State.Strict hiding (forM_)
import Parser.Dfa (doParseDfa)
data TagState
= DfaA Int
| DfaB Int
deriving (Eq, Ord, Show)
isDfaA :: TagState -> Bool
isDfaA (DfaA _)
= True
isDfaA _
= False
isDfaB :: TagState -> Bool
isDfaB = not . isDfaA
untagState :: TagState -> Int
untagState (DfaA x)
= x
untagState (DfaB x)
= x
type SetOfSets a = S.Set (S.Set a)
isRight :: Either a b -> Bool
isRight (Right _) = True
isRight _ = False
isLeft :: Either a b -> Bool
isLeft (Left _) = True
isLeft _ = False
checkDfa :: T.Text -> Bool
checkDfa dfaText
= isRight $ doParseDfa dfaText
compareText :: ( MonadError DfaError m
, Functor m) => (Dfa -> Dfa -> m Bool)
-> T.Text -> T.Text -> m Bool
compareText f dfaT1 dfaT2
= let dfaE1 = doParseDfa dfaT1
dfaE2 = doParseDfa dfaT2
in
case doParseDfa dfaT1 of
Left e -> throwError (DfaParseError e)
Right dfa1 ->
case doParseDfa dfaT2 of
Left e -> throwError (DfaParseError e)
Right dfa2 ->
f dfa1 dfa2
equivalentText :: (MonadError DfaError m, Functor m) => T.Text -> T.Text -> m Bool
equivalentText
= compareText equivalent
isomorphicText :: (MonadError DfaError m, Functor m) => T.Text -> T.Text -> m Bool
isomorphicText
= compareText isomorphic
equivalent :: (MonadError DfaError m, Functor m) => Dfa -> Dfa -> m Bool
equivalent
= hopcroftKarp
isomorphic :: (MonadError DfaError m, Functor m) => Dfa -> Dfa -> m Bool
isomorphic dfa1 dfa2
= do
equiv <- equivalent dfa1 dfa2
return $ _Q dfa1 == _Q dfa2 && equiv
-- | Destructive union within a set of sets
-- TODO: StateStack is static, so this might be refactorable to RWS
union :: (Show a, Ord a, MonadState (StateStack, SetOfSets a) m, Functor m)
=> S.Set a -> S.Set a -> m Bool
union set1 set2
| set1 == set2
= return False
| otherwise
= do
(x, curSets) <- get
let removed = S.filter (\x -> (x /= set1) && (x /= set2)) curSets
newSets = S.insert (set1 `S.union` set2) removed
if curSets == removed
then return False
else do
put (x, newSets)
return True
-- TODO: StateStack is static, so this might be refactorable to RWS
find :: (Show a, Ord a, MonadState (StateStack, SetOfSets a) m, Functor m)
=> a -> m (StateStack, S.Set a)
find element
= do
(x, sets) <- get
let containingSets = S.filter (S.member element) sets
if S.size containingSets > 1
then error "BUG: Error in find --- too many sets"
else
return $ (x, setHead containingSets)
hasLefts :: [Either a b] -> Bool
hasLefts = or . map isLeft
lefts :: [Either a b] -> [a]
lefts = map unsafeLeft . filter isLeft
unsafeLeft :: Either a b -> a
unsafeLeft (Left x) = x
type StateStack = [(TagState, TagState)]
hopcroftKarp :: (MonadError DfaError m, Functor m)
=> Dfa -> Dfa -> m Bool
hopcroftKarp dfaA dfaB
= do
let σ = _Σ dfaA
statesA = map (S.singleton . DfaA) [0.._Q dfaA - 1]
statesB = map (S.singleton . DfaB) [0.._Q dfaB - 1]
states' = S.fromList statesA `S.union` S.fromList statesB
starts = [(DfaA 0, DfaB 0)]
states = execState
(S.singleton (DfaA 0) `union`
S.singleton (DfaB 0))
(starts, states')
(_, partition) <- execStateT (forStack σ) states
return $ (_Σ dfaA == _Σ dfaB) && checkPartition partition
where
forStack :: ( MonadError DfaError m
, MonadState (StateStack, SetOfSets TagState) m
, Functor m)
=> S.Set Char
-> m ()
forStack σ
= do
(stack, tagged) <- get
let preStack = stack
unless (null stack) $ do
forM_ (S.toList σ) forSymbol
(postStack, postTagged) <- get
when (preStack /= postStack) $ forStack σ
forSymbol :: ( MonadError DfaError m
, MonadState (StateStack, SetOfSets TagState) m
, Functor m)
=> Char
-> m ()
forSymbol symb
= do
((p, q):stack, tagged) <- get
let pNum = untagState p
qNum = untagState q
qOnSymbNum = M.lookup (qNum, symb) (_δ dfaB)
case M.lookup (pNum, symb) (_δ dfaA) of
Nothing -> throwError (TransitionError pNum symb)
(Just pOnSymbNum) ->
case M.lookup (qNum, symb) (_δ dfaB) of
Nothing -> throwError (TransitionError qNum symb)
(Just qOnSymbNum) -> do
let pOnSymb = DfaA pOnSymbNum
qOnSymb = DfaB qOnSymbNum
p' <- snd <$> find pOnSymb
q' <- snd <$> find qOnSymb
when (p' /= q') $ do
_ <- union p' q'
(stack', set) <- get
put $ ((pOnSymb, qOnSymb):stack', set)
checkPartition :: SetOfSets TagState -> Bool
checkPartition
= S.foldr (&&) True . S.map sameFinality
sameFinality :: S.Set TagState -> Bool
sameFinality states
= let aStates' = S.filter isDfaA states
bStates' = S.filter isDfaB states
unTag = S.map untagState
aStates = unTag aStates'
bStates = unTag bStates'
aFinal = allFinal dfaA aStates
aNonFinal = allNonFinal dfaA aStates
bFinal = allFinal dfaB bStates
bNonFinal = allNonFinal dfaB bStates
in (aFinal && bFinal) || (aNonFinal && bNonFinal)
allFinal :: Dfa -> S.Set Int -> Bool
allFinal dfa states
= states `S.isSubsetOf` final
where final = _F dfa
allNonFinal :: Dfa -> S.Set Int -> Bool
allNonFinal dfa states
= S.null $ S.intersection final states
where final = _F dfa
setHead :: S.Set a -> a
setHead
= head . S.toAscList
| qfjp/csce_dfa_project_test | src/Data/Dfa/Equivalence.hs | bsd-3-clause | 7,294 | 0 | 23 | 2,638 | 2,257 | 1,159 | 1,098 | 183 | 3 |
module Chapter3.ExercisesSpec where
import Test.Hspec
rvrs :: String -> String
rvrs sentence = undefined
thirdLetter :: String -> Char
thirdLetter = undefined
letterIndex :: Int -> Char
letterIndex x = undefined -- "Curry is awesome!"
spec :: Spec
spec =
describe "Chapter 3 Exercises" $ do
-- Intermediate Exercises currently still missing
it "should compile" $ "" `shouldBe` ""
-- describe "Reading syntax" $ do
{- Before commenting in the following tests read the syntax and make sure
that the tests are correct. Otherwise correct them -}
-- it "1a your expected outcome" $ (concat [[1, 2, 3], [4, 5, 6]]) `shouldBe` ???
-- it "1b your expected outcome" $ (++ [1, 2, 3] [4, 5, 6]) `shouldBe` ???
-- it "1c your expected outcome" $ ((++) "hello" " world") `shouldBe` ???
-- it "1d your expected outcome" $ (["hello" ++ " world]) `shouldBe` ???
-- it "1e your expected outcome" $ (4 !! "hello") `shouldBe` ???
-- it "1f your expected outcome" $ ((!!) "hello" 4) `shouldBe` ???
-- it "1g your expected outcome" $ (take "4 lovely") `shouldBe` ???
-- it "1h your expected outcome" $ (take 3 "awesome") `shouldBe` ???
-- it "2a your expected outcome" $ (concat [[1 * 6], [2 * 6], [3 * 6]]) `shouldBe` ???
-- it "2b your expected outcome" $ ("rain" ++ drop 2 "elbow") `shouldBe` ???
-- it "2c your expected outcome" $ (10 * head [1, 2, 3]) `shouldBe` ???
-- it "2d your expected outcome" $ ((take 3 "Julie") ++ (tail "yes")) `shouldBe` ???
-- it "2e your expected outcome" $ (concat [tail [1, 2, 3], tail [4, 5, 6], tail [7, 8, 9]]) `shouldBe` ???
-- describe "Building functions" $ do
-- it "Match the 2 Strings" $ ("Curry is awesome") `shouldBe` "Curry is awesome!"
-- it "Use `take` and `drop` to match the 2 Strings" $ ("Curry is awesome!") `shouldBe` "y"
-- it "Use `drop` to match the 2 Strings" $ ("Curry is awesome!") `shouldBe` "awesome!"
-- it "Should return the third letter of a string" $ thirdLetter "Curry on" `shouldBe` 'r'
-- it "Should the nth letter of a string" $ letterIndex 2 `shouldBe` 'r'
-- it "should reverse the sentence `Curry is awesome` using only `drop` and `take`" $ rvrs "Curry is awesome" `shouldBe` "awesome is Curry"
| yannick-cw/haskell_katas | test/Chapter3/ExercisesSpec.hs | bsd-3-clause | 2,298 | 0 | 10 | 533 | 116 | 74 | 42 | 12 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
-- | Interpretation of binding constructs
module Feldspar.Core.Constructs.Binding
( module Language.Syntactic.Constructs.Binding
, optimizeLambda
, optimizeFunction
-- , optimizeFunctionFix
, betaReduce
, prjLambda
, cLambda
, reuseCLambda
, collectLetBinders
) where
import Control.Monad.Reader
import Data.Maybe
import Data.Map
import Data.Typeable (Typeable, gcast)
--import Data.Lens.Common
import Language.Syntactic
import Language.Syntactic.Constructs.Binding hiding (subst,betaReduce)
import Language.Syntactic.Constructs.Binding.HigherOrder (CLambda)
import Feldspar.Lattice
import Feldspar.Core.Types
import Feldspar.Core.Interpretation
instance Sharable Variable -- `codeMotion` will not share variables anyway
instance Sharable Lambda -- Will not be shared anyway because we disallow variables of `->` type
instance Sharable Let
subst :: forall constr dom a b
. ( Constrained dom
, CLambda Type :<: dom
, (Variable :|| Type) :<: dom
)
=> VarId -- ^ Variable to be substituted
-> ASTF (dom :|| Typeable) a -- ^ Expression to substitute for
-> ASTF (dom :|| Typeable) b -- ^ Expression to substitute in
-> ASTF (dom :|| Typeable) b
subst v new a = go a
where
go :: AST (dom :|| Typeable) c -> AST (dom :|| Typeable) c
go a@((prjLambda -> Just (SubConstr2 (Lambda w))) :$ _)
| v==w = a -- Capture
go (f :$ a) = go f :$ go a
go var
| Just (C' (Variable w)) <- prjF var
, v==w
, Dict <- exprDictSub pTypeable new
, Dict <- exprDictSub pTypeable var
, Just new' <- gcast new
= new'
go a = a
-- TODO Should be possible to use the one in Syntactic instead
betaReduce
:: ( Constrained dom
, CLambda Type :<: dom
, (Variable :|| Type) :<: dom
)
=> ASTF (dom :|| Typeable) a -- ^ Argument
-> ASTF (dom :|| Typeable) (a -> b) -- ^ Function to be reduced
-> ASTF (dom :|| Typeable) b
betaReduce new (lam :$ body)
| Just (SubConstr2 (Lambda v)) <- prjLambda lam = subst v new body
-- TODO Should be possible to use the one in Syntactic instead
optimizeLambda :: ( CLambda Type :<: dom
, OptimizeSuper dom)
=> FeldOpts
-> (ASTF (dom :|| Typeable) b -> Opt (ASTF (Decor Info (dom :|| Typeable)) b)) -- ^ Optimization of the body
-> Info a
-> CLambda Type (b :-> Full (a -> b))
-> Args (AST (dom :|| Typeable)) (b :-> Full (a -> b))
-> Opt (ASTF (Decor Info (dom :|| Typeable)) (a -> b))
optimizeLambda opts opt info lam@(SubConstr2 (Lambda v)) (body :* Nil)
| Dict <- exprDict body
= do
body' <- localVar v info $ opt body
constructFeatUnOpt opts lam (body' :* Nil)
-- | Assumes that the expression is a 'Lambda'
optimizeFunction :: ( (Variable :|| Type) :<: dom
, CLambda Type :<: dom
, Let :<: dom
, OptimizeSuper dom
)
=> FeldOpts
-> (ASTF (dom :|| Typeable) b -> Opt (ASTF (Decor Info (dom :|| Typeable)) b)) -- ^ Optimization of the body
-> Info a
-> (ASTF (dom :|| Typeable) (a -> b) -> Opt (ASTF (Decor Info (dom :|| Typeable)) (a -> b)))
optimizeFunction opts opt info e
| e'@(bs, _) <- collectLetBinders e
, not (Prelude.null bs)
= optimizeLet opts opt info e'
optimizeFunction opts opt info a@(sym :$ body)
| Dict <- exprDict a
, Dict <- exprDict body
, Just (lam@(SubConstr2 (Lambda v))) <- prjLambda sym
= optimizeLambda opts opt info lam (body :* Nil)
optimizeFunction opts opt info a
= error $ "optimizeFunction: AST is not a function: " ++ show a ++ "\n" ++ show (infoType info)
optimizeLet
:: ( (Variable :|| Type) :<: dom
, CLambda Type :<: dom
, Let :<: dom
, OptimizeSuper dom
)
=> FeldOpts
-> (ASTF (dom :|| Typeable) b -> Opt (ASTF (Decor Info (dom :|| Typeable)) b)) -- ^ Optimization of the body
-> Info a
-> ([(VarId, ASTB (dom :|| Typeable) Type)], ASTF (dom :|| Typeable) (a -> b))
-> Opt (ASTF (Decor Info (dom :|| Typeable)) (a -> b))
optimizeLet opts opt info ((v, ASTB e):t, bd)
| Dict <- exprDict bd
, Dict <- exprDict e
= do
e' <- optimizeM opts e
bd' <- localVar v (getInfo e') $ optimizeLet opts opt info (t, bd)
bd'' <- constructFeatUnOpt opts (cLambda v) (bd' :* Nil)
constructFeatUnOpt opts Let (e' :* bd'' :* Nil)
optimizeLet opts opt info ([], e) = optimizeFunction opts opt info e
{-
optimizeFunBody :: (Lambda TypeCtx :<: dom, Optimize dom dom, Typeable a)
=> (ASTF dom a -> Opt (ASTF (Decor Info dom) a)) -- ^ Optimization of the body
-> Env -- ^ Environment (instead of using 'Opt')
-> VarId -- ^ Bound variable
-> ASTF dom a -- ^ Body
-> Info a -- ^ 'Info' of bound variable
-> ASTF (Decor Info dom) a
optimizeFunBody opt env v body info =
flip runReader env $ localVar v info $ opt body
-- | Assumes that the expression is a 'Lambda'
optimizeFunctionFix
:: forall dom a
. (Lambda TypeCtx :<: dom, Optimize dom dom, Type a)
=> (ASTF dom a -> Opt (ASTF (Decor Info dom) a)) -- ^ Optimization of the body
-> Info a
-> (ASTF dom (a -> a) -> Opt (ASTF (Decor Info dom) (a -> a)))
optimizeFunctionFix opt info (lam :$ body)
| Just (Lambda v) <- prjCtx typeCtx lam
= do
env <- ask
let aLens :: Lens (Info a) (Size a)
aLens = lens infoSize (\sz inf -> inf {infoSize = sz})
let bLens :: Lens (ASTF (Decor Info dom) a) (Size a)
bLens = lens (infoSize . getInfo)
(\sz a -> updateDecor (\inf -> inf {infoSize = sz}) a)
let body' = fst $ boundedLensedFixedPoint 1 aLens bLens
(optimizeFunBody opt env v body)
info
-- Using 1 as bound is motivated by the fact that a higher number
-- leads to exponential blowup when there are many nested
-- iterations. Since it is probably uncommon to have very deeply
-- nested loops, it might be fine to increase the bound. However
-- it is not clear that we gain anything by doing so, other than
-- in very special cases.
constructFeatUnOpt (Lambda v `withContext` typeCtx) (body' :* Nil)
-}
instance ( (Variable :|| Type) :<: dom
, OptimizeSuper dom)
=> Optimize (Variable :|| Type) dom
where
constructFeatUnOpt _ var@(C' (Variable v)) Nil
= reader $ \env -> case Prelude.lookup v (varEnv env) of
Nothing -> error $
"optimizeFeat: can't get size of free variable: v" ++ show v
Just (SomeInfo info) ->
let info' = (fromJust $ gcast info) {infoVars = singleton v (SomeType $ infoType info) }
in Sym $ Decor info' $ C' $ inj $ c' (Variable v)
instance ( CLambda Type :<: dom
, OptimizeSuper dom)
=> Optimize (CLambda Type) dom
where
-- | Assigns a 'universal' size to the bound variable. This only makes sense
-- for top-level lambdas. For other uses, use 'optimizeLambda' instead.
optimizeFeat opts lam@(SubConstr2 (Lambda v))
| Dict <- exprDict lam
= optimizeLambda opts (optimizeM opts) (mkInfo universal) lam
constructFeatUnOpt _ lam@(SubConstr2 (Lambda v)) (body :* Nil)
| Dict <- exprDict lam
, Info t sz vars _ <- getInfo body
= do
src <- asks sourceEnv
let info = Info (FunType typeRep t) (universal, sz) (delete v vars) src
return $ (Sym $ Decor info $ C' $ inj lam) :$ body
instance SizeProp Let
where
sizeProp Let (_ :* WrapFull f :* Nil) = snd $ infoSize f
instance
( Let :<: dom
, (Variable :|| Type) :<: dom
, CLambda Type :<: dom
, OptimizeSuper dom
) =>
Optimize Let dom
where
optimizeFeat opts lt@Let (a :* f :* Nil) = do
a' <- optimizeM opts a
f' <- optimizeFunction opts (optimizeM opts) (getInfo a') f
constructFeat opts lt (a' :* f' :* Nil)
constructFeatOpt _ Let (a :* (lam :$ var) :* Nil)
| Just (C' (Variable v2)) <- prjF var
, Just (SubConstr2 (Lambda v1)) <- prjLambda lam
, v1 == v2
= return $ fromJust $ gcast a
constructFeatOpt opts Let (var :* f :* Nil)
| Just (C' (Variable v)) <- prjF var
= optimizeM opts $ betaReduce (stripDecor var) (stripDecor f)
-- (letBind (letBind e1 (\x -> e2)) (\y -> e3) ==>
-- letBind e1 (\x -> letBind e2 (\y-> e3))
--
-- Test case:
--
-- stestL2 :: Data Index -> Data Length -> Data [[Index]]
-- stestL2 m x = parallel x (\x1 -> let z = let y = x `mod` m in (y, y) in parallel 2 (\x -> fst z))
constructFeatOpt opts lt1@Let ((lt2 :$ x :$ (lam :$ bd)) :* y :* Nil)
| Just Let <- prj lt2
, Just lam'@(SubConstr2 (Lambda v1)) <- prjLambda lam
, SICS `inTarget` opts
= do
bb <- constructFeat opts lt1 (bd :* y :* Nil)
bd' <- constructFeat opts (reuseCLambda lam') (bb :* Nil)
constructFeatUnOpt opts Let (x :* bd' :* Nil)
-- Hoist let-bound constants upwards.
--
-- (letBind e1 (\x -> letBind v (\y -> e2)) ==>
-- letBind v (\y -> letBind e1 (\x-> e2))
constructFeatOpt opts lt1@Let (e :* (lam1 :$ (lt2 :$ v :$ (lam2 :$ bd))) :* Nil)
| Just Let <- prj lt2
, Nothing <- viewLiteral e
, Just _ <- viewLiteral v
, Just lam1'@(SubConstr2 (Lambda{})) <- prjLambda lam1
, Just lam2'@(SubConstr2 (Lambda{})) <- prjLambda lam2
, SICS `inTarget` opts
= do
bb <- constructFeat opts (reuseCLambda lam1') (bd :* Nil)
bb' <- constructFeat opts lt1 (e :* bb :* Nil)
bd' <- constructFeat opts (reuseCLambda lam2') (bb' :* Nil)
constructFeatUnOpt opts Let (v :* bd' :* Nil)
constructFeatOpt opts a args = constructFeatUnOpt opts a args
constructFeatUnOpt opts Let args@(_ :* (lam :$ body) :* Nil)
| Just (SubConstr2 (Lambda _)) <- prjLambda lam
, Info {infoType = t} <- getInfo body
= constructFeatUnOptDefaultTyp opts t Let args
prjLambda :: (Project (CLambda Type) dom)
=> dom sig -> Maybe (CLambda Type sig)
prjLambda = prj
cLambda :: Type a => VarId -> CLambda Type (b :-> Full (a -> b))
cLambda = SubConstr2 . Lambda
-- | Allow an existing binding to be used with a body of a different type
reuseCLambda :: CLambda Type (b :-> Full (a -> b)) -> CLambda Type (c :-> Full (a -> c))
reuseCLambda (SubConstr2 (Lambda v)) = SubConstr2 (Lambda v)
-- | Collects the immediate let bindings in a list and returns the first non-let expression
--
-- This function can be useful when let bindings get in the way of pattern matching on a
-- sub-expressions.
collectLetBinders :: forall dom a .
( Project Let dom
, Project (CLambda Type) dom
, ConstrainedBy dom Typeable
) => ASTF dom a ->
( [(VarId, ASTB dom Type)]
, ASTF dom a
)
collectLetBinders e = go [] e
where
go
:: [(VarId, ASTB dom Type)]
-> ASTF dom a
-> ( [(VarId, ASTB dom Type)]
, ASTF dom a
)
go bs (lt :$ e :$ (lam :$ body))
| Just (SubConstr2 (Lambda v)) <- prjLambda lam
, Just Let <- prj lt
, Dict <- exprDict e
= go ((v, ASTB e):bs) body
go bs e = (reverse bs, e)
| rCEx/feldspar-lang-small | src/Feldspar/Core/Constructs/Binding.hs | bsd-3-clause | 13,735 | 0 | 20 | 4,026 | 3,472 | 1,778 | 1,694 | 208 | 4 |
module Language.Haskell.GhcMod.Lang where
import DynFlags (supportedLanguagesAndExtensions)
import Language.Haskell.GhcMod.Types
-- | Listing language extensions.
listLanguages :: Options -> IO String
listLanguages opt = return $ convert opt supportedLanguagesAndExtensions
| carlohamalainen/ghc-mod | Language/Haskell/GhcMod/Lang.hs | bsd-3-clause | 277 | 0 | 6 | 30 | 53 | 31 | 22 | 5 | 1 |
{-# LANGUAGE DeriveDataTypeable, FlexibleContexts, GeneralizedNewtypeDeriving, RankNTypes, RecordWildCards, OverloadedStrings #-}
module Main where
import Control.Concurrent (killThread)
import Control.Concurrent.Chan (Chan)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as C
import Data.Set (Set, insert)
import Network (HostName, PortID(PortNumber), connectTo)
import Network.IRC (Message)
import Network.IRC.Bot.BotMonad (BotMonad(..))
import Network.IRC.Bot.Core (BotConf(..), User(..), nullBotConf, simpleBot)
import Network.IRC.Bot.Log (LogLevel(..), nullLogger, stdoutLogger)
import Network.IRC.Bot.Part.Dice (dicePart)
import Network.IRC.Bot.Part.Hello (helloPart)
import Network.IRC.Bot.Part.Ping (pingPart)
import Network.IRC.Bot.Part.NickUser (nickUserPart)
import Network.IRC.Bot.Part.Channels (initChannelsPart)
import System.Console.GetOpt
import System.Environment (getArgs, getProgName)
import System.Exit (exitFailure)
import System.IO (stdout)
data Flag
= BotConfOpt { unBotConfOpt :: (BotConf -> BotConf) }
botOpts :: [OptDescr Flag]
botOpts =
[ Option [] ["irc-server"] (ReqArg setIrcServer "hostname or IP") "irc server to connect to"
, Option [] ["port"] (ReqArg setPort "port") "port to connect to on server"
, Option [] ["nick"] (ReqArg setNick "name") "irc nick"
, Option [] ["username"] (ReqArg setUsername "username") "ident username"
, Option [] ["hostname"] (ReqArg setHostname "hostname") "hostname of machine bot is connecting from"
, Option [] ["realname"] (ReqArg setRealname "name") "bot's real name"
, Option [] ["cmd-prefix"] (ReqArg setCmdPrefix "prefix") "prefix to bot commands (e.g., ?, @, bot: )"
, Option [] ["channel"] (ReqArg addChannel "channel name") "channel to join after connecting. (can be specified more than once to join multiple channels)"
, Option [] ["log-level"] (ReqArg setLogLevel "debug, normal, important, quiet") "set the logging level"
, Option [] ["limit"] (ReqArg setLimit "int,int") "enable rate limiter. burst length, delay in microseconds"
]
where
setIrcServer n = BotConfOpt $ \c -> c { host = n, user = (user c) { servername = n } }
setPort str = BotConfOpt $ \c -> c { port = PortNumber (fromIntegral $ read str) }
setNick n = BotConfOpt $ \c -> c { nick = C.pack n }
setUsername n = BotConfOpt $ \c -> c { user = (user c) { username = C.pack n } }
setHostname n = BotConfOpt $ \c -> c { user = (user c) { hostname = n } }
setRealname n = BotConfOpt $ \c -> c { user = (user c) { realname = (C.pack n) } }
setCmdPrefix p = BotConfOpt $ \c -> c { commandPrefix = p }
addChannel ch = BotConfOpt $ \c -> c { channels = insert (C.pack ch) (channels c) }
setLogLevel l = BotConfOpt $ \c ->
case l of
"debug" -> c { logger = stdoutLogger Debug }
"normal" -> c { logger = stdoutLogger Normal }
"important" -> c { logger = stdoutLogger Important }
"quiet" -> c { logger = nullLogger }
_ -> error $ "unknown log-level: " ++ l
setLimit s = BotConfOpt $ \c ->
case break (== ',') s of
(burstStr, delayStr) ->
case reads burstStr of
[(burstLen,[])] ->
case reads (drop 1 $ delayStr) of
[(delay,[])] ->
c { limits = Just (burstLen, delay) }
_ -> error $ "unabled to parse delay: " ++ delayStr
_ -> error $ "unabled to parse burst length: " ++ burstStr
getBotConf :: Maybe (Chan Message -> IO ()) -> IO BotConf
getBotConf mLogger =
do args <- getArgs
case getOpt Permute botOpts args of
(f,_,[]) ->
do let conf = (foldr ($) nullBotConf (map unBotConfOpt f)) { channelLogger = mLogger }
checkConf conf
return conf
(_,_,errs) ->
do progName <- getProgName
putStr (helpMessage progName)
exitFailure
exitHelp msg =
do progName <- getProgName
putStrLn msg
putStr (helpMessage progName)
exitFailure
checkConf :: BotConf -> IO ()
checkConf BotConf{..}
| null host = exitHelp "must specify --irc-server"
| C.null nick = exitHelp "must specify --nick"
| C.null (username user) = exitHelp "must specify --username"
| null (hostname user) = exitHelp "must specify --hostname"
| C.null (realname user) = exitHelp "must specify --realname"
| otherwise = return ()
helpMessage progName = usageInfo header botOpts
where
header = "Usage: "++progName++" [OPTION...]\n" ++ "e.g.\n" ++
progName ++ " --irc-server irc.freenode.net --nick stepbot --username stepbot --hostname happstack.com --realname \"happstack bot\" --channel \"#stepbot\""
main :: IO ()
main =
do botConf <- getBotConf Nothing
ircParts <- initParts (channels botConf)
(tids, reconnect) <- simpleBot botConf ircParts
(logger botConf) Important "Press enter to force reconnect."
getLine
reconnect
(logger botConf) Important "Press enter to quit."
getLine
mapM_ killThread tids
initParts :: (BotMonad m) =>
Set ByteString -- ^ set of channels to join
-> IO [m ()]
initParts chans =
do (_, channelsPart) <- initChannelsPart chans
return [ pingPart
, nickUserPart
, channelsPart
, dicePart
, helloPart
]
| eigengrau/haskell-ircbot | demo.hs | bsd-3-clause | 5,831 | 0 | 21 | 1,727 | 1,662 | 887 | 775 | 109 | 7 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Dwindle
-- Description : Various spirally layouts.
-- Copyright : (c) Norbert Zeh <[email protected]>
-- License : BSD3
--
-- Maintainer : Norbert Zeh <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- Three layouts: The first, 'Spiral', is a reimplementation of
-- 'XMonad.Layout.Spiral.spiral' with, at least to me, more intuitive semantics.
-- The second, 'Dwindle', is inspired by a similar layout in awesome and
-- produces the same sequence of decreasing window sizes as Spiral but pushes
-- the smallest windows into a screen corner rather than the centre. The third,
-- 'Squeeze' arranges all windows in one row or in one column, with
-- geometrically decreasing sizes.
--
-----------------------------------------------------------------------------
module XMonad.Layout.Dwindle ( -- * Usage
-- $usage
Dwindle(..)
, Direction2D(..)
, Chirality(..)
) where
import XMonad.Prelude ( unfoldr )
import XMonad
import XMonad.StackSet ( integrate, Stack )
import XMonad.Util.Types ( Direction2D(..) )
-- $usage
-- This module can be used as follows:
--
-- > import XMonad.Layout.Dwindle
--
-- Then add something like this to your layouts:
--
-- > Dwindle R CW 1.5 1.1
--
-- or
--
-- > Spiral L CW 1.5 1.1
--
-- or
--
-- ^ Squeeze D 1.5 1.1
--
-- The first produces a layout that places the second window to the right of
-- the first, the third below the second, the fourth to the right of the third,
-- and so on. The first window is 1.5 times as wide as the second one, the
-- second is 1.5 times as tall as the third one, and so on. Thus, the further
-- down the window stack a window is, the smaller it is and the more it is
-- pushed into the bottom-right corner.
--
-- The second produces a layout with the same window sizes but places the second
-- window to the left of the first one, the third above the second one, the
-- fourth to the right of the third one, and so on.
--
-- The third produces a layout that stacks windows vertically top-down with each
-- window being 1.5 times as tall as the next.
--
-- In all three cases, the fourth (third, in the case of 'Squeeze') parameter,
-- 1.1, is the factor by which the third parameter increases or decreases in
-- response to Expand or Shrink messages.
--
-- For more detailed instructions on editing the layoutHook see:
--
-- "XMonad.Doc.Extending#Editing_the_layout_hook"
-- | Layouts with geometrically decreasing window sizes. 'Spiral' and 'Dwindle'
-- split the screen into a rectangle for the first window and a rectangle for
-- the remaining windows, which is split recursively to lay out these windows.
-- Both layouts alternate between horizontal and vertical splits.
--
-- In each recursive step, the split 'Direction2D' determines the placement of the
-- remaining windows relative to the current window: to the left, to the right,
-- above or below. The split direction of the first split is determined by the
-- first layout parameter. The split direction of the second step is rotated 90
-- degrees relative to the first split direction according to the second layout
-- parameter of type 'Chirality'. So, if the first split is 'R' and the second
-- layout parameter is 'CW', then the second split is 'D'.
--
-- For the 'Spiral' layout, the same 'Chirality' is used for computing the split
-- direction of each step from the split direction of the previous step. For
-- example, parameters 'R' and 'CW' produces the direction sequence 'R', 'D',
-- 'L', 'U', 'R', 'D', 'L', 'U', ...
--
-- For the 'Dwindle' layout, the 'Chirality' alternates between 'CW' and 'CCW' in
-- each step. For example, parameters 'U' and 'CCW' produce the direction
-- sequence 'U', 'L', 'U', 'L', ... because 'L' is the 'CCW' rotation of 'U' and
-- 'U' is the 'CW' rotation of 'L'.
--
-- In each split, the current rectangle is split so that the ratio between the
-- size of the rectangle allocated to the current window and the size of the
-- rectangle allocated to the remaining windows is the third layout parameter.
-- This ratio can be altered using 'Expand' and 'Shrink' messages. The former
-- multiplies the ratio by the fourth layout parameter. The latter divides the
-- ratio by this parameter.
--
-- 'Squeeze' does not alternate between horizontal and vertical splits and
-- simply splits in the direction given as its first argument.
--
-- Parameters for both 'Dwindle' and 'Spiral':
--
-- * First split direction
--
-- * First split chirality
--
-- * Size ratio between rectangle allocated to current window and rectangle
-- allocated to remaining windows
--
-- * Factor by which the size ratio is changed in response to 'Expand' or 'Shrink'
-- messages
--
-- The parameters for 'Squeeze' are the same, except that there is no 'Chirality'
-- parameter.
data Dwindle a = Dwindle !Direction2D !Chirality !Rational !Rational
| Spiral !Direction2D !Chirality !Rational !Rational
| Squeeze !Direction2D !Rational !Rational
deriving (Read, Show)
-- | Rotation between consecutive split directions
data Chirality = CW | CCW
deriving (Read, Show)
instance LayoutClass Dwindle a where
pureLayout (Dwindle dir rot ratio _) = dwindle alternate dir rot ratio
pureLayout (Spiral dir rot ratio _) = dwindle rotate dir rot ratio
pureLayout (Squeeze dir ratio _) = squeeze dir ratio
pureMessage (Dwindle dir rot ratio delta) =
fmap (\ratio' -> Dwindle dir rot ratio' delta) . changeRatio ratio delta
pureMessage (Spiral dir rot ratio delta) =
fmap (\ratio' -> Spiral dir rot ratio' delta) . changeRatio ratio delta
pureMessage (Squeeze dir ratio delta) =
fmap (\ratio' -> Squeeze dir ratio' delta) . changeRatio ratio delta
changeRatio :: Rational -> Rational -> SomeMessage -> Maybe Rational
changeRatio ratio delta = fmap f . fromMessage
where f Expand = ratio * delta
f Shrink = ratio / delta
dwindle :: AxesGenerator -> Direction2D -> Chirality -> Rational -> Rectangle -> Stack a ->
[(a, Rectangle)]
dwindle trans dir rot ratio rect st = unfoldr genRects (integrate st, rect, dirAxes dir, rot)
where genRects ([], _, _, _ ) = Nothing
genRects ([w], r, a, rt) = Just ((w, r), ([], r, a, rt))
genRects (w:ws, r, a, rt) = Just ((w, r'), (ws, r'', a', rt'))
where (r', r'') = splitRect r ratio a
(a', rt') = trans a rt
squeeze :: Direction2D -> Rational -> Rectangle -> Stack a -> [(a, Rectangle)]
squeeze dir ratio rect st = zip wins rects
where wins = integrate st
nwins = length wins
sizes = take nwins $ unfoldr (\r -> Just (r * ratio, r * ratio)) 1
totals' = 0 : zipWith (+) sizes totals'
totals = tail totals'
splits = zip (tail sizes) totals
ratios = reverse $ map (uncurry (/)) splits
rects = genRects rect ratios
genRects r [] = [r]
genRects r (x:xs) = r' : genRects r'' xs
where (r', r'') = splitRect r x (dirAxes dir)
splitRect :: Rectangle -> Rational -> Axes -> (Rectangle, Rectangle)
splitRect (Rectangle x y w h) ratio (ax, ay) = (Rectangle x' y' w' h', Rectangle x'' y'' w'' h'')
where portion = ratio / (ratio + 1)
w1 = (round $ fi w * portion) :: Int
w2 = fi w - w1
h1 = (round $ fi h * portion) :: Int
h2 = fi h - h1
x' = x + fi (negate ax * (1 - ax) * w2 `div` 2)
y' = y + fi (negate ay * (1 - ay) * h2 `div` 2)
w' = fi $ w1 + (1 - abs ax) * w2
h' = fi $ h1 + (1 - abs ay) * h2
x'' = x + fi (ax * (1 + ax) * w1 `div` 2)
y'' = y + fi (ay * (1 + ay) * h1 `div` 2)
w'' = fi $ w2 + (1 - abs ax) * w1
h'' = fi $ h2 + (1 - abs ay) * h1
fi :: (Num b, Integral a) => a -> b
fi = fromIntegral
type Axes = (Int, Int)
type AxesGenerator = Axes -> Chirality -> (Axes, Chirality)
dirAxes :: Direction2D -> Axes
dirAxes L = (-1, 0)
dirAxes R = ( 1, 0)
dirAxes U = ( 0, -1)
dirAxes D = ( 0, 1)
alternate :: AxesGenerator
alternate = chDir alt
rotate :: AxesGenerator
rotate = chDir id
chDir :: (Chirality -> Chirality) -> AxesGenerator
chDir f (x, y) r = (a' r, r')
where a' CW = (-y, x)
a' CCW = ( y, -x)
r' = f r
alt :: Chirality -> Chirality
alt CW = CCW
alt CCW = CW
| xmonad/xmonad-contrib | XMonad/Layout/Dwindle.hs | bsd-3-clause | 8,679 | 0 | 14 | 2,119 | 1,744 | 985 | 759 | 108 | 3 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnSource]{Main pass of renamer}
-}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module RnSource (
rnSrcDecls, addTcgDUs, findSplice
) where
#include "HsVersions.h"
import {-# SOURCE #-} RnExpr( rnLExpr )
import {-# SOURCE #-} RnSplice ( rnSpliceDecl, rnTopSpliceDecls )
import HsSyn
import FieldLabel
import RdrName
import RnTypes
import RnBinds
import RnEnv
import RnNames
import RnHsDoc ( rnHsDoc, rnMbLHsDoc )
import TcAnnotations ( annCtxt )
import TcRnMonad
import ForeignCall ( CCallTarget(..) )
import Module
import HscTypes ( Warnings(..), plusWarns )
import Class ( FunDep )
import PrelNames ( applicativeClassName, pureAName, thenAName
, monadClassName, returnMName, thenMName
, monadFailClassName, failMName, failMName_preMFP
, semigroupClassName, sappendName
, monoidClassName, mappendName
)
import Name
import NameSet
import NameEnv
import Avail
import Outputable
import Bag
import BasicTypes ( DerivStrategy, RuleName, pprRuleName )
import FastString
import SrcLoc
import DynFlags
import Util ( debugIsOn, lengthExceeds, partitionWith )
import HscTypes ( HscEnv, hsc_dflags )
import ListSetOps ( findDupsEq, removeDups, equivClasses )
import Digraph ( SCC, flattenSCC, flattenSCCs
, stronglyConnCompFromEdgedVerticesUniq )
import UniqFM
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad
import Control.Arrow ( first )
import Data.List ( sortBy, mapAccumL )
import Data.Maybe ( isJust )
import qualified Data.Set as Set ( difference, fromList, toList, null )
{-
@rnSourceDecl@ `renames' declarations.
It simultaneously performs dependency analysis and precedence parsing.
It also does the following error checks:
\begin{enumerate}
\item
Checks that tyvars are used properly. This includes checking
for undefined tyvars, and tyvars in contexts that are ambiguous.
(Some of this checking has now been moved to module @TcMonoType@,
since we don't have functional dependency information at this point.)
\item
Checks that all variable occurrences are defined.
\item
Checks the @(..)@ etc constraints in the export list.
\end{enumerate}
-}
-- Brings the binders of the group into scope in the appropriate places;
-- does NOT assume that anything is in scope already
rnSrcDecls :: HsGroup RdrName -> RnM (TcGblEnv, HsGroup Name)
-- Rename a top-level HsGroup; used for normal source files *and* hs-boot files
rnSrcDecls group@(HsGroup { hs_valds = val_decls,
hs_splcds = splice_decls,
hs_tyclds = tycl_decls,
hs_derivds = deriv_decls,
hs_fixds = fix_decls,
hs_warnds = warn_decls,
hs_annds = ann_decls,
hs_fords = foreign_decls,
hs_defds = default_decls,
hs_ruleds = rule_decls,
hs_vects = vect_decls,
hs_docs = docs })
= do {
-- (A) Process the fixity declarations, creating a mapping from
-- FastStrings to FixItems.
-- Also checks for duplicates.
local_fix_env <- makeMiniFixityEnv fix_decls ;
-- (B) Bring top level binders (and their fixities) into scope,
-- *except* for the value bindings, which get done in step (D)
-- with collectHsIdBinders. However *do* include
--
-- * Class ops, data constructors, and record fields,
-- because they do not have value declarations.
-- Aso step (C) depends on datacons and record fields
--
-- * For hs-boot files, include the value signatures
-- Again, they have no value declarations
--
(tc_envs, tc_bndrs) <- getLocalNonValBinders local_fix_env group ;
setEnvs tc_envs $ do {
failIfErrsM ; -- No point in continuing if (say) we have duplicate declarations
-- (D1) Bring pattern synonyms into scope.
-- Need to do this before (D2) because rnTopBindsLHS
-- looks up those pattern synonyms (Trac #9889)
extendPatSynEnv val_decls local_fix_env $ \pat_syn_bndrs -> do {
-- (D2) Rename the left-hand sides of the value bindings.
-- This depends on everything from (B) being in scope,
-- and on (C) for resolving record wild cards.
-- It uses the fixity env from (A) to bind fixities for view patterns.
new_lhs <- rnTopBindsLHS local_fix_env val_decls ;
-- Bind the LHSes (and their fixities) in the global rdr environment
let { id_bndrs = collectHsIdBinders new_lhs } ; -- Excludes pattern-synonym binders
-- They are already in scope
traceRn (text "rnSrcDecls" <+> ppr id_bndrs) ;
tc_envs <- extendGlobalRdrEnvRn (map avail id_bndrs) local_fix_env ;
traceRn (text "D2" <+> ppr (tcg_rdr_env (fst tc_envs)));
setEnvs tc_envs $ do {
-- Now everything is in scope, as the remaining renaming assumes.
-- (E) Rename type and class decls
-- (note that value LHSes need to be in scope for default methods)
--
-- You might think that we could build proper def/use information
-- for type and class declarations, but they can be involved
-- in mutual recursion across modules, and we only do the SCC
-- analysis for them in the type checker.
-- So we content ourselves with gathering uses only; that
-- means we'll only report a declaration as unused if it isn't
-- mentioned at all. Ah well.
traceRn (text "Start rnTyClDecls" <+> ppr tycl_decls) ;
(rn_tycl_decls, src_fvs1) <- rnTyClDecls tycl_decls ;
-- (F) Rename Value declarations right-hand sides
traceRn (text "Start rnmono") ;
let { val_bndr_set = mkNameSet id_bndrs `unionNameSet` mkNameSet pat_syn_bndrs } ;
is_boot <- tcIsHsBootOrSig ;
(rn_val_decls, bind_dus) <- if is_boot
-- For an hs-boot, use tc_bndrs (which collects how we're renamed
-- signatures), since val_bndr_set is empty (there are no x = ...
-- bindings in an hs-boot.)
then rnTopBindsBoot tc_bndrs new_lhs
else rnValBindsRHS (TopSigCtxt val_bndr_set) new_lhs ;
traceRn (text "finish rnmono" <+> ppr rn_val_decls) ;
-- (G) Rename Fixity and deprecations
-- Rename fixity declarations and error if we try to
-- fix something from another module (duplicates were checked in (A))
let { all_bndrs = tc_bndrs `unionNameSet` val_bndr_set } ;
rn_fix_decls <- rnSrcFixityDecls all_bndrs fix_decls ;
-- Rename deprec decls;
-- check for duplicates and ensure that deprecated things are defined locally
-- at the moment, we don't keep these around past renaming
rn_warns <- rnSrcWarnDecls all_bndrs warn_decls ;
-- (H) Rename Everything else
(rn_rule_decls, src_fvs2) <- setXOptM LangExt.ScopedTypeVariables $
rnList rnHsRuleDecls rule_decls ;
-- Inside RULES, scoped type variables are on
(rn_vect_decls, src_fvs3) <- rnList rnHsVectDecl vect_decls ;
(rn_foreign_decls, src_fvs4) <- rnList rnHsForeignDecl foreign_decls ;
(rn_ann_decls, src_fvs5) <- rnList rnAnnDecl ann_decls ;
(rn_default_decls, src_fvs6) <- rnList rnDefaultDecl default_decls ;
(rn_deriv_decls, src_fvs7) <- rnList rnSrcDerivDecl deriv_decls ;
(rn_splice_decls, src_fvs8) <- rnList rnSpliceDecl splice_decls ;
-- Haddock docs; no free vars
rn_docs <- mapM (wrapLocM rnDocDecl) docs ;
last_tcg_env <- getGblEnv ;
-- (I) Compute the results and return
let {rn_group = HsGroup { hs_valds = rn_val_decls,
hs_splcds = rn_splice_decls,
hs_tyclds = rn_tycl_decls,
hs_derivds = rn_deriv_decls,
hs_fixds = rn_fix_decls,
hs_warnds = [], -- warns are returned in the tcg_env
-- (see below) not in the HsGroup
hs_fords = rn_foreign_decls,
hs_annds = rn_ann_decls,
hs_defds = rn_default_decls,
hs_ruleds = rn_rule_decls,
hs_vects = rn_vect_decls,
hs_docs = rn_docs } ;
tcf_bndrs = hsTyClForeignBinders rn_tycl_decls rn_foreign_decls ;
other_def = (Just (mkNameSet tcf_bndrs), emptyNameSet) ;
other_fvs = plusFVs [src_fvs1, src_fvs2, src_fvs3, src_fvs4, src_fvs5,
src_fvs6, src_fvs7, src_fvs8] ;
-- It is tiresome to gather the binders from type and class decls
src_dus = [other_def] `plusDU` bind_dus `plusDU` usesOnly other_fvs ;
-- Instance decls may have occurrences of things bound in bind_dus
-- so we must put other_fvs last
final_tcg_env = let tcg_env' = (last_tcg_env `addTcgDUs` src_dus)
in -- we return the deprecs in the env, not in the HsGroup above
tcg_env' { tcg_warns = tcg_warns tcg_env' `plusWarns` rn_warns };
} ;
traceRn (text "last" <+> ppr (tcg_rdr_env final_tcg_env)) ;
traceRn (text "finish rnSrc" <+> ppr rn_group) ;
traceRn (text "finish Dus" <+> ppr src_dus ) ;
return (final_tcg_env, rn_group)
}}}}
addTcgDUs :: TcGblEnv -> DefUses -> TcGblEnv
-- This function could be defined lower down in the module hierarchy,
-- but there doesn't seem anywhere very logical to put it.
addTcgDUs tcg_env dus = tcg_env { tcg_dus = tcg_dus tcg_env `plusDU` dus }
rnList :: (a -> RnM (b, FreeVars)) -> [Located a] -> RnM ([Located b], FreeVars)
rnList f xs = mapFvRn (wrapLocFstM f) xs
{-
*********************************************************
* *
HsDoc stuff
* *
*********************************************************
-}
rnDocDecl :: DocDecl -> RnM DocDecl
rnDocDecl (DocCommentNext doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentNext rn_doc)
rnDocDecl (DocCommentPrev doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentPrev rn_doc)
rnDocDecl (DocCommentNamed str doc) = do
rn_doc <- rnHsDoc doc
return (DocCommentNamed str rn_doc)
rnDocDecl (DocGroup lev doc) = do
rn_doc <- rnHsDoc doc
return (DocGroup lev rn_doc)
{-
*********************************************************
* *
Source-code fixity declarations
* *
*********************************************************
-}
rnSrcFixityDecls :: NameSet -> [LFixitySig RdrName] -> RnM [LFixitySig Name]
-- Rename the fixity decls, so we can put
-- the renamed decls in the renamed syntax tree
-- Errors if the thing being fixed is not defined locally.
--
-- The returned FixitySigs are not actually used for anything,
-- except perhaps the GHCi API
rnSrcFixityDecls bndr_set fix_decls
= do fix_decls <- mapM rn_decl fix_decls
return (concat fix_decls)
where
sig_ctxt = TopSigCtxt bndr_set
rn_decl :: LFixitySig RdrName -> RnM [LFixitySig Name]
-- GHC extension: look up both the tycon and data con
-- for con-like things; hence returning a list
-- If neither are in scope, report an error; otherwise
-- return a fixity sig for each (slightly odd)
rn_decl (L loc (FixitySig fnames fixity))
= do names <- mapM lookup_one fnames
return [ L loc (FixitySig name fixity)
| name <- names ]
lookup_one :: Located RdrName -> RnM [Located Name]
lookup_one (L name_loc rdr_name)
= setSrcSpan name_loc $
-- this lookup will fail if the definition isn't local
do names <- lookupLocalTcNames sig_ctxt what rdr_name
return [ L name_loc name | (_, name) <- names ]
what = text "fixity signature"
{-
*********************************************************
* *
Source-code deprecations declarations
* *
*********************************************************
Check that the deprecated names are defined, are defined locally, and
that there are no duplicate deprecations.
It's only imported deprecations, dealt with in RnIfaces, that we
gather them together.
-}
-- checks that the deprecations are defined locally, and that there are no duplicates
rnSrcWarnDecls :: NameSet -> [LWarnDecls RdrName] -> RnM Warnings
rnSrcWarnDecls _ []
= return NoWarnings
rnSrcWarnDecls bndr_set decls'
= do { -- check for duplicates
; mapM_ (\ dups -> let (L loc rdr:lrdr':_) = dups
in addErrAt loc (dupWarnDecl lrdr' rdr))
warn_rdr_dups
; pairs_s <- mapM (addLocM rn_deprec) decls
; return (WarnSome ((concat pairs_s))) }
where
decls = concatMap (\(L _ d) -> wd_warnings d) decls'
sig_ctxt = TopSigCtxt bndr_set
rn_deprec (Warning rdr_names txt)
-- ensures that the names are defined locally
= do { names <- concatMapM (lookupLocalTcNames sig_ctxt what . unLoc)
rdr_names
; return [(rdrNameOcc rdr, txt) | (rdr, _) <- names] }
what = text "deprecation"
warn_rdr_dups = findDupRdrNames $ concatMap (\(L _ (Warning ns _)) -> ns)
decls
findDupRdrNames :: [Located RdrName] -> [[Located RdrName]]
findDupRdrNames = findDupsEq (\ x -> \ y -> rdrNameOcc (unLoc x) == rdrNameOcc (unLoc y))
-- look for duplicates among the OccNames;
-- we check that the names are defined above
-- invt: the lists returned by findDupsEq always have at least two elements
dupWarnDecl :: Located RdrName -> RdrName -> SDoc
-- Located RdrName -> DeprecDecl RdrName -> SDoc
dupWarnDecl (L loc _) rdr_name
= vcat [text "Multiple warning declarations for" <+> quotes (ppr rdr_name),
text "also at " <+> ppr loc]
{-
*********************************************************
* *
\subsection{Annotation declarations}
* *
*********************************************************
-}
rnAnnDecl :: AnnDecl RdrName -> RnM (AnnDecl Name, FreeVars)
rnAnnDecl ann@(HsAnnotation s provenance expr)
= addErrCtxt (annCtxt ann) $
do { (provenance', provenance_fvs) <- rnAnnProvenance provenance
; (expr', expr_fvs) <- setStage (Splice Untyped) $
rnLExpr expr
; return (HsAnnotation s provenance' expr',
provenance_fvs `plusFV` expr_fvs) }
rnAnnProvenance :: AnnProvenance RdrName -> RnM (AnnProvenance Name, FreeVars)
rnAnnProvenance provenance = do
provenance' <- traverse lookupTopBndrRn provenance
return (provenance', maybe emptyFVs unitFV (annProvenanceName_maybe provenance'))
{-
*********************************************************
* *
\subsection{Default declarations}
* *
*********************************************************
-}
rnDefaultDecl :: DefaultDecl RdrName -> RnM (DefaultDecl Name, FreeVars)
rnDefaultDecl (DefaultDecl tys)
= do { (tys', fvs) <- rnLHsTypes doc_str tys
; return (DefaultDecl tys', fvs) }
where
doc_str = DefaultDeclCtx
{-
*********************************************************
* *
\subsection{Foreign declarations}
* *
*********************************************************
-}
rnHsForeignDecl :: ForeignDecl RdrName -> RnM (ForeignDecl Name, FreeVars)
rnHsForeignDecl (ForeignImport { fd_name = name, fd_sig_ty = ty, fd_fi = spec })
= do { topEnv :: HscEnv <- getTopEnv
; name' <- lookupLocatedTopBndrRn name
; (ty', fvs) <- rnHsSigType (ForeignDeclCtx name) ty
-- Mark any PackageTarget style imports as coming from the current package
; let unitId = thisPackage $ hsc_dflags topEnv
spec' = patchForeignImport unitId spec
; return (ForeignImport { fd_name = name', fd_sig_ty = ty'
, fd_co = noForeignImportCoercionYet
, fd_fi = spec' }, fvs) }
rnHsForeignDecl (ForeignExport { fd_name = name, fd_sig_ty = ty, fd_fe = spec })
= do { name' <- lookupLocatedOccRn name
; (ty', fvs) <- rnHsSigType (ForeignDeclCtx name) ty
; return (ForeignExport { fd_name = name', fd_sig_ty = ty'
, fd_co = noForeignExportCoercionYet
, fd_fe = spec }
, fvs `addOneFV` unLoc name') }
-- NB: a foreign export is an *occurrence site* for name, so
-- we add it to the free-variable list. It might, for example,
-- be imported from another module
-- | For Windows DLLs we need to know what packages imported symbols are from
-- to generate correct calls. Imported symbols are tagged with the current
-- package, so if they get inlined across a package boundry we'll still
-- know where they're from.
--
patchForeignImport :: UnitId -> ForeignImport -> ForeignImport
patchForeignImport unitId (CImport cconv safety fs spec src)
= CImport cconv safety fs (patchCImportSpec unitId spec) src
patchCImportSpec :: UnitId -> CImportSpec -> CImportSpec
patchCImportSpec unitId spec
= case spec of
CFunction callTarget -> CFunction $ patchCCallTarget unitId callTarget
_ -> spec
patchCCallTarget :: UnitId -> CCallTarget -> CCallTarget
patchCCallTarget unitId callTarget =
case callTarget of
StaticTarget src label Nothing isFun
-> StaticTarget src label (Just unitId) isFun
_ -> callTarget
{-
*********************************************************
* *
\subsection{Instance declarations}
* *
*********************************************************
-}
rnSrcInstDecl :: InstDecl RdrName -> RnM (InstDecl Name, FreeVars)
rnSrcInstDecl (TyFamInstD { tfid_inst = tfi })
= do { (tfi', fvs) <- rnTyFamInstDecl Nothing tfi
; return (TyFamInstD { tfid_inst = tfi' }, fvs) }
rnSrcInstDecl (DataFamInstD { dfid_inst = dfi })
= do { (dfi', fvs) <- rnDataFamInstDecl Nothing dfi
; return (DataFamInstD { dfid_inst = dfi' }, fvs) }
rnSrcInstDecl (ClsInstD { cid_inst = cid })
= do { (cid', fvs) <- rnClsInstDecl cid
; return (ClsInstD { cid_inst = cid' }, fvs) }
-- | Warn about non-canonical typeclass instance declarations
--
-- A "non-canonical" instance definition can occur for instances of a
-- class which redundantly defines an operation its superclass
-- provides as well (c.f. `return`/`pure`). In such cases, a canonical
-- instance is one where the subclass inherits its method
-- implementation from its superclass instance (usually the subclass
-- has a default method implementation to that effect). Consequently,
-- a non-canonical instance occurs when this is not the case.
--
-- See also descriptions of 'checkCanonicalMonadInstances' and
-- 'checkCanonicalMonoidInstances'
checkCanonicalInstances :: Name -> LHsSigType Name -> LHsBinds Name -> RnM ()
checkCanonicalInstances cls poly_ty mbinds = do
whenWOptM Opt_WarnNonCanonicalMonadInstances
checkCanonicalMonadInstances
whenWOptM Opt_WarnNonCanonicalMonadFailInstances
checkCanonicalMonadFailInstances
whenWOptM Opt_WarnNonCanonicalMonoidInstances
checkCanonicalMonoidInstances
where
-- | Warn about unsound/non-canonical 'Applicative'/'Monad' instance
-- declarations. Specifically, the following conditions are verified:
--
-- In 'Monad' instances declarations:
--
-- * If 'return' is overridden it must be canonical (i.e. @return = pure@)
-- * If '(>>)' is overridden it must be canonical (i.e. @(>>) = (*>)@)
--
-- In 'Applicative' instance declarations:
--
-- * Warn if 'pure' is defined backwards (i.e. @pure = return@).
-- * Warn if '(*>)' is defined backwards (i.e. @(*>) = (>>)@).
--
checkCanonicalMonadInstances
| cls == applicativeClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == pureAName, isAliasMG mg == Just returnMName
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonadInstances "pure" "return"
| name == thenAName, isAliasMG mg == Just thenMName
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonadInstances "(*>)" "(>>)"
_ -> return ()
| cls == monadClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == returnMName, isAliasMG mg /= Just pureAName
-> addWarnNonCanonicalMethod2
Opt_WarnNonCanonicalMonadInstances "return" "pure"
| name == thenMName, isAliasMG mg /= Just thenAName
-> addWarnNonCanonicalMethod2
Opt_WarnNonCanonicalMonadInstances "(>>)" "(*>)"
_ -> return ()
| otherwise = return ()
-- | Warn about unsound/non-canonical 'Monad'/'MonadFail' instance
-- declarations. Specifically, the following conditions are verified:
--
-- In 'Monad' instances declarations:
--
-- * If 'fail' is overridden it must be canonical
-- (i.e. @fail = Control.Monad.Fail.fail@)
--
-- In 'MonadFail' instance declarations:
--
-- * Warn if 'fail' is defined backwards
-- (i.e. @fail = Control.Monad.fail@).
--
checkCanonicalMonadFailInstances
| cls == monadFailClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == failMName, isAliasMG mg == Just failMName_preMFP
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonadFailInstances "fail"
"Control.Monad.fail"
_ -> return ()
| cls == monadClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == failMName_preMFP, isAliasMG mg /= Just failMName
-> addWarnNonCanonicalMethod2
Opt_WarnNonCanonicalMonadFailInstances "fail"
"Control.Monad.Fail.fail"
_ -> return ()
| otherwise = return ()
-- | Check whether Monoid(mappend) is defined in terms of
-- Semigroup((<>)) (and not the other way round). Specifically,
-- the following conditions are verified:
--
-- In 'Monoid' instances declarations:
--
-- * If 'mappend' is overridden it must be canonical
-- (i.e. @mappend = (<>)@)
--
-- In 'Semigroup' instance declarations:
--
-- * Warn if '(<>)' is defined backwards (i.e. @(<>) = mappend@).
--
checkCanonicalMonoidInstances
| cls == semigroupClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == sappendName, isAliasMG mg == Just mappendName
-> addWarnNonCanonicalMethod1
Opt_WarnNonCanonicalMonoidInstances "(<>)" "mappend"
_ -> return ()
| cls == monoidClassName = do
forM_ (bagToList mbinds) $ \(L loc mbind) -> setSrcSpan loc $ do
case mbind of
FunBind { fun_id = L _ name, fun_matches = mg }
| name == mappendName, isAliasMG mg /= Just sappendName
-> addWarnNonCanonicalMethod2NoDefault
Opt_WarnNonCanonicalMonoidInstances "mappend" "(<>)"
_ -> return ()
| otherwise = return ()
-- | test whether MatchGroup represents a trivial \"lhsName = rhsName\"
-- binding, and return @Just rhsName@ if this is the case
isAliasMG :: MatchGroup Name (LHsExpr Name) -> Maybe Name
isAliasMG MG {mg_alts = L _ [L _ (Match { m_pats = [], m_grhss = grhss })]}
| GRHSs [L _ (GRHS [] body)] lbinds <- grhss
, L _ EmptyLocalBinds <- lbinds
, L _ (HsVar (L _ rhsName)) <- body = Just rhsName
isAliasMG _ = Nothing
-- got "lhs = rhs" but expected something different
addWarnNonCanonicalMethod1 flag lhs rhs = do
addWarn (Reason flag) $ vcat
[ text "Noncanonical" <+>
quotes (text (lhs ++ " = " ++ rhs)) <+>
text "definition detected"
, instDeclCtxt1 poly_ty
, text "Move definition from" <+>
quotes (text rhs) <+>
text "to" <+> quotes (text lhs)
]
-- expected "lhs = rhs" but got something else
addWarnNonCanonicalMethod2 flag lhs rhs = do
addWarn (Reason flag) $ vcat
[ text "Noncanonical" <+>
quotes (text lhs) <+>
text "definition detected"
, instDeclCtxt1 poly_ty
, text "Either remove definition for" <+>
quotes (text lhs) <+> text "or define as" <+>
quotes (text (lhs ++ " = " ++ rhs))
]
-- like above, but method has no default impl
addWarnNonCanonicalMethod2NoDefault flag lhs rhs = do
addWarn (Reason flag) $ vcat
[ text "Noncanonical" <+>
quotes (text lhs) <+>
text "definition detected"
, instDeclCtxt1 poly_ty
, text "Define as" <+>
quotes (text (lhs ++ " = " ++ rhs))
]
-- stolen from TcInstDcls
instDeclCtxt1 :: LHsSigType Name -> SDoc
instDeclCtxt1 hs_inst_ty
= inst_decl_ctxt (ppr (getLHsInstDeclHead hs_inst_ty))
inst_decl_ctxt :: SDoc -> SDoc
inst_decl_ctxt doc = hang (text "in the instance declaration for")
2 (quotes doc <> text ".")
rnClsInstDecl :: ClsInstDecl RdrName -> RnM (ClsInstDecl Name, FreeVars)
rnClsInstDecl (ClsInstDecl { cid_poly_ty = inst_ty, cid_binds = mbinds
, cid_sigs = uprags, cid_tyfam_insts = ats
, cid_overlap_mode = oflag
, cid_datafam_insts = adts })
= do { (inst_ty', inst_fvs) <- rnLHsInstType (text "an instance declaration") inst_ty
; let (ktv_names, _, head_ty') = splitLHsInstDeclTy inst_ty'
; let cls = case hsTyGetAppHead_maybe head_ty' of
Nothing -> mkUnboundName (mkTcOccFS (fsLit "<class>"))
Just (L _ cls, _) -> cls
-- rnLHsInstType has added an error message
-- if hsTyGetAppHead_maybe fails
-- Rename the bindings
-- The typechecker (not the renamer) checks that all
-- the bindings are for the right class
-- (Slightly strangely) when scoped type variables are on, the
-- forall-d tyvars scope over the method bindings too
; (mbinds', uprags', meth_fvs) <- rnMethodBinds False cls ktv_names mbinds uprags
; checkCanonicalInstances cls inst_ty' mbinds'
-- Rename the associated types, and type signatures
-- Both need to have the instance type variables in scope
; traceRn (text "rnSrcInstDecl" <+> ppr inst_ty' $$ ppr ktv_names)
; ((ats', adts'), more_fvs)
<- extendTyVarEnvFVRn ktv_names $
do { (ats', at_fvs) <- rnATInstDecls rnTyFamInstDecl cls ktv_names ats
; (adts', adt_fvs) <- rnATInstDecls rnDataFamInstDecl cls ktv_names adts
; return ( (ats', adts'), at_fvs `plusFV` adt_fvs) }
; let all_fvs = meth_fvs `plusFV` more_fvs
`plusFV` inst_fvs
; return (ClsInstDecl { cid_poly_ty = inst_ty', cid_binds = mbinds'
, cid_sigs = uprags', cid_tyfam_insts = ats'
, cid_overlap_mode = oflag
, cid_datafam_insts = adts' },
all_fvs) }
-- We return the renamed associated data type declarations so
-- that they can be entered into the list of type declarations
-- for the binding group, but we also keep a copy in the instance.
-- The latter is needed for well-formedness checks in the type
-- checker (eg, to ensure that all ATs of the instance actually
-- receive a declaration).
-- NB: Even the copies in the instance declaration carry copies of
-- the instance context after renaming. This is a bit
-- strange, but should not matter (and it would be more work
-- to remove the context).
rnFamInstDecl :: HsDocContext
-> Maybe (Name, [Name]) -- Nothing => not associated
-- Just (cls,tvs) => associated,
-- and gives class and tyvars of the
-- parent instance delc
-> Located RdrName
-> HsTyPats RdrName
-> rhs
-> (HsDocContext -> rhs -> RnM (rhs', FreeVars))
-> RnM (Located Name, HsTyPats Name, rhs', FreeVars)
rnFamInstDecl doc mb_cls tycon (HsIB { hsib_body = pats }) payload rnPayload
= do { tycon' <- lookupFamInstName (fmap fst mb_cls) tycon
; let loc = case pats of
[] -> pprPanic "rnFamInstDecl" (ppr tycon)
(L loc _ : []) -> loc
(L loc _ : ps) -> combineSrcSpans loc (getLoc (last ps))
; pat_kity_vars_with_dups <- extractHsTysRdrTyVarsDups pats
-- Use the "...Dups" form because it's needed
-- below to report unsed binder on the LHS
; var_names <- mapM (newTyVarNameRn mb_cls . L loc . unLoc) $
freeKiTyVarsAllVars $
rmDupsInRdrTyVars pat_kity_vars_with_dups
-- All the free vars of the family patterns
-- with a sensible binding location
; ((pats', payload'), fvs)
<- bindLocalNamesFV var_names $
do { (pats', pat_fvs) <- rnLHsTypes (FamPatCtx tycon) pats
; (payload', rhs_fvs) <- rnPayload doc payload
-- Report unused binders on the LHS
-- See Note [Unused type variables in family instances]
; let groups :: [[Located RdrName]]
groups = equivClasses cmpLocated $
freeKiTyVarsAllVars pat_kity_vars_with_dups
; tv_nms_dups <- mapM (lookupOccRn . unLoc) $
[ tv | (tv:_:_) <- groups ]
-- Add to the used variables
-- a) any variables that appear *more than once* on the LHS
-- e.g. F a Int a = Bool
-- b) for associated instances, the variables
-- of the instance decl. See
-- Note [Unused type variables in family instances]
; let tv_nms_used = extendNameSetList rhs_fvs $
inst_tvs ++ tv_nms_dups
inst_tvs = case mb_cls of
Nothing -> []
Just (_, inst_tvs) -> inst_tvs
; warnUnusedTypePatterns var_names tv_nms_used
-- See Note [Renaming associated types]
; let bad_tvs = case mb_cls of
Nothing -> []
Just (_,cls_tkvs) -> filter is_bad cls_tkvs
var_name_set = mkNameSet var_names
is_bad cls_tkv = cls_tkv `elemNameSet` rhs_fvs
&& not (cls_tkv `elemNameSet` var_name_set)
; unless (null bad_tvs) (badAssocRhs bad_tvs)
; return ((pats', payload'), rhs_fvs `plusFV` pat_fvs) }
; let anon_wcs = concatMap collectAnonWildCards pats'
all_ibs = anon_wcs ++ var_names
-- all_ibs: include anonymous wildcards in the implicit
-- binders In a type pattern they behave just like any
-- other type variable except for being anoymous. See
-- Note [Wildcards in family instances]
all_fvs = fvs `addOneFV` unLoc tycon'
; return (tycon',
HsIB { hsib_body = pats'
, hsib_vars = all_ibs },
payload',
all_fvs) }
-- type instance => use, hence addOneFV
rnTyFamInstDecl :: Maybe (Name, [Name])
-> TyFamInstDecl RdrName
-> RnM (TyFamInstDecl Name, FreeVars)
rnTyFamInstDecl mb_cls (TyFamInstDecl { tfid_eqn = L loc eqn })
= do { (eqn', fvs) <- rnTyFamInstEqn mb_cls eqn
; return (TyFamInstDecl { tfid_eqn = L loc eqn'
, tfid_fvs = fvs }, fvs) }
rnTyFamInstEqn :: Maybe (Name, [Name])
-> TyFamInstEqn RdrName
-> RnM (TyFamInstEqn Name, FreeVars)
rnTyFamInstEqn mb_cls (TyFamEqn { tfe_tycon = tycon
, tfe_pats = pats
, tfe_rhs = rhs })
= do { (tycon', pats', rhs', fvs) <-
rnFamInstDecl (TySynCtx tycon) mb_cls tycon pats rhs rnTySyn
; return (TyFamEqn { tfe_tycon = tycon'
, tfe_pats = pats'
, tfe_rhs = rhs' }, fvs) }
rnTyFamDefltEqn :: Name
-> TyFamDefltEqn RdrName
-> RnM (TyFamDefltEqn Name, FreeVars)
rnTyFamDefltEqn cls (TyFamEqn { tfe_tycon = tycon
, tfe_pats = tyvars
, tfe_rhs = rhs })
= bindHsQTyVars ctx Nothing (Just cls) [] tyvars $ \ tyvars' _ ->
do { tycon' <- lookupFamInstName (Just cls) tycon
; (rhs', fvs) <- rnLHsType ctx rhs
; return (TyFamEqn { tfe_tycon = tycon'
, tfe_pats = tyvars'
, tfe_rhs = rhs' }, fvs) }
where
ctx = TyFamilyCtx tycon
rnDataFamInstDecl :: Maybe (Name, [Name])
-> DataFamInstDecl RdrName
-> RnM (DataFamInstDecl Name, FreeVars)
rnDataFamInstDecl mb_cls (DataFamInstDecl { dfid_tycon = tycon
, dfid_pats = pats
, dfid_defn = defn })
= do { (tycon', pats', (defn', _), fvs) <-
rnFamInstDecl (TyDataCtx tycon) mb_cls tycon pats defn rnDataDefn
; return (DataFamInstDecl { dfid_tycon = tycon'
, dfid_pats = pats'
, dfid_defn = defn'
, dfid_fvs = fvs }, fvs) }
-- Renaming of the associated types in instances.
-- Rename associated type family decl in class
rnATDecls :: Name -- Class
-> [LFamilyDecl RdrName]
-> RnM ([LFamilyDecl Name], FreeVars)
rnATDecls cls at_decls
= rnList (rnFamDecl (Just cls)) at_decls
rnATInstDecls :: (Maybe (Name, [Name]) -> -- The function that renames
decl RdrName -> -- an instance. rnTyFamInstDecl
RnM (decl Name, FreeVars)) -- or rnDataFamInstDecl
-> Name -- Class
-> [Name]
-> [Located (decl RdrName)]
-> RnM ([Located (decl Name)], FreeVars)
-- Used for data and type family defaults in a class decl
-- and the family instance declarations in an instance
--
-- NB: We allow duplicate associated-type decls;
-- See Note [Associated type instances] in TcInstDcls
rnATInstDecls rnFun cls tv_ns at_insts
= rnList (rnFun (Just (cls, tv_ns))) at_insts
-- See Note [Renaming associated types]
{- Note [Wildcards in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Wild cards can be used in type/data family instance declarations to indicate
that the name of a type variable doesn't matter. Each wild card will be
replaced with a new unique type variable. For instance:
type family F a b :: *
type instance F Int _ = Int
is the same as
type family F a b :: *
type instance F Int b = Int
This is implemented as follows: during renaming anonymous wild cards
'_' are given freshly generated names. These names are collected after
renaming (rnFamInstDecl) and used to make new type variables during
type checking (tc_fam_ty_pats). One should not confuse these wild
cards with the ones from partial type signatures. The latter generate
fresh meta-variables whereas the former generate fresh skolems.
Note [Unused type variables in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the flag -fwarn-unused-type-patterns is on, the compiler reports
warnings about unused type variables in type-family instances. A
tpye variable is considered used (i.e. cannot be turned into a wildcard)
when
* it occurs on the RHS of the family instance
e.g. type instance F a b = a -- a is used on the RHS
* it occurs multiple times in the patterns on the LHS
e.g. type instance F a a = Int -- a appears more than once on LHS
* it is one of the instance-decl variables, for associated types
e.g. instance C (a,b) where
type T (a,b) = a
Here the type pattern in the type instance must be the same as that
for the class instance, so
type T (a,_) = a
would be rejected. So we should not complain about an unused variable b
As usual, the warnings are not reported for for type variables with names
beginning with an underscore.
Extra-constraints wild cards are not supported in type/data family
instance declarations.
Relevant tickets: #3699, #10586, #10982 and #11451.
Note [Renaming associated types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Check that the RHS of the decl mentions only type variables
bound on the LHS. For example, this is not ok
class C a b where
type F a x :: *
instance C (p,q) r where
type F (p,q) x = (x, r) -- BAD: mentions 'r'
c.f. Trac #5515
The same thing applies to kind variables, of course (Trac #7938, #9574):
class Funct f where
type Codomain f :: *
instance Funct ('KProxy :: KProxy o) where
type Codomain 'KProxy = NatTr (Proxy :: o -> *)
Here 'o' is mentioned on the RHS of the Codomain function, but
not on the LHS.
All this applies only for *instance* declarations. In *class*
declarations there is no RHS to worry about, and the class variables
can all be in scope (Trac #5862):
class Category (x :: k -> k -> *) where
type Ob x :: k -> Constraint
id :: Ob x a => x a a
(.) :: (Ob x a, Ob x b, Ob x c) => x b c -> x a b -> x a c
Here 'k' is in scope in the kind signature, just like 'x'.
-}
{-
*********************************************************
* *
\subsection{Stand-alone deriving declarations}
* *
*********************************************************
-}
rnSrcDerivDecl :: DerivDecl RdrName -> RnM (DerivDecl Name, FreeVars)
rnSrcDerivDecl (DerivDecl ty deriv_strat overlap)
= do { standalone_deriv_ok <- xoptM LangExt.StandaloneDeriving
; deriv_strats_ok <- xoptM LangExt.DerivingStrategies
; unless standalone_deriv_ok (addErr standaloneDerivErr)
; failIfTc (isJust deriv_strat && not deriv_strats_ok) $
illegalDerivStrategyErr $ fmap unLoc deriv_strat
; (ty', fvs) <- rnLHsInstType (text "In a deriving declaration") ty
; return (DerivDecl ty' deriv_strat overlap, fvs) }
standaloneDerivErr :: SDoc
standaloneDerivErr
= hang (text "Illegal standalone deriving declaration")
2 (text "Use StandaloneDeriving to enable this extension")
{-
*********************************************************
* *
\subsection{Rules}
* *
*********************************************************
-}
rnHsRuleDecls :: RuleDecls RdrName -> RnM (RuleDecls Name, FreeVars)
rnHsRuleDecls (HsRules src rules)
= do { (rn_rules,fvs) <- rnList rnHsRuleDecl rules
; return (HsRules src rn_rules,fvs) }
rnHsRuleDecl :: RuleDecl RdrName -> RnM (RuleDecl Name, FreeVars)
rnHsRuleDecl (HsRule rule_name act vars lhs _fv_lhs rhs _fv_rhs)
= do { let rdr_names_w_loc = map get_var vars
; checkDupRdrNames rdr_names_w_loc
; checkShadowedRdrNames rdr_names_w_loc
; names <- newLocalBndrsRn rdr_names_w_loc
; bindHsRuleVars (snd $ unLoc rule_name) vars names $ \ vars' ->
do { (lhs', fv_lhs') <- rnLExpr lhs
; (rhs', fv_rhs') <- rnLExpr rhs
; checkValidRule (snd $ unLoc rule_name) names lhs' fv_lhs'
; return (HsRule rule_name act vars' lhs' fv_lhs' rhs' fv_rhs',
fv_lhs' `plusFV` fv_rhs') } }
where
get_var (L _ (RuleBndrSig v _)) = v
get_var (L _ (RuleBndr v)) = v
bindHsRuleVars :: RuleName -> [LRuleBndr RdrName] -> [Name]
-> ([LRuleBndr Name] -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
bindHsRuleVars rule_name vars names thing_inside
= go vars names $ \ vars' ->
bindLocalNamesFV names (thing_inside vars')
where
doc = RuleCtx rule_name
go (L l (RuleBndr (L loc _)) : vars) (n : ns) thing_inside
= go vars ns $ \ vars' ->
thing_inside (L l (RuleBndr (L loc n)) : vars')
go (L l (RuleBndrSig (L loc _) bsig) : vars) (n : ns) thing_inside
= rnHsSigWcTypeScoped doc bsig $ \ bsig' ->
go vars ns $ \ vars' ->
thing_inside (L l (RuleBndrSig (L loc n) bsig') : vars')
go [] [] thing_inside = thing_inside []
go vars names _ = pprPanic "bindRuleVars" (ppr vars $$ ppr names)
{-
Note [Rule LHS validity checking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Check the shape of a transformation rule LHS. Currently we only allow
LHSs of the form @(f e1 .. en)@, where @f@ is not one of the
@forall@'d variables.
We used restrict the form of the 'ei' to prevent you writing rules
with LHSs with a complicated desugaring (and hence unlikely to match);
(e.g. a case expression is not allowed: too elaborate.)
But there are legitimate non-trivial args ei, like sections and
lambdas. So it seems simmpler not to check at all, and that is why
check_e is commented out.
-}
checkValidRule :: FastString -> [Name] -> LHsExpr Name -> NameSet -> RnM ()
checkValidRule rule_name ids lhs' fv_lhs'
= do { -- Check for the form of the LHS
case (validRuleLhs ids lhs') of
Nothing -> return ()
Just bad -> failWithTc (badRuleLhsErr rule_name lhs' bad)
-- Check that LHS vars are all bound
; let bad_vars = [var | var <- ids, not (var `elemNameSet` fv_lhs')]
; mapM_ (addErr . badRuleVar rule_name) bad_vars }
validRuleLhs :: [Name] -> LHsExpr Name -> Maybe (HsExpr Name)
-- Nothing => OK
-- Just e => Not ok, and e is the offending sub-expression
validRuleLhs foralls lhs
= checkl lhs
where
checkl (L _ e) = check e
check (OpApp e1 op _ e2) = checkl op `mplus` checkl_e e1 `mplus` checkl_e e2
check (HsApp e1 e2) = checkl e1 `mplus` checkl_e e2
check (HsAppType e _) = checkl e
check (HsVar (L _ v)) | v `notElem` foralls = Nothing
check other = Just other -- Failure
-- Check an argument
checkl_e (L _ _e) = Nothing -- Was (check_e e); see Note [Rule LHS validity checking]
{- Commented out; see Note [Rule LHS validity checking] above
check_e (HsVar v) = Nothing
check_e (HsPar e) = checkl_e e
check_e (HsLit e) = Nothing
check_e (HsOverLit e) = Nothing
check_e (OpApp e1 op _ e2) = checkl_e e1 `mplus` checkl_e op `mplus` checkl_e e2
check_e (HsApp e1 e2) = checkl_e e1 `mplus` checkl_e e2
check_e (NegApp e _) = checkl_e e
check_e (ExplicitList _ es) = checkl_es es
check_e other = Just other -- Fails
checkl_es es = foldr (mplus . checkl_e) Nothing es
-}
badRuleVar :: FastString -> Name -> SDoc
badRuleVar name var
= sep [text "Rule" <+> doubleQuotes (ftext name) <> colon,
text "Forall'd variable" <+> quotes (ppr var) <+>
text "does not appear on left hand side"]
badRuleLhsErr :: FastString -> LHsExpr Name -> HsExpr Name -> SDoc
badRuleLhsErr name lhs bad_e
= sep [text "Rule" <+> pprRuleName name <> colon,
nest 4 (vcat [err,
text "in left-hand side:" <+> ppr lhs])]
$$
text "LHS must be of form (f e1 .. en) where f is not forall'd"
where
err = case bad_e of
HsUnboundVar uv -> text "Not in scope:" <+> ppr uv
_ -> text "Illegal expression:" <+> ppr bad_e
{-
*********************************************************
* *
\subsection{Vectorisation declarations}
* *
*********************************************************
-}
rnHsVectDecl :: VectDecl RdrName -> RnM (VectDecl Name, FreeVars)
-- FIXME: For the moment, the right-hand side is restricted to be a variable as we cannot properly
-- typecheck a complex right-hand side without invoking 'vectType' from the vectoriser.
rnHsVectDecl (HsVect s var rhs@(L _ (HsVar _)))
= do { var' <- lookupLocatedOccRn var
; (rhs', fv_rhs) <- rnLExpr rhs
; return (HsVect s var' rhs', fv_rhs `addOneFV` unLoc var')
}
rnHsVectDecl (HsVect _ _var _rhs)
= failWith $ vcat
[ text "IMPLEMENTATION RESTRICTION: right-hand side of a VECTORISE pragma"
, text "must be an identifier"
]
rnHsVectDecl (HsNoVect s var)
= do { var' <- lookupLocatedTopBndrRn var -- only applies to local (not imported) names
; return (HsNoVect s var', unitFV (unLoc var'))
}
rnHsVectDecl (HsVectTypeIn s isScalar tycon Nothing)
= do { tycon' <- lookupLocatedOccRn tycon
; return (HsVectTypeIn s isScalar tycon' Nothing, unitFV (unLoc tycon'))
}
rnHsVectDecl (HsVectTypeIn s isScalar tycon (Just rhs_tycon))
= do { tycon' <- lookupLocatedOccRn tycon
; rhs_tycon' <- lookupLocatedOccRn rhs_tycon
; return ( HsVectTypeIn s isScalar tycon' (Just rhs_tycon')
, mkFVs [unLoc tycon', unLoc rhs_tycon'])
}
rnHsVectDecl (HsVectTypeOut _ _ _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectTypeOut'"
rnHsVectDecl (HsVectClassIn s cls)
= do { cls' <- lookupLocatedOccRn cls
; return (HsVectClassIn s cls', unitFV (unLoc cls'))
}
rnHsVectDecl (HsVectClassOut _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectClassOut'"
rnHsVectDecl (HsVectInstIn instTy)
= do { (instTy', fvs) <- rnLHsInstType (text "a VECTORISE pragma") instTy
; return (HsVectInstIn instTy', fvs)
}
rnHsVectDecl (HsVectInstOut _)
= panic "RnSource.rnHsVectDecl: Unexpected 'HsVectInstOut'"
{- **************************************************************
* *
Renaming type, class, instance and role declarations
* *
*****************************************************************
@rnTyDecl@ uses the `global name function' to create a new type
declaration in which local names have been replaced by their original
names, reporting any unknown names.
Renaming type variables is a pain. Because they now contain uniques,
it is necessary to pass in an association list which maps a parsed
tyvar to its @Name@ representation.
In some cases (type signatures of values),
it is even necessary to go over the type first
in order to get the set of tyvars used by it, make an assoc list,
and then go over it again to rename the tyvars!
However, we can also do some scoping checks at the same time.
Note [Dependency analysis of type, class, and instance decls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A TyClGroup represents a strongly connected components of
type/class/instance decls, together with the role annotations for the
type/class declarations. The renamer uses strongly connected
comoponent analysis to build these groups. We do this for a number of
reasons:
* Improve kind error messages. Consider
data T f a = MkT f a
data S f a = MkS f (T f a)
This has a kind error, but the error message is better if you
check T first, (fixing its kind) and *then* S. If you do kind
inference together, you might get an error reported in S, which
is jolly confusing. See Trac #4875
* Increase kind polymorphism. See TcTyClsDecls
Note [Grouping of type and class declarations]
Why do the instance declarations participate? At least two reasons
* Consider (Trac #11348)
type family F a
type instance F Int = Bool
data R = MkR (F Int)
type Foo = 'MkR 'True
For Foo to kind-check we need to know that (F Int) ~ Bool. But we won't
know that unless we've looked at the type instance declaration for F
before kind-checking Foo.
* Another example is this (Trac #3990).
data family Complex a
data instance Complex Double = CD {-# UNPACK #-} !Double
{-# UNPACK #-} !Double
data T = T {-# UNPACK #-} !(Complex Double)
Here, to generate the right kind of unpacked implementation for T,
we must have access to the 'data instance' declaration.
* Things become more complicated when we introduce transitive
dependencies through imported definitions, like in this scenario:
A.hs
type family Closed (t :: Type) :: Type where
Closed t = Open t
type family Open (t :: Type) :: Type
B.hs
data Q where
Q :: Closed Bool -> Q
type instance Open Int = Bool
type S = 'Q 'True
Somehow, we must ensure that the instance Open Int = Bool is checked before
the type synonym S. While we know that S depends upon 'Q depends upon Closed,
we have no idea that Closed depends upon Open!
To accomodate for these situations, we ensure that an instance is checked
before every @TyClDecl@ on which it does not depend. That's to say, instances
are checked as early as possible in @tcTyAndClassDecls@.
------------------------------------
So much for WHY. What about HOW? It's pretty easy:
(1) Rename the type/class, instance, and role declarations
individually
(2) Do strongly-connected component analysis of the type/class decls,
We'll make a TyClGroup for each SCC
In this step we treat a reference to a (promoted) data constructor
K as a dependency on its parent type. Thus
data T = K1 | K2
data S = MkS (Proxy 'K1)
Here S depends on 'K1 and hence on its parent T.
In this step we ignore instances; see
Note [No dependencies on data instances]
(3) Attach roles to the appropriate SCC
(4) Attach instances to the appropriate SCC.
We add an instance decl to SCC when:
all its free types/classes are bound in this SCC or earlier ones
(5) We make an initial TyClGroup, with empty group_tyclds, for any
(orphan) instances that affect only imported types/classes
Steps (3) and (4) are done by the (mapAccumL mk_group) call.
Note [No dependencies on data instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
data family D a
data instance D Int = D1
data S = MkS (Proxy 'D1)
Here the declaration of S depends on the /data instance/ declaration
for 'D Int'. That makes things a lot more complicated, especially
if the data instance is an assocaited type of an enclosing class instance.
(And the class instance might have several assocatiated type instances
with different dependency structure!)
Ugh. For now we simply don't allow promotion of data constructors for
data instances. See Note [AFamDataCon: not promoting data family
constructors] in TcEnv
-}
rnTyClDecls :: [TyClGroup RdrName]
-> RnM ([TyClGroup Name], FreeVars)
-- Rename the declarations and do dependency analysis on them
rnTyClDecls tycl_ds
= do { -- Rename the type/class, instance, and role declaraations
tycls_w_fvs <- mapM (wrapLocFstM rnTyClDecl)
(tyClGroupTyClDecls tycl_ds)
; let tc_names = mkNameSet (map (tcdName . unLoc . fst) tycls_w_fvs)
; instds_w_fvs <- mapM (wrapLocFstM rnSrcInstDecl) (tyClGroupInstDecls tycl_ds)
; role_annots <- rnRoleAnnots tc_names (tyClGroupRoleDecls tycl_ds)
; tycls_w_fvs <- addBootDeps tycls_w_fvs
-- TBD must add_boot_deps to instds_w_fvs?
-- Do SCC analysis on the type/class decls
; rdr_env <- getGlobalRdrEnv
; let tycl_sccs = depAnalTyClDecls rdr_env tycls_w_fvs
role_annot_env = mkRoleAnnotEnv role_annots
inst_ds_map = mkInstDeclFreeVarsMap rdr_env tc_names instds_w_fvs
(init_inst_ds, rest_inst_ds) = getInsts [] inst_ds_map
first_group
| null init_inst_ds = []
| otherwise = [TyClGroup { group_tyclds = []
, group_roles = []
, group_instds = init_inst_ds }]
((final_inst_ds, orphan_roles), groups)
= mapAccumL mk_group (rest_inst_ds, role_annot_env) tycl_sccs
all_fvs = plusFV (foldr (plusFV . snd) emptyFVs tycls_w_fvs)
(foldr (plusFV . snd) emptyFVs instds_w_fvs)
all_groups = first_group ++ groups
; ASSERT2( null final_inst_ds, ppr instds_w_fvs $$ ppr inst_ds_map
$$ ppr (flattenSCCs tycl_sccs) $$ ppr final_inst_ds )
mapM_ orphanRoleAnnotErr (nameEnvElts orphan_roles)
; traceRn (text "rnTycl dependency analysis made groups" $$ ppr all_groups)
; return (all_groups, all_fvs) }
where
mk_group :: (InstDeclFreeVarsMap, RoleAnnotEnv)
-> SCC (LTyClDecl Name)
-> ( (InstDeclFreeVarsMap, RoleAnnotEnv)
, TyClGroup Name )
mk_group (inst_map, role_env) scc
= ((inst_map', role_env'), group)
where
tycl_ds = flattenSCC scc
bndrs = map (tcdName . unLoc) tycl_ds
(inst_ds, inst_map') = getInsts bndrs inst_map
(roles, role_env') = getRoleAnnots bndrs role_env
group = TyClGroup { group_tyclds = tycl_ds
, group_roles = roles
, group_instds = inst_ds }
depAnalTyClDecls :: GlobalRdrEnv
-> [(LTyClDecl Name, FreeVars)]
-> [SCC (LTyClDecl Name)]
-- See Note [Dependency analysis of type, class, and instance decls]
depAnalTyClDecls rdr_env ds_w_fvs
= stronglyConnCompFromEdgedVerticesUniq edges
where
edges = [ (d, tcdName (unLoc d), map (getParent rdr_env) (nonDetEltsUFM fvs))
| (d, fvs) <- ds_w_fvs ]
-- It's OK to use nonDetEltsUFM here as
-- stronglyConnCompFromEdgedVertices is still deterministic
-- even if the edges are in nondeterministic order as explained
-- in Note [Deterministic SCC] in Digraph.
toParents :: GlobalRdrEnv -> NameSet -> NameSet
toParents rdr_env ns
= nonDetFoldUFM add emptyNameSet ns
-- It's OK to use nonDetFoldUFM because we immediately forget the
-- ordering by creating a set
where
add n s = extendNameSet s (getParent rdr_env n)
getParent :: GlobalRdrEnv -> Name -> Name
getParent rdr_env n
= case lookupGRE_Name rdr_env n of
Just gre -> case gre_par gre of
ParentIs { par_is = p } -> p
FldParent { par_is = p } -> p
_ -> n
Nothing -> n
{- Note [Extra dependencies from .hs-boot files]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is a long story, so buckle in.
**Dependencies via hs-boot files are not obvious.** Consider the following case:
A.hs-boot
module A where
data A1
B.hs
module B where
import {-# SOURCE #-} A
type B1 = A1
A.hs
module A where
import B
data A2 = MkA2 B1
data A1 = MkA1 A2
Here A2 is really recursive (via B1), but we won't see that easily when
doing dependency analysis when compiling A.hs. When we look at A2,
we see that its free variables are simply B1, but without (recursively) digging
into the definition of B1 will we see that it actually refers to A1 via an
hs-boot file.
**Recursive declarations, even those broken by an hs-boot file, need to
be type-checked together.** Whenever we refer to a declaration via
an hs-boot file, we must be careful not to force the TyThing too early:
ala Note [Tying the knot] if we force the TyThing before we have
defined it ourselves in the local type environment, GHC will error.
Conservatively, then, it would make sense that we to typecheck A1
and A2 from the previous example together, because the two types are
truly mutually recursive through B1.
If we are being clever, we might observe that while kind-checking
A2, we don't actually need to force the TyThing for A1: B1
independently records its kind, so there is no need to go "deeper".
But then we are in an uncomfortable situation where we have
constructed a TyThing for A2 before we have checked A1, and we
have to be absolutely certain we don't force it too deeply until
we get around to kind checking A1, which could be for a very long
time.
Indeed, with datatype promotion, we may very well need to look
at the type of MkA2 before we have kind-checked A1: consider,
data T = MkT (Proxy 'MkA2)
To promote MkA2, we need to lift its type to the kind level.
We never tested this, but it seems likely A1 would get poked
at this point.
**Here's what we do instead.** So it is expedient for us to
make sure A1 and A2 are kind checked together in a loop.
To ensure that our dependency analysis can catch this,
we add a dependency:
- from every local declaration
- to everything that comes from this module's .hs-boot file
(this is gotten from sb_tcs in the SelfBootInfo).
In this case, we'll add an edges
- from A1 to A2 (but that edge is there already)
- from A2 to A1 (which is new)
Well, not quite *every* declaration. Imagine module A
above had another datatype declaration:
data A3 = A3 Int
Even though A3 has a dependency (on Int), all its dependencies are from things
that live on other packages. Since we don't have mutual dependencies across
packages, it is safe not to add the dependencies on the .hs-boot stuff to A2.
Hence function nameIsHomePackageImport.
Note that this is fairly conservative: it essentially implies that
EVERY type declaration in this modules hs-boot file will be kind-checked
together in one giant loop (and furthermore makes every other type
in the module depend on this loop). This is perhaps less than ideal, because
the larger a recursive group, the less polymorphism available (we
cannot infer a type to be polymorphically instantiated while we
are inferring its kind), but no one has hollered about this (yet!)
-}
addBootDeps :: [(LTyClDecl Name, FreeVars)] -> RnM [(LTyClDecl Name, FreeVars)]
-- See Note [Extra dependencies from .hs-boot files]
addBootDeps ds_w_fvs
= do { tcg_env <- getGblEnv
; let this_mod = tcg_mod tcg_env
boot_info = tcg_self_boot tcg_env
add_boot_deps :: [(LTyClDecl Name, FreeVars)] -> [(LTyClDecl Name, FreeVars)]
add_boot_deps ds_w_fvs
= case boot_info of
SelfBoot { sb_tcs = tcs } | not (isEmptyNameSet tcs)
-> map (add_one tcs) ds_w_fvs
_ -> ds_w_fvs
add_one :: NameSet -> (LTyClDecl Name, FreeVars) -> (LTyClDecl Name, FreeVars)
add_one tcs pr@(decl,fvs)
| has_local_imports fvs = (decl, fvs `plusFV` tcs)
| otherwise = pr
has_local_imports fvs
= nameSetAny (nameIsHomePackageImport this_mod) fvs
; return (add_boot_deps ds_w_fvs) }
{- ******************************************************
* *
Role annotations
* *
****************************************************** -}
-- | Renames role annotations, returning them as the values in a NameEnv
-- and checks for duplicate role annotations.
-- It is quite convenient to do both of these in the same place.
-- See also Note [Role annotations in the renamer]
rnRoleAnnots :: NameSet
-> [LRoleAnnotDecl RdrName]
-> RnM [LRoleAnnotDecl Name]
rnRoleAnnots tc_names role_annots
= do { -- Check for duplicates *before* renaming, to avoid
-- lumping together all the unboundNames
let (no_dups, dup_annots) = removeDups role_annots_cmp role_annots
role_annots_cmp (L _ annot1) (L _ annot2)
= roleAnnotDeclName annot1 `compare` roleAnnotDeclName annot2
; mapM_ dupRoleAnnotErr dup_annots
; mapM (wrapLocM rn_role_annot1) no_dups }
where
rn_role_annot1 (RoleAnnotDecl tycon roles)
= do { -- the name is an *occurrence*, but look it up only in the
-- decls defined in this group (see #10263)
tycon' <- lookupSigCtxtOccRn (RoleAnnotCtxt tc_names)
(text "role annotation")
tycon
; return $ RoleAnnotDecl tycon' roles }
dupRoleAnnotErr :: [LRoleAnnotDecl RdrName] -> RnM ()
dupRoleAnnotErr [] = panic "dupRoleAnnotErr"
dupRoleAnnotErr list
= addErrAt loc $
hang (text "Duplicate role annotations for" <+>
quotes (ppr $ roleAnnotDeclName first_decl) <> colon)
2 (vcat $ map pp_role_annot sorted_list)
where
sorted_list = sortBy cmp_annot list
(L loc first_decl : _) = sorted_list
pp_role_annot (L loc decl) = hang (ppr decl)
4 (text "-- written at" <+> ppr loc)
cmp_annot (L loc1 _) (L loc2 _) = loc1 `compare` loc2
orphanRoleAnnotErr :: LRoleAnnotDecl Name -> RnM ()
orphanRoleAnnotErr (L loc decl)
= addErrAt loc $
hang (text "Role annotation for a type previously declared:")
2 (ppr decl) $$
parens (text "The role annotation must be given where" <+>
quotes (ppr $ roleAnnotDeclName decl) <+>
text "is declared.")
{- Note [Role annotations in the renamer]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must ensure that a type's role annotation is put in the same group as the
proper type declaration. This is because role annotations are needed during
type-checking when creating the type's TyCon. So, rnRoleAnnots builds a
NameEnv (LRoleAnnotDecl Name) that maps a name to a role annotation for that
type, if any. Then, this map can be used to add the role annotations to the
groups after dependency analysis.
This process checks for duplicate role annotations, where we must be careful
to do the check *before* renaming to avoid calling all unbound names duplicates
of one another.
The renaming process, as usual, might identify and report errors for unbound
names. We exclude the annotations for unbound names in the annotation
environment to avoid spurious errors for orphaned annotations.
We then (in rnTyClDecls) do a check for orphan role annotations (role
annotations without an accompanying type decl). The check works by folding
over components (of type [[Either (TyClDecl Name) (InstDecl Name)]]), selecting
out the relevant role declarations for each group, as well as diminishing the
annotation environment. After the fold is complete, anything left over in the
name environment must be an orphan, and errors are generated.
An earlier version of this algorithm short-cut the orphan check by renaming
only with names declared in this module. But, this check is insufficient in
the case of staged module compilation (Template Haskell, GHCi).
See #8485. With the new lookup process (which includes types declared in other
modules), we get better error messages, too.
-}
{- ******************************************************
* *
Dependency info for instances
* *
****************************************************** -}
----------------------------------------------------------
-- | 'InstDeclFreeVarsMap is an association of an
-- @InstDecl@ with @FreeVars@. The @FreeVars@ are
-- the tycon names that are both
-- a) free in the instance declaration
-- b) bound by this group of type/class/instance decls
type InstDeclFreeVarsMap = [(LInstDecl Name, FreeVars)]
-- | Construct an @InstDeclFreeVarsMap@ by eliminating any @Name@s from the
-- @FreeVars@ which are *not* the binders of a @TyClDecl@.
mkInstDeclFreeVarsMap :: GlobalRdrEnv
-> NameSet
-> [(LInstDecl Name, FreeVars)]
-> InstDeclFreeVarsMap
mkInstDeclFreeVarsMap rdr_env tycl_bndrs inst_ds_fvs
= [ (inst_decl, toParents rdr_env fvs `intersectFVs` tycl_bndrs)
| (inst_decl, fvs) <- inst_ds_fvs ]
-- | Get the @LInstDecl@s which have empty @FreeVars@ sets, and the
-- @InstDeclFreeVarsMap@ with these entries removed.
-- We call (getInsts tcs instd_map) when we've completed the declarations
-- for 'tcs'. The call returns (inst_decls, instd_map'), where
-- inst_decls are the instance declarations all of
-- whose free vars are now defined
-- instd_map' is the inst-decl map with 'tcs' removed from
-- the free-var set
getInsts :: [Name] -> InstDeclFreeVarsMap -> ([LInstDecl Name], InstDeclFreeVarsMap)
getInsts bndrs inst_decl_map
= partitionWith pick_me inst_decl_map
where
pick_me :: (LInstDecl Name, FreeVars)
-> Either (LInstDecl Name) (LInstDecl Name, FreeVars)
pick_me (decl, fvs)
| isEmptyNameSet depleted_fvs = Left decl
| otherwise = Right (decl, depleted_fvs)
where
depleted_fvs = delFVs bndrs fvs
{- ******************************************************
* *
Renaming a type or class declaration
* *
****************************************************** -}
rnTyClDecl :: TyClDecl RdrName
-> RnM (TyClDecl Name, FreeVars)
-- All flavours of type family declarations ("type family", "newtype family",
-- and "data family"), both top level and (for an associated type)
-- in a class decl
rnTyClDecl (FamDecl { tcdFam = decl })
= do { (decl', fvs) <- rnFamDecl Nothing decl
; return (FamDecl decl', fvs) }
rnTyClDecl (SynDecl { tcdLName = tycon, tcdTyVars = tyvars, tcdRhs = rhs })
= do { tycon' <- lookupLocatedTopBndrRn tycon
; kvs <- freeKiTyVarsKindVars <$> extractHsTyRdrTyVars rhs
; let doc = TySynCtx tycon
; traceRn (text "rntycl-ty" <+> ppr tycon <+> ppr kvs)
; ((tyvars', rhs'), fvs) <- bindHsQTyVars doc Nothing Nothing kvs tyvars $
\ tyvars' _ ->
do { (rhs', fvs) <- rnTySyn doc rhs
; return ((tyvars', rhs'), fvs) }
; return (SynDecl { tcdLName = tycon', tcdTyVars = tyvars'
, tcdRhs = rhs', tcdFVs = fvs }, fvs) }
-- "data", "newtype" declarations
-- both top level and (for an associated type) in an instance decl
rnTyClDecl (DataDecl { tcdLName = tycon, tcdTyVars = tyvars, tcdDataDefn = defn })
= do { tycon' <- lookupLocatedTopBndrRn tycon
; kvs <- extractDataDefnKindVars defn
; let doc = TyDataCtx tycon
; traceRn (text "rntycl-data" <+> ppr tycon <+> ppr kvs)
; ((tyvars', defn', no_kvs), fvs)
<- bindHsQTyVars doc Nothing Nothing kvs tyvars $ \ tyvars' dep_vars ->
do { ((defn', kind_sig_fvs), fvs) <- rnDataDefn doc defn
; let sig_tvs = filterNameSet isTyVarName kind_sig_fvs
unbound_sig_tvs = sig_tvs `minusNameSet` dep_vars
; return ((tyvars', defn', isEmptyNameSet unbound_sig_tvs), fvs) }
-- See Note [Complete user-supplied kind signatures] in HsDecls
; typeintype <- xoptM LangExt.TypeInType
; let cusk = hsTvbAllKinded tyvars' &&
(not typeintype || no_kvs)
; return (DataDecl { tcdLName = tycon', tcdTyVars = tyvars'
, tcdDataDefn = defn', tcdDataCusk = cusk
, tcdFVs = fvs }, fvs) }
rnTyClDecl (ClassDecl { tcdCtxt = context, tcdLName = lcls,
tcdTyVars = tyvars, tcdFDs = fds, tcdSigs = sigs,
tcdMeths = mbinds, tcdATs = ats, tcdATDefs = at_defs,
tcdDocs = docs})
= do { lcls' <- lookupLocatedTopBndrRn lcls
; let cls' = unLoc lcls'
kvs = [] -- No scoped kind vars except those in
-- kind signatures on the tyvars
-- Tyvars scope over superclass context and method signatures
; ((tyvars', context', fds', ats'), stuff_fvs)
<- bindHsQTyVars cls_doc Nothing Nothing kvs tyvars $ \ tyvars' _ -> do
-- Checks for distinct tyvars
{ (context', cxt_fvs) <- rnContext cls_doc context
; fds' <- rnFds fds
-- The fundeps have no free variables
; (ats', fv_ats) <- rnATDecls cls' ats
; let fvs = cxt_fvs `plusFV`
fv_ats
; return ((tyvars', context', fds', ats'), fvs) }
; (at_defs', fv_at_defs) <- rnList (rnTyFamDefltEqn cls') at_defs
-- No need to check for duplicate associated type decls
-- since that is done by RnNames.extendGlobalRdrEnvRn
-- Check the signatures
-- First process the class op sigs (op_sigs), then the fixity sigs (non_op_sigs).
; let sig_rdr_names_w_locs = [op | L _ (ClassOpSig False ops _) <- sigs
, op <- ops]
; checkDupRdrNames sig_rdr_names_w_locs
-- Typechecker is responsible for checking that we only
-- give default-method bindings for things in this class.
-- The renamer *could* check this for class decls, but can't
-- for instance decls.
-- The newLocals call is tiresome: given a generic class decl
-- class C a where
-- op :: a -> a
-- op {| x+y |} (Inl a) = ...
-- op {| x+y |} (Inr b) = ...
-- op {| a*b |} (a*b) = ...
-- we want to name both "x" tyvars with the same unique, so that they are
-- easy to group together in the typechecker.
; (mbinds', sigs', meth_fvs)
<- rnMethodBinds True cls' (hsAllLTyVarNames tyvars') mbinds sigs
-- No need to check for duplicate method signatures
-- since that is done by RnNames.extendGlobalRdrEnvRn
-- and the methods are already in scope
-- Haddock docs
; docs' <- mapM (wrapLocM rnDocDecl) docs
; let all_fvs = meth_fvs `plusFV` stuff_fvs `plusFV` fv_at_defs
; return (ClassDecl { tcdCtxt = context', tcdLName = lcls',
tcdTyVars = tyvars', tcdFDs = fds', tcdSigs = sigs',
tcdMeths = mbinds', tcdATs = ats', tcdATDefs = at_defs',
tcdDocs = docs', tcdFVs = all_fvs },
all_fvs ) }
where
cls_doc = ClassDeclCtx lcls
-- "type" and "type instance" declarations
rnTySyn :: HsDocContext -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
rnTySyn doc rhs = rnLHsType doc rhs
rnDataDefn :: HsDocContext -> HsDataDefn RdrName
-> RnM ((HsDataDefn Name, NameSet), FreeVars)
-- the NameSet includes all Names free in the kind signature
-- See Note [Complete user-supplied kind signatures]
rnDataDefn doc (HsDataDefn { dd_ND = new_or_data, dd_cType = cType
, dd_ctxt = context, dd_cons = condecls
, dd_kindSig = m_sig, dd_derivs = derivs })
= do { checkTc (h98_style || null (unLoc context))
(badGadtStupidTheta doc)
; (m_sig', sig_fvs) <- case m_sig of
Just sig -> first Just <$> rnLHsKind doc sig
Nothing -> return (Nothing, emptyFVs)
; (context', fvs1) <- rnContext doc context
; (derivs', fvs3) <- rn_derivs derivs
-- For the constructor declarations, drop the LocalRdrEnv
-- in the GADT case, where the type variables in the declaration
-- do not scope over the constructor signatures
-- data T a where { T1 :: forall b. b-> b }
; let { zap_lcl_env | h98_style = \ thing -> thing
| otherwise = setLocalRdrEnv emptyLocalRdrEnv }
; (condecls', con_fvs) <- zap_lcl_env $ rnConDecls condecls
-- No need to check for duplicate constructor decls
-- since that is done by RnNames.extendGlobalRdrEnvRn
; let all_fvs = fvs1 `plusFV` fvs3 `plusFV`
con_fvs `plusFV` sig_fvs
; return (( HsDataDefn { dd_ND = new_or_data, dd_cType = cType
, dd_ctxt = context', dd_kindSig = m_sig'
, dd_cons = condecls'
, dd_derivs = derivs' }
, sig_fvs )
, all_fvs )
}
where
h98_style = case condecls of -- Note [Stupid theta]
L _ (ConDeclGADT {}) : _ -> False
_ -> True
rn_derivs (L loc ds)
= do { deriv_strats_ok <- xoptM LangExt.DerivingStrategies
; failIfTc (lengthExceeds ds 1 && not deriv_strats_ok)
multipleDerivClausesErr
; (ds', fvs) <- mapFvRn (rnLHsDerivingClause deriv_strats_ok doc) ds
; return (L loc ds', fvs) }
rnLHsDerivingClause :: Bool -> HsDocContext -> LHsDerivingClause RdrName
-> RnM (LHsDerivingClause Name, FreeVars)
rnLHsDerivingClause deriv_strats_ok doc
(L loc (HsDerivingClause { deriv_clause_strategy = dcs
, deriv_clause_tys = L loc' dct }))
= do { failIfTc (isJust dcs && not deriv_strats_ok) $
illegalDerivStrategyErr $ fmap unLoc dcs
; (dct', fvs) <- mapFvRn (rnHsSigType doc) dct
; return ( L loc (HsDerivingClause { deriv_clause_strategy = dcs
, deriv_clause_tys = L loc' dct' })
, fvs ) }
badGadtStupidTheta :: HsDocContext -> SDoc
badGadtStupidTheta _
= vcat [text "No context is allowed on a GADT-style data declaration",
text "(You can put a context on each constructor, though.)"]
illegalDerivStrategyErr :: Maybe DerivStrategy -> SDoc
illegalDerivStrategyErr ds
= vcat [ text "Illegal deriving strategy" <> colon <+> maybe empty ppr ds
, text "Use DerivingStrategies to enable this extension" ]
multipleDerivClausesErr :: SDoc
multipleDerivClausesErr
= vcat [ text "Illegal use of multiple, consecutive deriving clauses"
, text "Use DerivingStrategies to allow this" ]
rnFamDecl :: Maybe Name -- Just cls => this FamilyDecl is nested
-- inside an *class decl* for cls
-- used for associated types
-> FamilyDecl RdrName
-> RnM (FamilyDecl Name, FreeVars)
rnFamDecl mb_cls (FamilyDecl { fdLName = tycon, fdTyVars = tyvars
, fdInfo = info, fdResultSig = res_sig
, fdInjectivityAnn = injectivity })
= do { tycon' <- lookupLocatedTopBndrRn tycon
; kvs <- extractRdrKindSigVars res_sig
; ((tyvars', res_sig', injectivity'), fv1) <-
bindHsQTyVars doc Nothing mb_cls kvs tyvars $
\ tyvars'@(HsQTvs { hsq_implicit = rn_kvs }) _ ->
do { let rn_sig = rnFamResultSig doc rn_kvs
; (res_sig', fv_kind) <- wrapLocFstM rn_sig res_sig
; injectivity' <- traverse (rnInjectivityAnn tyvars' res_sig')
injectivity
; return ( (tyvars', res_sig', injectivity') , fv_kind ) }
; (info', fv2) <- rn_info info
; return (FamilyDecl { fdLName = tycon', fdTyVars = tyvars'
, fdInfo = info', fdResultSig = res_sig'
, fdInjectivityAnn = injectivity' }
, fv1 `plusFV` fv2) }
where
doc = TyFamilyCtx tycon
----------------------
rn_info (ClosedTypeFamily (Just eqns))
= do { (eqns', fvs) <- rnList (rnTyFamInstEqn Nothing) eqns
-- no class context,
; return (ClosedTypeFamily (Just eqns'), fvs) }
rn_info (ClosedTypeFamily Nothing)
= return (ClosedTypeFamily Nothing, emptyFVs)
rn_info OpenTypeFamily = return (OpenTypeFamily, emptyFVs)
rn_info DataFamily = return (DataFamily, emptyFVs)
rnFamResultSig :: HsDocContext
-> [Name] -- kind variables already in scope
-> FamilyResultSig RdrName
-> RnM (FamilyResultSig Name, FreeVars)
rnFamResultSig _ _ NoSig
= return (NoSig, emptyFVs)
rnFamResultSig doc _ (KindSig kind)
= do { (rndKind, ftvs) <- rnLHsKind doc kind
; return (KindSig rndKind, ftvs) }
rnFamResultSig doc kv_names (TyVarSig tvbndr)
= do { -- `TyVarSig` tells us that user named the result of a type family by
-- writing `= tyvar` or `= (tyvar :: kind)`. In such case we want to
-- be sure that the supplied result name is not identical to an
-- already in-scope type variable from an enclosing class.
--
-- Example of disallowed declaration:
-- class C a b where
-- type F b = a | a -> b
rdr_env <- getLocalRdrEnv
; let resName = hsLTyVarName tvbndr
; when (resName `elemLocalRdrEnv` rdr_env) $
addErrAt (getLoc tvbndr) $
(hsep [ text "Type variable", quotes (ppr resName) <> comma
, text "naming a type family result,"
] $$
text "shadows an already bound type variable")
; bindLHsTyVarBndr doc Nothing -- this might be a lie, but it's used for
-- scoping checks that are irrelevant here
(mkNameSet kv_names) emptyNameSet
-- use of emptyNameSet here avoids
-- redundant duplicate errors
tvbndr $ \ _ _ tvbndr' ->
return (TyVarSig tvbndr', unitFV (hsLTyVarName tvbndr')) }
-- Note [Renaming injectivity annotation]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- During renaming of injectivity annotation we have to make several checks to
-- make sure that it is well-formed. At the moment injectivity annotation
-- consists of a single injectivity condition, so the terms "injectivity
-- annotation" and "injectivity condition" might be used interchangeably. See
-- Note [Injectivity annotation] for a detailed discussion of currently allowed
-- injectivity annotations.
--
-- Checking LHS is simple because the only type variable allowed on the LHS of
-- injectivity condition is the variable naming the result in type family head.
-- Example of disallowed annotation:
--
-- type family Foo a b = r | b -> a
--
-- Verifying RHS of injectivity consists of checking that:
--
-- 1. only variables defined in type family head appear on the RHS (kind
-- variables are also allowed). Example of disallowed annotation:
--
-- type family Foo a = r | r -> b
--
-- 2. for associated types the result variable does not shadow any of type
-- class variables. Example of disallowed annotation:
--
-- class Foo a b where
-- type F a = b | b -> a
--
-- Breaking any of these assumptions results in an error.
-- | Rename injectivity annotation. Note that injectivity annotation is just the
-- part after the "|". Everything that appears before it is renamed in
-- rnFamDecl.
rnInjectivityAnn :: LHsQTyVars Name -- ^ Type variables declared in
-- type family head
-> LFamilyResultSig Name -- ^ Result signature
-> LInjectivityAnn RdrName -- ^ Injectivity annotation
-> RnM (LInjectivityAnn Name)
rnInjectivityAnn tvBndrs (L _ (TyVarSig resTv))
(L srcSpan (InjectivityAnn injFrom injTo))
= do
{ (injDecl'@(L _ (InjectivityAnn injFrom' injTo')), noRnErrors)
<- askNoErrs $
bindLocalNames [hsLTyVarName resTv] $
-- The return type variable scopes over the injectivity annotation
-- e.g. type family F a = (r::*) | r -> a
do { injFrom' <- rnLTyVar injFrom
; injTo' <- mapM rnLTyVar injTo
; return $ L srcSpan (InjectivityAnn injFrom' injTo') }
; let tvNames = Set.fromList $ hsAllLTyVarNames tvBndrs
resName = hsLTyVarName resTv
-- See Note [Renaming injectivity annotation]
lhsValid = EQ == (stableNameCmp resName (unLoc injFrom'))
rhsValid = Set.fromList (map unLoc injTo') `Set.difference` tvNames
-- if renaming of type variables ended with errors (eg. there were
-- not-in-scope variables) don't check the validity of injectivity
-- annotation. This gives better error messages.
; when (noRnErrors && not lhsValid) $
addErrAt (getLoc injFrom)
( vcat [ text $ "Incorrect type variable on the LHS of "
++ "injectivity condition"
, nest 5
( vcat [ text "Expected :" <+> ppr resName
, text "Actual :" <+> ppr injFrom ])])
; when (noRnErrors && not (Set.null rhsValid)) $
do { let errorVars = Set.toList rhsValid
; addErrAt srcSpan $ ( hsep
[ text "Unknown type variable" <> plural errorVars
, text "on the RHS of injectivity condition:"
, interpp'SP errorVars ] ) }
; return injDecl' }
-- We can only hit this case when the user writes injectivity annotation without
-- naming the result:
--
-- type family F a | result -> a
-- type family F a :: * | result -> a
--
-- So we rename injectivity annotation like we normally would except that
-- this time we expect "result" to be reported not in scope by rnLTyVar.
rnInjectivityAnn _ _ (L srcSpan (InjectivityAnn injFrom injTo)) =
setSrcSpan srcSpan $ do
(injDecl', _) <- askNoErrs $ do
injFrom' <- rnLTyVar injFrom
injTo' <- mapM rnLTyVar injTo
return $ L srcSpan (InjectivityAnn injFrom' injTo')
return $ injDecl'
{-
Note [Stupid theta]
~~~~~~~~~~~~~~~~~~~
Trac #3850 complains about a regression wrt 6.10 for
data Show a => T a
There is no reason not to allow the stupid theta if there are no data
constructors. It's still stupid, but does no harm, and I don't want
to cause programs to break unnecessarily (notably HList). So if there
are no data constructors we allow h98_style = True
-}
{- *****************************************************
* *
Support code for type/data declarations
* *
***************************************************** -}
---------------
badAssocRhs :: [Name] -> RnM ()
badAssocRhs ns
= addErr (hang (text "The RHS of an associated type declaration mentions"
<+> pprWithCommas (quotes . ppr) ns)
2 (text "All such variables must be bound on the LHS"))
-----------------
rnConDecls :: [LConDecl RdrName] -> RnM ([LConDecl Name], FreeVars)
rnConDecls = mapFvRn (wrapLocFstM rnConDecl)
rnConDecl :: ConDecl RdrName -> RnM (ConDecl Name, FreeVars)
rnConDecl decl@(ConDeclH98 { con_name = name, con_qvars = qtvs
, con_cxt = mcxt, con_details = details
, con_doc = mb_doc })
= do { _ <- addLocM checkConName name
; new_name <- lookupLocatedTopBndrRn name
; let doc = ConDeclCtx [new_name]
; mb_doc' <- rnMbLHsDoc mb_doc
; (kvs, qtvs') <- get_con_qtvs (hsConDeclArgTys details)
; bindHsQTyVars doc (Just $ inHsDocContext doc) Nothing kvs qtvs' $
\new_tyvars _ -> do
{ (new_context, fvs1) <- case mcxt of
Nothing -> return (Nothing,emptyFVs)
Just lcxt -> do { (lctx',fvs) <- rnContext doc lcxt
; return (Just lctx',fvs) }
; (new_details, fvs2) <- rnConDeclDetails (unLoc new_name) doc details
; let (new_details',fvs3) = (new_details,emptyFVs)
; traceRn (text "rnConDecl" <+> ppr name <+> vcat
[ text "free_kvs:" <+> ppr kvs
, text "qtvs:" <+> ppr qtvs
, text "qtvs':" <+> ppr qtvs' ])
; let all_fvs = fvs1 `plusFV` fvs2 `plusFV` fvs3
new_tyvars' = case qtvs of
Nothing -> Nothing
Just _ -> Just new_tyvars
; return (decl { con_name = new_name, con_qvars = new_tyvars'
, con_cxt = new_context, con_details = new_details'
, con_doc = mb_doc' },
all_fvs) }}
where
cxt = maybe [] unLoc mcxt
get_rdr_tvs tys = extractHsTysRdrTyVars (cxt ++ tys)
get_con_qtvs :: [LHsType RdrName]
-> RnM ([Located RdrName], LHsQTyVars RdrName)
get_con_qtvs arg_tys
| Just tvs <- qtvs -- data T = forall a. MkT (a -> a)
= do { free_vars <- get_rdr_tvs arg_tys
; return (freeKiTyVarsKindVars free_vars, tvs) }
| otherwise -- data T = MkT (a -> a)
= return ([], mkHsQTvs [])
rnConDecl decl@(ConDeclGADT { con_names = names, con_type = ty
, con_doc = mb_doc })
= do { mapM_ (addLocM checkConName) names
; new_names <- mapM lookupLocatedTopBndrRn names
; let doc = ConDeclCtx new_names
; mb_doc' <- rnMbLHsDoc mb_doc
; (ty', fvs) <- rnHsSigType doc ty
; traceRn (text "rnConDecl" <+> ppr names <+> vcat
[ text "fvs:" <+> ppr fvs ])
; return (decl { con_names = new_names, con_type = ty'
, con_doc = mb_doc' },
fvs) }
rnConDeclDetails
:: Name
-> HsDocContext
-> HsConDetails (LHsType RdrName) (Located [LConDeclField RdrName])
-> RnM (HsConDetails (LHsType Name) (Located [LConDeclField Name]), FreeVars)
rnConDeclDetails _ doc (PrefixCon tys)
= do { (new_tys, fvs) <- rnLHsTypes doc tys
; return (PrefixCon new_tys, fvs) }
rnConDeclDetails _ doc (InfixCon ty1 ty2)
= do { (new_ty1, fvs1) <- rnLHsType doc ty1
; (new_ty2, fvs2) <- rnLHsType doc ty2
; return (InfixCon new_ty1 new_ty2, fvs1 `plusFV` fvs2) }
rnConDeclDetails con doc (RecCon (L l fields))
= do { fls <- lookupConstructorFields con
; (new_fields, fvs) <- rnConDeclFields doc fls fields
-- No need to check for duplicate fields
-- since that is done by RnNames.extendGlobalRdrEnvRn
; return (RecCon (L l new_fields), fvs) }
-------------------------------------------------
-- | Brings pattern synonym names and also pattern synonym selectors
-- from record pattern synonyms into scope.
extendPatSynEnv :: HsValBinds RdrName -> MiniFixityEnv
-> ([Name] -> TcRnIf TcGblEnv TcLclEnv a) -> TcM a
extendPatSynEnv val_decls local_fix_env thing = do {
names_with_fls <- new_ps val_decls
; let pat_syn_bndrs = concat [ name: map flSelector fields
| (name, fields) <- names_with_fls ]
; let avails = map avail pat_syn_bndrs
; (gbl_env, lcl_env) <- extendGlobalRdrEnvRn avails local_fix_env
; let field_env' = extendNameEnvList (tcg_field_env gbl_env) names_with_fls
final_gbl_env = gbl_env { tcg_field_env = field_env' }
; setEnvs (final_gbl_env, lcl_env) (thing pat_syn_bndrs) }
where
new_ps :: HsValBinds RdrName -> TcM [(Name, [FieldLabel])]
new_ps (ValBindsIn binds _) = foldrBagM new_ps' [] binds
new_ps _ = panic "new_ps"
new_ps' :: LHsBindLR RdrName RdrName
-> [(Name, [FieldLabel])]
-> TcM [(Name, [FieldLabel])]
new_ps' bind names
| L bind_loc (PatSynBind (PSB { psb_id = L _ n
, psb_args = RecordPatSyn as })) <- bind
= do
bnd_name <- newTopSrcBinder (L bind_loc n)
let rnames = map recordPatSynSelectorId as
mkFieldOcc :: Located RdrName -> LFieldOcc RdrName
mkFieldOcc (L l name) = L l (FieldOcc (L l name) PlaceHolder)
field_occs = map mkFieldOcc rnames
flds <- mapM (newRecordSelector False [bnd_name]) field_occs
return ((bnd_name, flds): names)
| L bind_loc (PatSynBind (PSB { psb_id = L _ n})) <- bind
= do
bnd_name <- newTopSrcBinder (L bind_loc n)
return ((bnd_name, []): names)
| otherwise
= return names
{-
*********************************************************
* *
\subsection{Support code to rename types}
* *
*********************************************************
-}
rnFds :: [Located (FunDep (Located RdrName))]
-> RnM [Located (FunDep (Located Name))]
rnFds fds
= mapM (wrapLocM rn_fds) fds
where
rn_fds (tys1, tys2)
= do { tys1' <- rnHsTyVars tys1
; tys2' <- rnHsTyVars tys2
; return (tys1', tys2') }
rnHsTyVars :: [Located RdrName] -> RnM [Located Name]
rnHsTyVars tvs = mapM rnHsTyVar tvs
rnHsTyVar :: Located RdrName -> RnM (Located Name)
rnHsTyVar (L l tyvar) = do
tyvar' <- lookupOccRn tyvar
return (L l tyvar')
{-
*********************************************************
* *
findSplice
* *
*********************************************************
This code marches down the declarations, looking for the first
Template Haskell splice. As it does so it
a) groups the declarations into a HsGroup
b) runs any top-level quasi-quotes
-}
findSplice :: [LHsDecl RdrName] -> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
findSplice ds = addl emptyRdrGroup ds
addl :: HsGroup RdrName -> [LHsDecl RdrName]
-> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
-- This stuff reverses the declarations (again) but it doesn't matter
addl gp [] = return (gp, Nothing)
addl gp (L l d : ds) = add gp l d ds
add :: HsGroup RdrName -> SrcSpan -> HsDecl RdrName -> [LHsDecl RdrName]
-> RnM (HsGroup RdrName, Maybe (SpliceDecl RdrName, [LHsDecl RdrName]))
-- #10047: Declaration QuasiQuoters are expanded immediately, without
-- causing a group split
add gp _ (SpliceD (SpliceDecl (L _ qq@HsQuasiQuote{}) _)) ds
= do { (ds', _) <- rnTopSpliceDecls qq
; addl gp (ds' ++ ds)
}
add gp loc (SpliceD splice@(SpliceDecl _ flag)) ds
= do { -- We've found a top-level splice. If it is an *implicit* one
-- (i.e. a naked top level expression)
case flag of
ExplicitSplice -> return ()
ImplicitSplice -> do { th_on <- xoptM LangExt.TemplateHaskell
; unless th_on $ setSrcSpan loc $
failWith badImplicitSplice }
; return (gp, Just (splice, ds)) }
where
badImplicitSplice = text "Parse error: module header, import declaration"
$$ text "or top-level declaration expected."
-- Class declarations: pull out the fixity signatures to the top
add gp@(HsGroup {hs_tyclds = ts, hs_fixds = fs}) l (TyClD d) ds
| isClassDecl d
= let fsigs = [ L l f | L l (FixSig f) <- tcdSigs d ] in
addl (gp { hs_tyclds = add_tycld (L l d) ts, hs_fixds = fsigs ++ fs}) ds
| otherwise
= addl (gp { hs_tyclds = add_tycld (L l d) ts }) ds
-- Signatures: fixity sigs go a different place than all others
add gp@(HsGroup {hs_fixds = ts}) l (SigD (FixSig f)) ds
= addl (gp {hs_fixds = L l f : ts}) ds
add gp@(HsGroup {hs_valds = ts}) l (SigD d) ds
= addl (gp {hs_valds = add_sig (L l d) ts}) ds
-- Value declarations: use add_bind
add gp@(HsGroup {hs_valds = ts}) l (ValD d) ds
= addl (gp { hs_valds = add_bind (L l d) ts }) ds
-- Role annotations: added to the TyClGroup
add gp@(HsGroup {hs_tyclds = ts}) l (RoleAnnotD d) ds
= addl (gp { hs_tyclds = add_role_annot (L l d) ts }) ds
-- NB instance declarations go into TyClGroups. We throw them into the first
-- group, just as we do for the TyClD case. The renamer will go on to group
-- and order them later.
add gp@(HsGroup {hs_tyclds = ts}) l (InstD d) ds
= addl (gp { hs_tyclds = add_instd (L l d) ts }) ds
-- The rest are routine
add gp@(HsGroup {hs_derivds = ts}) l (DerivD d) ds
= addl (gp { hs_derivds = L l d : ts }) ds
add gp@(HsGroup {hs_defds = ts}) l (DefD d) ds
= addl (gp { hs_defds = L l d : ts }) ds
add gp@(HsGroup {hs_fords = ts}) l (ForD d) ds
= addl (gp { hs_fords = L l d : ts }) ds
add gp@(HsGroup {hs_warnds = ts}) l (WarningD d) ds
= addl (gp { hs_warnds = L l d : ts }) ds
add gp@(HsGroup {hs_annds = ts}) l (AnnD d) ds
= addl (gp { hs_annds = L l d : ts }) ds
add gp@(HsGroup {hs_ruleds = ts}) l (RuleD d) ds
= addl (gp { hs_ruleds = L l d : ts }) ds
add gp@(HsGroup {hs_vects = ts}) l (VectD d) ds
= addl (gp { hs_vects = L l d : ts }) ds
add gp l (DocD d) ds
= addl (gp { hs_docs = (L l d) : (hs_docs gp) }) ds
add_tycld :: LTyClDecl a -> [TyClGroup a] -> [TyClGroup a]
add_tycld d [] = [TyClGroup { group_tyclds = [d]
, group_roles = []
, group_instds = []
}
]
add_tycld d (ds@(TyClGroup { group_tyclds = tyclds }):dss)
= ds { group_tyclds = d : tyclds } : dss
add_instd :: LInstDecl a -> [TyClGroup a] -> [TyClGroup a]
add_instd d [] = [TyClGroup { group_tyclds = []
, group_roles = []
, group_instds = [d]
}
]
add_instd d (ds@(TyClGroup { group_instds = instds }):dss)
= ds { group_instds = d : instds } : dss
add_role_annot :: LRoleAnnotDecl a -> [TyClGroup a] -> [TyClGroup a]
add_role_annot d [] = [TyClGroup { group_tyclds = []
, group_roles = [d]
, group_instds = []
}
]
add_role_annot d (tycls@(TyClGroup { group_roles = roles }) : rest)
= tycls { group_roles = d : roles } : rest
add_bind :: LHsBind a -> HsValBinds a -> HsValBinds a
add_bind b (ValBindsIn bs sigs) = ValBindsIn (bs `snocBag` b) sigs
add_bind _ (ValBindsOut {}) = panic "RdrHsSyn:add_bind"
add_sig :: LSig a -> HsValBinds a -> HsValBinds a
add_sig s (ValBindsIn bs sigs) = ValBindsIn bs (s:sigs)
add_sig _ (ValBindsOut {}) = panic "RdrHsSyn:add_sig"
| snoyberg/ghc | compiler/rename/RnSource.hs | bsd-3-clause | 98,028 | 1 | 25 | 30,437 | 17,843 | 9,419 | 8,424 | 1,123 | 8 |
-- |
-- Module : Data.MailStorage
-- License : BSD-Style
--
-- Maintainer : Nicolas DI PRIMA <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
-- Mail are managing as follow:
-- -1- incoming emails are stored in "incoming" directory
-- -2- when the mail has been fully received by the server, the server move it
-- to "fordelivery". At this point, the file is not yet in the recipient
-- mailbox. The email is waiting to be managed/filter/delivered.
--
{-# Language OverloadedStrings #-}
module Data.MailStorage
( -- * helpers
getIncomingDir
, getForDeliveryDir
, getDomainsDir
, generateUniqueFilename
, createIncomingDataFile
-- * General
, MailStorage(..)
, isMailStorageDir
, initMailStorageDir
, getMailStorage
, fromIncomingToFordelivery
, deleteDataFromDeliveryDir
-- * Domains
, listDomains
, isSupportedDomain
, isLocalPartOf
, isLocalAddress
, getMailStorageUser
, findMailStorageUsers
) where
import Network.SMTP.Types
import Network.SMTP.Connection (ConnectionID)
import Data.Maild.Email
import qualified Crypto.Hash as Hash
import Control.Monad (when)
import System.FilePath (FilePath, (</>), takeFileName)
import System.Directory
import System.Random (getStdRandom, randomR)
import System.Hourglass (timeCurrent)
import Data.Hourglass
import Data.Char (isSpace, toUpper, toLower)
import Data.Maybe (catMaybes)
import Data.Configurator
import Data.Configurator.Types
import qualified Data.ByteString.Char8 as BC (unpack, pack, ByteString, writeFile)
------------------------------------------------------------------------------
-- Mail Storages --
------------------------------------------------------------------------------
-- | the name of the directory used to store the incoming emails
getIncomingDir :: FilePath
getIncomingDir = "incoming"
-- | the name of the directory used to store the "Ready for delivery" emails
getForDeliveryDir :: FilePath
getForDeliveryDir = "for-delivery"
-- | the name of the directory used to user's configuration files
getUsersDir :: FilePath
getUsersDir = "users"
-- | the name of the directory used to manage the domains and their localpart
-- a domain is a directory (the domain name is the directory name)
-- each domain may contains localpart. A local part is a subdirectory of this domain.
getDomainsDir :: FilePath
getDomainsDir = "domains"
-- | list the mandatory directory needed in a MailStorageDir
getMandatorySubDir :: [FilePath]
getMandatorySubDir =
[ getIncomingDir
, getForDeliveryDir
, getDomainsDir
, getUsersDir
]
-- | Configuration for MailStorage
data MailStorage = MailStorage
{ mainDir :: FilePath
, incomingDir :: FilePath
, forDeliveryDir :: FilePath
, usersDir :: FilePath
, domainsDir :: FilePath
} deriving (Show, Eq)
-- | get mailStorage:
getMailStorage :: FilePath -> IO (Maybe MailStorage)
getMailStorage dir = do
isDir <- isMailStorageDir dir
return $ case isDir of
False -> Nothing
True -> Just $ MailStorage
dir
(dir </> getIncomingDir)
(dir </> getForDeliveryDir)
(dir </> getUsersDir)
(dir </> getDomainsDir)
-- | init or create a MailStorageDir
-- if the directory already exists, then it attempt to create the sub-directories
initMailStorageDir :: FilePath -> IO MailStorage
initMailStorageDir dir = do
isDir <- doesDirectoryExist dir
when (not isDir) $ createDirectory dir
createMailStorageSubDir getMandatorySubDir
return $ MailStorage dir
(dir </> getIncomingDir)
(dir </> getForDeliveryDir)
(dir </> getUsersDir)
(dir </> getDomainsDir)
where
createMailStorageSubDir :: [FilePath] -> IO ()
createMailStorageSubDir [] = return ()
createMailStorageSubDir (d:ds) = do
isDir <- doesDirectoryExist $ dir </> d
when (not isDir) $ createDirectory $ dir </> d
createMailStorageSubDir ds
-- | check the directory exists and the mandatory subdirectories exist
isMailStorageDir :: FilePath -> IO Bool
isMailStorageDir dir = do
isDir <- doesDirectoryExist dir
case isDir of
True -> isMailStorageDir' getMandatorySubDir
_ -> return False
where
isMailStorageDir' :: [FilePath] -> IO Bool
isMailStorageDir' [] = return True
isMailStorageDir' (d:ds) = do
isPresent <- doesDirectoryExist $ dir </> d
if isPresent
then isMailStorageDir' ds
else return False
-- | This function aims to generate a unique filename.
-- the filename is given as follow:
-- <ISO8601-date>_<SHA3 of @client domain@ @from address@ @current time@ @random number@>_<connectionId)
generateUniqueFilename :: String -- ^ client domain
-> String -- ^ from address
-> ConnectionID -- ^ Connection ID
-> IO (FilePath)
generateUniqueFilename client from conId = do
time <- timeCurrent
random <- getStdRandom (randomR (1, 9999999)) :: IO Int
return $ (timePrint getISOTimeFormat time)
++ "_" ++ (BC.unpack $ getHash $ randomThing (show time) (show random))
++ "_" ++ conId
where
getISOTimeFormat = ISO8601_DateAndTime
getHash :: BC.ByteString -> BC.ByteString
getHash buff = Hash.digestToHexByteString $ (Hash.hash buff :: Hash.Digest Hash.SHA3_224)
randomThing :: String -> String -> BC.ByteString
randomThing t r = BC.pack $ t ++ client ++ from ++ r
-- | in the case the server is receiving a new MAIL. We are required to add
-- a MIME Header "Received:".
createIncomingDataFile :: MailStorage
-> Domain
-> Domain
-> Maybe SMTPType
-> Email
-> IO ()
createIncomingDataFile ms by from mtype email = do
time <- timeCurrent >>= \t -> return $ timePrint myTimeFormat t
let receivedString = "Received: " ++ fromDomainString ++ withString
++ cwfs ++ byDomainString ++ "; " ++ time
++ "\r\n"
BC.writeFile inComingPath $ BC.pack receivedString
where
inComingPath = (incomingDir ms) </> (mailData email)
wsp :: Char
wsp = ' '
cwfs :: String
cwfs = "\r\n "
fromDomainString = "From " ++ from
withString = maybe "" (\t -> " With " ++ (show t)) mtype
byDomainString = "By " ++ by
myTimeFormat :: TimeFormatString
myTimeFormat =
TimeFormatString
[ Format_Day2
, Format_Text wsp
, Format_MonthName_Short
, Format_Text wsp
, Format_Year4
, Format_Text wsp
, Format_Hour
, Format_Text ':'
, Format_Minute
, Format_Text ':'
, Format_Second
, Format_Text wsp
, Format_TzHM
, Format_Text wsp
, Format_Text '('
, Format_TimezoneName
, Format_Text ')'
]
-- | move a file from the "bufferisation" area to the
-- "wait for filtering/delivering" area
--
-- This action also add the "time-stamp-line" as specified in RFC5321 section
-- 4.1.1.4 (and discribed in section 4.4)
fromIncomingToFordelivery :: MailStorage -- ^ MailStorageDirectory
-> Email -- ^ the email to move for delivery
-> IO ()
fromIncomingToFordelivery ms email = renameFile inComingPath forDeliveryPath
where
inComingPath = (incomingDir ms) </> (mailData email)
forDeliveryPath = (forDeliveryDir ms) </> (mailData email)
-- | Delete a file email from the Delivery Directory (you may lost of the data)
deleteDataFromDeliveryDir :: MailStorage
-> Email
-> IO ()
deleteDataFromDeliveryDir ms email = removeFile filepath
where
filepath = (forDeliveryDir ms) </> (mailData email)
------------------------------------------------------------------------------
-- User's mailbox --
------------------------------------------------------------------------------
-- It is time to define user properly:
-- what do we want?
-- The mailDir:
-- some/path:
-- |
-- |-- incoming <- a temporary area to store the incoming mail (every *DATA*)
-- |
-- |-- for-delivery <- incoming mail will be moved to this dir for delivery
-- |
-- |-- users <-- users
-- | |
-- | |-- nicolas <- file which list all the domains/localpart
-- | | |
-- | | ` - firstname, lastname, digest, list of <localpart@domains>
-- | |
-- | `-- .. <- others users
-- |
-- |
-- `-- domains
-- |
-- |-- di-prima.fr
-- | |
-- | `-- nicolas <-- [email protected]
-- | |
-- | `--.config <- user config
-- |
-- |-- mail.di-prima.fr
-- | |
-- | `-- nicolas <-- [email protected]
-- | |
-- | `.config
-- |
-- `-- di-prima.io
-- |
-- `-- git <-- [email protected]
-- |
-- `.config
defaultMailStorageUser :: MailStorageUser
defaultMailStorageUser = MailStorageUser [] "" "" ""
-- | list the domains supported
listDomains :: MailStorage -> IO [Domain]
listDomains ms =
(getDirectoryContents $ domainsDir ms) >>= \xs -> return $ filter (\x -> notElem x [".", ".."]) xs
-- | list the mailbox in the given domain
listMailBoxs :: MailStorage -> Domain -> IO [LocalPart]
listMailBoxs ms d =
(getDirectoryContents $ (domainsDir ms) </> d) >>= \xs -> return $ filter (\x -> notElem x [".", ".."]) xs
-- | list users
listUsers :: MailStorage -> IO [FilePath]
listUsers ms =
(getDirectoryContents $ usersDir ms) >>= \xs -> return $ filter (\x -> notElem x [".", ".."]) xs
-- | check if a domain is in the list of managed domains
isSupportedDomain :: MailStorage -> Domain -> IO Bool
isSupportedDomain ms d =
let domains = domainsDir ms
domain = domains </> d
in doesDirectoryExist domain
-- | check if a localpart is a localpart of the given domain
isLocalPartOf :: MailStorage -> Domain -> LocalPart -> IO Bool
isLocalPartOf ms d l =
let domains = domainsDir ms
domain = domains </> d
mailBoxPath = domain </> l
in doesDirectoryExist mailBoxPath
-- | Check if an EmailAdress is a local address
--
-- > isLocalAddress ms == isLocalPartOf ms
isLocalAddress :: MailStorage -> EmailAddress -> IO Bool
isLocalAddress ms (EmailAddress l d) = isLocalPartOf ms d l
-- | return the list of users who are associated to the given string
-- (check the firstname, lastname and the email)
findMailStorageUsers :: MailStorage -> String -> IO [MailStorageUser]
findMailStorageUsers ms s = do
l <- listUsers ms
lmuser <- mapM (getMailStorageUser ms) l
return $ filter findMailStorageUser $ catMaybes lmuser
where
exp :: String
exp = map toUpper s
findMailStorageUser :: MailStorageUser -> Bool
findMailStorageUser msu
= (map toUpper $ firstName msu) == exp
|| (map toUpper $ lastName msu) == exp
|| checkLocalEmails (emails msu)
checkLocalEmails :: [EmailAddress] -> Bool
checkLocalEmails [] = False
checkLocalEmails ((EmailAddress l _):xs) =
if (map toUpper l) == exp
then True
else checkLocalEmails xs
-- | get a user configuration
getMailStorageUser :: MailStorage -> FilePath -> IO (Maybe MailStorageUser)
getMailStorageUser ms login =
let userFile = (usersDir ms) </> login
in do isFile <- doesFileExist userFile
if isFile
then parseMailStorageUserFile userFile >>= \u -> return $ Just u
else return Nothing
-- | Read a User configuration file
parseMailStorageUserFile :: FilePath -> IO MailStorageUser
parseMailStorageUserFile filepath = do
conf <- load [Required filepath]
firstname <- require conf "firstname"
lastname <- require conf "lastname"
password <- require conf "password"
address <- require conf "address" >>= \(List lvalues) -> return $ map parseUserAddress $ catMaybes $ map convert lvalues
return $ MailStorageUser
{ emails = address
, firstName = firstname
, lastName = lastname
, userDigest = password
}
where
parseUserAddress :: String -> EmailAddress
parseUserAddress s = addr
where
(local, pDom) = span (\c -> c /= '@') s
dom = drop 1 pDom
addr = EmailAddress local dom
| NicolasDP/maild | Data/MailStorage.hs | bsd-3-clause | 13,364 | 0 | 16 | 4,078 | 2,476 | 1,336 | 1,140 | 228 | 4 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
module Duckling.AmountOfMoney.HR.Tests
( tests ) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.AmountOfMoney.HR.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "HR Tests"
[ makeCorpusTest [This AmountOfMoney] corpus
]
| rfranek/duckling | tests/Duckling/AmountOfMoney/HR/Tests.hs | bsd-3-clause | 617 | 0 | 9 | 95 | 80 | 51 | 29 | 11 | 1 |
{-# LANGUAGE RankNTypes #-}
module Area.Links where
import Servant
import Database.Persist.Postgresql
import Text.Blaze.Html5
import Common.Links
import Area.API
import Area.Types
toCreateAreaLink :: Key Area -> AttributeValue
toCreateAreaLink pid = stringValue $
"/" ++ show (linkTo (Proxy :: Proxy ToCreateArea) pid)
toCreateTopAreaLink :: AttributeValue
toCreateTopAreaLink = stringValue $
"/" ++ show (linkTo (Proxy :: Proxy ToCreateArea'))
viewAreasLink' :: String -> String
viewAreasLink' target =
"/" ++ show (linkTo (Proxy :: Proxy ViewAreas) target)
viewAreasLink :: String -> AttributeValue
viewAreasLink = stringValue . viewAreasLink'
viewAreaLink' :: Key Area -> String
viewAreaLink' aid =
"/" ++ show (linkTo (Proxy :: Proxy ViewArea) aid)
viewAreaLink :: Key Area -> AttributeValue
viewAreaLink = stringValue . viewAreaLink'
toEditAreaLink :: Key Area -> AttributeValue
toEditAreaLink aid = stringValue $
"/" ++ show (linkTo (Proxy :: Proxy ToEditArea) aid)
| hectorhon/autotrace2 | app/Area/Links.hs | bsd-3-clause | 997 | 0 | 10 | 155 | 293 | 155 | 138 | 27 | 1 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE GADTs #-}
module Data.Type.Nat.Quote
( qN
) where
import Data.Type.Quote
import Data.Type.Nat
import Type.Family.Nat
import Language.Haskell.TH
import Language.Haskell.TH.Quote
import Control.Monad ((>=>))
qN :: QuasiQuoter
qN = QuasiQuoter
{ quoteExp = parseAsNatTerm qq varE [|Z_|] $ \x -> [|S_ $x|]
, quotePat = parseAsNatTerm qq varP [p|Z_|] $ \x -> [p|S_ $x|]
, quoteType = parseAsNatTerm qq varT [t|Z|] $ \x -> [t|S $x|]
, quoteDec = stub qq "quoteDec"
}
where
qq = "qN"
| kylcarte/type-combinators-quote | src/Data/Type/Nat/Quote.hs | bsd-3-clause | 997 | 0 | 8 | 163 | 190 | 129 | 61 | 31 | 1 |
module Math.ProxN.Show1 where
class Show1 v where
show1 :: (Show a) => v a -> String
| exFalso/ProxN | src/Math/ProxN/Show1.hs | bsd-3-clause | 88 | 0 | 9 | 19 | 37 | 20 | 17 | 3 | 0 |
{-
Problem 1
If we list all the natural numbers below 10 that are multiples of 3 or
5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
Find the sum of all the multiples of 3 or 5 below 1000.
-}
module Main where
import Euler.Problem1
main :: IO ()
main = putStrLn $ show answer
answer :: Int
answer = sumMultiple3or5Below 1000
| FranklinChen/project-euler-haskell | Answer/answer1.hs | bsd-3-clause | 349 | 0 | 6 | 83 | 45 | 25 | 20 | 6 | 1 |
module Tile
( Tile (..)
, tileType
, tilePack
, CTile
, packDirection
) where
import Data.Maybe
import Tile.TileType
import qualified Tile.Coordinates as C
import qualified Electric.Direction as D
import qualified Electric.Pack as P
-- | Representation of a single tile in the screen.
-- It stores the the type and maybe a pack.
data Tile = Tile TileType (Maybe P.Pack)
zipTile :: [TileType] -> [Maybe P.Pack] -> [Tile]
zipTile (tt : tts) (p : ps) = Tile tt p : zipTile tts ps
zipTile [] _ = []
zipTile _ [] = []
unzipTile :: [Tile] -> ([TileType], [Maybe P.Pack])
unzipTile tiles = unzip $ map (\t_ -> (tileType t_, tilePack t_)) tiles
tileType :: Tile -> TileType
tileType (Tile tt _) = tt
tilePack :: Tile -> Maybe P.Pack
tilePack (Tile _ p) = p
type CTile = (C.Coord, Tile)
| Megaleo/Electras | src/Tile.hs | bsd-3-clause | 840 | 0 | 10 | 204 | 308 | 174 | 134 | 23 | 1 |
import Control.Monad
import Data.List (find)
import Data.Maybe
import Distribution.PackageDescription
import Distribution.Simple
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Setup
import System.Environment (getEnvironment)
import System.Exit
import System.FilePath.Posix ((</>))
import System.Posix.Env hiding (getEnvironment)
import System.Process
main = defaultMainWithHooks $ simpleUserHooks {
preBuild = writePrelude
}
-- * Build prelude.d.ts
--
preludeDir :: FilePath
preludeDir = "./include"
preludeFiles :: [FilePath]
preludeFiles = [
"./rsc/prims.d.ts",
"./rsc/mutability.d.ts",
"./rsc/aliases.d.ts",
"./rsc/qualifiers.d.ts",
"./rsc/measures.d.ts",
"./ambient/undefined.d.ts",
"./ambient/object.d.ts",
"./ambient/array.d.ts",
"./ambient/list.d.ts",
"./ambient/boolean.d.ts",
"./ambient/function.d.ts",
"./ambient/string.d.ts",
"./ambient/number.d.ts",
"./ambient/iarguments.d.ts",
"./ambient/regexp.d.ts",
"./ambient/error.d.ts",
"./ambient/console.d.ts",
"./ambient/math.d.ts"
]
preludeName = "prelude.d.ts"
writePrelude _ _ = do
txts <- forM preludeFiles (readFile . (preludeDir </>))
_ <- writeFile (preludeDir </> preludeName) (mconcat txts)
return (Nothing, [])
| UCSD-PL/RefScript | Setup.hs | bsd-3-clause | 1,507 | 0 | 11 | 410 | 261 | 156 | 105 | 41 | 1 |
module Tct.Jbc.Processor.ToIts (toIts) where
import qualified Data.Map.Strict as M
import Data.Maybe (fromMaybe)
import qualified Data.Set as S
-- import qualified J2J.Compiler as JJ (compileClasses, processClasses)
-- import qualified J2J.Datatypes.MetaTypes as JJ (MapOfMapClasses)
-- import qualified J2J.Output.JinjaByteCode as JJ (prettyJBC)
-- import qualified J2J.Output.JinjaSourceCode as JJ (prettySourceCode)
import qualified Jat.CompGraph as J (JGraph)
import qualified Jat.Constraints as J (PAFun (..), PAFun (..), PAVar (..), bot, eq, isIFun, isIFun,
isRFun, not, pushNot, top)
import qualified Jat.Utils.TRS as J (PARule, prettyITS, simplifyTRS)
import Tct.Core
import Tct.Core.Processor.Transform (transform)
import qualified Data.Rewriting.Rule as RT hiding (vars)
import qualified Tct.Trs.Encoding.ArgumentFiltering as AF
import qualified Tct.Its as I
import qualified Tct.Its.Config as I (fromString)
import Tct.Jbc.Data.Problem
import Tct.Jbc.Encoding.ArgumentFiltering (mkFilter)
-- import Debug.Trace
-- import qualified Tct.Core.Common.Pretty as PP
--- * ITS ------------------------------------------------------------------------------------------------------------
-- transformation from CTRSs to the ITSs
toIts :: Strategy CTrs I.Its
toIts = transform "We extract a pure ITS fragment form the current cTRS problem" toIts'
toIts' :: CTrs -> Either String I.Its
toIts' (CTrs gr rs) = I.fromString . show . J.prettyITS "a" $ toITS gr rs
-- toIts' (CTrs gr rs) = I.fromString . traceId . PP.display . J.prettyITS "a" $ toITS gr rs
toITS :: J.JGraph i a -> [J.PARule] -> [J.PARule]
toITS gr =
S.toList
. S.fromList
. addStartRule
. map (normalizevars . linearise)
-- . (\p -> trace (PP.display (J.prettyITS "f0" p)) p)
. padding
. elimUniv
. map substituteBVal
. simplify gr
. concatMap (expandNeq . substituteIFun)
. concatMap instantiateBVar
where
apply tau (RT.Rule (RT.Fun l ls) (RT.Fun r rs)) = let sigma = sig tau in RT.Rule (RT.Fun l $ sigma ls) (RT.Fun r $ sigma rs)
apply _ _ = error "apply: not a rule"
sig ls = map k where
k t@(RT.Fun _ _) = t
k t = t `fromMaybe` lookup t ls
top x = sig [(x,J.top)]
bot x = sig [(x,J.bot)]
-- we consider constraints: b := a1 * a2 where b is a boolean variable and * is either a relational operator between integer or a boolean operation
-- (i) we instantiate the result with True/False;
-- (iia) if the result is obtained from an Boolean operation; we suitably instantiate the arguments
-- (iib) if the result is obtained from an relational operation; we consider the case and the negation of it
-- f(X) -> f(Y) | b := x > 0 --> [f(X) -> f(Y) | x > 0 {b/True}, f(X) -> f(Y) | not(x > 0) {b /False}
-- f(X) -> f(Y) | b := b1 && b2 --> [f(X) -> f(Y) {b/True, b1/True,/b2/True}, f(X) -> f(Y) {b/False,b1/False} ...
instantiateBVar ( rule@(RT.Rule l (RT.Fun r rs)), [RT.Fun J.Ass [w@(RT.Var (J.BVar _ _)), f]] ) = case f of
(RT.Fun J.And [b1,b2]) ->
[ (apply [(b1,J.top),(b2,J.top),(w,J.top)] rule, [])
, (apply [(b1,J.bot), (w,J.bot)] rule, [])
, (apply [ (b2,J.bot),(w,J.bot)] rule, []) ]
(RT.Fun J.Or [b1,b2]) ->
[ (apply [(b1,J.bot),(b2,J.bot),(w,J.bot)] rule, [])
, (apply [(b1,J.top), (w,J.top)] rule, [])
, (apply [ (b2,J.top),(w,J.top)] rule, []) ]
(RT.Fun J.Not [b1]) ->
[ (apply [(b1,J.bot),(w,J.top)] rule, [])
, (apply [(b1,J.top),(w,J.bot)] rule, []) ]
rel | J.isRFun rel -> [( RT.Rule l (RT.Fun r $ top w rs), [f]), ( RT.Rule l (RT.Fun r $ bot w rs), [J.pushNot $ J.not f])]
_ -> error "instantitateBVar: toITS mistyped?"
instantiateBVar cr = [cr]
-- we lift constraints that are operations over integers into the rhs
-- f(X) -> f(Y) | x := x+1 --> f(X) -> f(Y {x/x+1})
substituteIFun (RT.Rule l (RT.Fun r rs) ,[RT.Fun J.Ass [v@(RT.Var (J.IVar _ _)), t]])
| J.isIFun t = (RT.Rule l (RT.Fun r $ sig [(v, t)] rs), [])
substituteIFun cr = cr
-- for some reason we do not accept the negation of equality; so we consider
-- f(X) -> f(Y) | x1 /= x2 --> f(X) -> f(Y) | x1 > x2, f(X) -> f(Y) | x1 < x2
expandNeq (r,[RT.Fun J.Neq [t1,t2]]) =
[ (r, [RT.Fun J.Gt [t1,t2]])
, (r, [RT.Fun J.Gt [t2,t1]]) ]
expandNeq r = [r]
-- simplifying on rhs/inlining
simplify = J.simplifyTRS
-- we replace Boolean values with True False; this is sound value abstraction as all boolean operations are already handled by instantiateBVAr
-- f(X) -> f(Y) C --> f(X) -> f(Y) C {True/1,False/0,null/0}
substituteBVal (RT.Rule (RT.Fun l ls) (RT.Fun r rs), cr) = (RT.Rule (RT.Fun l (foldr k [] ls)) (RT.Fun r (foldr k [] rs)), cr)
where
k t
| t == J.top = (RT.Fun (J.IConst 1) []:)
| t == J.bot = (RT.Fun (J.IConst 0) []:)
| otherwise = (t:)
substituteBVal _ = error "substituteBVal: not a rule"
-- start rules do not have incoming edges; this could happend if the first satement is a loop
addStartRule (r@(RT.Rule (RT.Fun _ as) _ ,_) :rs) = (RT.Rule (RT.Fun (J.UFun "a") as) (RT.Fun (J.UFun "f0") as),[]) :r:rs
addStartRule _ = []
-- environment variables are initialised with null, therefore we can have for example f1(x) -> f1(null); f1(x) -> f1(x+1)
-- then af(f)={}; so we first ignore null; then replace it with 0;
-- alternatively we could have replaced all non-theory terms with fresh variables; but then we have all thes fresh variables in the system
elimUniv rs = [ (replaceNull $ af r,cs) | (r,cs) <- rs ]
where af = AF.filterRule . theoryFilter . fst $ unzip rs
theoryFilter = mkFilter (not . isUTerm) where
isUTerm (RT.Fun (J.UFun "null") []) = False
isUTerm (RT.Fun (J.UFun _) _) = True
isUTerm (RT.Var (J.UVar _ _)) = True
isUTerm _ = False
replaceNull (RT.Rule (RT.Fun l ls) (RT.Fun r rs)) = RT.Rule (RT.Fun l $ replaceNull' `fmap` ls) (RT.Fun r $ replaceNull' `fmap` rs) where
replaceNull' (RT.Fun (J.UFun "null") []) = RT.Fun (J.IConst 0) []
replaceNull' t = t
replaceNull _ = error "replaceNull: not a rule"
-- the ITS fromat requires that all function symbols have the same arity
-- so we introduce fresh variables on lhs and 0 on rhs wrt to the max arity of the system
padding rs = [ (c,cs) | (RT.Rule l r ,cs) <- rs, let c = RT.Rule (padLeft l) (padRight r)]
where
padLeft (RT.Fun s fs) = RT.Fun s (take m $ fs ++ varsl)
padLeft _ = error "padding: oh no"
padRight (RT.Fun s fs) = RT.Fun s (take m $ fs ++ varsr)
padRight _ = error "padding: oh no"
m = max 1 $ maximum [ n | (RT.Rule l r ,_) <- rs, let n = max (len l) (len r)]
len (RT.Fun _ fs) = length fs
len _ = 0
varsl = map (RT.Var . J.IVar "fl") [1..]
-- varsr = map (RT.Var . J.IVar "fr") [1..]
varsr = repeat (RT.Fun (J.IConst 0) [])
-- the lhs of ITS is linear; so we add additional constraints
-- f(x,0,x) -> f(Y) | C == f(x,fresh1,fresh2) -> f(Y) | C && fresh1 == 0 && fresh2 == x
linearise (RT.Rule (RT.Fun f as) r, cs) = (RT.Rule (RT.Fun f as') r, cs')
where
(as',cs',_) = foldr k ([],cs,0) as
k a (asx,csx,i) = case a of
c@(RT.Fun (J.IConst _) []) -> let v = RT.Var (J.IVar "fresh" i) in (v:asx, J.eq v c : csx, succ i)
c@(RT.Var _) ->
if c `elem` asx
then let v = RT.Var (J.IVar "fresh" i) in (v:asx, J.eq v c : csx, succ i)
else (c:asx, csx, succ i)
c -> error $ "linearise: oh no" ++ show c
linearise _ = error "oh no"
-- make sure that rules range over same variables
normalizevars (RT.Rule l r, cs) =
let
(ml,il,l') = norm (M.empty,0,l)
(mr,ir,r') = norm (ml,il,r)
(_,_,cs') = norms (mr,ir,cs)
in (RT.Rule l' r', cs')
where
norm (m,i,RT.Var v) = case v `M.lookup` m of
Just v' -> (m,i,RT.Var v')
Nothing -> (M.insert v v' m, i+1, RT.Var v')
where v' = J.IVar "" i
norm (m,i,RT.Fun f ts) = (m', i', RT.Fun f ts') where
(m',i',ts') = norms (m,i,ts)
norms (m,i,ts) = foldr k (m,i,[]) ts
where k t (n,j,ss) = let (n',j',t') = norm (n,j,t) in (n',j',t':ss)
| ComputationWithBoundedResources/tct-jbc | src/Tct/Jbc/Processor/ToIts.hs | bsd-3-clause | 8,894 | 0 | 20 | 2,635 | 3,127 | 1,705 | 1,422 | 115 | 26 |
module Language.Modelica.Parser where
import Language.Modelica.Syntax.Programme
import Language.Modelica.Syntax.Modelica
import Language.Modelica.Syntax.ToString (toString)
import Language.Modelica.Parser.ClassDefinition
import Language.Modelica.Parser.Programme
import Language.Modelica.Parser.Option (OptionSet, defaultOptions)
import Language.Modelica.Parser.Utility (stringParser)
import Text.Parsec.Prim (runP)
commentsAndCode ::
OptionSet -> FilePath -> String -> [TextSegment]
commentsAndCode opts file str =
case runP (stringParser modelica_programme) opts file str of
Right x -> x
Left err -> error (show err)
simple :: FilePath -> String -> StoredDefinition
simple = withOptions defaultOptions
withOptions :: OptionSet -> FilePath -> String -> StoredDefinition
withOptions opts file str =
let txt = concatMap f $ commentsAndCode opts file str
f xs@(Str _) = toString xs
f xs@(Code _) = toString xs
f (LineComment _) = "\n"
f (BlockComment xs) = filter (== '\n') xs
in case runP stored_definition opts file txt of
Right ast -> ast
Left err -> error $ show err
| xie-dongping/modelicaparser | src/Language/Modelica/Parser.hs | bsd-3-clause | 1,146 | 0 | 12 | 212 | 353 | 189 | 164 | 27 | 5 |
module Asteroids.GameLogic.Constants
( module Asteroids.GameLogic.Constants
) where
import Asteroids.GameLogic.Physical
import Asteroids.UILogic.Drawable
asteroidColor, shipColor, bulletColor, thrustColor, explosionColor :: PixelColor
lifeColor, scoreColor :: PixelColor
asteroidColor = pixelColor 0.5 0.5 0.5
shipColor = pixelColor 1 1 1
bulletColor = pixelColor 0.9 1 1
thrustColor = pixelColor 1 0 0
explosionColor = pixelColor 1 0.5 0.2
lifeColor = pixelColor 0.5 0 1
scoreColor = pixelColor 0.9 1 1
explosionSize :: Coord
explosionSpeed :: Coord
burstAngle :: Coord
burstCount :: Int
explosionSize = 0.25
burstAngle = 2.0 * pi / fromIntegral burstCount
explosionSpeed = 1
burstCount = 7
lifeSpacing :: Pt2 Coord
lifePos :: Pt2 Coord
lifeSize :: Coord
lifePos = Pt2 (-0.95, 0.95)
lifeSpacing = Pt2 (0.05,0)
lifeSize = 0.015
scorePos :: Pt2 Coord
scorePos = Pt2 (0.95,0.95)
shipSize :: Coord
shipTurnRate :: Coord
shipSize = 0.04
shipTurnRate = 360
bulletSize :: Coord
bulletSize = 0.005
bulletSpeed :: Coord
bulletRange :: Coord
maxBulletAge :: TimeDelta
bulletSpeed = 0.5
bulletRange = 1.4
maxBulletAge = bulletRange / bulletSpeed
startingAsteroidCount :: Int
startingAsteroidCount = 3
largeAsteroid :: Coord
tinyAsteroid :: Coord
largeAsteroid = 0.10
tinyAsteroid = 0.01 | trenttobler/hs-asteroids | src/Asteroids/GameLogic/Constants.hs | bsd-3-clause | 1,369 | 0 | 7 | 272 | 355 | 205 | 150 | 47 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
-- {-# OPTIONS_GHC -cpp -DPiForallInstalled #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Andreas Reuleaux 2015
-- License : BSD2
-- Maintainer: Andreas Reuleaux <[email protected]>
-- Stability : experimental
-- Portability: non-portable
--
-- This module tests Pire's parser: implicit products
--------------------------------------------------------------------
module ParserTests.ImpProd where
import Test.Tasty
-- import Test.Tasty.SmallCheck as SC
-- import qualified Test.Tasty.QuickCheck as QC
import Test.Tasty.HUnit
-- import qualified Test.Tasty.Runners as R
-- import Test.Tasty.Options as O
import Pire.Syntax.Eps
import Pire.Syntax
import Pire.NoPos
import Pire.Parser.Expr
import Pire.Forget
import Bound
import Pire.Untie
import Pire.Parser.ParseUtils
import Pire.Parser.PiParseUtils
#ifdef PiForallInstalled
import qualified PiForall.Parser as P
#endif
-- "the ? is towards the actual"
-- actual @?= expected = assertEqual "" expected actual
-- expected @=? actual = assertEqual "" expected actual
-- main' "basics"
-- implicit products, as eg. in the beginning of the test/Logic.pi file
-- new tests as of may 2015
-- test w/
-- main' "implicit"
parsingImpProdU = testGroup "parsing implicit products - unit tests" $ tail [
undefined
-- [x:a] -> b
-- or
-- [a] -> b
, let s = "[x:A] -> B"
in
testGroup ("imp prod '"++s++"'") $ tail [
undefined
-- as an imp prod
, testCase ("parsing impProd '"++s++"'")
$ (nopos $ parse impProd s) @?= (PiP ErasedP "x" (V "A") (Scope (V (F (V "B")))))
-- and as an expr as well, course
, testCase ("parsing expr '"++s++"'")
$ (nopos $ parse expr s) @?= (PiP ErasedP "x" (V "A") (Scope (V (F (V "B")))))
-- todo: parse as impProd_
, testCase ("parse & forget impProd_ '"++s++"'")
$ (forget $ parseP impProd_ s) @?= (parseP impProd s)
]
#ifdef PiForallInstalled
++ tail [
undefined
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (piParse P.expr s)
]
#endif
, let s = "[a] -> b"
in
testGroup ("imp prod '"++s++"'") $ tail [
undefined
, testCase ("parsing impProd '"++s++"'")
$ (nopos $ parse impProd s) @?= (PiP ErasedP "_" (V "a") (Scope (V (F (V "b")))))
-- as an expr as well
, testCase ("parsing expr '"++s++"'")
$ (nopos $ parse expr s) @?= (PiP ErasedP "_" (V "a") (Scope (V (F (V "b")))))
, testCase ("parse & forget impProd_ '"++s++"'")
$ (forget $ parseP impProd_ s) @?= (parseP impProd s)
]
#ifdef PiForallInstalled
++ tail [
undefined
, testCase ("parse & untie expr '"++s++"'")
$ (untie $ parse expr s) @?= (piParse P.expr s)
]
#endif
-- indentation!
]
| reuleaux/pire | tests/ParserTests/ImpProd.hs | bsd-3-clause | 3,032 | 0 | 24 | 805 | 725 | 397 | 328 | 34 | 1 |
module Main where
import Control.Monad
main :: IO ()
main = getLine >>= \n -> replicateM (read n) getLine >>= \acts -> mapM_ (print . echo . read) acts
-- echo :: Int -> Int
echo :: String -> String
echo x = x
| everyevery/programming_study | template/haskell/stdin_num_cases.hs | mit | 212 | 0 | 12 | 46 | 87 | 47 | 40 | 6 | 1 |
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Test.AWS.Gen.OpsWorks
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Test.AWS.Gen.OpsWorks where
import Data.Proxy
import Test.AWS.Fixture
import Test.AWS.Prelude
import Test.Tasty
import Network.AWS.OpsWorks
import Test.AWS.OpsWorks.Internal
-- Auto-generated: the actual test selection needs to be manually placed into
-- the top-level so that real test data can be incrementally added.
--
-- This commented snippet is what the entire set should look like:
-- fixtures :: TestTree
-- fixtures =
-- [ testGroup "request"
-- [ testDescribeRDSDBInstances $
-- describeRDSDBInstances
--
-- , testDeleteStack $
-- deleteStack
--
-- , testUpdateStack $
-- updateStack
--
-- , testCreateLayer $
-- createLayer
--
-- , testSetLoadBasedAutoScaling $
-- setLoadBasedAutoScaling
--
-- , testDeregisterRDSDBInstance $
-- deregisterRDSDBInstance
--
-- , testUnassignVolume $
-- unassignVolume
--
-- , testCreateInstance $
-- createInstance
--
-- , testDescribeLayers $
-- describeLayers
--
-- , testRegisterElasticIP $
-- registerElasticIP
--
-- , testDescribeAgentVersions $
-- describeAgentVersions
--
-- , testCreateDeployment $
-- createDeployment
--
-- , testAssignInstance $
-- assignInstance
--
-- , testDescribeStacks $
-- describeStacks
--
-- , testDeleteInstance $
-- deleteInstance
--
-- , testUpdateInstance $
-- updateInstance
--
-- , testDeregisterVolume $
-- deregisterVolume
--
-- , testRebootInstance $
-- rebootInstance
--
-- , testDeleteApp $
-- deleteApp
--
-- , testUpdateApp $
-- updateApp
--
-- , testUpdateRDSDBInstance $
-- updateRDSDBInstance
--
-- , testDescribeTimeBasedAutoScaling $
-- describeTimeBasedAutoScaling
--
-- , testStopStack $
-- stopStack
--
-- , testDescribeVolumes $
-- describeVolumes
--
-- , testDisassociateElasticIP $
-- disassociateElasticIP
--
-- , testRegisterEcsCluster $
-- registerEcsCluster
--
-- , testStopInstance $
-- stopInstance
--
-- , testRegisterVolume $
-- registerVolume
--
-- , testSetTimeBasedAutoScaling $
-- setTimeBasedAutoScaling
--
-- , testDescribeUserProfiles $
-- describeUserProfiles
--
-- , testAttachElasticLoadBalancer $
-- attachElasticLoadBalancer
--
-- , testDeregisterElasticIP $
-- deregisterElasticIP
--
-- , testDeregisterEcsCluster $
-- deregisterEcsCluster
--
-- , testDescribeApps $
-- describeApps
--
-- , testUpdateMyUserProfile $
-- updateMyUserProfile
--
-- , testDescribeStackSummary $
-- describeStackSummary
--
-- , testDescribeInstances $
-- describeInstances
--
-- , testDescribeDeployments $
-- describeDeployments
--
-- , testDescribeElasticIPs $
-- describeElasticIPs
--
-- , testGrantAccess $
-- grantAccess
--
-- , testDeleteLayer $
-- deleteLayer
--
-- , testUpdateLayer $
-- updateLayer
--
-- , testCreateStack $
-- createStack
--
-- , testUpdateElasticIP $
-- updateElasticIP
--
-- , testCreateApp $
-- createApp
--
-- , testGetHostnameSuggestion $
-- getHostnameSuggestion
--
-- , testCloneStack $
-- cloneStack
--
-- , testDescribePermissions $
-- describePermissions
--
-- , testDetachElasticLoadBalancer $
-- detachElasticLoadBalancer
--
-- , testRegisterInstance $
-- registerInstance
--
-- , testAssociateElasticIP $
-- associateElasticIP
--
-- , testDescribeLoadBasedAutoScaling $
-- describeLoadBasedAutoScaling
--
-- , testDescribeStackProvisioningParameters $
-- describeStackProvisioningParameters
--
-- , testUnassignInstance $
-- unassignInstance
--
-- , testDescribeMyUserProfile $
-- describeMyUserProfile
--
-- , testDeleteUserProfile $
-- deleteUserProfile
--
-- , testUpdateUserProfile $
-- updateUserProfile
--
-- , testDescribeServiceErrors $
-- describeServiceErrors
--
-- , testRegisterRDSDBInstance $
-- registerRDSDBInstance
--
-- , testStartStack $
-- startStack
--
-- , testCreateUserProfile $
-- createUserProfile
--
-- , testDescribeCommands $
-- describeCommands
--
-- , testAssignVolume $
-- assignVolume
--
-- , testDescribeElasticLoadBalancers $
-- describeElasticLoadBalancers
--
-- , testSetPermission $
-- setPermission
--
-- , testDeregisterInstance $
-- deregisterInstance
--
-- , testDescribeEcsClusters $
-- describeEcsClusters
--
-- , testDescribeRAIdArrays $
-- describeRAIdArrays
--
-- , testUpdateVolume $
-- updateVolume
--
-- , testStartInstance $
-- startInstance
--
-- ]
-- , testGroup "response"
-- [ testDescribeRDSDBInstancesResponse $
-- describeRDSDBInstancesResponse
--
-- , testDeleteStackResponse $
-- deleteStackResponse
--
-- , testUpdateStackResponse $
-- updateStackResponse
--
-- , testCreateLayerResponse $
-- createLayerResponse
--
-- , testSetLoadBasedAutoScalingResponse $
-- setLoadBasedAutoScalingResponse
--
-- , testDeregisterRDSDBInstanceResponse $
-- deregisterRDSDBInstanceResponse
--
-- , testUnassignVolumeResponse $
-- unassignVolumeResponse
--
-- , testCreateInstanceResponse $
-- createInstanceResponse
--
-- , testDescribeLayersResponse $
-- describeLayersResponse
--
-- , testRegisterElasticIPResponse $
-- registerElasticIPResponse
--
-- , testDescribeAgentVersionsResponse $
-- describeAgentVersionsResponse
--
-- , testCreateDeploymentResponse $
-- createDeploymentResponse
--
-- , testAssignInstanceResponse $
-- assignInstanceResponse
--
-- , testDescribeStacksResponse $
-- describeStacksResponse
--
-- , testDeleteInstanceResponse $
-- deleteInstanceResponse
--
-- , testUpdateInstanceResponse $
-- updateInstanceResponse
--
-- , testDeregisterVolumeResponse $
-- deregisterVolumeResponse
--
-- , testRebootInstanceResponse $
-- rebootInstanceResponse
--
-- , testDeleteAppResponse $
-- deleteAppResponse
--
-- , testUpdateAppResponse $
-- updateAppResponse
--
-- , testUpdateRDSDBInstanceResponse $
-- updateRDSDBInstanceResponse
--
-- , testDescribeTimeBasedAutoScalingResponse $
-- describeTimeBasedAutoScalingResponse
--
-- , testStopStackResponse $
-- stopStackResponse
--
-- , testDescribeVolumesResponse $
-- describeVolumesResponse
--
-- , testDisassociateElasticIPResponse $
-- disassociateElasticIPResponse
--
-- , testRegisterEcsClusterResponse $
-- registerEcsClusterResponse
--
-- , testStopInstanceResponse $
-- stopInstanceResponse
--
-- , testRegisterVolumeResponse $
-- registerVolumeResponse
--
-- , testSetTimeBasedAutoScalingResponse $
-- setTimeBasedAutoScalingResponse
--
-- , testDescribeUserProfilesResponse $
-- describeUserProfilesResponse
--
-- , testAttachElasticLoadBalancerResponse $
-- attachElasticLoadBalancerResponse
--
-- , testDeregisterElasticIPResponse $
-- deregisterElasticIPResponse
--
-- , testDeregisterEcsClusterResponse $
-- deregisterEcsClusterResponse
--
-- , testDescribeAppsResponse $
-- describeAppsResponse
--
-- , testUpdateMyUserProfileResponse $
-- updateMyUserProfileResponse
--
-- , testDescribeStackSummaryResponse $
-- describeStackSummaryResponse
--
-- , testDescribeInstancesResponse $
-- describeInstancesResponse
--
-- , testDescribeDeploymentsResponse $
-- describeDeploymentsResponse
--
-- , testDescribeElasticIPsResponse $
-- describeElasticIPsResponse
--
-- , testGrantAccessResponse $
-- grantAccessResponse
--
-- , testDeleteLayerResponse $
-- deleteLayerResponse
--
-- , testUpdateLayerResponse $
-- updateLayerResponse
--
-- , testCreateStackResponse $
-- createStackResponse
--
-- , testUpdateElasticIPResponse $
-- updateElasticIPResponse
--
-- , testCreateAppResponse $
-- createAppResponse
--
-- , testGetHostnameSuggestionResponse $
-- getHostnameSuggestionResponse
--
-- , testCloneStackResponse $
-- cloneStackResponse
--
-- , testDescribePermissionsResponse $
-- describePermissionsResponse
--
-- , testDetachElasticLoadBalancerResponse $
-- detachElasticLoadBalancerResponse
--
-- , testRegisterInstanceResponse $
-- registerInstanceResponse
--
-- , testAssociateElasticIPResponse $
-- associateElasticIPResponse
--
-- , testDescribeLoadBasedAutoScalingResponse $
-- describeLoadBasedAutoScalingResponse
--
-- , testDescribeStackProvisioningParametersResponse $
-- describeStackProvisioningParametersResponse
--
-- , testUnassignInstanceResponse $
-- unassignInstanceResponse
--
-- , testDescribeMyUserProfileResponse $
-- describeMyUserProfileResponse
--
-- , testDeleteUserProfileResponse $
-- deleteUserProfileResponse
--
-- , testUpdateUserProfileResponse $
-- updateUserProfileResponse
--
-- , testDescribeServiceErrorsResponse $
-- describeServiceErrorsResponse
--
-- , testRegisterRDSDBInstanceResponse $
-- registerRDSDBInstanceResponse
--
-- , testStartStackResponse $
-- startStackResponse
--
-- , testCreateUserProfileResponse $
-- createUserProfileResponse
--
-- , testDescribeCommandsResponse $
-- describeCommandsResponse
--
-- , testAssignVolumeResponse $
-- assignVolumeResponse
--
-- , testDescribeElasticLoadBalancersResponse $
-- describeElasticLoadBalancersResponse
--
-- , testSetPermissionResponse $
-- setPermissionResponse
--
-- , testDeregisterInstanceResponse $
-- deregisterInstanceResponse
--
-- , testDescribeEcsClustersResponse $
-- describeEcsClustersResponse
--
-- , testDescribeRAIdArraysResponse $
-- describeRAIdArraysResponse
--
-- , testUpdateVolumeResponse $
-- updateVolumeResponse
--
-- , testStartInstanceResponse $
-- startInstanceResponse
--
-- ]
-- ]
-- Requests
testDescribeRDSDBInstances :: DescribeRDSDBInstances -> TestTree
testDescribeRDSDBInstances = req
"DescribeRDSDBInstances"
"fixture/DescribeRDSDBInstances.yaml"
testDeleteStack :: DeleteStack -> TestTree
testDeleteStack = req
"DeleteStack"
"fixture/DeleteStack.yaml"
testUpdateStack :: UpdateStack -> TestTree
testUpdateStack = req
"UpdateStack"
"fixture/UpdateStack.yaml"
testCreateLayer :: CreateLayer -> TestTree
testCreateLayer = req
"CreateLayer"
"fixture/CreateLayer.yaml"
testSetLoadBasedAutoScaling :: SetLoadBasedAutoScaling -> TestTree
testSetLoadBasedAutoScaling = req
"SetLoadBasedAutoScaling"
"fixture/SetLoadBasedAutoScaling.yaml"
testDeregisterRDSDBInstance :: DeregisterRDSDBInstance -> TestTree
testDeregisterRDSDBInstance = req
"DeregisterRDSDBInstance"
"fixture/DeregisterRDSDBInstance.yaml"
testUnassignVolume :: UnassignVolume -> TestTree
testUnassignVolume = req
"UnassignVolume"
"fixture/UnassignVolume.yaml"
testCreateInstance :: CreateInstance -> TestTree
testCreateInstance = req
"CreateInstance"
"fixture/CreateInstance.yaml"
testDescribeLayers :: DescribeLayers -> TestTree
testDescribeLayers = req
"DescribeLayers"
"fixture/DescribeLayers.yaml"
testRegisterElasticIP :: RegisterElasticIP -> TestTree
testRegisterElasticIP = req
"RegisterElasticIP"
"fixture/RegisterElasticIP.yaml"
testDescribeAgentVersions :: DescribeAgentVersions -> TestTree
testDescribeAgentVersions = req
"DescribeAgentVersions"
"fixture/DescribeAgentVersions.yaml"
testCreateDeployment :: CreateDeployment -> TestTree
testCreateDeployment = req
"CreateDeployment"
"fixture/CreateDeployment.yaml"
testAssignInstance :: AssignInstance -> TestTree
testAssignInstance = req
"AssignInstance"
"fixture/AssignInstance.yaml"
testDescribeStacks :: DescribeStacks -> TestTree
testDescribeStacks = req
"DescribeStacks"
"fixture/DescribeStacks.yaml"
testDeleteInstance :: DeleteInstance -> TestTree
testDeleteInstance = req
"DeleteInstance"
"fixture/DeleteInstance.yaml"
testUpdateInstance :: UpdateInstance -> TestTree
testUpdateInstance = req
"UpdateInstance"
"fixture/UpdateInstance.yaml"
testDeregisterVolume :: DeregisterVolume -> TestTree
testDeregisterVolume = req
"DeregisterVolume"
"fixture/DeregisterVolume.yaml"
testRebootInstance :: RebootInstance -> TestTree
testRebootInstance = req
"RebootInstance"
"fixture/RebootInstance.yaml"
testDeleteApp :: DeleteApp -> TestTree
testDeleteApp = req
"DeleteApp"
"fixture/DeleteApp.yaml"
testUpdateApp :: UpdateApp -> TestTree
testUpdateApp = req
"UpdateApp"
"fixture/UpdateApp.yaml"
testUpdateRDSDBInstance :: UpdateRDSDBInstance -> TestTree
testUpdateRDSDBInstance = req
"UpdateRDSDBInstance"
"fixture/UpdateRDSDBInstance.yaml"
testDescribeTimeBasedAutoScaling :: DescribeTimeBasedAutoScaling -> TestTree
testDescribeTimeBasedAutoScaling = req
"DescribeTimeBasedAutoScaling"
"fixture/DescribeTimeBasedAutoScaling.yaml"
testStopStack :: StopStack -> TestTree
testStopStack = req
"StopStack"
"fixture/StopStack.yaml"
testDescribeVolumes :: DescribeVolumes -> TestTree
testDescribeVolumes = req
"DescribeVolumes"
"fixture/DescribeVolumes.yaml"
testDisassociateElasticIP :: DisassociateElasticIP -> TestTree
testDisassociateElasticIP = req
"DisassociateElasticIP"
"fixture/DisassociateElasticIP.yaml"
testRegisterEcsCluster :: RegisterEcsCluster -> TestTree
testRegisterEcsCluster = req
"RegisterEcsCluster"
"fixture/RegisterEcsCluster.yaml"
testStopInstance :: StopInstance -> TestTree
testStopInstance = req
"StopInstance"
"fixture/StopInstance.yaml"
testRegisterVolume :: RegisterVolume -> TestTree
testRegisterVolume = req
"RegisterVolume"
"fixture/RegisterVolume.yaml"
testSetTimeBasedAutoScaling :: SetTimeBasedAutoScaling -> TestTree
testSetTimeBasedAutoScaling = req
"SetTimeBasedAutoScaling"
"fixture/SetTimeBasedAutoScaling.yaml"
testDescribeUserProfiles :: DescribeUserProfiles -> TestTree
testDescribeUserProfiles = req
"DescribeUserProfiles"
"fixture/DescribeUserProfiles.yaml"
testAttachElasticLoadBalancer :: AttachElasticLoadBalancer -> TestTree
testAttachElasticLoadBalancer = req
"AttachElasticLoadBalancer"
"fixture/AttachElasticLoadBalancer.yaml"
testDeregisterElasticIP :: DeregisterElasticIP -> TestTree
testDeregisterElasticIP = req
"DeregisterElasticIP"
"fixture/DeregisterElasticIP.yaml"
testDeregisterEcsCluster :: DeregisterEcsCluster -> TestTree
testDeregisterEcsCluster = req
"DeregisterEcsCluster"
"fixture/DeregisterEcsCluster.yaml"
testDescribeApps :: DescribeApps -> TestTree
testDescribeApps = req
"DescribeApps"
"fixture/DescribeApps.yaml"
testUpdateMyUserProfile :: UpdateMyUserProfile -> TestTree
testUpdateMyUserProfile = req
"UpdateMyUserProfile"
"fixture/UpdateMyUserProfile.yaml"
testDescribeStackSummary :: DescribeStackSummary -> TestTree
testDescribeStackSummary = req
"DescribeStackSummary"
"fixture/DescribeStackSummary.yaml"
testDescribeInstances :: DescribeInstances -> TestTree
testDescribeInstances = req
"DescribeInstances"
"fixture/DescribeInstances.yaml"
testDescribeDeployments :: DescribeDeployments -> TestTree
testDescribeDeployments = req
"DescribeDeployments"
"fixture/DescribeDeployments.yaml"
testDescribeElasticIPs :: DescribeElasticIPs -> TestTree
testDescribeElasticIPs = req
"DescribeElasticIPs"
"fixture/DescribeElasticIPs.yaml"
testGrantAccess :: GrantAccess -> TestTree
testGrantAccess = req
"GrantAccess"
"fixture/GrantAccess.yaml"
testDeleteLayer :: DeleteLayer -> TestTree
testDeleteLayer = req
"DeleteLayer"
"fixture/DeleteLayer.yaml"
testUpdateLayer :: UpdateLayer -> TestTree
testUpdateLayer = req
"UpdateLayer"
"fixture/UpdateLayer.yaml"
testCreateStack :: CreateStack -> TestTree
testCreateStack = req
"CreateStack"
"fixture/CreateStack.yaml"
testUpdateElasticIP :: UpdateElasticIP -> TestTree
testUpdateElasticIP = req
"UpdateElasticIP"
"fixture/UpdateElasticIP.yaml"
testCreateApp :: CreateApp -> TestTree
testCreateApp = req
"CreateApp"
"fixture/CreateApp.yaml"
testGetHostnameSuggestion :: GetHostnameSuggestion -> TestTree
testGetHostnameSuggestion = req
"GetHostnameSuggestion"
"fixture/GetHostnameSuggestion.yaml"
testCloneStack :: CloneStack -> TestTree
testCloneStack = req
"CloneStack"
"fixture/CloneStack.yaml"
testDescribePermissions :: DescribePermissions -> TestTree
testDescribePermissions = req
"DescribePermissions"
"fixture/DescribePermissions.yaml"
testDetachElasticLoadBalancer :: DetachElasticLoadBalancer -> TestTree
testDetachElasticLoadBalancer = req
"DetachElasticLoadBalancer"
"fixture/DetachElasticLoadBalancer.yaml"
testRegisterInstance :: RegisterInstance -> TestTree
testRegisterInstance = req
"RegisterInstance"
"fixture/RegisterInstance.yaml"
testAssociateElasticIP :: AssociateElasticIP -> TestTree
testAssociateElasticIP = req
"AssociateElasticIP"
"fixture/AssociateElasticIP.yaml"
testDescribeLoadBasedAutoScaling :: DescribeLoadBasedAutoScaling -> TestTree
testDescribeLoadBasedAutoScaling = req
"DescribeLoadBasedAutoScaling"
"fixture/DescribeLoadBasedAutoScaling.yaml"
testDescribeStackProvisioningParameters :: DescribeStackProvisioningParameters -> TestTree
testDescribeStackProvisioningParameters = req
"DescribeStackProvisioningParameters"
"fixture/DescribeStackProvisioningParameters.yaml"
testUnassignInstance :: UnassignInstance -> TestTree
testUnassignInstance = req
"UnassignInstance"
"fixture/UnassignInstance.yaml"
testDescribeMyUserProfile :: DescribeMyUserProfile -> TestTree
testDescribeMyUserProfile = req
"DescribeMyUserProfile"
"fixture/DescribeMyUserProfile.yaml"
testDeleteUserProfile :: DeleteUserProfile -> TestTree
testDeleteUserProfile = req
"DeleteUserProfile"
"fixture/DeleteUserProfile.yaml"
testUpdateUserProfile :: UpdateUserProfile -> TestTree
testUpdateUserProfile = req
"UpdateUserProfile"
"fixture/UpdateUserProfile.yaml"
testDescribeServiceErrors :: DescribeServiceErrors -> TestTree
testDescribeServiceErrors = req
"DescribeServiceErrors"
"fixture/DescribeServiceErrors.yaml"
testRegisterRDSDBInstance :: RegisterRDSDBInstance -> TestTree
testRegisterRDSDBInstance = req
"RegisterRDSDBInstance"
"fixture/RegisterRDSDBInstance.yaml"
testStartStack :: StartStack -> TestTree
testStartStack = req
"StartStack"
"fixture/StartStack.yaml"
testCreateUserProfile :: CreateUserProfile -> TestTree
testCreateUserProfile = req
"CreateUserProfile"
"fixture/CreateUserProfile.yaml"
testDescribeCommands :: DescribeCommands -> TestTree
testDescribeCommands = req
"DescribeCommands"
"fixture/DescribeCommands.yaml"
testAssignVolume :: AssignVolume -> TestTree
testAssignVolume = req
"AssignVolume"
"fixture/AssignVolume.yaml"
testDescribeElasticLoadBalancers :: DescribeElasticLoadBalancers -> TestTree
testDescribeElasticLoadBalancers = req
"DescribeElasticLoadBalancers"
"fixture/DescribeElasticLoadBalancers.yaml"
testSetPermission :: SetPermission -> TestTree
testSetPermission = req
"SetPermission"
"fixture/SetPermission.yaml"
testDeregisterInstance :: DeregisterInstance -> TestTree
testDeregisterInstance = req
"DeregisterInstance"
"fixture/DeregisterInstance.yaml"
testDescribeEcsClusters :: DescribeEcsClusters -> TestTree
testDescribeEcsClusters = req
"DescribeEcsClusters"
"fixture/DescribeEcsClusters.yaml"
testDescribeRAIdArrays :: DescribeRAIdArrays -> TestTree
testDescribeRAIdArrays = req
"DescribeRAIdArrays"
"fixture/DescribeRAIdArrays.yaml"
testUpdateVolume :: UpdateVolume -> TestTree
testUpdateVolume = req
"UpdateVolume"
"fixture/UpdateVolume.yaml"
testStartInstance :: StartInstance -> TestTree
testStartInstance = req
"StartInstance"
"fixture/StartInstance.yaml"
-- Responses
testDescribeRDSDBInstancesResponse :: DescribeRDSDBInstancesResponse -> TestTree
testDescribeRDSDBInstancesResponse = res
"DescribeRDSDBInstancesResponse"
"fixture/DescribeRDSDBInstancesResponse.proto"
opsWorks
(Proxy :: Proxy DescribeRDSDBInstances)
testDeleteStackResponse :: DeleteStackResponse -> TestTree
testDeleteStackResponse = res
"DeleteStackResponse"
"fixture/DeleteStackResponse.proto"
opsWorks
(Proxy :: Proxy DeleteStack)
testUpdateStackResponse :: UpdateStackResponse -> TestTree
testUpdateStackResponse = res
"UpdateStackResponse"
"fixture/UpdateStackResponse.proto"
opsWorks
(Proxy :: Proxy UpdateStack)
testCreateLayerResponse :: CreateLayerResponse -> TestTree
testCreateLayerResponse = res
"CreateLayerResponse"
"fixture/CreateLayerResponse.proto"
opsWorks
(Proxy :: Proxy CreateLayer)
testSetLoadBasedAutoScalingResponse :: SetLoadBasedAutoScalingResponse -> TestTree
testSetLoadBasedAutoScalingResponse = res
"SetLoadBasedAutoScalingResponse"
"fixture/SetLoadBasedAutoScalingResponse.proto"
opsWorks
(Proxy :: Proxy SetLoadBasedAutoScaling)
testDeregisterRDSDBInstanceResponse :: DeregisterRDSDBInstanceResponse -> TestTree
testDeregisterRDSDBInstanceResponse = res
"DeregisterRDSDBInstanceResponse"
"fixture/DeregisterRDSDBInstanceResponse.proto"
opsWorks
(Proxy :: Proxy DeregisterRDSDBInstance)
testUnassignVolumeResponse :: UnassignVolumeResponse -> TestTree
testUnassignVolumeResponse = res
"UnassignVolumeResponse"
"fixture/UnassignVolumeResponse.proto"
opsWorks
(Proxy :: Proxy UnassignVolume)
testCreateInstanceResponse :: CreateInstanceResponse -> TestTree
testCreateInstanceResponse = res
"CreateInstanceResponse"
"fixture/CreateInstanceResponse.proto"
opsWorks
(Proxy :: Proxy CreateInstance)
testDescribeLayersResponse :: DescribeLayersResponse -> TestTree
testDescribeLayersResponse = res
"DescribeLayersResponse"
"fixture/DescribeLayersResponse.proto"
opsWorks
(Proxy :: Proxy DescribeLayers)
testRegisterElasticIPResponse :: RegisterElasticIPResponse -> TestTree
testRegisterElasticIPResponse = res
"RegisterElasticIPResponse"
"fixture/RegisterElasticIPResponse.proto"
opsWorks
(Proxy :: Proxy RegisterElasticIP)
testDescribeAgentVersionsResponse :: DescribeAgentVersionsResponse -> TestTree
testDescribeAgentVersionsResponse = res
"DescribeAgentVersionsResponse"
"fixture/DescribeAgentVersionsResponse.proto"
opsWorks
(Proxy :: Proxy DescribeAgentVersions)
testCreateDeploymentResponse :: CreateDeploymentResponse -> TestTree
testCreateDeploymentResponse = res
"CreateDeploymentResponse"
"fixture/CreateDeploymentResponse.proto"
opsWorks
(Proxy :: Proxy CreateDeployment)
testAssignInstanceResponse :: AssignInstanceResponse -> TestTree
testAssignInstanceResponse = res
"AssignInstanceResponse"
"fixture/AssignInstanceResponse.proto"
opsWorks
(Proxy :: Proxy AssignInstance)
testDescribeStacksResponse :: DescribeStacksResponse -> TestTree
testDescribeStacksResponse = res
"DescribeStacksResponse"
"fixture/DescribeStacksResponse.proto"
opsWorks
(Proxy :: Proxy DescribeStacks)
testDeleteInstanceResponse :: DeleteInstanceResponse -> TestTree
testDeleteInstanceResponse = res
"DeleteInstanceResponse"
"fixture/DeleteInstanceResponse.proto"
opsWorks
(Proxy :: Proxy DeleteInstance)
testUpdateInstanceResponse :: UpdateInstanceResponse -> TestTree
testUpdateInstanceResponse = res
"UpdateInstanceResponse"
"fixture/UpdateInstanceResponse.proto"
opsWorks
(Proxy :: Proxy UpdateInstance)
testDeregisterVolumeResponse :: DeregisterVolumeResponse -> TestTree
testDeregisterVolumeResponse = res
"DeregisterVolumeResponse"
"fixture/DeregisterVolumeResponse.proto"
opsWorks
(Proxy :: Proxy DeregisterVolume)
testRebootInstanceResponse :: RebootInstanceResponse -> TestTree
testRebootInstanceResponse = res
"RebootInstanceResponse"
"fixture/RebootInstanceResponse.proto"
opsWorks
(Proxy :: Proxy RebootInstance)
testDeleteAppResponse :: DeleteAppResponse -> TestTree
testDeleteAppResponse = res
"DeleteAppResponse"
"fixture/DeleteAppResponse.proto"
opsWorks
(Proxy :: Proxy DeleteApp)
testUpdateAppResponse :: UpdateAppResponse -> TestTree
testUpdateAppResponse = res
"UpdateAppResponse"
"fixture/UpdateAppResponse.proto"
opsWorks
(Proxy :: Proxy UpdateApp)
testUpdateRDSDBInstanceResponse :: UpdateRDSDBInstanceResponse -> TestTree
testUpdateRDSDBInstanceResponse = res
"UpdateRDSDBInstanceResponse"
"fixture/UpdateRDSDBInstanceResponse.proto"
opsWorks
(Proxy :: Proxy UpdateRDSDBInstance)
testDescribeTimeBasedAutoScalingResponse :: DescribeTimeBasedAutoScalingResponse -> TestTree
testDescribeTimeBasedAutoScalingResponse = res
"DescribeTimeBasedAutoScalingResponse"
"fixture/DescribeTimeBasedAutoScalingResponse.proto"
opsWorks
(Proxy :: Proxy DescribeTimeBasedAutoScaling)
testStopStackResponse :: StopStackResponse -> TestTree
testStopStackResponse = res
"StopStackResponse"
"fixture/StopStackResponse.proto"
opsWorks
(Proxy :: Proxy StopStack)
testDescribeVolumesResponse :: DescribeVolumesResponse -> TestTree
testDescribeVolumesResponse = res
"DescribeVolumesResponse"
"fixture/DescribeVolumesResponse.proto"
opsWorks
(Proxy :: Proxy DescribeVolumes)
testDisassociateElasticIPResponse :: DisassociateElasticIPResponse -> TestTree
testDisassociateElasticIPResponse = res
"DisassociateElasticIPResponse"
"fixture/DisassociateElasticIPResponse.proto"
opsWorks
(Proxy :: Proxy DisassociateElasticIP)
testRegisterEcsClusterResponse :: RegisterEcsClusterResponse -> TestTree
testRegisterEcsClusterResponse = res
"RegisterEcsClusterResponse"
"fixture/RegisterEcsClusterResponse.proto"
opsWorks
(Proxy :: Proxy RegisterEcsCluster)
testStopInstanceResponse :: StopInstanceResponse -> TestTree
testStopInstanceResponse = res
"StopInstanceResponse"
"fixture/StopInstanceResponse.proto"
opsWorks
(Proxy :: Proxy StopInstance)
testRegisterVolumeResponse :: RegisterVolumeResponse -> TestTree
testRegisterVolumeResponse = res
"RegisterVolumeResponse"
"fixture/RegisterVolumeResponse.proto"
opsWorks
(Proxy :: Proxy RegisterVolume)
testSetTimeBasedAutoScalingResponse :: SetTimeBasedAutoScalingResponse -> TestTree
testSetTimeBasedAutoScalingResponse = res
"SetTimeBasedAutoScalingResponse"
"fixture/SetTimeBasedAutoScalingResponse.proto"
opsWorks
(Proxy :: Proxy SetTimeBasedAutoScaling)
testDescribeUserProfilesResponse :: DescribeUserProfilesResponse -> TestTree
testDescribeUserProfilesResponse = res
"DescribeUserProfilesResponse"
"fixture/DescribeUserProfilesResponse.proto"
opsWorks
(Proxy :: Proxy DescribeUserProfiles)
testAttachElasticLoadBalancerResponse :: AttachElasticLoadBalancerResponse -> TestTree
testAttachElasticLoadBalancerResponse = res
"AttachElasticLoadBalancerResponse"
"fixture/AttachElasticLoadBalancerResponse.proto"
opsWorks
(Proxy :: Proxy AttachElasticLoadBalancer)
testDeregisterElasticIPResponse :: DeregisterElasticIPResponse -> TestTree
testDeregisterElasticIPResponse = res
"DeregisterElasticIPResponse"
"fixture/DeregisterElasticIPResponse.proto"
opsWorks
(Proxy :: Proxy DeregisterElasticIP)
testDeregisterEcsClusterResponse :: DeregisterEcsClusterResponse -> TestTree
testDeregisterEcsClusterResponse = res
"DeregisterEcsClusterResponse"
"fixture/DeregisterEcsClusterResponse.proto"
opsWorks
(Proxy :: Proxy DeregisterEcsCluster)
testDescribeAppsResponse :: DescribeAppsResponse -> TestTree
testDescribeAppsResponse = res
"DescribeAppsResponse"
"fixture/DescribeAppsResponse.proto"
opsWorks
(Proxy :: Proxy DescribeApps)
testUpdateMyUserProfileResponse :: UpdateMyUserProfileResponse -> TestTree
testUpdateMyUserProfileResponse = res
"UpdateMyUserProfileResponse"
"fixture/UpdateMyUserProfileResponse.proto"
opsWorks
(Proxy :: Proxy UpdateMyUserProfile)
testDescribeStackSummaryResponse :: DescribeStackSummaryResponse -> TestTree
testDescribeStackSummaryResponse = res
"DescribeStackSummaryResponse"
"fixture/DescribeStackSummaryResponse.proto"
opsWorks
(Proxy :: Proxy DescribeStackSummary)
testDescribeInstancesResponse :: DescribeInstancesResponse -> TestTree
testDescribeInstancesResponse = res
"DescribeInstancesResponse"
"fixture/DescribeInstancesResponse.proto"
opsWorks
(Proxy :: Proxy DescribeInstances)
testDescribeDeploymentsResponse :: DescribeDeploymentsResponse -> TestTree
testDescribeDeploymentsResponse = res
"DescribeDeploymentsResponse"
"fixture/DescribeDeploymentsResponse.proto"
opsWorks
(Proxy :: Proxy DescribeDeployments)
testDescribeElasticIPsResponse :: DescribeElasticIPsResponse -> TestTree
testDescribeElasticIPsResponse = res
"DescribeElasticIPsResponse"
"fixture/DescribeElasticIPsResponse.proto"
opsWorks
(Proxy :: Proxy DescribeElasticIPs)
testGrantAccessResponse :: GrantAccessResponse -> TestTree
testGrantAccessResponse = res
"GrantAccessResponse"
"fixture/GrantAccessResponse.proto"
opsWorks
(Proxy :: Proxy GrantAccess)
testDeleteLayerResponse :: DeleteLayerResponse -> TestTree
testDeleteLayerResponse = res
"DeleteLayerResponse"
"fixture/DeleteLayerResponse.proto"
opsWorks
(Proxy :: Proxy DeleteLayer)
testUpdateLayerResponse :: UpdateLayerResponse -> TestTree
testUpdateLayerResponse = res
"UpdateLayerResponse"
"fixture/UpdateLayerResponse.proto"
opsWorks
(Proxy :: Proxy UpdateLayer)
testCreateStackResponse :: CreateStackResponse -> TestTree
testCreateStackResponse = res
"CreateStackResponse"
"fixture/CreateStackResponse.proto"
opsWorks
(Proxy :: Proxy CreateStack)
testUpdateElasticIPResponse :: UpdateElasticIPResponse -> TestTree
testUpdateElasticIPResponse = res
"UpdateElasticIPResponse"
"fixture/UpdateElasticIPResponse.proto"
opsWorks
(Proxy :: Proxy UpdateElasticIP)
testCreateAppResponse :: CreateAppResponse -> TestTree
testCreateAppResponse = res
"CreateAppResponse"
"fixture/CreateAppResponse.proto"
opsWorks
(Proxy :: Proxy CreateApp)
testGetHostnameSuggestionResponse :: GetHostnameSuggestionResponse -> TestTree
testGetHostnameSuggestionResponse = res
"GetHostnameSuggestionResponse"
"fixture/GetHostnameSuggestionResponse.proto"
opsWorks
(Proxy :: Proxy GetHostnameSuggestion)
testCloneStackResponse :: CloneStackResponse -> TestTree
testCloneStackResponse = res
"CloneStackResponse"
"fixture/CloneStackResponse.proto"
opsWorks
(Proxy :: Proxy CloneStack)
testDescribePermissionsResponse :: DescribePermissionsResponse -> TestTree
testDescribePermissionsResponse = res
"DescribePermissionsResponse"
"fixture/DescribePermissionsResponse.proto"
opsWorks
(Proxy :: Proxy DescribePermissions)
testDetachElasticLoadBalancerResponse :: DetachElasticLoadBalancerResponse -> TestTree
testDetachElasticLoadBalancerResponse = res
"DetachElasticLoadBalancerResponse"
"fixture/DetachElasticLoadBalancerResponse.proto"
opsWorks
(Proxy :: Proxy DetachElasticLoadBalancer)
testRegisterInstanceResponse :: RegisterInstanceResponse -> TestTree
testRegisterInstanceResponse = res
"RegisterInstanceResponse"
"fixture/RegisterInstanceResponse.proto"
opsWorks
(Proxy :: Proxy RegisterInstance)
testAssociateElasticIPResponse :: AssociateElasticIPResponse -> TestTree
testAssociateElasticIPResponse = res
"AssociateElasticIPResponse"
"fixture/AssociateElasticIPResponse.proto"
opsWorks
(Proxy :: Proxy AssociateElasticIP)
testDescribeLoadBasedAutoScalingResponse :: DescribeLoadBasedAutoScalingResponse -> TestTree
testDescribeLoadBasedAutoScalingResponse = res
"DescribeLoadBasedAutoScalingResponse"
"fixture/DescribeLoadBasedAutoScalingResponse.proto"
opsWorks
(Proxy :: Proxy DescribeLoadBasedAutoScaling)
testDescribeStackProvisioningParametersResponse :: DescribeStackProvisioningParametersResponse -> TestTree
testDescribeStackProvisioningParametersResponse = res
"DescribeStackProvisioningParametersResponse"
"fixture/DescribeStackProvisioningParametersResponse.proto"
opsWorks
(Proxy :: Proxy DescribeStackProvisioningParameters)
testUnassignInstanceResponse :: UnassignInstanceResponse -> TestTree
testUnassignInstanceResponse = res
"UnassignInstanceResponse"
"fixture/UnassignInstanceResponse.proto"
opsWorks
(Proxy :: Proxy UnassignInstance)
testDescribeMyUserProfileResponse :: DescribeMyUserProfileResponse -> TestTree
testDescribeMyUserProfileResponse = res
"DescribeMyUserProfileResponse"
"fixture/DescribeMyUserProfileResponse.proto"
opsWorks
(Proxy :: Proxy DescribeMyUserProfile)
testDeleteUserProfileResponse :: DeleteUserProfileResponse -> TestTree
testDeleteUserProfileResponse = res
"DeleteUserProfileResponse"
"fixture/DeleteUserProfileResponse.proto"
opsWorks
(Proxy :: Proxy DeleteUserProfile)
testUpdateUserProfileResponse :: UpdateUserProfileResponse -> TestTree
testUpdateUserProfileResponse = res
"UpdateUserProfileResponse"
"fixture/UpdateUserProfileResponse.proto"
opsWorks
(Proxy :: Proxy UpdateUserProfile)
testDescribeServiceErrorsResponse :: DescribeServiceErrorsResponse -> TestTree
testDescribeServiceErrorsResponse = res
"DescribeServiceErrorsResponse"
"fixture/DescribeServiceErrorsResponse.proto"
opsWorks
(Proxy :: Proxy DescribeServiceErrors)
testRegisterRDSDBInstanceResponse :: RegisterRDSDBInstanceResponse -> TestTree
testRegisterRDSDBInstanceResponse = res
"RegisterRDSDBInstanceResponse"
"fixture/RegisterRDSDBInstanceResponse.proto"
opsWorks
(Proxy :: Proxy RegisterRDSDBInstance)
testStartStackResponse :: StartStackResponse -> TestTree
testStartStackResponse = res
"StartStackResponse"
"fixture/StartStackResponse.proto"
opsWorks
(Proxy :: Proxy StartStack)
testCreateUserProfileResponse :: CreateUserProfileResponse -> TestTree
testCreateUserProfileResponse = res
"CreateUserProfileResponse"
"fixture/CreateUserProfileResponse.proto"
opsWorks
(Proxy :: Proxy CreateUserProfile)
testDescribeCommandsResponse :: DescribeCommandsResponse -> TestTree
testDescribeCommandsResponse = res
"DescribeCommandsResponse"
"fixture/DescribeCommandsResponse.proto"
opsWorks
(Proxy :: Proxy DescribeCommands)
testAssignVolumeResponse :: AssignVolumeResponse -> TestTree
testAssignVolumeResponse = res
"AssignVolumeResponse"
"fixture/AssignVolumeResponse.proto"
opsWorks
(Proxy :: Proxy AssignVolume)
testDescribeElasticLoadBalancersResponse :: DescribeElasticLoadBalancersResponse -> TestTree
testDescribeElasticLoadBalancersResponse = res
"DescribeElasticLoadBalancersResponse"
"fixture/DescribeElasticLoadBalancersResponse.proto"
opsWorks
(Proxy :: Proxy DescribeElasticLoadBalancers)
testSetPermissionResponse :: SetPermissionResponse -> TestTree
testSetPermissionResponse = res
"SetPermissionResponse"
"fixture/SetPermissionResponse.proto"
opsWorks
(Proxy :: Proxy SetPermission)
testDeregisterInstanceResponse :: DeregisterInstanceResponse -> TestTree
testDeregisterInstanceResponse = res
"DeregisterInstanceResponse"
"fixture/DeregisterInstanceResponse.proto"
opsWorks
(Proxy :: Proxy DeregisterInstance)
testDescribeEcsClustersResponse :: DescribeEcsClustersResponse -> TestTree
testDescribeEcsClustersResponse = res
"DescribeEcsClustersResponse"
"fixture/DescribeEcsClustersResponse.proto"
opsWorks
(Proxy :: Proxy DescribeEcsClusters)
testDescribeRAIdArraysResponse :: DescribeRAIdArraysResponse -> TestTree
testDescribeRAIdArraysResponse = res
"DescribeRAIdArraysResponse"
"fixture/DescribeRAIdArraysResponse.proto"
opsWorks
(Proxy :: Proxy DescribeRAIdArrays)
testUpdateVolumeResponse :: UpdateVolumeResponse -> TestTree
testUpdateVolumeResponse = res
"UpdateVolumeResponse"
"fixture/UpdateVolumeResponse.proto"
opsWorks
(Proxy :: Proxy UpdateVolume)
testStartInstanceResponse :: StartInstanceResponse -> TestTree
testStartInstanceResponse = res
"StartInstanceResponse"
"fixture/StartInstanceResponse.proto"
opsWorks
(Proxy :: Proxy StartInstance)
| fmapfmapfmap/amazonka | amazonka-opsworks/test/Test/AWS/Gen/OpsWorks.hs | mpl-2.0 | 38,968 | 0 | 7 | 7,544 | 4,057 | 2,363 | 1,694 | 709 | 1 |
{- |
Module : Data.PDRS.Input
Copyright : (c) Harm Brouwer and Noortje Venhuizen
License : Apache-2.0
Maintainer : [email protected], [email protected]
Stability : provisional
Portability : portable
PDRS input
-}
module Data.PDRS.Input
(
module Input
) where
import Data.PDRS.Input.Boxer as Input
import Data.PDRS.Input.String as Input
| hbrouwer/pdrt-sandbox | src/Data/PDRS/Input.hs | apache-2.0 | 362 | 0 | 4 | 70 | 32 | 24 | 8 | 5 | 0 |
{-# LANGUAGE Trustworthy #-}
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2015 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-----------------------------------------------------------------------------
module Data.Struct.Label
( Label
, new
, insertAfter
, delete
, least
, greatest
, cutAfter
, cutBefore
, compareM
) where
import Data.Struct.Internal.Label
| bitemyapp/structs | src/Data/Struct/Label.hs | bsd-2-clause | 574 | 0 | 4 | 97 | 53 | 39 | 14 | 12 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.