code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Lambia.Combi () where
import Prelude hiding (lookup)
import Lambia.Types hiding (simple,apply)
import qualified Lambia.Types as T (simple,apply)
simple :: Int -> Combi -> (Bool, Combi)
simple n l = s l n where
s :: Combi -> Int -> (Bool, Combi)
s e 0
| size e == size l = (True,e)
| size e < size l = (True,snd $ simple n e)
| otherwise = (False,l)
s e x = case repl e of
(False,_) -> s e 0
(True,e') -> s e' (x-1)
size :: Combi -> Int
size (A x y) = size x + size y
size _ = 1
apply :: Combi -> (Combi, [Combi])
apply i = a i [] where
a x xs = case repl x of
(True, x') -> a x' (x:xs)
(False, _) -> (x,xs)
repl :: Combi -> (Bool, Combi)
repl (A I x) = (True, snd $ repl x)
repl (A (A K x) y) = (True, snd $ repl x)
repl (A (A (A S x) y) z) = (True, A (A x z) (A y z))
repl (A (A (A C x) y) z) = (True, snd $ repl $ A (A x z) y)
repl (A (A (A B x) y) z) = (True, snd $ repl $ A x (A y z))
repl (A x y) = case repl x of
(True, x') -> (True, A x' y)
(False, _) -> case repl y of
(True, y') -> (True, A x y')
(False, _) -> (False, A x y)
repl x = (False, x)
instance Store Combi where
simple = simple
apply = apply
fromSyn = sToC
| phi16/Lambia | src/Lambia/Combi.hs | gpl-3.0 | 1,201 | 0 | 12 | 342 | 785 | 417 | 368 | 38 | 4 |
module Fp05LamTest where
-- êîìïàêòíî çàïèñàííûå ïåðåìåííûå
[a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z] =
map (Var . (:[])) "abcdefghijklmnopqrstuvwxyz"
-- àïïëèêàöèÿ äâóõ àðãóìåíòîâ
app2 f x y = f :@ x :@ y
-- àïïëèêàöèÿ òð¸õ àðãóìåíòîâ
app3 f x y z = f :@ x :@ y :@ z
-- êîìáèíàòîðû
cI = Lam "x" x
cK = Lam "x" $ Lam "y" x
cK_ast = Lam "x" $ Lam "y" y
cB = Lam "f" $ Lam "g" $ Lam "x" $ f :@ (g :@ x)
cS = Lam "f" $ Lam "g" $ Lam "x" $ f :@ x :@ (g :@ x)
-- Áóëåâû çíà÷åíèÿ
fls = Lam "t" $ Lam "f" f
tru = Lam "t" $ Lam "f" t
iif = Lam "b" $ Lam "x" $ Lam "y" $ b :@ x :@ y
not' = Lam "b" $ Lam "t" $ Lam "f" $ app2 b f t
and' = Lam "x" $ Lam "y" $ app2 x y fls
or' = Lam "x" $ Lam "y" $ app2 x tru y
-- ïàðû
pair = Lam "x" $ Lam "y" $ Lam "f" $ app2 f x y
fst' = Lam "p" $ p :@ tru
snd' = Lam "p" $ p :@ fls
-- ÷èñëà ׸ð÷à
zero = Lam "s" $ Lam "z" z
one = Lam "s" $ Lam "z" $ s :@ z
two = Lam "s" $ Lam "z" $ s :@ (s :@ z)
three = Lam "s" $ Lam "z" $ s :@ (s :@ (s :@ z))
four = Lam "s" $ Lam "z" $ s :@ (s :@ (s :@ (s :@ z)))
five = Lam "s" $ Lam "z" $ s :@ (s :@ (s :@ (s :@ (s :@ z))))
six = Lam "s" $ Lam "z" $ s :@ (s :@ (s :@ (s :@ (s :@ (s :@ z)))))
seven = Lam "s" $ Lam "z" $ s :@ (s :@ (s :@ (s :@ (s :@ (s :@ (s :@ z))))))
eight = Lam "s" $ Lam "z" $ s :@ (s :@ (s :@ (s :@ (s :@ (s :@ (s :@ (s :@ z)))))))
nine = Lam "s" $ Lam "z" $ s :@ (s :@ (s :@ (s :@ (s :@ (s :@ (s :@ (s :@ (s :@ z))))))))
ten = Lam "s" $ Lam "z" $ s :@ (s :@ (s :@ (s :@ (s :@ (s :@ (s :@ (s :@ (s :@ (s :@ z)))))))))
iszro = Lam "n" $ n :@ (Lam "x" fls) :@ tru
suc = Lam "n" $ Lam "s" $ Lam "z" $ s :@ (n :@ s :@ z)
plus = Lam "m" $ Lam "n" $ Lam "s" $ Lam "z" $ app2 m s (app2 n s z)
mult = Lam "m" $ Lam "n" $ Lam "s" $ m :@ (n :@ s)
pow = Lam "m" $ Lam "n" $ n :@ m
omega = Lam "x" $ x :@ x
zp = pair :@ zero :@ zero
sp = Lam "p" $ pair :@ (snd' :@ p) :@ (suc :@ (snd' :@ p))
pred' = Lam "m" $ fst' :@ (m :@ sp :@ zp)
-- ôàêòîðèàë
zf = pair :@ one :@ zero
sf = Lam "p" $ pair :@ (mult :@ (fst' :@ p) :@ (suc :@ (snd' :@ p))) :@ (suc :@ (snd' :@ p))
fac = Lam "m" $ fst' :@ (m :@ sf :@ zf)
-- îáùàÿ ñõåìà ïðèìèòèâíîé ðåêóðñèè
xz = Lam "x" $ pair :@ x :@ zero
fs = Lam "f" $ Lam "p" $ pair :@ (f :@ (fst' :@ p) :@ (snd' :@ p)) :@ (suc :@ (snd' :@ p))
rec = Lam "m" $ Lam "f" $ Lam "x" $ fst' :@ (m :@ (fs :@ f) :@ (xz :@ x))
pred'' = Lam "n" $ rec :@ n :@ cK_ast :@ zero
minus = Lam "k" $ Lam "l" $ l :@ pred' :@ k
lt = Lam "n" $ Lam "m" $ not' :@ (iszro :@ (minus :@ m :@ n))
ge = Lam "n" $ Lam "m" $ iszro :@ (app2 minus m n)
gt = Lam "n" $ Lam "m" $ not' :@ (iszro :@ (app2 minus n m))
le = Lam "n" $ Lam "m" $ iszro :@ (app2 minus n m)
eq = Lam "n" $ Lam "m" $ and' :@ (le :@ m :@ n) :@ (ge :@ m :@ n)
fac'step = Lam "u" $ Lam "v" $ app2 mult u (suc :@ v)
fac' = Lam "n" $ app3 rec n fac'step one
-- Y-êîìáèíàòîð
cY = Lam "f" $ (Lam "z" $ f :@ (z :@ z)) :@ (Lam "z" $ f :@ (z :@ z))
cTheta = aa :@ aa
where aa = Lam "a" $ Lam "b" $ b :@ (a :@ a :@ b)
fac''step = Lam "f" $ Lam "n" $ iif :@ (iszro :@ n) :@ one :@ (mult :@ n :@ (f :@ (pred' :@ n)))
fac'' = cY :@ fac''step
fac''' = cTheta :@ fac''step
-- ñïèñêè
nil = Lam "c" $ Lam "n" n
cons = Lam "e" $ Lam "l" $ Lam "c" $ Lam "n" $ app2 c e (app2 l c n)
l532 = app2 cons five (app2 cons three (app2 cons two nil))
l2 = Lam "c" $ Lam "n" $ c :@ two :@ n
empty = Lam "l" $ app2 l (Lam "h" $ Lam "t" fls) tru
length' = Lam "l" $ app2 l (Lam "x" $ Lam "y" $ suc :@ y) zero
length'' = Lam "l" $ app2 l (Lam "y" $ suc) zero
head' = Lam "l" $ app2 l cK cI
zpl = app2 pair nil nil
spl = Lam "e" $ Lam "p" $ app2 pair (snd' :@ p) (app2 cons e (snd' :@ p))
tail' = Lam "l" $ fst' :@ (app2 l spl zpl)
sum' = Lam "l" $ app2 l plus zero
sum'' = Lam "l" $ app2 l (Lam "h" $ Lam "t" $ app2 plus h t) zero
| ItsLastDay/academic_university_2016-2018 | subjects/Haskell/5/Fp05LamTest.hs | gpl-3.0 | 3,942 | 0 | 23 | 1,225 | 2,471 | 1,264 | 1,207 | 73 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.RegionBackendServices.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates the specified regional BackendService resource with the data
-- included in the request. There are several restrictions and guidelines
-- to keep in mind when updating a backend service. Read Restrictions and
-- Guidelines for more information. This method supports patch semantics.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.regionBackendServices.patch@.
module Network.Google.Resource.Compute.RegionBackendServices.Patch
(
-- * REST Resource
RegionBackendServicesPatchResource
-- * Creating a Request
, regionBackendServicesPatch
, RegionBackendServicesPatch
-- * Request Lenses
, rbspProject
, rbspPayload
, rbspRegion
, rbspBackendService
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.regionBackendServices.patch@ method which the
-- 'RegionBackendServicesPatch' request conforms to.
type RegionBackendServicesPatchResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"backendServices" :>
Capture "backendService" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] BackendService :>
Patch '[JSON] Operation
-- | Updates the specified regional BackendService resource with the data
-- included in the request. There are several restrictions and guidelines
-- to keep in mind when updating a backend service. Read Restrictions and
-- Guidelines for more information. This method supports patch semantics.
--
-- /See:/ 'regionBackendServicesPatch' smart constructor.
data RegionBackendServicesPatch = RegionBackendServicesPatch'
{ _rbspProject :: !Text
, _rbspPayload :: !BackendService
, _rbspRegion :: !Text
, _rbspBackendService :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RegionBackendServicesPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rbspProject'
--
-- * 'rbspPayload'
--
-- * 'rbspRegion'
--
-- * 'rbspBackendService'
regionBackendServicesPatch
:: Text -- ^ 'rbspProject'
-> BackendService -- ^ 'rbspPayload'
-> Text -- ^ 'rbspRegion'
-> Text -- ^ 'rbspBackendService'
-> RegionBackendServicesPatch
regionBackendServicesPatch pRbspProject_ pRbspPayload_ pRbspRegion_ pRbspBackendService_ =
RegionBackendServicesPatch'
{ _rbspProject = pRbspProject_
, _rbspPayload = pRbspPayload_
, _rbspRegion = pRbspRegion_
, _rbspBackendService = pRbspBackendService_
}
-- | Project ID for this request.
rbspProject :: Lens' RegionBackendServicesPatch Text
rbspProject
= lens _rbspProject (\ s a -> s{_rbspProject = a})
-- | Multipart request metadata.
rbspPayload :: Lens' RegionBackendServicesPatch BackendService
rbspPayload
= lens _rbspPayload (\ s a -> s{_rbspPayload = a})
-- | Name of the region scoping this request.
rbspRegion :: Lens' RegionBackendServicesPatch Text
rbspRegion
= lens _rbspRegion (\ s a -> s{_rbspRegion = a})
-- | Name of the BackendService resource to update.
rbspBackendService :: Lens' RegionBackendServicesPatch Text
rbspBackendService
= lens _rbspBackendService
(\ s a -> s{_rbspBackendService = a})
instance GoogleRequest RegionBackendServicesPatch
where
type Rs RegionBackendServicesPatch = Operation
type Scopes RegionBackendServicesPatch =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient RegionBackendServicesPatch'{..}
= go _rbspProject _rbspRegion _rbspBackendService
(Just AltJSON)
_rbspPayload
computeService
where go
= buildClient
(Proxy :: Proxy RegionBackendServicesPatchResource)
mempty
| rueshyna/gogol | gogol-compute/gen/Network/Google/Resource/Compute/RegionBackendServices/Patch.hs | mpl-2.0 | 4,937 | 0 | 17 | 1,108 | 549 | 328 | 221 | 88 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.S3.GetBucketLifecycleConfiguration
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the lifecycle configuration information set on the bucket.
--
-- /See:/ <http://docs.aws.amazon.com/AmazonS3/latest/API/GetBucketLifecycleConfiguration.html AWS API Reference> for GetBucketLifecycleConfiguration.
module Network.AWS.S3.GetBucketLifecycleConfiguration
(
-- * Creating a Request
getBucketLifecycleConfiguration
, GetBucketLifecycleConfiguration
-- * Request Lenses
, gblcBucket
-- * Destructuring the Response
, getBucketLifecycleConfigurationResponse
, GetBucketLifecycleConfigurationResponse
-- * Response Lenses
, gblcrsRules
, gblcrsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.S3.Types
import Network.AWS.S3.Types.Product
-- | /See:/ 'getBucketLifecycleConfiguration' smart constructor.
newtype GetBucketLifecycleConfiguration = GetBucketLifecycleConfiguration'
{ _gblcBucket :: BucketName
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetBucketLifecycleConfiguration' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gblcBucket'
getBucketLifecycleConfiguration
:: BucketName -- ^ 'gblcBucket'
-> GetBucketLifecycleConfiguration
getBucketLifecycleConfiguration pBucket_ =
GetBucketLifecycleConfiguration'
{ _gblcBucket = pBucket_
}
-- | Undocumented member.
gblcBucket :: Lens' GetBucketLifecycleConfiguration BucketName
gblcBucket = lens _gblcBucket (\ s a -> s{_gblcBucket = a});
instance AWSRequest GetBucketLifecycleConfiguration
where
type Rs GetBucketLifecycleConfiguration =
GetBucketLifecycleConfigurationResponse
request = get s3
response
= receiveXML
(\ s h x ->
GetBucketLifecycleConfigurationResponse' <$>
(may (parseXMLList "Rule") x) <*>
(pure (fromEnum s)))
instance ToHeaders GetBucketLifecycleConfiguration
where
toHeaders = const mempty
instance ToPath GetBucketLifecycleConfiguration where
toPath GetBucketLifecycleConfiguration'{..}
= mconcat ["/", toBS _gblcBucket]
instance ToQuery GetBucketLifecycleConfiguration
where
toQuery = const (mconcat ["lifecycle"])
-- | /See:/ 'getBucketLifecycleConfigurationResponse' smart constructor.
data GetBucketLifecycleConfigurationResponse = GetBucketLifecycleConfigurationResponse'
{ _gblcrsRules :: !(Maybe [LifecycleRule])
, _gblcrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetBucketLifecycleConfigurationResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gblcrsRules'
--
-- * 'gblcrsResponseStatus'
getBucketLifecycleConfigurationResponse
:: Int -- ^ 'gblcrsResponseStatus'
-> GetBucketLifecycleConfigurationResponse
getBucketLifecycleConfigurationResponse pResponseStatus_ =
GetBucketLifecycleConfigurationResponse'
{ _gblcrsRules = Nothing
, _gblcrsResponseStatus = pResponseStatus_
}
-- | Undocumented member.
gblcrsRules :: Lens' GetBucketLifecycleConfigurationResponse [LifecycleRule]
gblcrsRules = lens _gblcrsRules (\ s a -> s{_gblcrsRules = a}) . _Default . _Coerce;
-- | The response status code.
gblcrsResponseStatus :: Lens' GetBucketLifecycleConfigurationResponse Int
gblcrsResponseStatus = lens _gblcrsResponseStatus (\ s a -> s{_gblcrsResponseStatus = a});
| olorin/amazonka | amazonka-s3/gen/Network/AWS/S3/GetBucketLifecycleConfiguration.hs | mpl-2.0 | 4,357 | 0 | 14 | 822 | 544 | 325 | 219 | 69 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.PublicAdvertisedPrefixes.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the PublicAdvertisedPrefixes for a project.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.publicAdvertisedPrefixes.list@.
module Network.Google.Resource.Compute.PublicAdvertisedPrefixes.List
(
-- * REST Resource
PublicAdvertisedPrefixesListResource
-- * Creating a Request
, publicAdvertisedPrefixesList
, PublicAdvertisedPrefixesList
-- * Request Lenses
, paplReturnPartialSuccess
, paplOrderBy
, paplProject
, paplFilter
, paplPageToken
, paplMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.publicAdvertisedPrefixes.list@ method which the
-- 'PublicAdvertisedPrefixesList' request conforms to.
type PublicAdvertisedPrefixesListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"publicAdvertisedPrefixes" :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] PublicAdvertisedPrefixList
-- | Lists the PublicAdvertisedPrefixes for a project.
--
-- /See:/ 'publicAdvertisedPrefixesList' smart constructor.
data PublicAdvertisedPrefixesList =
PublicAdvertisedPrefixesList'
{ _paplReturnPartialSuccess :: !(Maybe Bool)
, _paplOrderBy :: !(Maybe Text)
, _paplProject :: !Text
, _paplFilter :: !(Maybe Text)
, _paplPageToken :: !(Maybe Text)
, _paplMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PublicAdvertisedPrefixesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'paplReturnPartialSuccess'
--
-- * 'paplOrderBy'
--
-- * 'paplProject'
--
-- * 'paplFilter'
--
-- * 'paplPageToken'
--
-- * 'paplMaxResults'
publicAdvertisedPrefixesList
:: Text -- ^ 'paplProject'
-> PublicAdvertisedPrefixesList
publicAdvertisedPrefixesList pPaplProject_ =
PublicAdvertisedPrefixesList'
{ _paplReturnPartialSuccess = Nothing
, _paplOrderBy = Nothing
, _paplProject = pPaplProject_
, _paplFilter = Nothing
, _paplPageToken = Nothing
, _paplMaxResults = 500
}
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
paplReturnPartialSuccess :: Lens' PublicAdvertisedPrefixesList (Maybe Bool)
paplReturnPartialSuccess
= lens _paplReturnPartialSuccess
(\ s a -> s{_paplReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
paplOrderBy :: Lens' PublicAdvertisedPrefixesList (Maybe Text)
paplOrderBy
= lens _paplOrderBy (\ s a -> s{_paplOrderBy = a})
-- | Project ID for this request.
paplProject :: Lens' PublicAdvertisedPrefixesList Text
paplProject
= lens _paplProject (\ s a -> s{_paplProject = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
paplFilter :: Lens' PublicAdvertisedPrefixesList (Maybe Text)
paplFilter
= lens _paplFilter (\ s a -> s{_paplFilter = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
paplPageToken :: Lens' PublicAdvertisedPrefixesList (Maybe Text)
paplPageToken
= lens _paplPageToken
(\ s a -> s{_paplPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
paplMaxResults :: Lens' PublicAdvertisedPrefixesList Word32
paplMaxResults
= lens _paplMaxResults
(\ s a -> s{_paplMaxResults = a})
. _Coerce
instance GoogleRequest PublicAdvertisedPrefixesList
where
type Rs PublicAdvertisedPrefixesList =
PublicAdvertisedPrefixList
type Scopes PublicAdvertisedPrefixesList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient PublicAdvertisedPrefixesList'{..}
= go _paplProject _paplReturnPartialSuccess
_paplOrderBy
_paplFilter
_paplPageToken
(Just _paplMaxResults)
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy PublicAdvertisedPrefixesListResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/PublicAdvertisedPrefixes/List.hs | mpl-2.0 | 7,509 | 0 | 19 | 1,580 | 756 | 452 | 304 | 112 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetGrpcProxies.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the specified TargetGrpcProxy resource in the given scope.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetGrpcProxies.get@.
module Network.Google.Resource.Compute.TargetGrpcProxies.Get
(
-- * REST Resource
TargetGrpcProxiesGetResource
-- * Creating a Request
, targetGrpcProxiesGet
, TargetGrpcProxiesGet
-- * Request Lenses
, tgpgProject
, tgpgTargetGrpcProxy
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetGrpcProxies.get@ method which the
-- 'TargetGrpcProxiesGet' request conforms to.
type TargetGrpcProxiesGetResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"targetGrpcProxies" :>
Capture "targetGrpcProxy" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] TargetGrpcProxy
-- | Returns the specified TargetGrpcProxy resource in the given scope.
--
-- /See:/ 'targetGrpcProxiesGet' smart constructor.
data TargetGrpcProxiesGet =
TargetGrpcProxiesGet'
{ _tgpgProject :: !Text
, _tgpgTargetGrpcProxy :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TargetGrpcProxiesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tgpgProject'
--
-- * 'tgpgTargetGrpcProxy'
targetGrpcProxiesGet
:: Text -- ^ 'tgpgProject'
-> Text -- ^ 'tgpgTargetGrpcProxy'
-> TargetGrpcProxiesGet
targetGrpcProxiesGet pTgpgProject_ pTgpgTargetGrpcProxy_ =
TargetGrpcProxiesGet'
{_tgpgProject = pTgpgProject_, _tgpgTargetGrpcProxy = pTgpgTargetGrpcProxy_}
-- | Project ID for this request.
tgpgProject :: Lens' TargetGrpcProxiesGet Text
tgpgProject
= lens _tgpgProject (\ s a -> s{_tgpgProject = a})
-- | Name of the TargetGrpcProxy resource to return.
tgpgTargetGrpcProxy :: Lens' TargetGrpcProxiesGet Text
tgpgTargetGrpcProxy
= lens _tgpgTargetGrpcProxy
(\ s a -> s{_tgpgTargetGrpcProxy = a})
instance GoogleRequest TargetGrpcProxiesGet where
type Rs TargetGrpcProxiesGet = TargetGrpcProxy
type Scopes TargetGrpcProxiesGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient TargetGrpcProxiesGet'{..}
= go _tgpgProject _tgpgTargetGrpcProxy (Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy TargetGrpcProxiesGetResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/TargetGrpcProxies/Get.hs | mpl-2.0 | 3,591 | 0 | 15 | 792 | 391 | 235 | 156 | 67 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Instances.DeleteAccessConfig
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes an access config from an instance\'s network interface.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.instances.deleteAccessConfig@.
module Network.Google.Resource.Compute.Instances.DeleteAccessConfig
(
-- * REST Resource
InstancesDeleteAccessConfigResource
-- * Creating a Request
, instancesDeleteAccessConfig
, InstancesDeleteAccessConfig
-- * Request Lenses
, idacRequestId
, idacProject
, idacNetworkInterface
, idacZone
, idacAccessConfig
, idacInstance
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.instances.deleteAccessConfig@ method which the
-- 'InstancesDeleteAccessConfig' request conforms to.
type InstancesDeleteAccessConfigResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"instances" :>
Capture "instance" Text :>
"deleteAccessConfig" :>
QueryParam "accessConfig" Text :>
QueryParam "networkInterface" Text :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :> Post '[JSON] Operation
-- | Deletes an access config from an instance\'s network interface.
--
-- /See:/ 'instancesDeleteAccessConfig' smart constructor.
data InstancesDeleteAccessConfig =
InstancesDeleteAccessConfig'
{ _idacRequestId :: !(Maybe Text)
, _idacProject :: !Text
, _idacNetworkInterface :: !Text
, _idacZone :: !Text
, _idacAccessConfig :: !Text
, _idacInstance :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'InstancesDeleteAccessConfig' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'idacRequestId'
--
-- * 'idacProject'
--
-- * 'idacNetworkInterface'
--
-- * 'idacZone'
--
-- * 'idacAccessConfig'
--
-- * 'idacInstance'
instancesDeleteAccessConfig
:: Text -- ^ 'idacProject'
-> Text -- ^ 'idacNetworkInterface'
-> Text -- ^ 'idacZone'
-> Text -- ^ 'idacAccessConfig'
-> Text -- ^ 'idacInstance'
-> InstancesDeleteAccessConfig
instancesDeleteAccessConfig pIdacProject_ pIdacNetworkInterface_ pIdacZone_ pIdacAccessConfig_ pIdacInstance_ =
InstancesDeleteAccessConfig'
{ _idacRequestId = Nothing
, _idacProject = pIdacProject_
, _idacNetworkInterface = pIdacNetworkInterface_
, _idacZone = pIdacZone_
, _idacAccessConfig = pIdacAccessConfig_
, _idacInstance = pIdacInstance_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
idacRequestId :: Lens' InstancesDeleteAccessConfig (Maybe Text)
idacRequestId
= lens _idacRequestId
(\ s a -> s{_idacRequestId = a})
-- | Project ID for this request.
idacProject :: Lens' InstancesDeleteAccessConfig Text
idacProject
= lens _idacProject (\ s a -> s{_idacProject = a})
-- | The name of the network interface.
idacNetworkInterface :: Lens' InstancesDeleteAccessConfig Text
idacNetworkInterface
= lens _idacNetworkInterface
(\ s a -> s{_idacNetworkInterface = a})
-- | The name of the zone for this request.
idacZone :: Lens' InstancesDeleteAccessConfig Text
idacZone = lens _idacZone (\ s a -> s{_idacZone = a})
-- | The name of the access config to delete.
idacAccessConfig :: Lens' InstancesDeleteAccessConfig Text
idacAccessConfig
= lens _idacAccessConfig
(\ s a -> s{_idacAccessConfig = a})
-- | The instance name for this request.
idacInstance :: Lens' InstancesDeleteAccessConfig Text
idacInstance
= lens _idacInstance (\ s a -> s{_idacInstance = a})
instance GoogleRequest InstancesDeleteAccessConfig
where
type Rs InstancesDeleteAccessConfig = Operation
type Scopes InstancesDeleteAccessConfig =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient InstancesDeleteAccessConfig'{..}
= go _idacProject _idacZone _idacInstance
(Just _idacAccessConfig)
(Just _idacNetworkInterface)
_idacRequestId
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy InstancesDeleteAccessConfigResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/Instances/DeleteAccessConfig.hs | mpl-2.0 | 5,983 | 0 | 20 | 1,349 | 718 | 424 | 294 | 111 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Games.Rooms.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns invitations to join rooms.
--
-- /See:/ <https://developers.google.com/games/services/ Google Play Game Services API Reference> for @games.rooms.list@.
module Network.Google.Resource.Games.Rooms.List
(
-- * REST Resource
RoomsListResource
-- * Creating a Request
, roomsList
, RoomsList
-- * Request Lenses
, rConsistencyToken
, rLanguage
, rPageToken
, rMaxResults
) where
import Network.Google.Games.Types
import Network.Google.Prelude
-- | A resource alias for @games.rooms.list@ method which the
-- 'RoomsList' request conforms to.
type RoomsListResource =
"games" :>
"v1" :>
"rooms" :>
QueryParam "consistencyToken" (Textual Int64) :>
QueryParam "language" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Int32) :>
QueryParam "alt" AltJSON :> Get '[JSON] RoomList
-- | Returns invitations to join rooms.
--
-- /See:/ 'roomsList' smart constructor.
data RoomsList = RoomsList'
{ _rConsistencyToken :: !(Maybe (Textual Int64))
, _rLanguage :: !(Maybe Text)
, _rPageToken :: !(Maybe Text)
, _rMaxResults :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RoomsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rConsistencyToken'
--
-- * 'rLanguage'
--
-- * 'rPageToken'
--
-- * 'rMaxResults'
roomsList
:: RoomsList
roomsList =
RoomsList'
{ _rConsistencyToken = Nothing
, _rLanguage = Nothing
, _rPageToken = Nothing
, _rMaxResults = Nothing
}
-- | The last-seen mutation timestamp.
rConsistencyToken :: Lens' RoomsList (Maybe Int64)
rConsistencyToken
= lens _rConsistencyToken
(\ s a -> s{_rConsistencyToken = a})
. mapping _Coerce
-- | The preferred language to use for strings returned by this method.
rLanguage :: Lens' RoomsList (Maybe Text)
rLanguage
= lens _rLanguage (\ s a -> s{_rLanguage = a})
-- | The token returned by the previous request.
rPageToken :: Lens' RoomsList (Maybe Text)
rPageToken
= lens _rPageToken (\ s a -> s{_rPageToken = a})
-- | The maximum number of rooms to return in the response, used for paging.
-- For any response, the actual number of rooms to return may be less than
-- the specified maxResults.
rMaxResults :: Lens' RoomsList (Maybe Int32)
rMaxResults
= lens _rMaxResults (\ s a -> s{_rMaxResults = a}) .
mapping _Coerce
instance GoogleRequest RoomsList where
type Rs RoomsList = RoomList
type Scopes RoomsList =
'["https://www.googleapis.com/auth/games",
"https://www.googleapis.com/auth/plus.login"]
requestClient RoomsList'{..}
= go _rConsistencyToken _rLanguage _rPageToken
_rMaxResults
(Just AltJSON)
gamesService
where go
= buildClient (Proxy :: Proxy RoomsListResource)
mempty
| rueshyna/gogol | gogol-games/gen/Network/Google/Resource/Games/Rooms/List.hs | mpl-2.0 | 3,885 | 0 | 15 | 951 | 589 | 343 | 246 | 82 | 1 |
module GLuaFixer.BadSequenceFinder (sequenceWarnings, checkFromString) where
import GLua.AG.Token
import GLua.PSParser
import GLuaFixer.LintMessage
import GLuaFixer.LintSettings
import Text.Parsec
import qualified GLua.PSLexer as PSL
-- | Satisfy for normal tokens
pTSatisfy :: (Token -> Bool) -> AParser MToken
pTSatisfy f = pMSatisfy f'
where
f' :: MToken -> Bool
f' (MToken _ t) = f t
-- | Parse an identifier
ident :: String -> AParser MToken
ident s = pMSatisfy isIdent
where
isIdent :: MToken -> Bool
isIdent (MToken _ (Identifier s')) = s == s'
isIdent _ = False
-- | Parse any kind of whitespace
whitespace :: AParser MToken
whitespace = pTSatisfy isWhitespace
-- | Whether a token is whitespace
isWhitespace :: Token -> Bool
isWhitespace (Whitespace _) = True
isWhitespace _ = False
-- | Parse anything that isn't whitespace
notWhitespace :: AParser MToken
notWhitespace = pMSatisfy isNotWhitespace
where
isNotWhitespace :: MToken -> Bool
isNotWhitespace (MToken _ (Whitespace _)) = False
isNotWhitespace _ = True
-- | Warnings for deprecated library functions
libraryWarnings :: String -> AParser String -> AParser String
libraryWarnings s p = do
ident s
pMTok Dot
p
-- | Warnings for the ai library
aiWarnings :: AParser String
aiWarnings = libraryWarnings "ai" $
const "The function is broken" <$> ident "GetScheduleID" <|>
const "The function is broken" <$> ident "GetTaskID"
-- | Warnings for the math library
mathWarnings :: AParser String
mathWarnings = libraryWarnings "math" $
const "Use math.Distance instead" <$> ident "Dist" <|>
const "Use math.fmod instead" <$> ident "mod"
-- | Warnings for the spawnmenu library
spawnmenuWarnings :: AParser String
spawnmenuWarnings = libraryWarnings "spawnmenu" $
const "Use spawnmenu.SaveToTextFiles instead" <$> ident "DoSaveToTextFiles" <|>
const "Use spawnmenu.PopulateFromTextFiles instead" <$> ident "PopulateFromEngineTextFiles" <|>
const "The function is broken" <$> ident "SwitchToolTab"
-- | Warnings for the string library
stringWarnings :: AParser String
stringWarnings = libraryWarnings "string" $
const "Use either string.sub(str, index, index) or str[index]" <$> ident "GetChar" <|>
const "Use string.gmatch instead" <$> ident "gfind"
-- | Warnings for the surface library
surfaceWarnings :: AParser String
surfaceWarnings = libraryWarnings "surface" $
const "Use ScrH instead" <$> ident "ScreenHeight" <|>
const "Use ScrW instead" <$> ident "ScreenWidth"
-- | Warnings for the table library
tableWarnings :: AParser String
tableWarnings = libraryWarnings "table" $
const "Use ipairs or something instead" <$> (
ident "FindNext" <|>
ident "FindPrev" <|>
ident "foreach" <|>
ident "ForEach" <|>
ident "foreachi"
) <|>
const "Use next instead" <$> (
ident "GetFirstKey" <|>
ident "GetFirstValue"
) <|>
const "Use #tbl instead" <$> ident "GetLastKey" <|>
const "Use tbl[#tbl] instead" <$> ident "GetLastValue" <|>
const "Use #tbl instead" <$> ident "getn"
-- | Warnings for the timer library
timerWarnings :: AParser String
timerWarnings = libraryWarnings "timer" $
const "The function is broken" <$> ident "Check" <|>
const "Use timer.Remove instead" <$> ident "Destroy"
-- | Warnings for the umsg library
umsgWarnings :: AParser String
umsgWarnings = libraryWarnings "umsg" $
const "Use net messages." <$> ident "Start"
-- | Warnings for the util library
utilWarnings :: AParser String
utilWarnings = libraryWarnings "util" $
const "Use tobool, without the util bit" <$> ident "tobool" <|>
const "The function is broken" <$> ident "TraceEntityHull"
-- | Warnings for things to do with self
selfWarnings :: AParser String
selfWarnings = libraryWarnings "self" $
const "Use self:GetOwner() instead" <$> ident "Owner"
-- | Warnings for meta functions
metaFuncWarnings :: AParser String
metaFuncWarnings = do
pMTok Colon
-- CLuaLocomotion functions
const "Use :IsUsingLadder instead" <$> ident "IsAscendingOrDescendingLadder" <|>
-- Panel functions
const "Use :GetPaintBackground instead" <$> ident "GetDrawBackground" <|>
const "Use :SetPaintBackground instead" <$> ident "SetDrawBackground" <|>
const "The function is broken" <$> ident "AddText" <|>
const "Only used by deprecated Derma controls" <$> ident "PostMessage" <|>
const "Only used in deprecated Derma controls" <$> ident "SetActionFunction" <|>
const "Use :SetKeyboardInputEnabled instead" <$> ident "SetKeyBoardInputEnabled" <|>
const "The function is broken" <$> ident "SetPaintFunction" <|>
const "Use :SetTooltip instead, notice the lowercase fucking t" <$> ident "SetToolTip" <|>
const "use :SetTooltipPanel instead, notice the lowercase fucking t" <$> ident "SetToolTipPanel" <|>
const "Use :IsValid instead" <$> ident "Valid" <|>
-- Entity functions
const "Use :GetHitBoxBone instead, note the capital fucking B" <$> ident "GetHitboxBone" <|>
const "Use :GetNWAngle instead" <$> ident "GetNetworkedAngle" <|>
const "Use :GetNWBool instead" <$> ident "GetNetworkedBool" <|>
const "Use :GetNWEntity instead" <$> ident "GetNetworkedEntity" <|>
const "Use :GetNWFloat instead" <$> ident "GetNetworkedFloat" <|>
const "Use :GetNWInt instead" <$> ident "GetNetworkedInt" <|>
const "Use :GetNWString instead" <$> ident "GetNetworkedString" <|>
const "Use :GetNWVarProxy instead" <$> ident "GetNetworkedVarProxy" <|>
const "Use :GetNWVarTable instead" <$> ident "GetNetworkedVarTable" <|>
const "Use :GetNWVector instead" <$> ident "GetNetworkedVector" <|>
const "The function is broken" <$> ident "GetWorkshopID" <|>
--const "Use :SetParent instead" <$> ident "SetAttachment" <|>
const "Use :SetNWAngle instead" <$> ident "SetNetworkedAngle" <|>
const "Use :SetNWBool instead" <$> ident "SetNetworkedBool" <|>
const "Use :SetNWEntity instead" <$> ident "SetNetworkedEntity" <|>
const "Use :SetNWFloat instead" <$> ident "SetNetworkedFloat" <|>
const "Use :SetNWInt instead" <$> ident "SetNetworkedInt" <|>
const "Use :SetNWString instead" <$> ident "SetNetworkedString" <|>
const "Use :SetNWVarProxy instead" <$> ident "SetNetworkedVarProxy" <|>
const "Use :SetNWVector instead" <$> ident "SetNetworkedVector" <|>
-- Player functions
const "Use :GetViewPunchAngles instead" <$> ident "GetPunchAngle" <|>
-- Material functions
const "The function is broken" <$> ident "SetShader" <|>
-- Vector functions
const "Use :Dot instead" <$> ident "DotProduct"
-- | Parser for all deprecated sequences of tokens
deprecatedSequence :: LintSettings -> AParser String
deprecatedSequence opts = if not (lint_deprecated opts) then parserZero else (++) "Deprecated: " <$> (
-- Deprecated meta functions
try metaFuncWarnings <|>
-- Library functions
try aiWarnings <|>
try mathWarnings <|>
try spawnmenuWarnings <|>
try stringWarnings <|>
try surfaceWarnings <|>
try tableWarnings <|>
try timerWarnings <|>
try umsgWarnings <|>
try utilWarnings <|>
try selfWarnings <|>
-- Global functions
const "Use collectgarbage(\"count\") instead" <$> ident "gcinfo" <|>
const "Use ConVar objects instead" <$> ident "GetConVarNumber" <|>
const "Use ConVar objects instead" <$> ident "GetConVarString" <|>
const "Use AddCSLuaFile in the file itself instead" <$> ident "IncludeCS" <|>
const "Use ScreenScale instead" <$> ident "SScale" <|>
const "Use IsUselessModel instead" <$> ident "UTIL_IsUselessModel" <|>
const "Use IsValid instead" <$> ident "ValidPanel" <|>
const "Use net messages." <$> ident "SendUserMessage" )
-- | Parser for all beginner mistakes
beginnerMistakeSequence :: LintSettings -> AParser String
beginnerMistakeSequence opts = if not (lint_beginnerMistakes opts) then parserZero else
try (const "There's little fucking reason to use ';' in the first place, don't use it twice in a row" <$> pMTok Semicolon <* pMTok Semicolon) <|>
try (const "The server already knows who sent the net message, use the first parameter of net.Receive" <$> do
ident "net"
pMTok Dot
ident "WriteEntity"
pMTok LRound
optional whitespace
ident "LocalPlayer"
optional whitespace
pMTok LRound
optional whitespace
pMTok RRound
) <|>
try (const "Jesus christ fuck off already" <$> do
pMTok While
whitespace
pMTok TTrue
whitespace
pMTok Do
whitespace
pMTok End
)
whiteSpaceStyleSequence :: LintSettings -> AParser String
whiteSpaceStyleSequence opts = if not (lint_whitespaceStyle opts) then parserZero else (++) "Style: " <$> (
try (const "Please put some whitespace after 'if'" <$> pMTok If <* notFollowedBy whitespace) <|>
try (const "Please put some whitespace after 'elseif'" <$> pMTok Elseif <* notFollowedBy whitespace) <|>
try (const "Please put some whitespace after 'while'" <$> pMTok While <* notFollowedBy whitespace) <|>
try (const "Please put some whitespace after 'until'" <$> pMTok Until <* notFollowedBy whitespace) <|>
try (const "Please put some whitespace after ')'" <$> do
pMTok RRound
pTSatisfy (\t -> not (isWhitespace t) && t `notElem` [Colon, RRound, LRound, LSquare, RSquare, LCurly, RCurly, Comma, Dot, Semicolon])
) <|>
try (const "Please put some whitespace before the operator" <$> do
notWhitespace
choice [pMTok Plus, pMTok Multiply, pMTok Divide, pMTok Modulus, pMTok TEq, pMTok TNEq, pMTok TCNEq, pMTok TLEQ, pMTok TGEQ, pMTok TLT, pMTok TGT, pMTok Equals, pMTok Concatenate, pMTok And, pMTok CAnd, pMTok Or, pMTok COr]
) <|>
try (const "Please put some whitespace after the operator" <$> do
choice [pMTok Plus, pMTok Multiply, pMTok Divide, pMTok Modulus, pMTok TEq, pMTok TNEq, pMTok TCNEq, pMTok TLEQ, pMTok TGEQ, pMTok TLT, pMTok TGT, pMTok Equals, pMTok Concatenate, pMTok And, pMTok CAnd, pMTok Or, pMTok COr]
notWhitespace
)
)
-- | Parser for all profanity
profanitySequence :: LintSettings -> AParser String
profanitySequence opts = if not (lint_profanity opts) then parserZero else const "Watch your profanity" <$> (
ident "anus" <|>
ident "bitch" <|>
ident "cock" <|>
ident "cocks" <|>
ident "cunt" <|>
ident "dick" <|>
ident "dicks" <|>
ident "fuck" <|>
ident "fucking" <|>
ident "goddamnit" <|>
ident "knob" <|>
ident "knobs" <|>
ident "motherfucker" <|>
ident "nigger" <|>
ident "niggers" <|>
ident "niggertits" <|>
ident "nipple" <|>
ident "shit"
)
-- | Parses for any bad sequence
badSequence :: LintSettings -> AParser String
badSequence opts = deprecatedSequence opts <|>
profanitySequence opts <|>
beginnerMistakeSequence opts <|>
whiteSpaceStyleSequence opts
-- | Creates a warning for a certain sequence at any position
badSequenceWarning :: Region -> String -> [FilePath -> LintMessage] -> [FilePath -> LintMessage]
badSequenceWarning pos message = (:) (LintMessage LintWarning pos message)
-- | Searches for all the bad sequences
badSequenceParser :: LintSettings -> AParser [String -> LintMessage]
badSequenceParser opts =
-- A bad sequence
annotated badSequenceWarning (badSequence opts) <*> badSequenceParser opts <|>
-- Continue searching
anyToken *> badSequenceParser opts <|>
-- end of input
return []
-- | Returns all the warnings for a lexicon
sequenceWarnings :: LintSettings -> [MToken] -> [String -> LintMessage]
sequenceWarnings opts mts = case execAParser "source.lua" (badSequenceParser opts) mts of
Left _ -> error "[Error] line 1, column 1: Sequence finding error! Report an issue!"
Right warnings -> warnings
-- | Helper function: check from string
checkFromString :: AParser a -> String -> Either ParseError a
checkFromString p inp = do
lexed <- PSL.execParseTokens inp
execAParser "source.lua" p lexed
| FPtje/GLuaFixer | src/GLuaFixer/BadSequenceFinder.hs | lgpl-2.1 | 15,750 | 0 | 72 | 6,084 | 2,725 | 1,282 | 1,443 | 219 | 2 |
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances #-}
-- |
-- Module : Data.Extra
-- Copyright : (c) Alexandru Scvortov 2009
-- License : LGPL (see LICENSE file)
-- Maintainer : [email protected]
--
module Data.Extra
( merge
, randint
, randdigit
, takeRandom
, Paddable(..)
, makeGroupedSum, mgsConfigTime, mgsConfigDiskSpace
, showPrettyFilesize, showPrettyTime
) where
import qualified Data.ByteString.Lazy.Char8 as L
import Data.Char ( chr, ord )
import Data.List (nub)
import System.Random (getStdRandom, randomR)
import Numeric ( readHex, showHex )
-- | Merge two SORTED lists discarding duplicates.
merge :: Ord a => [a] -> [a] -> [a]
merge xs ys = nub $ go [] xs ys
where
go acc xs [] = reverse acc ++ xs
go acc [] ys = reverse acc ++ ys
go acc allx@(x:xs) ally@(y:ys)
| x <= y = go (x:acc) xs ally
| otherwise = go (y:acc) allx ys
-- FIXME: Try using MonadRandom (see Hackage) instead of getStdRandom
-- and co. in the Data.Extra functions.
-- | Return an int from inclusive range.
randint :: (Int, Int) -> IO Int
randint (l, u) = getStdRandom (randomR (l, u))
-- | Return a random digit.
randdigit :: IO Char
randdigit = getStdRandom (randomR ('0', '9'))
-- | Return a random element from a list.
takeRandom :: [a] -> IO (Maybe a)
takeRandom xs = do
if length xs > 0
then do
i <- randint (0, length xs - 1)
return $ Just $ xs !! i
else
return Nothing
-- FIXME; Is class Paddable really necessary,
class Paddable s c | s -> c where
padFront :: Int -> c -> s -> s
padBack :: Int -> c -> s -> s
instance Paddable L.ByteString Char where
padFront n c s = let l = fromIntegral . L.length $ s
in if l >= n
then s
else L.pack (replicate (fromIntegral (n-l)) c) `L.append` s
padBack n c s = let l = fromIntegral . L.length $ s
in if l >= n
then s
else s `L.append` L.pack (replicate (fromIntegral (n-l)) c)
instance Paddable [a] a where
padFront n c s = let l = length s
in if l >= n
then s
else replicate (n-l) c ++ s
padBack n c s = let l = length s
in if l >= n
then s
else s ++ replicate (n-l) c
mgsConfigDiskSpace = [ (1024000000, "GB")
, (1024000, "MB")
, (1024, "KB")
, (1, "B") ]
mgsConfigTime = [ (86400, "d")
, (3600, "h")
, (60, "min")
, (1, "s") ]
-- | Perform conversions like 1536 -> "1Kb 512b" or 367 -> "6m 7s".
makeGroupedSum :: [(Integer, String)] -> Integer -> [(Integer, String)]
makeGroupedSum = go [] 0
where
go acc cur ((i, s):_) 0 = reverse $ upAcc acc cur s
go acc cur os@((i, s):ss) left = if i <= left
then go acc (cur+1) os (left-i)
else go (upAcc acc cur s) 0 ss left
upAcc acc 0 _ = acc
upAcc acc cur s = (cur, s) : acc
showPrettyFilesize = unwords . map (\(i, s) -> show i ++ s) . take 2 . makeGroupedSum mgsConfigDiskSpace
showPrettyTime = tryTail . concat . map (\(i, _) -> ':' : show i) . makeGroupedSum mgsConfigTime
where
tryTail :: [a] -> [a]
tryTail [] = []
tryTail xs = tail xs
| scvalex/ltorrent | Data/Extra.hs | lgpl-3.0 | 3,526 | 0 | 16 | 1,232 | 1,208 | 659 | 549 | 74 | 4 |
-- | Net module, defining functions to work on a neural network, which is a
-- list of list of neurons
module AI.HNN.Net where
import AI.HNN.Layer
import AI.HNN.Neuron
import Control.Arrow
import Data.List as L
import Data.Vector as V
check :: [[Neuron]] -> Bool
check nss = let l = Prelude.length nss in l > 1 && l < 3
nn :: [[Neuron]] -> [[Neuron]]
nn nss | check nss = nss
| otherwise = error "Invalid nn"
-- * Computation
-- | Computes the output of the given neural net on the given inputs
computeNetU :: [[Neuron]] -> Vector Double -> Vector Double
computeNetU neuralss xs =
computeLayerU (nss !! 1) $ computeLayerU (Prelude.head nss) xs
where nss = nn neuralss
-- | Computes the output of the given neural net on the given inputs
computeNet :: [[Neuron]] -> [Double] -> [Double]
computeNet neuralss = toList . computeNetU neuralss . fromList
-- * Quadratic Error
-- | Returns the quadratic error of the neural network on the given sample
quadErrorNetU :: [[Neuron]] -> (Vector Double, Vector Double) -> Double
quadErrorNetU nss (xs,ys) = (/2.0) $
V.sum . V.zipWith (\y s -> (y - s)**2) ys $
computeNetU nss xs
-- | Returns the quadratic error of the neural network on the given sample
quadErrorNet :: [[Neuron]] -> ([Double], [Double]) -> Double
quadErrorNet nss = quadErrorNetU nss . (fromList *** fromList)
-- | Returns the quadratic error of the neural network on the given samples
globalQuadErrorNetU :: [[Neuron]] -> [(Vector Double, Vector Double)] -> Double
globalQuadErrorNetU nss = L.sum . L.map (quadErrorNetU nss)
-- | Returns the quadratic error of the neural network on the given samples
globalQuadErrorNet :: [[Neuron]] -> [([Double], [Double])] -> Double
globalQuadErrorNet nss = globalQuadErrorNetU nss . L.map (fromList *** fromList)
-- * Learning
-- | Train the given neural network using the backpropagation algorithm on the
-- given sample with the given learning ratio (alpha)
backPropU :: Double -> [[Neuron]] -> (Vector Double, Vector Double) -> [[Neuron]]
backPropU alpha nss (xs, ys) = [aux (L.head nss) ds_hidden xs
,aux (nss !! 1) ds_out output_hidden]
where
output_hidden = computeLayerU (L.head nss) xs
output_out = computeLayerU (nss !! 1) output_hidden
ds_out = V.zipWith (\s y -> s * (1 - s) * (y - s)) output_out ys
ds_hidden = V.zipWith (\x s -> x * (1-x) * s) output_hidden
. fromList $
L.map (V.sum . V.zipWith (*) ds_out)
. L.map fromList
. transpose
. L.map (toList . weights) $
(nss !! 1)
aux ns ds xs = L.zipWith (\n d -> n { weights = V.zipWith (\w x -> w + alpha * d * x) (weights n) xs }) ns (toList ds)
-- | Train the given neural network using the backpropagation algorithm on the
-- given sample with the given learning ratio (alpha)
backProp :: Double -> [[Neuron]] -> ([Double], [Double]) -> [[Neuron]]
backProp alpha nss = backPropU alpha nss . (fromList *** fromList)
trainAux :: Double -> [[Neuron]] -> [(Vector Double, Vector Double)] -> [[Neuron]]
trainAux alpha = L.foldl' (backPropU alpha)
-- | Train the given neural network on the given samples using the
-- backpropagation algorithm using the given learning ratio (alpha) and the
-- given desired maximal bound for the global quadratic error on the samples
-- (epsilon)
trainU :: Double -> Double -> [[Neuron]] -> [(Vector Double, Vector Double)] -> [[Neuron]]
trainU alpha epsilon nss samples = until
(\nss' -> globalQuadErrorNetU nss' samples < epsilon)
(\nss' -> trainAux alpha nss' samples)
nss
-- | Train the given neural network on the given samples using the
-- backpropagation algorithm using the given learning ratio (alpha) and the
-- given desired maximal bound for the global quadratic error on the samples
-- (epsilon)
train :: Double -> Double -> [[Neuron]] -> [([Double], [Double])] -> [[Neuron]]
train alpha epsilon nss = trainU alpha epsilon nss . L.map (fromList *** fromList)
| alpmestan/HNN-0.1 | AI/HNN/Net.hs | lgpl-3.0 | 4,053 | 0 | 19 | 888 | 1,230 | 674 | 556 | 52 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
import Control.Applicative ((<$>))
import Data.Default (def)
import System.Environment
import Text.HTML.Scalpel
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Client.TLS as HTTP
import qualified Network.HTTP.Types.Header as HTTP
-- Create a new manager settings based on the default TLS manager that updates
-- the request headers to include a custom user agent.
managerSettings :: HTTP.ManagerSettings
managerSettings = HTTP.tlsManagerSettings {
HTTP.managerModifyRequest = \req -> do
req' <- HTTP.managerModifyRequest HTTP.tlsManagerSettings req
return $ req' {
HTTP.requestHeaders = (HTTP.hUserAgent, "My Custom UA")
: HTTP.requestHeaders req'
}
}
main :: IO ()
main = getArgs >>= handleArgs
handleArgs :: [String] -> IO ()
handleArgs [url] = listUrlsForSite url
handleArgs _ = putStrLn "usage: custom-user-agent URL"
listUrlsForSite :: URL -> IO ()
listUrlsForSite url = do
manager <- Just <$> HTTP.newManager managerSettings
images <- scrapeURLWithConfig (def { manager }) url (attrs "src" "img")
maybe printError printImages images
where
printError = putStrLn "ERROR: Could not scrape the URL!"
printImages = mapM_ putStrLn
| fimad/scalpel | examples/custom-user-agent/Main.hs | apache-2.0 | 1,314 | 0 | 15 | 250 | 303 | 167 | 136 | 28 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ParallelListComp #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
module Language.K3.Codegen.CPP.Collections where
import Data.Char
import Data.List (elemIndex, find, intercalate, partition, sort, isInfixOf)
import Language.K3.Core.Annotation
import Language.K3.Core.Common
import Language.K3.Core.Declaration
import Language.K3.Core.Expression
import Language.K3.Core.Type
import Language.K3.Codegen.CPP.Types
import Language.K3.Codegen.CPP.CollectionMembers (indexes,polybuffer)
import qualified Language.K3.Codegen.CPP.Representation as R
-- | An Annotation Combination Composite should contain the following:
-- - Inlined implementations for all provided methods.
-- - Declarations for all provided data members.
-- - Declaration for the dataspace of the collection.
-- - Implementations for at least the following constructors:
-- - Default constructor, which creates an empty collection.
-- - Dataspace constructor, which creates a collection from an empty dataspace (e.g. vector).
-- - Additionally a move dataspace constructor which uses a temporary dataspace?
-- - Copy constructor.
-- - Superclass constructor.
-- - Serialization function, which should proxy the dataspace serialization.
composite :: Identifier -> [(Identifier, [AnnMemDecl])] -> [K3 Type] -> CPPGenM [R.Definition]
composite name ans content_ts = do
let overrideGeneratedName n = case find (`isInfixOf` n) reservedGeneratedAnnotations of
Nothing -> n
Just i -> i
let isReserved (aname, _) = overrideGeneratedName aname `elem` reservedAnnotations
let (ras, as) = partition isReserved ans
-- Inlining is only done for provided (positive) declarations.
-- let positives = filter isPositiveDecl (concat . snd $ unzip nras)
-- Split data and method declarations, for access specifiers.
-- let (dataDecls, methDecls) = partition isDataDecl positives
-- For arrays, extract the array size template parameter.
let arraySize mdecls = case mdecls of
Nothing -> []
Just l -> (\f -> foldl f [] l) $ \acc m ->
case m of
Lifted _ "array_size" _ (Just (tag -> EConstant (CInt i))) _ ->
acc ++ [R.Named $ R.Name $ show i]
_ -> acc
-- MultiIndex member generation.
-- When dealing with Indexes, we need to specialize the MultiIndex* classes on each index type
(indexTypes, indexDefns) <- indexes name as content_ts
let selfType = R.Named $ R.Specialized [R.Named $ R.Name "__CONTENT"] $ R.Name name
let addnSpecializations n = if "Array" `isInfixOf` n then arraySize $ lookup n ans
else if "MultiIndex" `isInfixOf` n then indexTypes
else if "FlatPolyBuffer" `isInfixOf` n then [selfType]
else if "UniquePolyBuffer" `isInfixOf` n then [selfType]
else []
let baseClass (n,_) = R.Qualified (R.Name "K3")
(R.Specialized ((R.Named $ R.Name "__CONTENT"): addnSpecializations n)
(R.Name $ overrideGeneratedName n))
let baseClasses = map baseClass ras
-- FlatPolyBuffer member generation.
pbufDefns <- polybuffer name ras
let defaultConstructor
= R.FunctionDefn (R.Name name) [] Nothing [R.Call (R.Variable b) [] | b <- baseClasses] False []
let superConstructor = R.FunctionDefn (R.Name name)
[ (Just $ "__other" ++ show i, R.Reference $ R.Const $ R.Named b)
| (b,i) <- zip baseClasses ([1..] :: [Integer])
] Nothing
[ R.Call (R.Variable b)
[ R.Variable $ R.Name $ "__other" ++ show i]
| (b,i) <- zip baseClasses ([1..] :: [Integer])
] False []
let superMoveConstructor
= R.FunctionDefn (R.Name name)
[ (Just $ "__other" ++ show i, R.RValueReference $ R.Named b)
| (b,i) <- zip baseClasses ([1..] :: [Integer])
] Nothing
[ R.Call
(R.Variable b)
[ R.Call
(R.Variable $ R.Qualified (R.Name "std") (R.Name "move"))
[R.Variable $ R.Name $ "__other" ++ show i]
]
| (b,i) <- zip baseClasses ([1..] :: [Integer])
] False []
let mkXmlTagName s = map (\c -> if isAlphaNum c || c `elem` ['-','_','.'] then c else '_') s
let serializationName asYas n =
if asYas then R.Qualified (R.Name "yas") n
else R.Qualified (R.Name "boost") $ R.Qualified (R.Name "serialization") n
let serializeParent asYas (p, (q, _)) =
-- TOOD re-enable nvp
--let nvp_wrap e = if asYas then e
-- else R.Call (R.Variable $ serializationName asYas $ R.Name "make_nvp")
-- [ R.Literal $ R.LString $ mkXmlTagName q, e ]
--in
R.Ignore $ R.Binary "&" (R.Variable $ R.Name "_archive")
(R.Call (R.Variable $ serializationName asYas $ R.Specialized [R.Named p] $ R.Name "base_object")
[R.Dereference $ R.Variable $ R.Name "this"])
let serializeStatements asYas = map (serializeParent asYas) $ zip baseClasses ras
let serializeFn asYas =
R.TemplateDefn [("archive", Nothing)]
(R.FunctionDefn (R.Name "serialize")
([(Just "_archive", R.Reference (R.Parameter "archive"))]
++ (if asYas then [] else [ (Just "_version", R.Const $ R.Named (R.Name "unsigned int")) ]))
(Just $ R.Named $ R.Name "void")
[] False $ serializeStatements asYas)
let methods = [defaultConstructor, superConstructor, superMoveConstructor]
++ [serializeFn False, serializeFn True]
++ indexDefns ++ pbufDefns
sentinelDefn <- withLifetimeProfiling [] $ return [
R.GlobalDefn $
R.Forward $
R.ScalarDecl (R.Name "__lifetime_sentinel")
(R.Named $ R.Qualified (R.Name "K3") $ R.Qualified (R.Name "lifetime") (R.Name "sentinel"))
(Just $ R.Initialization (R.Named $ R.Qualified (R.Name "K3") $ R.Qualified (R.Name "lifetime") (R.Name "sentinel"))
[foldr1 (R.Binary "+") [R.Call (R.Variable (R.Name "sizeof")) [R.ExprOnType $ R.Named bc] | bc <- baseClasses]])
]
let members = sentinelDefn
let collectionClassDefn = R.TemplateDefn [("__CONTENT", Nothing)]
(R.ClassDefn (R.Name name) [] (map R.Named baseClasses) (members ++ methods) [] [])
let parent = head baseClasses
let compactSerializationDefn
= R.NamespaceDefn "boost" [ R.NamespaceDefn "serialization" [
R.TemplateDefn [("__CONTENT", Nothing)] $
R.ClassDefn (R.Name "implementation_level") [selfType] []
[ R.TypeDefn (R.Named $ R.Qualified (R.Name "mpl") $ R.Name "integral_c_tag") "tag"
, R.TypeDefn (R.Named $ R.Qualified (R.Name "mpl")
$ R.Specialized [R.Named $ R.Name "object_serializable"] $ R.Name "int_") "type"
, R.GlobalDefn $ R.Ignore $ R.Call (R.Variable $ R.Name "BOOST_STATIC_CONSTANT")
[ R.Variable $ R.Name "int"
, R.Binary "=" (R.Variable $ R.Name "value")
(R.Variable $ R.Qualified (R.Name "implementation_level") $ R.Qualified (R.Name "type") $ R.Name "value")]
]
[] []
]]
let yamlStructDefn = R.NamespaceDefn "YAML"
[ R.TemplateDefn [("__CONTENT", Nothing)] $
R.ClassDefn (R.Name "convert") [selfType] []
[ R.FunctionDefn (R.Name "encode")
[(Just "c", R.Reference $ R.Const $ selfType)]
(Just $ R.Static $ R.Named $ R.Name "Node") [] False
[R.Return $ R.Call
(R.Variable $ R.Qualified
(R.Specialized [R.Named $ parent] (R.Name "convert"))
(R.Name "encode"))
[R.Variable $ R.Name "c"]]
, R.FunctionDefn (R.Name "decode")
[ (Just "node", R.Reference $ R.Const $ R.Named $ R.Name "Node")
, (Just "c", R.Reference selfType)
] (Just $ R.Static $ R.Primitive $ R.PBool) [] False
[R.Return $ R.Call
(R.Variable $ R.Qualified
(R.Specialized [R.Named $ parent] (R.Name "convert"))
(R.Name "decode"))
[R.Variable $ R.Name "node", R.Variable $ R.Name "c"]]
]
[] []
]
let jsonStructDefn = R.NamespaceDefn "JSON"
[ R.TemplateDefn [("__CONTENT", Nothing)] $
R.ClassDefn (R.Name "convert") [selfType] []
[ R.TemplateDefn [("Allocator", Nothing)] $
R.FunctionDefn (R.Name "encode")
[(Just "c", R.Reference $ R.Const $ selfType)
,(Just "al", R.Reference $ R.Named $ R.Name "Allocator")
]
(Just $ R.Static $ R.Named $ R.Name "rapidjson::Value") [] False
[R.Return $ R.Call
(R.Variable $ R.Qualified
(R.Specialized [R.Named $ parent] (R.Name "convert"))
(R.Name "encode"))
[R.Variable $ R.Name "c",
R.Variable $ R.Name "al"]
]
]
[] []
]
return [collectionClassDefn, compactSerializationDefn, yamlStructDefn, jsonStructDefn]
record :: [Identifier] -> CPPGenM [R.Definition]
record (sort -> ids) = do
let recordName = "R_" ++ intercalate "_" ids
let templateVars = ["_T" ++ show n | _ <- ids | n <- [0..] :: [Int]]
let fullName = R.Specialized (map (R.Named . R.Name) templateVars) (R.Name recordName)
let formalVars = ["_" ++ i | i <- ids]
let recordType = R.Named $ R.Specialized [R.Named $ R.Name t | t <- templateVars] $ R.Name recordName
let defaultConstructor
= R.FunctionDefn (R.Name recordName) [] Nothing
[R.Call (R.Variable $ R.Name i) [] | i <- ids] False []
-- Forwarding constructor. One should be sufficient to handle all field-based constructions.
let forwardTemplateVars = map ('_':) templateVars
let init1Const fv tv i = R.FunctionDefn (R.Name recordName) [(Just fv, R.Reference $ R.Const $ R.Named $ R.Name tv)]
Nothing [R.Call (R.Variable $ R.Name i) [R.Variable $ R.Name fv]] False []
let init1Move fv tv i = R.FunctionDefn (R.Name recordName) [(Just fv, R.RValueReference $ R.Named $ R.Name tv)]
Nothing [R.Call (R.Variable $ R.Name i)
[R.Call (R.Variable $ R.Qualified (R.Name "std") (R.Name "move"))
[R.Variable $ R.Name fv]]] False []
let initConstructor
= R.TemplateDefn (zip forwardTemplateVars $ repeat Nothing) $
R.FunctionDefn (R.Name recordName)
[(Just fv, R.RValueReference $ R.Named $ R.Name tv) | fv <- formalVars | tv <- forwardTemplateVars] Nothing
[ R.Call (R.Variable $ R.Name i)
[R.Call (R.Variable $ R.Qualified (R.Name "std")
(R.Specialized [R.Named $ R.Name t] (R.Name "forward")))
[R.Variable $ R.Name f]]
| i <- ids
| f <- formalVars
| t <- forwardTemplateVars
] False []
let initConstructors = case (formalVars, templateVars, ids) of
([fv], [tv], [i]) -> [init1Const fv tv i, init1Move fv tv i]
_ -> [initConstructor]
let equalityOperator
= R.FunctionDefn (R.Name "operator==")
[(Just "__other", R.Reference $ R.Const recordType)] (Just $ R.Primitive R.PBool) []
True
[R.Return $ foldr1 (R.Binary "&&")
[ R.Binary "==" (R.Variable $ R.Name i) (R.Project (R.Variable $ R.Name "__other") (R.Name i))
| i <- ids
]]
let tieSelf = R.Call (R.Variable $ R.Qualified (R.Name "std") (R.Name "tie")) [R.Variable $ R.Name i | i <- ids]
let tieOther n = R.Call (R.Variable $ R.Qualified (R.Name "std") (R.Name "tie"))
[R.Project (R.Variable $ R.Name n) (R.Name i) | i <- ids]
let logicOp op
= R.FunctionDefn (R.Name $ "operator"++op)
[(Just "__other", R.Reference $ R.Const recordType)] (Just $ R.Primitive R.PBool)
[] True
[R.Return $ R.Binary op tieSelf (tieOther "__other")]
let fieldDecls = [ R.GlobalDefn (R.Forward $ R.ScalarDecl (R.Name i) (R.Named $ R.Name t) Nothing)
| i <- ids
| t <- templateVars
]
let serializeMember asYas m =
R.Ignore $ R.Binary "&" (R.Variable $ R.Name "_archive")
(if asYas then R.Variable $ R.Name m
else R.Call (R.Variable $ R.Name "BOOST_SERIALIZATION_NVP") [R.Variable $ R.Name m])
let serializeStatements asYas = map (serializeMember asYas) ids
let x_alizeMember fun m = R.Ignore $ R.Call (R.Project (R.Variable $ R.Name "arg") (R.Name fun)) [R.Variable $ R.Name m]
let x_alizeStatements fun = map (x_alizeMember fun) ids
let x_alize fun = R.TemplateDefn [("T", Nothing)] $
R.FunctionDefn (R.Name fun)
[ (Just "arg", R.Reference $ (R.Parameter "T")) ]
(Just $ R.Reference recordType)
[]
False
(x_alizeStatements fun ++ [R.Return $ R.Dereference $ R.Variable $ R.Name "this"])
let serializeFn asYas =
R.TemplateDefn [("archive", Nothing)]
(R.FunctionDefn (R.Name "serialize")
([ (Just "_archive", R.Reference (R.Parameter "archive")) ]
++ if asYas then [] else [ (Just "_version", R.Const $ R.Named (R.Name "unsigned int")) ])
(Just $ R.Named $ R.Name "void")
[] False $ serializeStatements asYas)
let typedefs = case "key" `elemIndex` ids of
Just idx -> [R.TypeDefn (R.Named $ R.Name $ templateVars !! idx) "KeyType"]
Nothing -> []
let constructors = (defaultConstructor:initConstructors)
let comparators = [equalityOperator, logicOp "!=", logicOp "<", logicOp ">", logicOp "<=", logicOp ">="]
sentinelDefn <- withLifetimeProfiling [] $ return [
R.GlobalDefn $
R.Forward $
R.ScalarDecl (R.Name "__lifetime_sentinel")
(R.Named $ R.Qualified (R.Name "K3") $ R.Qualified (R.Name "lifetime") (R.Name "sentinel"))
(Just $ R.Initialization (R.Named $ R.Qualified (R.Name "K3") $ R.Qualified (R.Name "lifetime") (R.Name "sentinel"))
[foldr1 (R.Binary "+") [R.Call (R.Variable (R.Name "sizeof"))
[R.ExprOnType $ R.Named $ R.Name bc] | bc <- templateVars]])
]
let members = sentinelDefn ++ typedefs ++ constructors ++ comparators ++
[serializeFn False, serializeFn True] ++ fieldDecls ++ [x_alize "internalize", x_alize "externalize"]
let recordStructDefn
= R.GuardedDefn ("K3_" ++ recordName) $
R.TemplateDefn (zip templateVars (repeat Nothing)) $
R.ClassDefn (R.Name recordName) [] [] members [] []
let compactSerializationDefn
= R.GuardedDefn ("K3_" ++ recordName ++ "_srimpl_lvl") $ R.NamespaceDefn "boost" [ R.NamespaceDefn "serialization" [
R.TemplateDefn (zip templateVars (repeat Nothing)) $
R.ClassDefn (R.Name "implementation_level") [recordType] []
[ R.TypeDefn (R.Named $ R.Qualified (R.Name "mpl") $ R.Name "integral_c_tag") "tag"
, R.TypeDefn (R.Named $ R.Qualified (R.Name "mpl")
$ R.Specialized [R.Named $ R.Name "object_serializable"] $ R.Name "int_") "type"
, R.GlobalDefn $ R.Ignore $ R.Call (R.Variable $ R.Name "BOOST_STATIC_CONSTANT")
[ R.Variable $ R.Name "int"
, R.Binary "=" (R.Variable $ R.Name "value")
(R.Variable $ R.Qualified (R.Name "implementation_level") $ R.Qualified (R.Name "type") $ R.Name "value")]
]
[] []
]]
{-
let noTrackingDefn
= R.GuardedDefn ("K3_" ++ recordName ++ "_srtrck_lvl") $ R.NamespaceDefn "boost" [ R.NamespaceDefn "serialization" [
R.TemplateDefn (zip templateVars (repeat Nothing)) $
R.ClassDefn (R.Name "tracking_level") [recordType] []
[ R.TypeDefn (R.Named $ R.Qualified (R.Name "mpl") $ R.Name "integral_c_tag") "tag"
, R.TypeDefn (R.Named $ R.Qualified (R.Name "mpl")
$ R.Specialized [R.Named $ R.Name "track_never"] $ R.Name "int_") "type"
, R.GlobalDefn $ R.Ignore $ R.Call (R.Variable $ R.Name "BOOST_STATIC_CONSTANT")
[ R.Variable $ R.Name "int"
, R.Binary "=" (R.Variable $ R.Name "value")
(R.Variable $ R.Qualified (R.Name "tracking_level") $ R.Qualified (R.Name "type") $ R.Name "value")]
]
[] []
]]
let bitwiseSerializableDefn
= R.GuardedDefn ("K3_" ++ recordName ++ "_srbitwise") $ R.NamespaceDefn "boost" [ R.NamespaceDefn "serialization" [
R.TemplateDefn (zip templateVars (repeat Nothing)) $
R.ClassDefn (R.Name "is_bitwise_serializable")
[recordType] [R.Named $ R.Qualified (R.Name "mpl") $ R.Name "true_"]
[] [] []
]]
-}
let isTypeFlat t = R.Variable $
R.Qualified
(R.Specialized [R.Named $ R.Name t] (R.Name "is_flat"))
(R.Name "value")
let isFlatDefn
= R.GuardedDefn ("K3_" ++ recordName ++ "_is_flat") $
R.NamespaceDefn "K3" [
R.TemplateDefn (zip templateVars (repeat Nothing)) $
R.ClassDefn
(R.Name "is_flat")
[R.Named $ fullName]
[]
[ R.GlobalDefn $ R.Forward $ R.ScalarDecl
(R.Name "value")
(R.Static $ R.Named $ R.Name "constexpr bool")
(Just $ foldl1 (R.Binary "&&") (map isTypeFlat templateVars))
]
[]
[]
]
let hashCombine x = R.Call (R.Variable $ (R.Name "hash_combine")) [R.Variable $ R.Name "seed", x]
let hashBody = [R.Forward $ R.ScalarDecl (R.Name "seed") (R.Named $ R.Qualified (R.Name "std") (R.Name "size_t")) (Just $ R.Literal $ R.LInt 0)]
++ (map (R.Ignore . hashCombine) [R.Project (R.Variable $ R.Name "r") (R.Name i) | i <- ids])
++ [R.Return $ R.Variable $ R.Name "seed" ]
let hashStructDefn
= R.NamespaceDefn "std" [
R.GuardedDefn ("K3_" ++ recordName ++ "_hash") $ R.TemplateDefn (zip templateVars (repeat Nothing)) $
R.ClassDefn (R.Name "hash") [recordType] []
[
R.FunctionDefn
(R.Name "operator()")
[(Just "r", R.Reference $ R.Const recordType)]
(Just $ R.Named $ R.Qualified (R.Name "std") (R.Name "size_t"))
[]
True
hashBody
]
[]
[]
]
let hashValueDefn = R.TemplateDefn (zip templateVars (repeat Nothing)) $ R.FunctionDefn
(R.Name "hash_value")
[(Just "r", R.Reference $ R.Const recordType)]
(Just $ R.Named $ R.Qualified (R.Name "std") (R.Name "size_t"))
[]
False
hashBody
let yamlStructDefn = R.NamespaceDefn "YAML"
[ R.TemplateDefn (zip templateVars (repeat Nothing)) $
R.ClassDefn (R.Name "convert") [recordType] []
[ R.FunctionDefn (R.Name "encode")
[(Just "r", R.Reference $ R.Const $ recordType)]
(Just $ R.Static $ R.Named $ R.Name "Node") [] False
([R.Forward $ R.ScalarDecl (R.Name "node") (R.Named $ R.Name "Node") Nothing] ++
[R.Assignment (R.Subscript
(R.Variable $ R.Name "node")
(R.Literal $ R.LString field))
(R.Call
(R.Variable $ R.Qualified (R.Specialized [R.Named $ R.Name fieldType] $ R.Name "convert") (R.Name "encode"))
[R.Project (R.Variable $ R.Name "r") (R.Name field)])
| field <- ids | fieldType <- templateVars
] ++ [R.Return $ R.Variable $ R.Name "node"])
, R.FunctionDefn (R.Name "decode")
[ (Just "node", R.Reference $ R.Const $ R.Named $ R.Name "Node")
, (Just "r", R.Reference recordType)
] (Just $ R.Static $ R.Primitive $ R.PBool) [] False
([ R.IfThenElse (R.Unary "!" $ R.Call (R.Project
(R.Variable $ R.Name "node")
(R.Name "IsMap")) [])
[R.Return $ R.Literal $ R.LBool False] []
] ++
[ R.IfThenElse (R.Subscript
(R.Variable $ R.Name "node")
(R.Literal $ R.LString field))
[ R.Assignment
(R.Project (R.Variable $ R.Name "r") (R.Name field))
(R.Call (R.Project
(R.Subscript
(R.Variable $ R.Name "node")
(R.Literal $ R.LString field))
(R.Specialized [R.Named $ R.Name templateVar] $ R.Name "as")) [])
] []
| field <- ids | templateVar <- templateVars
] ++ [R.Return $ R.Literal $ R.LBool True])
]
[] []
]
let addField (name,typ) = R.Call (R.Project (R.Variable $ R.Name "inner") (R.Name "AddMember"))
[ R.Literal $ R.LString name
, R.Call (R.Variable $ R.Qualified (R.Specialized [R.Named $ R.Name typ] (R.Name "convert")) (R.Name "encode"))
[R.Project (R.Variable $ R.Name "r") (R.Name name), R.Variable $ R.Name "al"]
, R.Variable $ R.Name "al"
]
let jsonStructDefn = R.NamespaceDefn "JSON"
[R.TemplateDefn (zip templateVars (repeat Nothing)) $
R.ClassDefn (R.Name "convert") [recordType] []
[
R.TemplateDefn [("Allocator", Nothing)] (R.FunctionDefn (R.Name "encode")
[(Just "r", R.Reference $ R.Const recordType)
,(Just "al", R.Reference $ R.Named $ R.Name "Allocator")
]
(Just $ R.Static $ R.Named $ R.Name "rapidjson::Value") [] False
(
[R.Forward $ R.UsingDecl (Left (R.Name "rapidjson")) Nothing] ++
[R.Forward $ R.ScalarDecl (R.Name "val") (R.Named $ R.Name "Value") Nothing] ++
[R.Forward $ R.ScalarDecl (R.Name "inner") (R.Named $ R.Name "Value") Nothing ] ++
[R.Ignore $ R.Call (R.Project (R.Variable $ R.Name "val") (R.Name "SetObject")) [] ] ++
[R.Ignore $ R.Call (R.Project (R.Variable $ R.Name "inner") (R.Name "SetObject")) [] ] ++
[R.Ignore $ addField tup | tup <- zip ids templateVars] ++
[R.Ignore $ R.Call (R.Project (R.Variable $ R.Name "val") (R.Name "AddMember"))
[R.Literal $ R.LString "type"
, R.Call (R.Variable $ R.Name "Value")
[R.Literal $ R.LString "record"]
, R.Variable $ R.Name "al"
]
] ++
[R.Ignore $ R.Call (R.Project (R.Variable $ R.Name "val") (R.Name "AddMember"))
[R.Literal $ R.LString "value"
, R.Call
(R.Project (R.Variable $ R.Name "inner") (R.Name "Move"))
[]
, R.Variable $ R.Name "al"
]
] ++
[R.Return $ R.Variable $ R.Name "val"]
))
] [] []
]
return [ recordStructDefn, compactSerializationDefn {-, noTrackingDefn, bitwiseSerializableDefn-}
, yamlStructDefn, hashStructDefn, hashValueDefn, isFlatDefn, jsonStructDefn]
reservedAnnotations :: [Identifier]
reservedAnnotations =
[ "Collection", "External", "Seq", "Set", "BitSet", "Sorted", "Map", "Vector", "Array"
, "IntSet", "SortedSet"
, "IntMap", "StrMap", "VMap", "SortedMap", "MapE", "SortedMapE", "MapCE"
, "MultiIndexBag", "MultiIndexMap", "MultiIndexVMap", "RealVector"
, "BulkFlatCollection", "FlatPolyBuffer", "UniquePolyBuffer"
, "BoxMap"
]
reservedGeneratedAnnotations :: [Identifier]
reservedGeneratedAnnotations = ["Array", "SortedMapE", "MapE", "MapCE", "FlatPolyBuffer", "UniquePolyBuffer"]
| DaMSL/K3 | src/Language/K3/Codegen/CPP/Collections.hs | apache-2.0 | 28,018 | 7 | 34 | 11,137 | 8,273 | 4,197 | 4,076 | 372 | 11 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QChildEvent.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:31
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Core.QChildEvent (
QqChildEvent(..)
,QqChildEvent_nf(..)
,added
,polished
,removed
,qChildEvent_delete
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Core.QEvent
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
class QqChildEvent x1 where
qChildEvent :: x1 -> IO (QChildEvent ())
instance QqChildEvent ((QChildEvent t1)) where
qChildEvent (x1)
= withQChildEventResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QChildEvent cobj_x1
foreign import ccall "qtc_QChildEvent" qtc_QChildEvent :: Ptr (TQChildEvent t1) -> IO (Ptr (TQChildEvent ()))
instance QqChildEvent ((QEventType, QObject t2)) where
qChildEvent (x1, x2)
= withQChildEventResult $
withObjectPtr x2 $ \cobj_x2 ->
qtc_QChildEvent1 (toCLong $ qEnum_toInt x1) cobj_x2
foreign import ccall "qtc_QChildEvent1" qtc_QChildEvent1 :: CLong -> Ptr (TQObject t2) -> IO (Ptr (TQChildEvent ()))
class QqChildEvent_nf x1 where
qChildEvent_nf :: x1 -> IO (QChildEvent ())
instance QqChildEvent_nf ((QChildEvent t1)) where
qChildEvent_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QChildEvent cobj_x1
instance QqChildEvent_nf ((QEventType, QObject t2)) where
qChildEvent_nf (x1, x2)
= withObjectRefResult $
withObjectPtr x2 $ \cobj_x2 ->
qtc_QChildEvent1 (toCLong $ qEnum_toInt x1) cobj_x2
added :: QChildEvent a -> (()) -> IO (Bool)
added x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QChildEvent_added cobj_x0
foreign import ccall "qtc_QChildEvent_added" qtc_QChildEvent_added :: Ptr (TQChildEvent a) -> IO CBool
instance Qchild (QChildEvent a) (()) (IO (QObject ())) where
child x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QChildEvent_child cobj_x0
foreign import ccall "qtc_QChildEvent_child" qtc_QChildEvent_child :: Ptr (TQChildEvent a) -> IO (Ptr (TQObject ()))
polished :: QChildEvent a -> (()) -> IO (Bool)
polished x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QChildEvent_polished cobj_x0
foreign import ccall "qtc_QChildEvent_polished" qtc_QChildEvent_polished :: Ptr (TQChildEvent a) -> IO CBool
removed :: QChildEvent a -> (()) -> IO (Bool)
removed x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QChildEvent_removed cobj_x0
foreign import ccall "qtc_QChildEvent_removed" qtc_QChildEvent_removed :: Ptr (TQChildEvent a) -> IO CBool
qChildEvent_delete :: QChildEvent a -> IO ()
qChildEvent_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QChildEvent_delete cobj_x0
foreign import ccall "qtc_QChildEvent_delete" qtc_QChildEvent_delete :: Ptr (TQChildEvent a) -> IO ()
| uduki/hsQt | Qtc/Core/QChildEvent.hs | bsd-2-clause | 3,159 | 0 | 13 | 532 | 896 | 467 | 429 | -1 | -1 |
perm [] = []
perm (a : b : xs) = b : a : (perm xs)
tst = getLine >>= (putStrLn . perm)
main = do
tstr <- getLine
mapM_ (const tst) [1 .. (read tstr)]
| pbl64k/HackerRank-Contests | 2014-03-21-FP/StringOPermute/sop.accepted.hs | bsd-2-clause | 161 | 0 | 10 | 48 | 100 | 51 | 49 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Common(
thingFixer,
listingFixer,
fieldFixer,
accountFixer,
linkFixer,
commentFixer,
moreChildrenFixer,
userAgent,
addUAString,
resultToEither,
eitherToException) where
import Control.Exception
import Data.Aeson
import qualified Data.ByteString as B
import Data.Char
import Network.HTTP.Conduit
import Network.HTTP.Types
fieldFixer :: String -> String
fieldFixer s = if s == "over_18"
then s
else toSnakeCase $ removeUnderscore s where
removeUnderscore = filter (/= '_')
toSnakeCase = foldr (\c acc -> if isUpper c
then '_' : toLower c : acc
else c : acc) ""
linkFixer :: String -> String
linkFixer = genericFixer 5
commentFixer :: String -> String
commentFixer = genericFixer 8
accountFixer :: String -> String
accountFixer = genericFixer 8
thingFixer :: String -> String
thingFixer = genericFixer 6
listingFixer :: String -> String
listingFixer = genericFixer 8
moreChildrenFixer :: String -> String
moreChildrenFixer = genericFixer 13
userAgent :: B.ByteString
userAgent = "https://github.com/MarkJr94/sleuth"
addUAString :: Request -> Request
addUAString request = request { requestHeaders = fixedHeaders } where
oldHeaders = requestHeaders request
fixedHeaders = (hUserAgent, userAgent)
: filter (\(name, _) -> name /= hUserAgent) oldHeaders
resultToEither :: Result a -> Either String a
resultToEither x = case x of
Success good -> Right good
Error bad -> Left bad
eitherToException :: Either String a -> a
eitherToException (Right x) = x
eitherToException (Left err) = throw $ userError err
-- ============================================================================
-- =========================== PRIVATE FUNCTIONS BEGIN ========================
-- ============================================================================
genericFixer :: Int -> String -> String
genericFixer n s = fieldFixer s' where
base = drop n s
newFirst = toLower $ head base
s' = newFirst : tail base | MarkJr94/sleuth | Common.hs | bsd-3-clause | 2,130 | 0 | 13 | 454 | 525 | 285 | 240 | 58 | 3 |
{-# LANGUAGE PatternGuards #-}
-- This is a quick hack for uploading build reports to Hackage.
module Distribution.Client.BuildReports.Upload
( BuildLog
, BuildReportId
, uploadReports
, postBuildReport
, putBuildLog
) where
import Network.Browser
( BrowserAction, request, setAllowRedirects )
import Network.HTTP
( Header(..), HeaderName(..)
, Request(..), RequestMethod(..), Response(..) )
import Network.TCP (HandleStream)
import Network.URI (URI, uriPath, parseRelativeReference, relativeTo)
import Control.Monad
( forM_ )
import System.FilePath.Posix
( (</>) )
import qualified Distribution.Client.BuildReports.Anonymous as BuildReport
import Distribution.Client.BuildReports.Anonymous (BuildReport)
import Distribution.Text (display)
type BuildReportId = URI
type BuildLog = String
uploadReports :: URI -> [(BuildReport, Maybe BuildLog)]
-> BrowserAction (HandleStream String) ()
-> BrowserAction (HandleStream BuildLog) ()
uploadReports uri reports auth = do
auth
forM_ reports $ \(report, mbBuildLog) -> do
buildId <- postBuildReport uri report
case mbBuildLog of
Just buildLog -> putBuildLog buildId buildLog
Nothing -> return ()
postBuildReport :: URI -> BuildReport
-> BrowserAction (HandleStream BuildLog) BuildReportId
postBuildReport uri buildReport = do
setAllowRedirects False
(_, response) <- request Request {
rqURI = uri { uriPath = "/package" </> display (BuildReport.package buildReport) </> "reports" },
rqMethod = POST,
rqHeaders = [Header HdrContentType ("text/plain"),
Header HdrContentLength (show (length body)),
Header HdrAccept ("text/plain")],
rqBody = body
}
case rspCode response of
(3,0,3) | [Just buildId] <- [ do rel <- parseRelativeReference location
relativeTo rel uri
| Header HdrLocation location <- rspHeaders response ]
-> return $ buildId
_ -> error "Unrecognised response from server."
where body = BuildReport.show buildReport
putBuildLog :: BuildReportId -> BuildLog
-> BrowserAction (HandleStream BuildLog) ()
putBuildLog reportId buildLog = do
--FIXME: do something if the request fails
(_, response) <- request Request {
rqURI = reportId{uriPath = uriPath reportId </> "log"},
rqMethod = PUT,
rqHeaders = [Header HdrContentType ("text/plain"),
Header HdrContentLength (show (length buildLog)),
Header HdrAccept ("text/plain")],
rqBody = buildLog
}
return ()
| yihuang/cabal-install | Distribution/Client/BuildReports/Upload.hs | bsd-3-clause | 2,741 | 0 | 18 | 726 | 714 | 391 | 323 | 62 | 2 |
module Win32Path
( beginPath, closeFigure, endPath, fillPath, flattenPath
, pathToRegion, strokeAndFillPath, strokePath, widenPath
) where
import StdDIS
import GDITypes
import Win32Types
----------------------------------------------------------------
-- Paths
----------------------------------------------------------------
-- %fun AbortPath :: HDC -> IO ()
beginPath :: HDC -> IO ()
beginPath arg1 =
prim_Win32Path_cpp_beginPath arg1 >>= \ (gc_failed,gc_failstring) ->
if ( gc_failed /= (0::Int))
then unmarshall_string_ gc_failstring >>= ioError . userError
else (return (()))
primitive prim_Win32Path_cpp_beginPath :: Addr -> IO (Int,Addr)
closeFigure :: HDC -> IO ()
closeFigure arg1 =
prim_Win32Path_cpp_closeFigure arg1 >>= \ (gc_failed,gc_failstring) ->
if ( gc_failed /= (0::Int))
then unmarshall_string_ gc_failstring >>= ioError . userError
else (return (()))
primitive prim_Win32Path_cpp_closeFigure :: Addr -> IO (Int,Addr)
endPath :: HDC -> IO ()
endPath arg1 =
prim_Win32Path_cpp_endPath arg1 >>= \ (gc_failed,gc_failstring) ->
if ( gc_failed /= (0::Int))
then unmarshall_string_ gc_failstring >>= ioError . userError
else (return (()))
primitive prim_Win32Path_cpp_endPath :: Addr -> IO (Int,Addr)
fillPath :: HDC -> IO ()
fillPath arg1 =
prim_Win32Path_cpp_fillPath arg1 >>= \ (gc_failed,gc_failstring) ->
if ( gc_failed /= (0::Int))
then unmarshall_string_ gc_failstring >>= ioError . userError
else (return (()))
primitive prim_Win32Path_cpp_fillPath :: Addr -> IO (Int,Addr)
flattenPath :: HDC -> IO ()
flattenPath arg1 =
prim_Win32Path_cpp_flattenPath arg1 >>= \ (gc_failed,gc_failstring) ->
if ( gc_failed /= (0::Int))
then unmarshall_string_ gc_failstring >>= ioError . userError
else (return (()))
primitive prim_Win32Path_cpp_flattenPath :: Addr -> IO (Int,Addr)
pathToRegion :: HDC -> IO HRGN
pathToRegion arg1 =
prim_Win32Path_cpp_pathToRegion arg1 >>= \ (gc_res3,gc_res1,gc_failed,gc_failstring) ->
if ( gc_failed /= (0::Int))
then unmarshall_string_ gc_failstring >>= ioError . userError
else (makeForeignObj gc_res1 gc_res3) >>= \ gc_res2 ->
(return (gc_res2))
primitive prim_Win32Path_cpp_pathToRegion :: Addr -> IO (Addr,Addr,Int,Addr)
strokeAndFillPath :: HDC -> IO ()
strokeAndFillPath arg1 =
prim_Win32Path_cpp_strokeAndFillPath arg1 >>= \ (gc_failed,gc_failstring) ->
if ( gc_failed /= (0::Int))
then unmarshall_string_ gc_failstring >>= ioError . userError
else (return (()))
primitive prim_Win32Path_cpp_strokeAndFillPath :: Addr -> IO (Int,Addr)
strokePath :: HDC -> IO ()
strokePath arg1 =
prim_Win32Path_cpp_strokePath arg1 >>= \ (gc_failed,gc_failstring) ->
if ( gc_failed /= (0::Int))
then unmarshall_string_ gc_failstring >>= ioError . userError
else (return (()))
primitive prim_Win32Path_cpp_strokePath :: Addr -> IO (Int,Addr)
widenPath :: HDC -> IO ()
widenPath arg1 =
prim_Win32Path_cpp_widenPath arg1 >>= \ (gc_failed,gc_failstring) ->
if ( gc_failed /= (0::Int))
then unmarshall_string_ gc_failstring >>= ioError . userError
else (return (()))
primitive prim_Win32Path_cpp_widenPath :: Addr -> IO (Int,Addr)
----------------------------------------------------------------
-- End
----------------------------------------------------------------
needPrims_hugs 2
| OS2World/DEV-UTIL-HUGS | libraries/win32/Win32Path.hs | bsd-3-clause | 3,334 | 39 | 13 | 499 | 1,010 | 558 | 452 | -1 | -1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, GADTs, RankNTypes #-}
module UI.PadKontrol (PadKontrol, runPadKontrol, module UI.PadKontrol.Types, module Control.Monad.IO.Class) where
import qualified System.MIDI as MIDI
import UI.PadKontrol.Types
import Control.Applicative
import Control.Monad.Operational.Mini
import Control.Monad.IO.Class
import Data.Word
import qualified Data.Map as M
import Data.Char
type PadKontrol = Program Message
instance MonadIO PadKontrol where
liftIO = singleton . LiftIO
showHex :: Word8 -> String
showHex n = intToDigit (fromEnum $ n `div` 16) : intToDigit (fromEnum $ n `mod` 16) : ""
convertEvent :: MIDI.MidiEvent -> Maybe (Int, Event)
convertEvent (MIDI.MidiEvent time (MIDI.SysEx msg)) = fmap ((,) (fromIntegral time)) $ case drop 4 msg of
[0x47, 0x00, 0x00] -> Just PedalUp
[0x47, 0x00, 0x7F] -> Just PedalDown
[0x45,s,t]
| s <= 0x0F -> Just $ PadUp (toEnum $ fromEnum s)
| s >= 0x40 -> Just $ PadDown (toEnum $ fromEnum (s - 0x40)) (fromIntegral (t - 0x30))
[0x48,0x20,0x7f] -> Just XYTouch
[0x48,0x20,0x00] -> Just XYRelease
[0x48,s,0x7f] -> Just $ ButtonDown $ toEnum $ fromEnum s
[0x48,s,0x00] -> Just $ ButtonUp $ toEnum $ fromEnum s
[0x49,0x00,v] -> Just $ Knob1 (fromIntegral v / 127)
[0x49,0x01,v] -> Just $ Knob2 (fromIntegral v / 127)
[0x4B,x,y] -> Just $ XYPad (fromIntegral x / 127) (fromIntegral y / 127)
[0x43,0x00,0x01] -> Just JogCW
[0x43,0x00,0x7F] -> Just JogCCW
[0x40,0x00,_] -> Nothing
[0x5f,_,_] -> Nothing
ev -> error $ "unknown message: " ++ show ev ++ " Please report this as a bug"
convertEvent ev = error $ "unknown event: " ++ show ev
runPadKontrol :: (Int -> Event -> Program Message ()) -> ((forall r. Program Message r -> IO r) -> IO a) -> IO a
runPadKontrol handle m = do
let g getName = fmap M.fromList . mapM (liftA2 (,) <$> getName <*> return)
srcs <- MIDI.enumerateSources >>= g MIDI.getName
dests <- MIDI.enumerateDestinations >>= g MIDI.getName
let devSrc = srcs M.! "padKONTROL PORT A"
devDest = dests M.! "padKONTROL CTRL"
dest <- MIDI.openDestination devDest
src <- MIDI.openSource devSrc $ Just
$ interpret (eval dest) . maybe (return ()) (uncurry handle) . convertEvent
MIDI.start dest
MIDI.start src
MIDI.sendSysEx dest $ [0x42, 0x49, 0x6E, 0x08, 0x00, 0x00, 0x01]
MIDI.sendSysEx dest $ [0x42, 0x49, 0x6E, 0x08, 0x3F, 0x0A, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x29, 0x29]
MIDI.sendSysEx dest $
[ 0x42, 0x49, 0x6E, 0x08, 0x3F, 0x2A, 0x00, 0x00
, 0x05, 0x05, 0x05, 0x7F, 0x7E, 0x7F, 0x7F, 0x03
, 0x0A, 0x0A, 0x0A, 0x0A, 0x0A, 0x0A, 0x0A, 0x0A
, 0x0A, 0x0A, 0x0A, 0x0A, 0x0A, 0x0A, 0x0A, 0x0A
, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08
, 0x09, 0x0A, 0x0B, 0x0C, 0x0d, 0x0E, 0x0F, 0x10]
result <- m $ interpret (eval dest)
MIDI.sendSysEx dest $ [0x42, 0x40, 0x6E, 0x08, 0x00, 0x00, 0x00]
MIDI.stop src
MIDI.stop dest
MIDI.close src
MIDI.close dest
return result
where
light :: Light -> Word8
light Off = 0x00
light On = 0x20
light Blink = 0x63
light (Flash f) = 0x41 + floor (f * (0x5f - 0x41))
eval :: MIDI.Connection -> Message a -> IO a
eval dest (Display x y z) = send dest [0x22, 0x04, 0x00, x, y, z]
eval dest (DisplayBlink x y z) = send dest [0x22, 0x04, 0x01, x, y, z]
eval dest (DisplayLeft seg l) = send dest [0x01, 0xB8 + fromIntegral (fromEnum seg), light l]
eval dest (DisplayCenter seg l) = send dest [0x01, 0xB0 + fromIntegral (fromEnum seg), light l]
eval dest (DisplayRight seg l) = send dest [0x01, 0xA8 + fromIntegral (fromEnum seg), light l]
eval dest (PadLight p l) = send dest [0x01, 0x00 + fromIntegral (fromEnum p), light l]
eval dest (ButtonLight b l) = send dest [0x01, 0x10 + fromIntegral (fromEnum b), light l]
eval _ (LiftIO m) = m
eval dest (AllLight w x y z) = send dest [0x3F, 0x0A, 0x01, g0, g1, g2, g3, g4, 0x00, x, y, z]
where
g = foldr (flip $ flip (+) . (*2)) 0 . map (toEnum . fromEnum . w)
g0 = g $ map Left [Pad01 .. Pad07]
g1 = g $ map Left [Pad08 .. Pad14]
g2 = g $ map Left [Pad15 .. Pad16] ++ map Right [ButtonScene ..ButtonMidiCh]
g3 = g $ map Right [ButtonSWType ..ButtonX]
g4 = g $ map Right [ButtonY .. ButtonHold]
send dest = MIDI.sendSysEx dest . ([0x42, 0x40, 0x6E, 0x08]++)
| fumieval/padKONTROL | UI/PadKontrol.hs | bsd-3-clause | 4,573 | 5 | 18 | 1,145 | 1,982 | 1,062 | 920 | 85 | 15 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- Determine which packages are already installed
module Stack.Build.Installed
( InstalledMap
, Installed (..)
, GetInstalledOpts (..)
, getInstalled
) where
import Control.Applicative
import Control.Arrow
import Control.Monad
import Control.Monad.Logger
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.Foldable as F
import Data.Function
import qualified Data.HashSet as HashSet
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Maybe.Extra (mapMaybeM)
import Data.Monoid
import qualified Data.Text as T
import Path
import Prelude hiding (FilePath, writeFile)
import Stack.Build.Cache
import Stack.Constants
import Stack.PackageDump
import Stack.Types.Build
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.GhcPkgId
import Stack.Types.Package
import Stack.Types.PackageDump
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.StackT
import Stack.Types.Version
import System.Process.Read (EnvOverride)
-- | Options for 'getInstalled'.
data GetInstalledOpts = GetInstalledOpts
{ getInstalledProfiling :: !Bool
-- ^ Require profiling libraries?
, getInstalledHaddock :: !Bool
-- ^ Require haddocks?
, getInstalledSymbols :: !Bool
-- ^ Require debugging symbols?
}
-- | Returns the new InstalledMap and all of the locally registered packages.
getInstalled :: (StackM env m, HasEnvConfig env, PackageInstallInfo pii)
=> EnvOverride
-> GetInstalledOpts
-> Map PackageName pii -- ^ does not contain any installed information
-> m ( InstalledMap
, [DumpPackage () () ()] -- globally installed
, [DumpPackage () () ()] -- snapshot installed
, [DumpPackage () () ()] -- locally installed
)
getInstalled menv opts sourceMap = do
$logDebug "Finding out which packages are already installed"
snapDBPath <- packageDatabaseDeps
localDBPath <- packageDatabaseLocal
extraDBPaths <- packageDatabaseExtra
mcache <-
if getInstalledProfiling opts || getInstalledHaddock opts
then configInstalledCache >>= liftM Just . loadInstalledCache
else return Nothing
let loadDatabase' = loadDatabase menv opts mcache sourceMap
(installedLibs0, globalDumpPkgs) <- loadDatabase' Nothing []
(installedLibs1, _extraInstalled) <-
foldM (\lhs' pkgdb ->
loadDatabase' (Just (ExtraGlobal, pkgdb)) (fst lhs')
) (installedLibs0, globalDumpPkgs) extraDBPaths
(installedLibs2, snapshotDumpPkgs) <-
loadDatabase' (Just (InstalledTo Snap, snapDBPath)) installedLibs1
(installedLibs3, localDumpPkgs) <-
loadDatabase' (Just (InstalledTo Local, localDBPath)) installedLibs2
let installedLibs = M.fromList $ map lhPair installedLibs3
F.forM_ mcache $ \cache -> do
icache <- configInstalledCache
saveInstalledCache icache cache
-- Add in the executables that are installed, making sure to only trust a
-- listed installation under the right circumstances (see below)
let exesToSM loc = Map.unions . map (exeToSM loc)
exeToSM loc (PackageIdentifier name version) =
case Map.lookup name sourceMap of
-- Doesn't conflict with anything, so that's OK
Nothing -> m
Just pii
-- Not the version we want, ignore it
| version /= piiVersion pii || loc /= piiLocation pii -> Map.empty
| otherwise -> m
where
m = Map.singleton name (loc, Executable $ PackageIdentifier name version)
exesSnap <- getInstalledExes Snap
exesLocal <- getInstalledExes Local
let installedMap = Map.unions
[ exesToSM Local exesLocal
, exesToSM Snap exesSnap
, installedLibs
]
return ( installedMap
, globalDumpPkgs
, snapshotDumpPkgs
, localDumpPkgs
)
-- | Outputs both the modified InstalledMap and the Set of all installed packages in this database
--
-- The goal is to ascertain that the dependencies for a package are present,
-- that it has profiling if necessary, and that it matches the version and
-- location needed by the SourceMap
loadDatabase :: (StackM env m, HasEnvConfig env, PackageInstallInfo pii)
=> EnvOverride
-> GetInstalledOpts
-> Maybe InstalledCache -- ^ if Just, profiling or haddock is required
-> Map PackageName pii -- ^ to determine which installed things we should include
-> Maybe (InstalledPackageLocation, Path Abs Dir) -- ^ package database, Nothing for global
-> [LoadHelper] -- ^ from parent databases
-> m ([LoadHelper], [DumpPackage () () ()])
loadDatabase menv opts mcache sourceMap mdb lhs0 = do
wc <- view $ actualCompilerVersionL.to whichCompiler
(lhs1', dps) <- ghcPkgDump menv wc (fmap snd (maybeToList mdb))
$ conduitDumpPackage =$ sink
let ghcjsHack = wc == Ghcjs && isNothing mdb
lhs1 <- mapMaybeM (processLoadResult mdb ghcjsHack) lhs1'
let lhs = pruneDeps
id
lhId
lhDeps
const
(lhs0 ++ lhs1)
return (map (\lh -> lh { lhDeps = [] }) $ Map.elems lhs, dps)
where
conduitProfilingCache =
case mcache of
Just cache | getInstalledProfiling opts -> addProfiling cache
-- Just an optimization to avoid calculating the profiling
-- values when they aren't necessary
_ -> CL.map (\dp -> dp { dpProfiling = False })
conduitHaddockCache =
case mcache of
Just cache | getInstalledHaddock opts -> addHaddock cache
-- Just an optimization to avoid calculating the haddock
-- values when they aren't necessary
_ -> CL.map (\dp -> dp { dpHaddock = False })
conduitSymbolsCache =
case mcache of
Just cache | getInstalledSymbols opts -> addSymbols cache
-- Just an optimization to avoid calculating the debugging
-- symbol values when they aren't necessary
_ -> CL.map (\dp -> dp { dpSymbols = False })
mloc = fmap fst mdb
sinkDP = conduitProfilingCache
=$ conduitHaddockCache
=$ conduitSymbolsCache
=$ CL.map (isAllowed opts mcache sourceMap mloc &&& toLoadHelper mloc)
=$ CL.consume
sink = getZipSink $ (,)
<$> ZipSink sinkDP
<*> ZipSink CL.consume
processLoadResult :: MonadLogger m
=> Maybe (InstalledPackageLocation, Path Abs Dir)
-> Bool
-> (Allowed, LoadHelper)
-> m (Maybe LoadHelper)
processLoadResult _ _ (Allowed, lh) = return (Just lh)
processLoadResult _ True (WrongVersion actual wanted, lh)
-- Allow some packages in the ghcjs global DB to have the wrong
-- versions. Treat them as wired-ins by setting deps to [].
| fst (lhPair lh) `HashSet.member` ghcjsBootPackages = do
$logWarn $ T.concat
[ "Ignoring that the GHCJS boot package \""
, packageNameText (fst (lhPair lh))
, "\" has a different version, "
, versionText actual
, ", than the resolver's wanted version, "
, versionText wanted
]
return (Just lh)
processLoadResult mdb _ (reason, lh) = do
$logDebug $ T.concat $
[ "Ignoring package "
, packageNameText (fst (lhPair lh))
] ++
maybe [] (\db -> [", from ", T.pack (show db), ","]) mdb ++
[ " due to"
, case reason of
Allowed -> " the impossible?!?!"
NeedsProfiling -> " it needing profiling."
NeedsHaddock -> " it needing haddocks."
NeedsSymbols -> " it needing debugging symbols."
UnknownPkg -> " it being unknown to the resolver / extra-deps."
WrongLocation mloc loc -> " wrong location: " <> T.pack (show (mloc, loc))
WrongVersion actual wanted -> T.concat
[ " wanting version "
, versionText wanted
, " instead of "
, versionText actual
]
]
return Nothing
data Allowed
= Allowed
| NeedsProfiling
| NeedsHaddock
| NeedsSymbols
| UnknownPkg
| WrongLocation (Maybe InstalledPackageLocation) InstallLocation
| WrongVersion Version Version
deriving (Eq, Show)
-- | Check if a can be included in the set of installed packages or not, based
-- on the package selections made by the user. This does not perform any
-- dirtiness or flag change checks.
isAllowed :: PackageInstallInfo pii
=> GetInstalledOpts
-> Maybe InstalledCache
-> Map PackageName pii
-> Maybe InstalledPackageLocation
-> DumpPackage Bool Bool Bool
-> Allowed
isAllowed opts mcache sourceMap mloc dp
-- Check that it can do profiling if necessary
| getInstalledProfiling opts && isJust mcache && not (dpProfiling dp) = NeedsProfiling
-- Check that it has haddocks if necessary
| getInstalledHaddock opts && isJust mcache && not (dpHaddock dp) = NeedsHaddock
-- Check that it has haddocks if necessary
| getInstalledSymbols opts && isJust mcache && not (dpSymbols dp) = NeedsSymbols
| otherwise =
case Map.lookup name sourceMap of
Nothing ->
case mloc of
-- The sourceMap has nothing to say about this global
-- package, so we can use it
Nothing -> Allowed
Just ExtraGlobal -> Allowed
-- For non-global packages, don't include unknown packages.
-- See:
-- https://github.com/commercialhaskell/stack/issues/292
Just _ -> UnknownPkg
Just pii
| not (checkLocation (piiLocation pii)) -> WrongLocation mloc (piiLocation pii)
| version /= piiVersion pii -> WrongVersion version (piiVersion pii)
| otherwise -> Allowed
where
PackageIdentifier name version = dpPackageIdent dp
-- Ensure that the installed location matches where the sourceMap says it
-- should be installed
checkLocation Snap = mloc /= Just (InstalledTo Local) -- we can allow either global or snap
checkLocation Local = mloc == Just (InstalledTo Local) || mloc == Just ExtraGlobal -- 'locally' installed snapshot packages can come from extra dbs
data LoadHelper = LoadHelper
{ lhId :: !GhcPkgId
, lhDeps :: ![GhcPkgId]
, lhPair :: !(PackageName, (InstallLocation, Installed))
}
deriving Show
toLoadHelper :: Maybe InstalledPackageLocation -> DumpPackage Bool Bool Bool -> LoadHelper
toLoadHelper mloc dp = LoadHelper
{ lhId = gid
, lhDeps =
-- We always want to consider the wired in packages as having all
-- of their dependencies installed, since we have no ability to
-- reinstall them. This is especially important for using different
-- minor versions of GHC, where the dependencies of wired-in
-- packages may change slightly and therefore not match the
-- snapshot.
if name `HashSet.member` wiredInPackages
then []
else dpDepends dp
, lhPair = (name, (toPackageLocation mloc, Library ident gid))
}
where
gid = dpGhcPkgId dp
ident@(PackageIdentifier name _) = dpPackageIdent dp
toPackageLocation :: Maybe InstalledPackageLocation -> InstallLocation
toPackageLocation Nothing = Snap
toPackageLocation (Just ExtraGlobal) = Snap
toPackageLocation (Just (InstalledTo loc)) = loc
| martin-kolinek/stack | src/Stack/Build/Installed.hs | bsd-3-clause | 12,424 | 0 | 19 | 3,755 | 2,528 | 1,324 | 1,204 | 239 | 7 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
-- | Report page view.
module HL.V.Report where
import HL.V
import HL.V.Template
-- | Report view.
reportV :: Int -> FilePath -> Html -> FromBlaze App
reportV year _ inner =
template
[DocumentationR
,ReportHomeR year]
"Report"
(\_ ->
container
(row
(span12 inner)))
| chrisdone/hl | src/HL/V/Report.hs | bsd-3-clause | 386 | 0 | 12 | 100 | 93 | 52 | 41 | 15 | 1 |
module Problem6Tests
( problem6Tests
) where
import Test.HUnit
import Problem6
testSumSquareDifference :: Test
testSumSquareDifference = TestCase $ do
assertEqual "(solutoin 1) difference for sums of sequences up to 10" 2640 (sumSquareDifference 10)
assertEqual "(solutoin 2) difference for sums of sequences up to 10" 2640 (sumSquareDifference' 10)
problem6Tests = TestList [testSumSquareDifference]
| candidtim/euler | test/Problem6Tests.hs | bsd-3-clause | 416 | 0 | 10 | 65 | 74 | 39 | 35 | 9 | 1 |
-- | Keeps some general functions, not related to compilers
module Toy.Util
( module M
, (>:)
) where
import Toy.Util.Bits as M
import Toy.Util.Error as M
import Toy.Util.Instances as M ()
import Toy.Util.Parsable as M
-- | Convenient alias for creating tuples.
(>:) :: a -> b -> (a, b)
(>:) = (,)
infixr 0 >:
| Martoon-00/toy-compiler | src/Toy/Util.hs | bsd-3-clause | 384 | 0 | 7 | 131 | 90 | 63 | 27 | 10 | 1 |
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[SimplUtils]{The simplifier utilities}
-}
{-# LANGUAGE CPP #-}
module SimplUtils (
-- Rebuilding
mkLam, mkCase, prepareAlts, tryEtaExpandRhs,
-- Inlining,
preInlineUnconditionally, postInlineUnconditionally,
activeUnfolding, activeRule,
getUnfoldingInRuleMatch,
simplEnvForGHCi, updModeForStableUnfoldings, updModeForRules,
-- The continuation type
SimplCont(..), DupFlag(..), StaticEnv,
isSimplified, contIsStop,
contIsDupable, contResultType, contHoleType,
contIsTrivial, contArgs,
countArgs,
mkBoringStop, mkRhsStop, mkLazyArgStop, contIsRhsOrArg,
interestingCallContext,
-- ArgInfo
ArgInfo(..), ArgSpec(..), mkArgInfo,
addValArgTo, addCastTo, addTyArgTo,
argInfoExpr, argInfoAppArgs, pushSimplifiedArgs,
abstractFloats,
-- Utilities
isExitJoinId
) where
#include "HsVersions.h"
import GhcPrelude
import SimplEnv
import CoreMonad ( SimplMode(..), Tick(..) )
import DynFlags
import CoreSyn
import qualified CoreSubst
import PprCore
import TyCoPpr ( pprParendType )
import CoreFVs
import CoreUtils
import CoreArity
import CoreUnfold
import Name
import Id
import IdInfo
import Var
import Demand
import SimplMonad
import Type hiding( substTy )
import Coercion hiding( substCo )
import DataCon ( dataConWorkId, isNullaryRepDataCon )
import VarSet
import BasicTypes
import Util
import OrdList ( isNilOL )
import MonadUtils
import Outputable
import PrelRules
import FastString ( fsLit )
import Control.Monad ( when )
import Data.List ( sortBy )
{-
************************************************************************
* *
The SimplCont and DupFlag types
* *
************************************************************************
A SimplCont allows the simplifier to traverse the expression in a
zipper-like fashion. The SimplCont represents the rest of the expression,
"above" the point of interest.
You can also think of a SimplCont as an "evaluation context", using
that term in the way it is used for operational semantics. This is the
way I usually think of it, For example you'll often see a syntax for
evaluation context looking like
C ::= [] | C e | case C of alts | C `cast` co
That's the kind of thing we are doing here, and I use that syntax in
the comments.
Key points:
* A SimplCont describes a *strict* context (just like
evaluation contexts do). E.g. Just [] is not a SimplCont
* A SimplCont describes a context that *does not* bind
any variables. E.g. \x. [] is not a SimplCont
-}
data SimplCont
= Stop -- Stop[e] = e
OutType -- Type of the <hole>
CallCtxt -- Tells if there is something interesting about
-- the context, and hence the inliner
-- should be a bit keener (see interestingCallContext)
-- Specifically:
-- This is an argument of a function that has RULES
-- Inlining the call might allow the rule to fire
-- Never ValAppCxt (use ApplyToVal instead)
-- or CaseCtxt (use Select instead)
| CastIt -- (CastIt co K)[e] = K[ e `cast` co ]
OutCoercion -- The coercion simplified
-- Invariant: never an identity coercion
SimplCont
| ApplyToVal -- (ApplyToVal arg K)[e] = K[ e arg ]
{ sc_dup :: DupFlag -- See Note [DupFlag invariants]
, sc_arg :: InExpr -- The argument,
, sc_env :: StaticEnv -- see Note [StaticEnv invariant]
, sc_cont :: SimplCont }
| ApplyToTy -- (ApplyToTy ty K)[e] = K[ e ty ]
{ sc_arg_ty :: OutType -- Argument type
, sc_hole_ty :: OutType -- Type of the function, presumably (forall a. blah)
-- See Note [The hole type in ApplyToTy]
, sc_cont :: SimplCont }
| Select -- (Select alts K)[e] = K[ case e of alts ]
{ sc_dup :: DupFlag -- See Note [DupFlag invariants]
, sc_bndr :: InId -- case binder
, sc_alts :: [InAlt] -- Alternatives
, sc_env :: StaticEnv -- See Note [StaticEnv invariant]
, sc_cont :: SimplCont }
-- The two strict forms have no DupFlag, because we never duplicate them
| StrictBind -- (StrictBind x xs b K)[e] = let x = e in K[\xs.b]
-- or, equivalently, = K[ (\x xs.b) e ]
{ sc_dup :: DupFlag -- See Note [DupFlag invariants]
, sc_bndr :: InId
, sc_bndrs :: [InBndr]
, sc_body :: InExpr
, sc_env :: StaticEnv -- See Note [StaticEnv invariant]
, sc_cont :: SimplCont }
| StrictArg -- (StrictArg (f e1 ..en) K)[e] = K[ f e1 .. en e ]
{ sc_dup :: DupFlag -- Always Simplified or OkToDup
, sc_fun :: ArgInfo -- Specifies f, e1..en, Whether f has rules, etc
-- plus strictness flags for *further* args
, sc_cci :: CallCtxt -- Whether *this* argument position is interesting
, sc_cont :: SimplCont }
| TickIt -- (TickIt t K)[e] = K[ tick t e ]
(Tickish Id) -- Tick tickish <hole>
SimplCont
type StaticEnv = SimplEnv -- Just the static part is relevant
data DupFlag = NoDup -- Unsimplified, might be big
| Simplified -- Simplified
| OkToDup -- Simplified and small
isSimplified :: DupFlag -> Bool
isSimplified NoDup = False
isSimplified _ = True -- Invariant: the subst-env is empty
perhapsSubstTy :: DupFlag -> StaticEnv -> Type -> Type
perhapsSubstTy dup env ty
| isSimplified dup = ty
| otherwise = substTy env ty
{- Note [StaticEnv invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We pair up an InExpr or InAlts with a StaticEnv, which establishes the
lexical scope for that InExpr. When we simplify that InExpr/InAlts, we
use
- Its captured StaticEnv
- Overriding its InScopeSet with the larger one at the
simplification point.
Why override the InScopeSet? Example:
(let y = ey in f) ex
By the time we simplify ex, 'y' will be in scope.
However the InScopeSet in the StaticEnv is not irrelevant: it should
include all the free vars of applying the substitution to the InExpr.
Reason: contHoleType uses perhapsSubstTy to apply the substitution to
the expression, and that (rightly) gives ASSERT failures if the InScopeSet
isn't big enough.
Note [DupFlag invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~
In both (ApplyToVal dup _ env k)
and (Select dup _ _ env k)
the following invariants hold
(a) if dup = OkToDup, then continuation k is also ok-to-dup
(b) if dup = OkToDup or Simplified, the subst-env is empty
(and and hence no need to re-simplify)
-}
instance Outputable DupFlag where
ppr OkToDup = text "ok"
ppr NoDup = text "nodup"
ppr Simplified = text "simpl"
instance Outputable SimplCont where
ppr (Stop ty interesting) = text "Stop" <> brackets (ppr interesting) <+> ppr ty
ppr (CastIt co cont ) = (text "CastIt" <+> pprOptCo co) $$ ppr cont
ppr (TickIt t cont) = (text "TickIt" <+> ppr t) $$ ppr cont
ppr (ApplyToTy { sc_arg_ty = ty, sc_cont = cont })
= (text "ApplyToTy" <+> pprParendType ty) $$ ppr cont
ppr (ApplyToVal { sc_arg = arg, sc_dup = dup, sc_cont = cont })
= (text "ApplyToVal" <+> ppr dup <+> pprParendExpr arg)
$$ ppr cont
ppr (StrictBind { sc_bndr = b, sc_cont = cont })
= (text "StrictBind" <+> ppr b) $$ ppr cont
ppr (StrictArg { sc_fun = ai, sc_cont = cont })
= (text "StrictArg" <+> ppr (ai_fun ai)) $$ ppr cont
ppr (Select { sc_dup = dup, sc_bndr = bndr, sc_alts = alts, sc_env = se, sc_cont = cont })
= (text "Select" <+> ppr dup <+> ppr bndr) $$
whenPprDebug (nest 2 $ vcat [ppr (seTvSubst se), ppr alts]) $$ ppr cont
{- Note [The hole type in ApplyToTy]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The sc_hole_ty field of ApplyToTy records the type of the "hole" in the
continuation. It is absolutely necessary to compute contHoleType, but it is
not used for anything else (and hence may not be evaluated).
Why is it necessary for contHoleType? Consider the continuation
ApplyToType Int (Stop Int)
corresponding to
(<hole> @Int) :: Int
What is the type of <hole>? It could be (forall a. Int) or (forall a. a),
and there is no way to know which, so we must record it.
In a chain of applications (f @t1 @t2 @t3) we'll lazily compute exprType
for (f @t1) and (f @t1 @t2), which is potentially non-linear; but it probably
doesn't matter because we'll never compute them all.
************************************************************************
* *
ArgInfo and ArgSpec
* *
************************************************************************
-}
data ArgInfo
= ArgInfo {
ai_fun :: OutId, -- The function
ai_args :: [ArgSpec], -- ...applied to these args (which are in *reverse* order)
ai_type :: OutType, -- Type of (f a1 ... an)
ai_rules :: FunRules, -- Rules for this function
ai_encl :: Bool, -- Flag saying whether this function
-- or an enclosing one has rules (recursively)
-- True => be keener to inline in all args
ai_strs :: [Bool], -- Strictness of remaining arguments
-- Usually infinite, but if it is finite it guarantees
-- that the function diverges after being given
-- that number of args
ai_discs :: [Int] -- Discounts for remaining arguments; non-zero => be keener to inline
-- Always infinite
}
data ArgSpec
= ValArg OutExpr -- Apply to this (coercion or value); c.f. ApplyToVal
| TyArg { as_arg_ty :: OutType -- Apply to this type; c.f. ApplyToTy
, as_hole_ty :: OutType } -- Type of the function (presumably forall a. blah)
| CastBy OutCoercion -- Cast by this; c.f. CastIt
instance Outputable ArgSpec where
ppr (ValArg e) = text "ValArg" <+> ppr e
ppr (TyArg { as_arg_ty = ty }) = text "TyArg" <+> ppr ty
ppr (CastBy c) = text "CastBy" <+> ppr c
addValArgTo :: ArgInfo -> OutExpr -> ArgInfo
addValArgTo ai arg = ai { ai_args = ValArg arg : ai_args ai
, ai_type = applyTypeToArg (ai_type ai) arg
, ai_rules = decRules (ai_rules ai) }
addTyArgTo :: ArgInfo -> OutType -> ArgInfo
addTyArgTo ai arg_ty = ai { ai_args = arg_spec : ai_args ai
, ai_type = piResultTy poly_fun_ty arg_ty
, ai_rules = decRules (ai_rules ai) }
where
poly_fun_ty = ai_type ai
arg_spec = TyArg { as_arg_ty = arg_ty, as_hole_ty = poly_fun_ty }
addCastTo :: ArgInfo -> OutCoercion -> ArgInfo
addCastTo ai co = ai { ai_args = CastBy co : ai_args ai
, ai_type = coercionRKind co }
argInfoAppArgs :: [ArgSpec] -> [OutExpr]
argInfoAppArgs [] = []
argInfoAppArgs (CastBy {} : _) = [] -- Stop at a cast
argInfoAppArgs (ValArg e : as) = e : argInfoAppArgs as
argInfoAppArgs (TyArg { as_arg_ty = ty } : as) = Type ty : argInfoAppArgs as
pushSimplifiedArgs :: SimplEnv -> [ArgSpec] -> SimplCont -> SimplCont
pushSimplifiedArgs _env [] k = k
pushSimplifiedArgs env (arg : args) k
= case arg of
TyArg { as_arg_ty = arg_ty, as_hole_ty = hole_ty }
-> ApplyToTy { sc_arg_ty = arg_ty, sc_hole_ty = hole_ty, sc_cont = rest }
ValArg e -> ApplyToVal { sc_arg = e, sc_env = env, sc_dup = Simplified, sc_cont = rest }
CastBy c -> CastIt c rest
where
rest = pushSimplifiedArgs env args k
-- The env has an empty SubstEnv
argInfoExpr :: OutId -> [ArgSpec] -> OutExpr
-- NB: the [ArgSpec] is reversed so that the first arg
-- in the list is the last one in the application
argInfoExpr fun rev_args
= go rev_args
where
go [] = Var fun
go (ValArg a : as) = go as `App` a
go (TyArg { as_arg_ty = ty } : as) = go as `App` Type ty
go (CastBy co : as) = mkCast (go as) co
type FunRules = Maybe (Int, [CoreRule]) -- Remaining rules for this function
-- Nothing => No rules
-- Just (n, rules) => some rules, requiring at least n more type/value args
decRules :: FunRules -> FunRules
decRules (Just (n, rules)) = Just (n-1, rules)
decRules Nothing = Nothing
mkFunRules :: [CoreRule] -> FunRules
mkFunRules [] = Nothing
mkFunRules rs = Just (n_required, rs)
where
n_required = maximum (map ruleArity rs)
{-
************************************************************************
* *
Functions on SimplCont
* *
************************************************************************
-}
mkBoringStop :: OutType -> SimplCont
mkBoringStop ty = Stop ty BoringCtxt
mkRhsStop :: OutType -> SimplCont -- See Note [RHS of lets] in CoreUnfold
mkRhsStop ty = Stop ty RhsCtxt
mkLazyArgStop :: OutType -> CallCtxt -> SimplCont
mkLazyArgStop ty cci = Stop ty cci
-------------------
contIsRhsOrArg :: SimplCont -> Bool
contIsRhsOrArg (Stop {}) = True
contIsRhsOrArg (StrictBind {}) = True
contIsRhsOrArg (StrictArg {}) = True
contIsRhsOrArg _ = False
contIsRhs :: SimplCont -> Bool
contIsRhs (Stop _ RhsCtxt) = True
contIsRhs _ = False
-------------------
contIsStop :: SimplCont -> Bool
contIsStop (Stop {}) = True
contIsStop _ = False
contIsDupable :: SimplCont -> Bool
contIsDupable (Stop {}) = True
contIsDupable (ApplyToTy { sc_cont = k }) = contIsDupable k
contIsDupable (ApplyToVal { sc_dup = OkToDup }) = True -- See Note [DupFlag invariants]
contIsDupable (Select { sc_dup = OkToDup }) = True -- ...ditto...
contIsDupable (StrictArg { sc_dup = OkToDup }) = True -- ...ditto...
contIsDupable (CastIt _ k) = contIsDupable k
contIsDupable _ = False
-------------------
contIsTrivial :: SimplCont -> Bool
contIsTrivial (Stop {}) = True
contIsTrivial (ApplyToTy { sc_cont = k }) = contIsTrivial k
contIsTrivial (ApplyToVal { sc_arg = Coercion _, sc_cont = k }) = contIsTrivial k
contIsTrivial (CastIt _ k) = contIsTrivial k
contIsTrivial _ = False
-------------------
contResultType :: SimplCont -> OutType
contResultType (Stop ty _) = ty
contResultType (CastIt _ k) = contResultType k
contResultType (StrictBind { sc_cont = k }) = contResultType k
contResultType (StrictArg { sc_cont = k }) = contResultType k
contResultType (Select { sc_cont = k }) = contResultType k
contResultType (ApplyToTy { sc_cont = k }) = contResultType k
contResultType (ApplyToVal { sc_cont = k }) = contResultType k
contResultType (TickIt _ k) = contResultType k
contHoleType :: SimplCont -> OutType
contHoleType (Stop ty _) = ty
contHoleType (TickIt _ k) = contHoleType k
contHoleType (CastIt co _) = coercionLKind co
contHoleType (StrictBind { sc_bndr = b, sc_dup = dup, sc_env = se })
= perhapsSubstTy dup se (idType b)
contHoleType (StrictArg { sc_fun = ai }) = funArgTy (ai_type ai)
contHoleType (ApplyToTy { sc_hole_ty = ty }) = ty -- See Note [The hole type in ApplyToTy]
contHoleType (ApplyToVal { sc_arg = e, sc_env = se, sc_dup = dup, sc_cont = k })
= mkVisFunTy (perhapsSubstTy dup se (exprType e))
(contHoleType k)
contHoleType (Select { sc_dup = d, sc_bndr = b, sc_env = se })
= perhapsSubstTy d se (idType b)
-------------------
countArgs :: SimplCont -> Int
-- Count all arguments, including types, coercions, and other values
countArgs (ApplyToTy { sc_cont = cont }) = 1 + countArgs cont
countArgs (ApplyToVal { sc_cont = cont }) = 1 + countArgs cont
countArgs _ = 0
contArgs :: SimplCont -> (Bool, [ArgSummary], SimplCont)
-- Summarises value args, discards type args and coercions
-- The returned continuation of the call is only used to
-- answer questions like "are you interesting?"
contArgs cont
| lone cont = (True, [], cont)
| otherwise = go [] cont
where
lone (ApplyToTy {}) = False -- See Note [Lone variables] in CoreUnfold
lone (ApplyToVal {}) = False
lone (CastIt {}) = False
lone _ = True
go args (ApplyToVal { sc_arg = arg, sc_env = se, sc_cont = k })
= go (is_interesting arg se : args) k
go args (ApplyToTy { sc_cont = k }) = go args k
go args (CastIt _ k) = go args k
go args k = (False, reverse args, k)
is_interesting arg se = interestingArg se arg
-- Do *not* use short-cutting substitution here
-- because we want to get as much IdInfo as possible
-------------------
mkArgInfo :: SimplEnv
-> Id
-> [CoreRule] -- Rules for function
-> Int -- Number of value args
-> SimplCont -- Context of the call
-> ArgInfo
mkArgInfo env fun rules n_val_args call_cont
| n_val_args < idArity fun -- Note [Unsaturated functions]
= ArgInfo { ai_fun = fun, ai_args = [], ai_type = fun_ty
, ai_rules = fun_rules
, ai_encl = False
, ai_strs = vanilla_stricts
, ai_discs = vanilla_discounts }
| otherwise
= ArgInfo { ai_fun = fun, ai_args = [], ai_type = fun_ty
, ai_rules = fun_rules
, ai_encl = interestingArgContext rules call_cont
, ai_strs = arg_stricts
, ai_discs = arg_discounts }
where
fun_ty = idType fun
fun_rules = mkFunRules rules
vanilla_discounts, arg_discounts :: [Int]
vanilla_discounts = repeat 0
arg_discounts = case idUnfolding fun of
CoreUnfolding {uf_guidance = UnfIfGoodArgs {ug_args = discounts}}
-> discounts ++ vanilla_discounts
_ -> vanilla_discounts
vanilla_stricts, arg_stricts :: [Bool]
vanilla_stricts = repeat False
arg_stricts
| not (sm_inline (seMode env))
= vanilla_stricts -- See Note [Do not expose strictness if sm_inline=False]
| otherwise
= add_type_str fun_ty $
case splitStrictSig (idStrictness fun) of
(demands, result_info)
| not (demands `lengthExceeds` n_val_args)
-> -- Enough args, use the strictness given.
-- For bottoming functions we used to pretend that the arg
-- is lazy, so that we don't treat the arg as an
-- interesting context. This avoids substituting
-- top-level bindings for (say) strings into
-- calls to error. But now we are more careful about
-- inlining lone variables, so its ok (see SimplUtils.analyseCont)
if isBotRes result_info then
map isStrictDmd demands -- Finite => result is bottom
else
map isStrictDmd demands ++ vanilla_stricts
| otherwise
-> WARN( True, text "More demands than arity" <+> ppr fun <+> ppr (idArity fun)
<+> ppr n_val_args <+> ppr demands )
vanilla_stricts -- Not enough args, or no strictness
add_type_str :: Type -> [Bool] -> [Bool]
-- If the function arg types are strict, record that in the 'strictness bits'
-- No need to instantiate because unboxed types (which dominate the strict
-- types) can't instantiate type variables.
-- add_type_str is done repeatedly (for each call);
-- might be better once-for-all in the function
-- But beware primops/datacons with no strictness
add_type_str _ [] = []
add_type_str fun_ty all_strs@(str:strs)
| Just (arg_ty, fun_ty') <- splitFunTy_maybe fun_ty -- Add strict-type info
= (str || Just False == isLiftedType_maybe arg_ty)
: add_type_str fun_ty' strs
-- If the type is levity-polymorphic, we can't know whether it's
-- strict. isLiftedType_maybe will return Just False only when
-- we're sure the type is unlifted.
| Just (_, fun_ty') <- splitForAllTy_maybe fun_ty
= add_type_str fun_ty' all_strs -- Look through foralls
| otherwise
= all_strs
{- Note [Unsaturated functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (test eyeball/inline4)
x = a:as
y = f x
where f has arity 2. Then we do not want to inline 'x', because
it'll just be floated out again. Even if f has lots of discounts
on its first argument -- it must be saturated for these to kick in
Note [Do not expose strictness if sm_inline=False]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#15163 showed a case in which we had
{-# INLINE [1] zip #-}
zip = undefined
{-# RULES "foo" forall as bs. stream (zip as bs) = ..blah... #-}
If we expose zip's bottoming nature when simplifying the LHS of the
RULE we get
{-# RULES "foo" forall as bs.
stream (case zip of {}) = ..blah... #-}
discarding the arguments to zip. Usually this is fine, but on the
LHS of a rule it's not, because 'as' and 'bs' are now not bound on
the LHS.
This is a pretty pathological example, so I'm not losing sleep over
it, but the simplest solution was to check sm_inline; if it is False,
which it is on the LHS of a rule (see updModeForRules), then don't
make use of the strictness info for the function.
-}
{-
************************************************************************
* *
Interesting arguments
* *
************************************************************************
Note [Interesting call context]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to avoid inlining an expression where there can't possibly be
any gain, such as in an argument position. Hence, if the continuation
is interesting (eg. a case scrutinee, application etc.) then we
inline, otherwise we don't.
Previously some_benefit used to return True only if the variable was
applied to some value arguments. This didn't work:
let x = _coerce_ (T Int) Int (I# 3) in
case _coerce_ Int (T Int) x of
I# y -> ....
we want to inline x, but can't see that it's a constructor in a case
scrutinee position, and some_benefit is False.
Another example:
dMonadST = _/\_ t -> :Monad (g1 _@_ t, g2 _@_ t, g3 _@_ t)
.... case dMonadST _@_ x0 of (a,b,c) -> ....
we'd really like to inline dMonadST here, but we *don't* want to
inline if the case expression is just
case x of y { DEFAULT -> ... }
since we can just eliminate this case instead (x is in WHNF). Similar
applies when x is bound to a lambda expression. Hence
contIsInteresting looks for case expressions with just a single
default case.
Note [No case of case is boring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we see
case f x of <alts>
we'd usually treat the context as interesting, to encourage 'f' to
inline. But if case-of-case is off, it's really not so interesting
after all, because we are unlikely to be able to push the case
expression into the branches of any case in f's unfolding. So, to
reduce unnecessary code expansion, we just make the context look boring.
This made a small compile-time perf improvement in perf/compiler/T6048,
and it looks plausible to me.
-}
interestingCallContext :: SimplEnv -> SimplCont -> CallCtxt
-- See Note [Interesting call context]
interestingCallContext env cont
= interesting cont
where
interesting (Select {})
| sm_case_case (getMode env) = CaseCtxt
| otherwise = BoringCtxt
-- See Note [No case of case is boring]
interesting (ApplyToVal {}) = ValAppCtxt
-- Can happen if we have (f Int |> co) y
-- If f has an INLINE prag we need to give it some
-- motivation to inline. See Note [Cast then apply]
-- in CoreUnfold
interesting (StrictArg { sc_cci = cci }) = cci
interesting (StrictBind {}) = BoringCtxt
interesting (Stop _ cci) = cci
interesting (TickIt _ k) = interesting k
interesting (ApplyToTy { sc_cont = k }) = interesting k
interesting (CastIt _ k) = interesting k
-- If this call is the arg of a strict function, the context
-- is a bit interesting. If we inline here, we may get useful
-- evaluation information to avoid repeated evals: e.g.
-- x + (y * z)
-- Here the contIsInteresting makes the '*' keener to inline,
-- which in turn exposes a constructor which makes the '+' inline.
-- Assuming that +,* aren't small enough to inline regardless.
--
-- It's also very important to inline in a strict context for things
-- like
-- foldr k z (f x)
-- Here, the context of (f x) is strict, and if f's unfolding is
-- a build it's *great* to inline it here. So we must ensure that
-- the context for (f x) is not totally uninteresting.
interestingArgContext :: [CoreRule] -> SimplCont -> Bool
-- If the argument has form (f x y), where x,y are boring,
-- and f is marked INLINE, then we don't want to inline f.
-- But if the context of the argument is
-- g (f x y)
-- where g has rules, then we *do* want to inline f, in case it
-- exposes a rule that might fire. Similarly, if the context is
-- h (g (f x x))
-- where h has rules, then we do want to inline f; hence the
-- call_cont argument to interestingArgContext
--
-- The ai-rules flag makes this happen; if it's
-- set, the inliner gets just enough keener to inline f
-- regardless of how boring f's arguments are, if it's marked INLINE
--
-- The alternative would be to *always* inline an INLINE function,
-- regardless of how boring its context is; but that seems overkill
-- For example, it'd mean that wrapper functions were always inlined
--
-- The call_cont passed to interestingArgContext is the context of
-- the call itself, e.g. g <hole> in the example above
interestingArgContext rules call_cont
= notNull rules || enclosing_fn_has_rules
where
enclosing_fn_has_rules = go call_cont
go (Select {}) = False
go (ApplyToVal {}) = False -- Shouldn't really happen
go (ApplyToTy {}) = False -- Ditto
go (StrictArg { sc_cci = cci }) = interesting cci
go (StrictBind {}) = False -- ??
go (CastIt _ c) = go c
go (Stop _ cci) = interesting cci
go (TickIt _ c) = go c
interesting RuleArgCtxt = True
interesting _ = False
{- Note [Interesting arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An argument is interesting if it deserves a discount for unfoldings
with a discount in that argument position. The idea is to avoid
unfolding a function that is applied only to variables that have no
unfolding (i.e. they are probably lambda bound): f x y z There is
little point in inlining f here.
Generally, *values* (like (C a b) and (\x.e)) deserve discounts. But
we must look through lets, eg (let x = e in C a b), because the let will
float, exposing the value, if we inline. That makes it different to
exprIsHNF.
Before 2009 we said it was interesting if the argument had *any* structure
at all; i.e. (hasSomeUnfolding v). But does too much inlining; see #3016.
But we don't regard (f x y) as interesting, unless f is unsaturated.
If it's saturated and f hasn't inlined, then it's probably not going
to now!
Note [Conlike is interesting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f d = ...((*) d x y)...
... f (df d')...
where df is con-like. Then we'd really like to inline 'f' so that the
rule for (*) (df d) can fire. To do this
a) we give a discount for being an argument of a class-op (eg (*) d)
b) we say that a con-like argument (eg (df d)) is interesting
-}
interestingArg :: SimplEnv -> CoreExpr -> ArgSummary
-- See Note [Interesting arguments]
interestingArg env e = go env 0 e
where
-- n is # value args to which the expression is applied
go env n (Var v)
= case substId env v of
DoneId v' -> go_var n v'
DoneEx e _ -> go (zapSubstEnv env) n e
ContEx tvs cvs ids e -> go (setSubstEnv env tvs cvs ids) n e
go _ _ (Lit {}) = ValueArg
go _ _ (Type _) = TrivArg
go _ _ (Coercion _) = TrivArg
go env n (App fn (Type _)) = go env n fn
go env n (App fn _) = go env (n+1) fn
go env n (Tick _ a) = go env n a
go env n (Cast e _) = go env n e
go env n (Lam v e)
| isTyVar v = go env n e
| n>0 = NonTrivArg -- (\x.b) e is NonTriv
| otherwise = ValueArg
go _ _ (Case {}) = NonTrivArg
go env n (Let b e) = case go env' n e of
ValueArg -> ValueArg
_ -> NonTrivArg
where
env' = env `addNewInScopeIds` bindersOf b
go_var n v
| isConLikeId v = ValueArg -- Experimenting with 'conlike' rather that
-- data constructors here
| idArity v > n = ValueArg -- Catches (eg) primops with arity but no unfolding
| n > 0 = NonTrivArg -- Saturated or unknown call
| conlike_unfolding = ValueArg -- n==0; look for an interesting unfolding
-- See Note [Conlike is interesting]
| otherwise = TrivArg -- n==0, no useful unfolding
where
conlike_unfolding = isConLikeUnfolding (idUnfolding v)
{-
************************************************************************
* *
SimplMode
* *
************************************************************************
The SimplMode controls several switches; see its definition in
CoreMonad
sm_rules :: Bool -- Whether RULES are enabled
sm_inline :: Bool -- Whether inlining is enabled
sm_case_case :: Bool -- Whether case-of-case is enabled
sm_eta_expand :: Bool -- Whether eta-expansion is enabled
-}
simplEnvForGHCi :: DynFlags -> SimplEnv
simplEnvForGHCi dflags
= mkSimplEnv $ SimplMode { sm_names = ["GHCi"]
, sm_phase = InitialPhase
, sm_dflags = dflags
, sm_rules = rules_on
, sm_inline = False
, sm_eta_expand = eta_expand_on
, sm_case_case = True }
where
rules_on = gopt Opt_EnableRewriteRules dflags
eta_expand_on = gopt Opt_DoLambdaEtaExpansion dflags
-- Do not do any inlining, in case we expose some unboxed
-- tuple stuff that confuses the bytecode interpreter
updModeForStableUnfoldings :: Activation -> SimplMode -> SimplMode
-- See Note [Simplifying inside stable unfoldings]
updModeForStableUnfoldings inline_rule_act current_mode
= current_mode { sm_phase = phaseFromActivation inline_rule_act
, sm_inline = True
, sm_eta_expand = False }
-- sm_eta_expand: see Note [No eta expansion in stable unfoldings]
-- For sm_rules, just inherit; sm_rules might be "off"
-- because of -fno-enable-rewrite-rules
where
phaseFromActivation (ActiveAfter _ n) = Phase n
phaseFromActivation _ = InitialPhase
updModeForRules :: SimplMode -> SimplMode
-- See Note [Simplifying rules]
updModeForRules current_mode
= current_mode { sm_phase = InitialPhase
, sm_inline = False -- See Note [Do not expose strictness if sm_inline=False]
, sm_rules = False
, sm_eta_expand = False }
{- Note [Simplifying rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When simplifying a rule LHS, refrain from /any/ inlining or applying
of other RULES.
Doing anything to the LHS is plain confusing, because it means that what the
rule matches is not what the user wrote. c.f. #10595, and #10528.
Moreover, inlining (or applying rules) on rule LHSs risks introducing
Ticks into the LHS, which makes matching trickier. #10665, #10745.
Doing this to either side confounds tools like HERMIT, which seek to reason
about and apply the RULES as originally written. See #10829.
Note [No eta expansion in stable unfoldings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have a stable unfolding
f :: Ord a => a -> IO ()
-- Unfolding template
-- = /\a \(d:Ord a) (x:a). bla
we do not want to eta-expand to
f :: Ord a => a -> IO ()
-- Unfolding template
-- = (/\a \(d:Ord a) (x:a) (eta:State#). bla eta) |> co
because not specialisation of the overloading doesn't work properly
(see Note [Specialisation shape] in Specialise), #9509.
So we disable eta-expansion in stable unfoldings.
Note [Inlining in gentle mode]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Something is inlined if
(i) the sm_inline flag is on, AND
(ii) the thing has an INLINE pragma, AND
(iii) the thing is inlinable in the earliest phase.
Example of why (iii) is important:
{-# INLINE [~1] g #-}
g = ...
{-# INLINE f #-}
f x = g (g x)
If we were to inline g into f's inlining, then an importing module would
never be able to do
f e --> g (g e) ---> RULE fires
because the stable unfolding for f has had g inlined into it.
On the other hand, it is bad not to do ANY inlining into an
stable unfolding, because then recursive knots in instance declarations
don't get unravelled.
However, *sometimes* SimplGently must do no call-site inlining at all
(hence sm_inline = False). Before full laziness we must be careful
not to inline wrappers, because doing so inhibits floating
e.g. ...(case f x of ...)...
==> ...(case (case x of I# x# -> fw x#) of ...)...
==> ...(case x of I# x# -> case fw x# of ...)...
and now the redex (f x) isn't floatable any more.
The no-inlining thing is also important for Template Haskell. You might be
compiling in one-shot mode with -O2; but when TH compiles a splice before
running it, we don't want to use -O2. Indeed, we don't want to inline
anything, because the byte-code interpreter might get confused about
unboxed tuples and suchlike.
Note [Simplifying inside stable unfoldings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must take care with simplification inside stable unfoldings (which come from
INLINE pragmas).
First, consider the following example
let f = \pq -> BIG
in
let g = \y -> f y y
{-# INLINE g #-}
in ...g...g...g...g...g...
Now, if that's the ONLY occurrence of f, it might be inlined inside g,
and thence copied multiple times when g is inlined. HENCE we treat
any occurrence in a stable unfolding as a multiple occurrence, not a single
one; see OccurAnal.addRuleUsage.
Second, we do want *do* to some modest rules/inlining stuff in stable
unfoldings, partly to eliminate senseless crap, and partly to break
the recursive knots generated by instance declarations.
However, suppose we have
{-# INLINE <act> f #-}
f = <rhs>
meaning "inline f in phases p where activation <act>(p) holds".
Then what inlinings/rules can we apply to the copy of <rhs> captured in
f's stable unfolding? Our model is that literally <rhs> is substituted for
f when it is inlined. So our conservative plan (implemented by
updModeForStableUnfoldings) is this:
-------------------------------------------------------------
When simplifying the RHS of a stable unfolding, set the phase
to the phase in which the stable unfolding first becomes active
-------------------------------------------------------------
That ensures that
a) Rules/inlinings that *cease* being active before p will
not apply to the stable unfolding, consistent with it being
inlined in its *original* form in phase p.
b) Rules/inlinings that only become active *after* p will
not apply to the stable unfolding, again to be consistent with
inlining the *original* rhs in phase p.
For example,
{-# INLINE f #-}
f x = ...g...
{-# NOINLINE [1] g #-}
g y = ...
{-# RULE h g = ... #-}
Here we must not inline g into f's RHS, even when we get to phase 0,
because when f is later inlined into some other module we want the
rule for h to fire.
Similarly, consider
{-# INLINE f #-}
f x = ...g...
g y = ...
and suppose that there are auto-generated specialisations and a strictness
wrapper for g. The specialisations get activation AlwaysActive, and the
strictness wrapper get activation (ActiveAfter 0). So the strictness
wrepper fails the test and won't be inlined into f's stable unfolding. That
means f can inline, expose the specialised call to g, so the specialisation
rules can fire.
A note about wrappers
~~~~~~~~~~~~~~~~~~~~~
It's also important not to inline a worker back into a wrapper.
A wrapper looks like
wraper = inline_me (\x -> ...worker... )
Normally, the inline_me prevents the worker getting inlined into
the wrapper (initially, the worker's only call site!). But,
if the wrapper is sure to be called, the strictness analyser will
mark it 'demanded', so when the RHS is simplified, it'll get an ArgOf
continuation.
-}
activeUnfolding :: SimplMode -> Id -> Bool
activeUnfolding mode id
| isCompulsoryUnfolding (realIdUnfolding id)
= True -- Even sm_inline can't override compulsory unfoldings
| otherwise
= isActive (sm_phase mode) (idInlineActivation id)
&& sm_inline mode
-- `or` isStableUnfolding (realIdUnfolding id)
-- Inline things when
-- (a) they are active
-- (b) sm_inline says so, except that for stable unfoldings
-- (ie pragmas) we inline anyway
getUnfoldingInRuleMatch :: SimplEnv -> InScopeEnv
-- When matching in RULE, we want to "look through" an unfolding
-- (to see a constructor) if *rules* are on, even if *inlinings*
-- are not. A notable example is DFuns, which really we want to
-- match in rules like (op dfun) in gentle mode. Another example
-- is 'otherwise' which we want exprIsConApp_maybe to be able to
-- see very early on
getUnfoldingInRuleMatch env
= (in_scope, id_unf)
where
in_scope = seInScope env
mode = getMode env
id_unf id | unf_is_active id = idUnfolding id
| otherwise = NoUnfolding
unf_is_active id
| not (sm_rules mode) = -- active_unfolding_minimal id
isStableUnfolding (realIdUnfolding id)
-- Do we even need to test this? I think this InScopeEnv
-- is only consulted if activeRule returns True, which
-- never happens if sm_rules is False
| otherwise = isActive (sm_phase mode) (idInlineActivation id)
----------------------
activeRule :: SimplMode -> Activation -> Bool
-- Nothing => No rules at all
activeRule mode
| not (sm_rules mode) = \_ -> False -- Rewriting is off
| otherwise = isActive (sm_phase mode)
{-
************************************************************************
* *
preInlineUnconditionally
* *
************************************************************************
preInlineUnconditionally
~~~~~~~~~~~~~~~~~~~~~~~~
@preInlineUnconditionally@ examines a bndr to see if it is used just
once in a completely safe way, so that it is safe to discard the
binding inline its RHS at the (unique) usage site, REGARDLESS of how
big the RHS might be. If this is the case we don't simplify the RHS
first, but just inline it un-simplified.
This is much better than first simplifying a perhaps-huge RHS and then
inlining and re-simplifying it. Indeed, it can be at least quadratically
better. Consider
x1 = e1
x2 = e2[x1]
x3 = e3[x2]
...etc...
xN = eN[xN-1]
We may end up simplifying e1 N times, e2 N-1 times, e3 N-3 times etc.
This can happen with cascades of functions too:
f1 = \x1.e1
f2 = \xs.e2[f1]
f3 = \xs.e3[f3]
...etc...
THE MAIN INVARIANT is this:
---- preInlineUnconditionally invariant -----
IF preInlineUnconditionally chooses to inline x = <rhs>
THEN doing the inlining should not change the occurrence
info for the free vars of <rhs>
----------------------------------------------
For example, it's tempting to look at trivial binding like
x = y
and inline it unconditionally. But suppose x is used many times,
but this is the unique occurrence of y. Then inlining x would change
y's occurrence info, which breaks the invariant. It matters: y
might have a BIG rhs, which will now be dup'd at every occurrence of x.
Even RHSs labelled InlineMe aren't caught here, because there might be
no benefit from inlining at the call site.
[Sept 01] Don't unconditionally inline a top-level thing, because that
can simply make a static thing into something built dynamically. E.g.
x = (a,b)
main = \s -> h x
[Remember that we treat \s as a one-shot lambda.] No point in
inlining x unless there is something interesting about the call site.
But watch out: if you aren't careful, some useful foldr/build fusion
can be lost (most notably in spectral/hartel/parstof) because the
foldr didn't see the build. Doing the dynamic allocation isn't a big
deal, in fact, but losing the fusion can be. But the right thing here
seems to be to do a callSiteInline based on the fact that there is
something interesting about the call site (it's strict). Hmm. That
seems a bit fragile.
Conclusion: inline top level things gaily until Phase 0 (the last
phase), at which point don't.
Note [pre/postInlineUnconditionally in gentle mode]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Even in gentle mode we want to do preInlineUnconditionally. The
reason is that too little clean-up happens if you don't inline
use-once things. Also a bit of inlining is *good* for full laziness;
it can expose constant sub-expressions. Example in
spectral/mandel/Mandel.hs, where the mandelset function gets a useful
let-float if you inline windowToViewport
However, as usual for Gentle mode, do not inline things that are
inactive in the initial stages. See Note [Gentle mode].
Note [Stable unfoldings and preInlineUnconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Surprisingly, do not pre-inline-unconditionally Ids with INLINE pragmas!
Example
{-# INLINE f #-}
f :: Eq a => a -> a
f x = ...
fInt :: Int -> Int
fInt = f Int dEqInt
...fInt...fInt...fInt...
Here f occurs just once, in the RHS of fInt. But if we inline it there
it might make fInt look big, and we'll lose the opportunity to inline f
at each of fInt's call sites. The INLINE pragma will only inline when
the application is saturated for exactly this reason; and we don't
want PreInlineUnconditionally to second-guess it. A live example is
#3736.
c.f. Note [Stable unfoldings and postInlineUnconditionally]
NB: if the pragma is INLINEABLE, then we don't want to behave in
this special way -- an INLINEABLE pragma just says to GHC "inline this
if you like". But if there is a unique occurrence, we want to inline
the stable unfolding, not the RHS.
Note [Top-level bottoming Ids]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Don't inline top-level Ids that are bottoming, even if they are used just
once, because FloatOut has gone to some trouble to extract them out.
Inlining them won't make the program run faster!
Note [Do not inline CoVars unconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Coercion variables appear inside coercions, and the RHS of a let-binding
is a term (not a coercion) so we can't necessarily inline the latter in
the former.
-}
preInlineUnconditionally
:: SimplEnv -> TopLevelFlag -> InId
-> InExpr -> StaticEnv -- These two go together
-> Maybe SimplEnv -- Returned env has extended substitution
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
-- Reason: we don't want to inline single uses, or discard dead bindings,
-- for unlifted, side-effect-ful bindings
preInlineUnconditionally env top_lvl bndr rhs rhs_env
| not pre_inline_unconditionally = Nothing
| not active = Nothing
| isTopLevel top_lvl && isBottomingId bndr = Nothing -- Note [Top-level bottoming Ids]
| isCoVar bndr = Nothing -- Note [Do not inline CoVars unconditionally]
| isExitJoinId bndr = Nothing -- Note [Do not inline exit join points]
-- in module Exitify
| not (one_occ (idOccInfo bndr)) = Nothing
| not (isStableUnfolding unf) = Just (extend_subst_with rhs)
-- Note [Stable unfoldings and preInlineUnconditionally]
| isInlinablePragma inline_prag
, Just inl <- maybeUnfoldingTemplate unf = Just (extend_subst_with inl)
| otherwise = Nothing
where
unf = idUnfolding bndr
extend_subst_with inl_rhs = extendIdSubst env bndr (mkContEx rhs_env inl_rhs)
one_occ IAmDead = True -- Happens in ((\x.1) v)
one_occ OneOcc{ occ_one_br = InOneBranch
, occ_in_lam = NotInsideLam } = isNotTopLevel top_lvl || early_phase
one_occ OneOcc{ occ_one_br = InOneBranch
, occ_in_lam = IsInsideLam
, occ_int_cxt = IsInteresting } = canInlineInLam rhs
one_occ _ = False
pre_inline_unconditionally = gopt Opt_SimplPreInlining (seDynFlags env)
mode = getMode env
active = isActive (sm_phase mode) (inlinePragmaActivation inline_prag)
-- See Note [pre/postInlineUnconditionally in gentle mode]
inline_prag = idInlinePragma bndr
-- Be very careful before inlining inside a lambda, because (a) we must not
-- invalidate occurrence information, and (b) we want to avoid pushing a
-- single allocation (here) into multiple allocations (inside lambda).
-- Inlining a *function* with a single *saturated* call would be ok, mind you.
-- || (if is_cheap && not (canInlineInLam rhs) then pprTrace "preinline" (ppr bndr <+> ppr rhs) ok else ok)
-- where
-- is_cheap = exprIsCheap rhs
-- ok = is_cheap && int_cxt
-- int_cxt The context isn't totally boring
-- E.g. let f = \ab.BIG in \y. map f xs
-- Don't want to substitute for f, because then we allocate
-- its closure every time the \y is called
-- But: let f = \ab.BIG in \y. map (f y) xs
-- Now we do want to substitute for f, even though it's not
-- saturated, because we're going to allocate a closure for
-- (f y) every time round the loop anyhow.
-- canInlineInLam => free vars of rhs are (Once in_lam) or Many,
-- so substituting rhs inside a lambda doesn't change the occ info.
-- Sadly, not quite the same as exprIsHNF.
canInlineInLam (Lit _) = True
canInlineInLam (Lam b e) = isRuntimeVar b || canInlineInLam e
canInlineInLam (Tick t e) = not (tickishIsCode t) && canInlineInLam e
canInlineInLam _ = False
-- not ticks. Counting ticks cannot be duplicated, and non-counting
-- ticks around a Lam will disappear anyway.
early_phase = case sm_phase mode of
Phase 0 -> False
_ -> True
-- If we don't have this early_phase test, consider
-- x = length [1,2,3]
-- The full laziness pass carefully floats all the cons cells to
-- top level, and preInlineUnconditionally floats them all back in.
-- Result is (a) static allocation replaced by dynamic allocation
-- (b) many simplifier iterations because this tickles
-- a related problem; only one inlining per pass
--
-- On the other hand, I have seen cases where top-level fusion is
-- lost if we don't inline top level thing (e.g. string constants)
-- Hence the test for phase zero (which is the phase for all the final
-- simplifications). Until phase zero we take no special notice of
-- top level things, but then we become more leery about inlining
-- them.
{-
************************************************************************
* *
postInlineUnconditionally
* *
************************************************************************
postInlineUnconditionally
~~~~~~~~~~~~~~~~~~~~~~~~~
@postInlineUnconditionally@ decides whether to unconditionally inline
a thing based on the form of its RHS; in particular if it has a
trivial RHS. If so, we can inline and discard the binding altogether.
NB: a loop breaker has must_keep_binding = True and non-loop-breakers
only have *forward* references. Hence, it's safe to discard the binding
NOTE: This isn't our last opportunity to inline. We're at the binding
site right now, and we'll get another opportunity when we get to the
occurrence(s)
Note that we do this unconditional inlining only for trivial RHSs.
Don't inline even WHNFs inside lambdas; doing so may simply increase
allocation when the function is called. This isn't the last chance; see
NOTE above.
NB: Even inline pragmas (e.g. IMustBeINLINEd) are ignored here Why?
Because we don't even want to inline them into the RHS of constructor
arguments. See NOTE above
NB: At one time even NOINLINE was ignored here: if the rhs is trivial
it's best to inline it anyway. We often get a=E; b=a from desugaring,
with both a and b marked NOINLINE. But that seems incompatible with
our new view that inlining is like a RULE, so I'm sticking to the 'active'
story for now.
-}
postInlineUnconditionally
:: SimplEnv -> TopLevelFlag
-> OutId -- The binder (*not* a CoVar), including its unfolding
-> OccInfo -- From the InId
-> OutExpr
-> Bool
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
-- Reason: we don't want to inline single uses, or discard dead bindings,
-- for unlifted, side-effect-ful bindings
postInlineUnconditionally env top_lvl bndr occ_info rhs
| not active = False
| isWeakLoopBreaker occ_info = False -- If it's a loop-breaker of any kind, don't inline
-- because it might be referred to "earlier"
| isStableUnfolding unfolding = False -- Note [Stable unfoldings and postInlineUnconditionally]
| isTopLevel top_lvl = False -- Note [Top level and postInlineUnconditionally]
| exprIsTrivial rhs = True
| otherwise
= case occ_info of
-- The point of examining occ_info here is that for *non-values*
-- that occur outside a lambda, the call-site inliner won't have
-- a chance (because it doesn't know that the thing
-- only occurs once). The pre-inliner won't have gotten
-- it either, if the thing occurs in more than one branch
-- So the main target is things like
-- let x = f y in
-- case v of
-- True -> case x of ...
-- False -> case x of ...
-- This is very important in practice; e.g. wheel-seive1 doubles
-- in allocation if you miss this out
OneOcc { occ_in_lam = in_lam, occ_int_cxt = int_cxt }
-- OneOcc => no code-duplication issue
-> smallEnoughToInline dflags unfolding -- Small enough to dup
-- ToDo: consider discount on smallEnoughToInline if int_cxt is true
--
-- NB: Do NOT inline arbitrarily big things, even if one_br is True
-- Reason: doing so risks exponential behaviour. We simplify a big
-- expression, inline it, and simplify it again. But if the
-- very same thing happens in the big expression, we get
-- exponential cost!
-- PRINCIPLE: when we've already simplified an expression once,
-- make sure that we only inline it if it's reasonably small.
&& (in_lam == NotInsideLam ||
-- Outside a lambda, we want to be reasonably aggressive
-- about inlining into multiple branches of case
-- e.g. let x = <non-value>
-- in case y of { C1 -> ..x..; C2 -> ..x..; C3 -> ... }
-- Inlining can be a big win if C3 is the hot-spot, even if
-- the uses in C1, C2 are not 'interesting'
-- An example that gets worse if you add int_cxt here is 'clausify'
(isCheapUnfolding unfolding && int_cxt == IsInteresting))
-- isCheap => acceptable work duplication; in_lam may be true
-- int_cxt to prevent us inlining inside a lambda without some
-- good reason. See the notes on int_cxt in preInlineUnconditionally
IAmDead -> True -- This happens; for example, the case_bndr during case of
-- known constructor: case (a,b) of x { (p,q) -> ... }
-- Here x isn't mentioned in the RHS, so we don't want to
-- create the (dead) let-binding let x = (a,b) in ...
_ -> False
-- Here's an example that we don't handle well:
-- let f = if b then Left (\x.BIG) else Right (\y.BIG)
-- in \y. ....case f of {...} ....
-- Here f is used just once, and duplicating the case work is fine (exprIsCheap).
-- But
-- - We can't preInlineUnconditionally because that would invalidate
-- the occ info for b.
-- - We can't postInlineUnconditionally because the RHS is big, and
-- that risks exponential behaviour
-- - We can't call-site inline, because the rhs is big
-- Alas!
where
unfolding = idUnfolding bndr
dflags = seDynFlags env
active = isActive (sm_phase (getMode env)) (idInlineActivation bndr)
-- See Note [pre/postInlineUnconditionally in gentle mode]
{-
Note [Top level and postInlineUnconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't do postInlineUnconditionally for top-level things (even for
ones that are trivial):
* Doing so will inline top-level error expressions that have been
carefully floated out by FloatOut. More generally, it might
replace static allocation with dynamic.
* Even for trivial expressions there's a problem. Consider
{-# RULE "foo" forall (xs::[T]). reverse xs = ruggle xs #-}
blah xs = reverse xs
ruggle = sort
In one simplifier pass we might fire the rule, getting
blah xs = ruggle xs
but in *that* simplifier pass we must not do postInlineUnconditionally
on 'ruggle' because then we'll have an unbound occurrence of 'ruggle'
If the rhs is trivial it'll be inlined by callSiteInline, and then
the binding will be dead and discarded by the next use of OccurAnal
* There is less point, because the main goal is to get rid of local
bindings used in multiple case branches.
* The inliner should inline trivial things at call sites anyway.
* The Id might be exported. We could check for that separately,
but since we aren't going to postInlineUnconditionally /any/
top-level bindings, we don't need to test.
Note [Stable unfoldings and postInlineUnconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Do not do postInlineUnconditionally if the Id has a stable unfolding,
otherwise we lose the unfolding. Example
-- f has stable unfolding with rhs (e |> co)
-- where 'e' is big
f = e |> co
Then there's a danger we'll optimise to
f' = e
f = f' |> co
and now postInlineUnconditionally, losing the stable unfolding on f. Now f'
won't inline because 'e' is too big.
c.f. Note [Stable unfoldings and preInlineUnconditionally]
************************************************************************
* *
Rebuilding a lambda
* *
************************************************************************
-}
mkLam :: SimplEnv -> [OutBndr] -> OutExpr -> SimplCont -> SimplM OutExpr
-- mkLam tries three things
-- a) eta reduction, if that gives a trivial expression
-- b) eta expansion [only if there are some value lambdas]
mkLam _env [] body _cont
= return body
mkLam env bndrs body cont
= do { dflags <- getDynFlags
; mkLam' dflags bndrs body }
where
mkLam' :: DynFlags -> [OutBndr] -> OutExpr -> SimplM OutExpr
mkLam' dflags bndrs (Cast body co)
| not (any bad bndrs)
-- Note [Casts and lambdas]
= do { lam <- mkLam' dflags bndrs body
; return (mkCast lam (mkPiCos Representational bndrs co)) }
where
co_vars = tyCoVarsOfCo co
bad bndr = isCoVar bndr && bndr `elemVarSet` co_vars
mkLam' dflags bndrs body@(Lam {})
= mkLam' dflags (bndrs ++ bndrs1) body1
where
(bndrs1, body1) = collectBinders body
mkLam' dflags bndrs (Tick t expr)
| tickishFloatable t
= mkTick t <$> mkLam' dflags bndrs expr
mkLam' dflags bndrs body
| gopt Opt_DoEtaReduction dflags
, Just etad_lam <- tryEtaReduce bndrs body
= do { tick (EtaReduction (head bndrs))
; return etad_lam }
| not (contIsRhs cont) -- See Note [Eta-expanding lambdas]
, sm_eta_expand (getMode env)
, any isRuntimeVar bndrs
, let body_arity = exprEtaExpandArity dflags body
, body_arity > 0
= do { tick (EtaExpansion (head bndrs))
; let res = mkLams bndrs (etaExpand body_arity body)
; traceSmpl "eta expand" (vcat [text "before" <+> ppr (mkLams bndrs body)
, text "after" <+> ppr res])
; return res }
| otherwise
= return (mkLams bndrs body)
{-
Note [Eta expanding lambdas]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general we *do* want to eta-expand lambdas. Consider
f (\x -> case x of (a,b) -> \s -> blah)
where 's' is a state token, and hence can be eta expanded. This
showed up in the code for GHc.IO.Handle.Text.hPutChar, a rather
important function!
The eta-expansion will never happen unless we do it now. (Well, it's
possible that CorePrep will do it, but CorePrep only has a half-baked
eta-expander that can't deal with casts. So it's much better to do it
here.)
However, when the lambda is let-bound, as the RHS of a let, we have a
better eta-expander (in the form of tryEtaExpandRhs), so we don't
bother to try expansion in mkLam in that case; hence the contIsRhs
guard.
NB: We check the SimplEnv (sm_eta_expand), not DynFlags.
See Note [No eta expansion in stable unfoldings]
Note [Casts and lambdas]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider
(\x. (\y. e) `cast` g1) `cast` g2
There is a danger here that the two lambdas look separated, and the
full laziness pass might float an expression to between the two.
So this equation in mkLam' floats the g1 out, thus:
(\x. e `cast` g1) --> (\x.e) `cast` (tx -> g1)
where x:tx.
In general, this floats casts outside lambdas, where (I hope) they
might meet and cancel with some other cast:
\x. e `cast` co ===> (\x. e) `cast` (tx -> co)
/\a. e `cast` co ===> (/\a. e) `cast` (/\a. co)
/\g. e `cast` co ===> (/\g. e) `cast` (/\g. co)
(if not (g `in` co))
Notice that it works regardless of 'e'. Originally it worked only
if 'e' was itself a lambda, but in some cases that resulted in
fruitless iteration in the simplifier. A good example was when
compiling Text.ParserCombinators.ReadPrec, where we had a definition
like (\x. Get `cast` g)
where Get is a constructor with nonzero arity. Then mkLam eta-expanded
the Get, and the next iteration eta-reduced it, and then eta-expanded
it again.
Note also the side condition for the case of coercion binders.
It does not make sense to transform
/\g. e `cast` g ==> (/\g.e) `cast` (/\g.g)
because the latter is not well-kinded.
************************************************************************
* *
Eta expansion
* *
************************************************************************
-}
tryEtaExpandRhs :: SimplMode -> OutId -> OutExpr
-> SimplM (Arity, Bool, OutExpr)
-- See Note [Eta-expanding at let bindings]
-- If tryEtaExpandRhs rhs = (n, is_bot, rhs') then
-- (a) rhs' has manifest arity n
-- (b) if is_bot is True then rhs' applied to n args is guaranteed bottom
tryEtaExpandRhs mode bndr rhs
| Just join_arity <- isJoinId_maybe bndr
= do { let (join_bndrs, join_body) = collectNBinders join_arity rhs
; return (count isId join_bndrs, exprIsBottom join_body, rhs) }
-- Note [Do not eta-expand join points]
-- But do return the correct arity and bottom-ness, because
-- these are used to set the bndr's IdInfo (#15517)
-- Note [Invariants on join points] invariant 2b, in CoreSyn
| otherwise
= do { (new_arity, is_bot, new_rhs) <- try_expand
; WARN( new_arity < old_id_arity,
(text "Arity decrease:" <+> (ppr bndr <+> ppr old_id_arity
<+> ppr old_arity <+> ppr new_arity) $$ ppr new_rhs) )
-- Note [Arity decrease] in Simplify
return (new_arity, is_bot, new_rhs) }
where
try_expand
| exprIsTrivial rhs
= return (exprArity rhs, False, rhs)
| sm_eta_expand mode -- Provided eta-expansion is on
, new_arity > old_arity -- And the current manifest arity isn't enough
= do { tick (EtaExpansion bndr)
; return (new_arity, is_bot, etaExpand new_arity rhs) }
| otherwise
= return (old_arity, is_bot && new_arity == old_arity, rhs)
dflags = sm_dflags mode
old_arity = exprArity rhs -- See Note [Do not expand eta-expand PAPs]
old_id_arity = idArity bndr
(new_arity1, is_bot) = findRhsArity dflags bndr rhs old_arity
new_arity2 = idCallArity bndr
new_arity = max new_arity1 new_arity2
{-
Note [Eta-expanding at let bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We now eta expand at let-bindings, which is where the payoff comes.
The most significant thing is that we can do a simple arity analysis
(in CoreArity.findRhsArity), which we can't do for free-floating lambdas
One useful consequence of not eta-expanding lambdas is this example:
genMap :: C a => ...
{-# INLINE genMap #-}
genMap f xs = ...
myMap :: D a => ...
{-# INLINE myMap #-}
myMap = genMap
Notice that 'genMap' should only inline if applied to two arguments.
In the stable unfolding for myMap we'll have the unfolding
(\d -> genMap Int (..d..))
We do not want to eta-expand to
(\d f xs -> genMap Int (..d..) f xs)
because then 'genMap' will inline, and it really shouldn't: at least
as far as the programmer is concerned, it's not applied to two
arguments!
Note [Do not eta-expand join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Similarly to CPR (see Note [Don't CPR join points] in WorkWrap), a join point
stands well to gain from its outer binding's eta-expansion, and eta-expanding a
join point is fraught with issues like how to deal with a cast:
let join $j1 :: IO ()
$j1 = ...
$j2 :: Int -> IO ()
$j2 n = if n > 0 then $j1
else ...
=>
let join $j1 :: IO ()
$j1 = (\eta -> ...)
`cast` N:IO :: State# RealWorld -> (# State# RealWorld, ())
~ IO ()
$j2 :: Int -> IO ()
$j2 n = (\eta -> if n > 0 then $j1
else ...)
`cast` N:IO :: State# RealWorld -> (# State# RealWorld, ())
~ IO ()
The cast here can't be pushed inside the lambda (since it's not casting to a
function type), so the lambda has to stay, but it can't because it contains a
reference to a join point. In fact, $j2 can't be eta-expanded at all. Rather
than try and detect this situation (and whatever other situations crop up!), we
don't bother; again, any surrounding eta-expansion will improve these join
points anyway, since an outer cast can *always* be pushed inside. By the time
CorePrep comes around, the code is very likely to look more like this:
let join $j1 :: State# RealWorld -> (# State# RealWorld, ())
$j1 = (...) eta
$j2 :: Int -> State# RealWorld -> (# State# RealWorld, ())
$j2 = if n > 0 then $j1
else (...) eta
Note [Do not eta-expand PAPs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We used to have old_arity = manifestArity rhs, which meant that we
would eta-expand even PAPs. But this gives no particular advantage,
and can lead to a massive blow-up in code size, exhibited by #9020.
Suppose we have a PAP
foo :: IO ()
foo = returnIO ()
Then we can eta-expand do
foo = (\eta. (returnIO () |> sym g) eta) |> g
where
g :: IO () ~ State# RealWorld -> (# State# RealWorld, () #)
But there is really no point in doing this, and it generates masses of
coercions and whatnot that eventually disappear again. For T9020, GHC
allocated 6.6G before, and 0.8G afterwards; and residency dropped from
1.8G to 45M.
But note that this won't eta-expand, say
f = \g -> map g
Does it matter not eta-expanding such functions? I'm not sure. Perhaps
strictness analysis will have less to bite on?
************************************************************************
* *
\subsection{Floating lets out of big lambdas}
* *
************************************************************************
Note [Floating and type abstraction]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
x = /\a. C e1 e2
We'd like to float this to
y1 = /\a. e1
y2 = /\a. e2
x = /\a. C (y1 a) (y2 a)
for the usual reasons: we want to inline x rather vigorously.
You may think that this kind of thing is rare. But in some programs it is
common. For example, if you do closure conversion you might get:
data a :-> b = forall e. (e -> a -> b) :$ e
f_cc :: forall a. a :-> a
f_cc = /\a. (\e. id a) :$ ()
Now we really want to inline that f_cc thing so that the
construction of the closure goes away.
So I have elaborated simplLazyBind to understand right-hand sides that look
like
/\ a1..an. body
and treat them specially. The real work is done in SimplUtils.abstractFloats,
but there is quite a bit of plumbing in simplLazyBind as well.
The same transformation is good when there are lets in the body:
/\abc -> let(rec) x = e in b
==>
let(rec) x' = /\abc -> let x = x' a b c in e
in
/\abc -> let x = x' a b c in b
This is good because it can turn things like:
let f = /\a -> letrec g = ... g ... in g
into
letrec g' = /\a -> ... g' a ...
in
let f = /\ a -> g' a
which is better. In effect, it means that big lambdas don't impede
let-floating.
This optimisation is CRUCIAL in eliminating the junk introduced by
desugaring mutually recursive definitions. Don't eliminate it lightly!
[May 1999] If we do this transformation *regardless* then we can
end up with some pretty silly stuff. For example,
let
st = /\ s -> let { x1=r1 ; x2=r2 } in ...
in ..
becomes
let y1 = /\s -> r1
y2 = /\s -> r2
st = /\s -> ...[y1 s/x1, y2 s/x2]
in ..
Unless the "..." is a WHNF there is really no point in doing this.
Indeed it can make things worse. Suppose x1 is used strictly,
and is of the form
x1* = case f y of { (a,b) -> e }
If we abstract this wrt the tyvar we then can't do the case inline
as we would normally do.
That's why the whole transformation is part of the same process that
floats let-bindings and constructor arguments out of RHSs. In particular,
it is guarded by the doFloatFromRhs call in simplLazyBind.
Note [Which type variables to abstract over]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Abstract only over the type variables free in the rhs wrt which the
new binding is abstracted. Note that
* The naive approach of abstracting wrt the
tyvars free in the Id's /type/ fails. Consider:
/\ a b -> let t :: (a,b) = (e1, e2)
x :: a = fst t
in ...
Here, b isn't free in x's type, but we must nevertheless
abstract wrt b as well, because t's type mentions b.
Since t is floated too, we'd end up with the bogus:
poly_t = /\ a b -> (e1, e2)
poly_x = /\ a -> fst (poly_t a *b*)
* We must do closeOverKinds. Example (#10934):
f = /\k (f:k->*) (a:k). let t = AccFailure @ (f a) in ...
Here we want to float 't', but we must remember to abstract over
'k' as well, even though it is not explicitly mentioned in the RHS,
otherwise we get
t = /\ (f:k->*) (a:k). AccFailure @ (f a)
which is obviously bogus.
-}
abstractFloats :: DynFlags -> TopLevelFlag -> [OutTyVar] -> SimplFloats
-> OutExpr -> SimplM ([OutBind], OutExpr)
abstractFloats dflags top_lvl main_tvs floats body
= ASSERT( notNull body_floats )
ASSERT( isNilOL (sfJoinFloats floats) )
do { (subst, float_binds) <- mapAccumLM abstract empty_subst body_floats
; return (float_binds, CoreSubst.substExpr (text "abstract_floats1") subst body) }
where
is_top_lvl = isTopLevel top_lvl
main_tv_set = mkVarSet main_tvs
body_floats = letFloatBinds (sfLetFloats floats)
empty_subst = CoreSubst.mkEmptySubst (sfInScope floats)
abstract :: CoreSubst.Subst -> OutBind -> SimplM (CoreSubst.Subst, OutBind)
abstract subst (NonRec id rhs)
= do { (poly_id1, poly_app) <- mk_poly1 tvs_here id
; let (poly_id2, poly_rhs) = mk_poly2 poly_id1 tvs_here rhs'
subst' = CoreSubst.extendIdSubst subst id poly_app
; return (subst', NonRec poly_id2 poly_rhs) }
where
rhs' = CoreSubst.substExpr (text "abstract_floats2") subst rhs
-- tvs_here: see Note [Which type variables to abstract over]
tvs_here = scopedSort $
filter (`elemVarSet` main_tv_set) $
closeOverKindsList $
exprSomeFreeVarsList isTyVar rhs'
abstract subst (Rec prs)
= do { (poly_ids, poly_apps) <- mapAndUnzipM (mk_poly1 tvs_here) ids
; let subst' = CoreSubst.extendSubstList subst (ids `zip` poly_apps)
poly_pairs = [ mk_poly2 poly_id tvs_here rhs'
| (poly_id, rhs) <- poly_ids `zip` rhss
, let rhs' = CoreSubst.substExpr (text "abstract_floats")
subst' rhs ]
; return (subst', Rec poly_pairs) }
where
(ids,rhss) = unzip prs
-- For a recursive group, it's a bit of a pain to work out the minimal
-- set of tyvars over which to abstract:
-- /\ a b c. let x = ...a... in
-- letrec { p = ...x...q...
-- q = .....p...b... } in
-- ...
-- Since 'x' is abstracted over 'a', the {p,q} group must be abstracted
-- over 'a' (because x is replaced by (poly_x a)) as well as 'b'.
-- Since it's a pain, we just use the whole set, which is always safe
--
-- If you ever want to be more selective, remember this bizarre case too:
-- x::a = x
-- Here, we must abstract 'x' over 'a'.
tvs_here = scopedSort main_tvs
mk_poly1 :: [TyVar] -> Id -> SimplM (Id, CoreExpr)
mk_poly1 tvs_here var
= do { uniq <- getUniqueM
; let poly_name = setNameUnique (idName var) uniq -- Keep same name
poly_ty = mkInvForAllTys tvs_here (idType var) -- But new type of course
poly_id = transferPolyIdInfo var tvs_here $ -- Note [transferPolyIdInfo] in Id.hs
mkLocalId poly_name poly_ty
; return (poly_id, mkTyApps (Var poly_id) (mkTyVarTys tvs_here)) }
-- In the olden days, it was crucial to copy the occInfo of the original var,
-- because we were looking at occurrence-analysed but as yet unsimplified code!
-- In particular, we mustn't lose the loop breakers. BUT NOW we are looking
-- at already simplified code, so it doesn't matter
--
-- It's even right to retain single-occurrence or dead-var info:
-- Suppose we started with /\a -> let x = E in B
-- where x occurs once in B. Then we transform to:
-- let x' = /\a -> E in /\a -> let x* = x' a in B
-- where x* has an INLINE prag on it. Now, once x* is inlined,
-- the occurrences of x' will be just the occurrences originally
-- pinned on x.
mk_poly2 :: Id -> [TyVar] -> CoreExpr -> (Id, CoreExpr)
mk_poly2 poly_id tvs_here rhs
= (poly_id `setIdUnfolding` unf, poly_rhs)
where
poly_rhs = mkLams tvs_here rhs
unf = mkUnfolding dflags InlineRhs is_top_lvl False poly_rhs
-- We want the unfolding. Consider
-- let
-- x = /\a. let y = ... in Just y
-- in body
-- Then we float the y-binding out (via abstractFloats and addPolyBind)
-- but 'x' may well then be inlined in 'body' in which case we'd like the
-- opportunity to inline 'y' too.
{-
Note [Abstract over coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a coercion variable (g :: a ~ Int) is free in the RHS, then so is the
type variable a. Rather than sort this mess out, we simply bale out and abstract
wrt all the type variables if any of them are coercion variables.
Historical note: if you use let-bindings instead of a substitution, beware of this:
-- Suppose we start with:
--
-- x = /\ a -> let g = G in E
--
-- Then we'll float to get
--
-- x = let poly_g = /\ a -> G
-- in /\ a -> let g = poly_g a in E
--
-- But now the occurrence analyser will see just one occurrence
-- of poly_g, not inside a lambda, so the simplifier will
-- PreInlineUnconditionally poly_g back into g! Badk to square 1!
-- (I used to think that the "don't inline lone occurrences" stuff
-- would stop this happening, but since it's the *only* occurrence,
-- PreInlineUnconditionally kicks in first!)
--
-- Solution: put an INLINE note on g's RHS, so that poly_g seems
-- to appear many times. (NB: mkInlineMe eliminates
-- such notes on trivial RHSs, so do it manually.)
************************************************************************
* *
prepareAlts
* *
************************************************************************
prepareAlts tries these things:
1. Eliminate alternatives that cannot match, including the
DEFAULT alternative.
2. If the DEFAULT alternative can match only one possible constructor,
then make that constructor explicit.
e.g.
case e of x { DEFAULT -> rhs }
===>
case e of x { (a,b) -> rhs }
where the type is a single constructor type. This gives better code
when rhs also scrutinises x or e.
3. Returns a list of the constructors that cannot holds in the
DEFAULT alternative (if there is one)
Here "cannot match" includes knowledge from GADTs
It's a good idea to do this stuff before simplifying the alternatives, to
avoid simplifying alternatives we know can't happen, and to come up with
the list of constructors that are handled, to put into the IdInfo of the
case binder, for use when simplifying the alternatives.
Eliminating the default alternative in (1) isn't so obvious, but it can
happen:
data Colour = Red | Green | Blue
f x = case x of
Red -> ..
Green -> ..
DEFAULT -> h x
h y = case y of
Blue -> ..
DEFAULT -> [ case y of ... ]
If we inline h into f, the default case of the inlined h can't happen.
If we don't notice this, we may end up filtering out *all* the cases
of the inner case y, which give us nowhere to go!
-}
prepareAlts :: OutExpr -> OutId -> [InAlt] -> SimplM ([AltCon], [InAlt])
-- The returned alternatives can be empty, none are possible
prepareAlts scrut case_bndr' alts
| Just (tc, tys) <- splitTyConApp_maybe (varType case_bndr')
-- Case binder is needed just for its type. Note that as an
-- OutId, it has maximum information; this is important.
-- Test simpl013 is an example
= do { us <- getUniquesM
; let (idcs1, alts1) = filterAlts tc tys imposs_cons alts
(yes2, alts2) = refineDefaultAlt us tc tys idcs1 alts1
(yes3, idcs3, alts3) = combineIdenticalAlts idcs1 alts2
-- "idcs" stands for "impossible default data constructors"
-- i.e. the constructors that can't match the default case
; when yes2 $ tick (FillInCaseDefault case_bndr')
; when yes3 $ tick (AltMerge case_bndr')
; return (idcs3, alts3) }
| otherwise -- Not a data type, so nothing interesting happens
= return ([], alts)
where
imposs_cons = case scrut of
Var v -> otherCons (idUnfolding v)
_ -> []
{-
************************************************************************
* *
mkCase
* *
************************************************************************
mkCase tries these things
* Note [Nerge nested cases]
* Note [Eliminate identity case]
* Note [Scrutinee constant folding]
Note [Merge Nested Cases]
~~~~~~~~~~~~~~~~~~~~~~~~~
case e of b { ==> case e of b {
p1 -> rhs1 p1 -> rhs1
... ...
pm -> rhsm pm -> rhsm
_ -> case b of b' { pn -> let b'=b in rhsn
pn -> rhsn ...
... po -> let b'=b in rhso
po -> rhso _ -> let b'=b in rhsd
_ -> rhsd
}
which merges two cases in one case when -- the default alternative of
the outer case scrutises the same variable as the outer case. This
transformation is called Case Merging. It avoids that the same
variable is scrutinised multiple times.
Note [Eliminate Identity Case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
case e of ===> e
True -> True;
False -> False
and similar friends.
Note [Scrutinee Constant Folding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
case x op# k# of _ { ===> case x of _ {
a1# -> e1 (a1# inv_op# k#) -> e1
a2# -> e2 (a2# inv_op# k#) -> e2
... ...
DEFAULT -> ed DEFAULT -> ed
where (x op# k#) inv_op# k# == x
And similarly for commuted arguments and for some unary operations.
The purpose of this transformation is not only to avoid an arithmetic
operation at runtime but to allow other transformations to apply in cascade.
Example with the "Merge Nested Cases" optimization (from #12877):
main = case t of t0
0## -> ...
DEFAULT -> case t0 `minusWord#` 1## of t1
0## -> ...
DEFAUT -> case t1 `minusWord#` 1## of t2
0## -> ...
DEFAULT -> case t2 `minusWord#` 1## of _
0## -> ...
DEFAULT -> ...
becomes:
main = case t of _
0## -> ...
1## -> ...
2## -> ...
3## -> ...
DEFAULT -> ...
There are some wrinkles
* Do not apply caseRules if there is just a single DEFAULT alternative
case e +# 3# of b { DEFAULT -> rhs }
If we applied the transformation here we would (stupidly) get
case a of b' { DEFAULT -> let b = e +# 3# in rhs }
and now the process may repeat, because that let will really
be a case.
* The type of the scrutinee might change. E.g.
case tagToEnum (x :: Int#) of (b::Bool)
False -> e1
True -> e2
==>
case x of (b'::Int#)
DEFAULT -> e1
1# -> e2
* The case binder may be used in the right hand sides, so we need
to make a local binding for it, if it is alive. e.g.
case e +# 10# of b
DEFAULT -> blah...b...
44# -> blah2...b...
===>
case e of b'
DEFAULT -> let b = b' +# 10# in blah...b...
34# -> let b = 44# in blah2...b...
Note that in the non-DEFAULT cases we know what to bind 'b' to,
whereas in the DEFAULT case we must reconstruct the original value.
But NB: we use b'; we do not duplicate 'e'.
* In dataToTag we might need to make up some fake binders;
see Note [caseRules for dataToTag] in PrelRules
-}
mkCase, mkCase1, mkCase2, mkCase3
:: DynFlags
-> OutExpr -> OutId
-> OutType -> [OutAlt] -- Alternatives in standard (increasing) order
-> SimplM OutExpr
--------------------------------------------------
-- 1. Merge Nested Cases
--------------------------------------------------
mkCase dflags scrut outer_bndr alts_ty ((DEFAULT, _, deflt_rhs) : outer_alts)
| gopt Opt_CaseMerge dflags
, (ticks, Case (Var inner_scrut_var) inner_bndr _ inner_alts)
<- stripTicksTop tickishFloatable deflt_rhs
, inner_scrut_var == outer_bndr
= do { tick (CaseMerge outer_bndr)
; let wrap_alt (con, args, rhs) = ASSERT( outer_bndr `notElem` args )
(con, args, wrap_rhs rhs)
-- Simplifier's no-shadowing invariant should ensure
-- that outer_bndr is not shadowed by the inner patterns
wrap_rhs rhs = Let (NonRec inner_bndr (Var outer_bndr)) rhs
-- The let is OK even for unboxed binders,
wrapped_alts | isDeadBinder inner_bndr = inner_alts
| otherwise = map wrap_alt inner_alts
merged_alts = mergeAlts outer_alts wrapped_alts
-- NB: mergeAlts gives priority to the left
-- case x of
-- A -> e1
-- DEFAULT -> case x of
-- A -> e2
-- B -> e3
-- When we merge, we must ensure that e1 takes
-- precedence over e2 as the value for A!
; fmap (mkTicks ticks) $
mkCase1 dflags scrut outer_bndr alts_ty merged_alts
}
-- Warning: don't call mkCase recursively!
-- Firstly, there's no point, because inner alts have already had
-- mkCase applied to them, so they won't have a case in their default
-- Secondly, if you do, you get an infinite loop, because the bindCaseBndr
-- in munge_rhs may put a case into the DEFAULT branch!
mkCase dflags scrut bndr alts_ty alts = mkCase1 dflags scrut bndr alts_ty alts
--------------------------------------------------
-- 2. Eliminate Identity Case
--------------------------------------------------
mkCase1 _dflags scrut case_bndr _ alts@((_,_,rhs1) : _) -- Identity case
| all identity_alt alts
= do { tick (CaseIdentity case_bndr)
; return (mkTicks ticks $ re_cast scrut rhs1) }
where
ticks = concatMap (stripTicksT tickishFloatable . thdOf3) (tail alts)
identity_alt (con, args, rhs) = check_eq rhs con args
check_eq (Cast rhs co) con args -- See Note [RHS casts]
= not (any (`elemVarSet` tyCoVarsOfCo co) args) && check_eq rhs con args
check_eq (Tick t e) alt args
= tickishFloatable t && check_eq e alt args
check_eq (Lit lit) (LitAlt lit') _ = lit == lit'
check_eq (Var v) _ _ | v == case_bndr = True
check_eq (Var v) (DataAlt con) args
| null arg_tys, null args = v == dataConWorkId con
-- Optimisation only
check_eq rhs (DataAlt con) args = cheapEqExpr' tickishFloatable rhs $
mkConApp2 con arg_tys args
check_eq _ _ _ = False
arg_tys = tyConAppArgs (idType case_bndr)
-- Note [RHS casts]
-- ~~~~~~~~~~~~~~~~
-- We've seen this:
-- case e of x { _ -> x `cast` c }
-- And we definitely want to eliminate this case, to give
-- e `cast` c
-- So we throw away the cast from the RHS, and reconstruct
-- it at the other end. All the RHS casts must be the same
-- if (all identity_alt alts) holds.
--
-- Don't worry about nested casts, because the simplifier combines them
re_cast scrut (Cast rhs co) = Cast (re_cast scrut rhs) co
re_cast scrut _ = scrut
mkCase1 dflags scrut bndr alts_ty alts = mkCase2 dflags scrut bndr alts_ty alts
--------------------------------------------------
-- 2. Scrutinee Constant Folding
--------------------------------------------------
mkCase2 dflags scrut bndr alts_ty alts
| -- See Note [Scrutinee Constant Folding]
case alts of -- Not if there is just a DEFAULT alternative
[(DEFAULT,_,_)] -> False
_ -> True
, gopt Opt_CaseFolding dflags
, Just (scrut', tx_con, mk_orig) <- caseRules dflags scrut
= do { bndr' <- newId (fsLit "lwild") (exprType scrut')
; alts' <- mapMaybeM (tx_alt tx_con mk_orig bndr') alts
-- mapMaybeM: discard unreachable alternatives
-- See Note [Unreachable caseRules alternatives]
-- in PrelRules
; mkCase3 dflags scrut' bndr' alts_ty $
add_default (re_sort alts')
}
| otherwise
= mkCase3 dflags scrut bndr alts_ty alts
where
-- We need to keep the correct association between the scrutinee and its
-- binder if the latter isn't dead. Hence we wrap rhs of alternatives with
-- "let bndr = ... in":
--
-- case v + 10 of y =====> case v of y
-- 20 -> e1 10 -> let y = 20 in e1
-- DEFAULT -> e2 DEFAULT -> let y = v + 10 in e2
--
-- Other transformations give: =====> case v of y'
-- 10 -> let y = 20 in e1
-- DEFAULT -> let y = y' + 10 in e2
--
-- This wrapping is done in tx_alt; we use mk_orig, returned by caseRules,
-- to construct an expression equivalent to the original one, for use
-- in the DEFAULT case
tx_alt :: (AltCon -> Maybe AltCon) -> (Id -> CoreExpr) -> Id
-> CoreAlt -> SimplM (Maybe CoreAlt)
tx_alt tx_con mk_orig new_bndr (con, bs, rhs)
= case tx_con con of
Nothing -> return Nothing
Just con' -> do { bs' <- mk_new_bndrs new_bndr con'
; return (Just (con', bs', rhs')) }
where
rhs' | isDeadBinder bndr = rhs
| otherwise = bindNonRec bndr orig_val rhs
orig_val = case con of
DEFAULT -> mk_orig new_bndr
LitAlt l -> Lit l
DataAlt dc -> mkConApp2 dc (tyConAppArgs (idType bndr)) bs
mk_new_bndrs new_bndr (DataAlt dc)
| not (isNullaryRepDataCon dc)
= -- For non-nullary data cons we must invent some fake binders
-- See Note [caseRules for dataToTag] in PrelRules
do { us <- getUniquesM
; let (ex_tvs, arg_ids) = dataConRepInstPat us dc
(tyConAppArgs (idType new_bndr))
; return (ex_tvs ++ arg_ids) }
mk_new_bndrs _ _ = return []
re_sort :: [CoreAlt] -> [CoreAlt]
-- Sort the alternatives to re-establish
-- CoreSyn Note [Case expression invariants]
re_sort alts = sortBy cmpAlt alts
add_default :: [CoreAlt] -> [CoreAlt]
-- See Note [Literal cases]
add_default ((LitAlt {}, bs, rhs) : alts) = (DEFAULT, bs, rhs) : alts
add_default alts = alts
{- Note [Literal cases]
~~~~~~~~~~~~~~~~~~~~~~~
If we have
case tagToEnum (a ># b) of
False -> e1
True -> e2
then caseRules for TagToEnum will turn it into
case tagToEnum (a ># b) of
0# -> e1
1# -> e2
Since the case is exhaustive (all cases are) we can convert it to
case tagToEnum (a ># b) of
DEFAULT -> e1
1# -> e2
This may generate sligthtly better code (although it should not, since
all cases are exhaustive) and/or optimise better. I'm not certain that
it's necessary, but currently we do make this change. We do it here,
NOT in the TagToEnum rules (see "Beware" in Note [caseRules for tagToEnum]
in PrelRules)
-}
--------------------------------------------------
-- Catch-all
--------------------------------------------------
mkCase3 _dflags scrut bndr alts_ty alts
= return (Case scrut bndr alts_ty alts)
-- See Note [Exitification] and Note [Do not inline exit join points] in Exitify.hs
-- This lives here (and not in Id) because occurrence info is only valid on
-- InIds, so it's crucial that isExitJoinId is only called on freshly
-- occ-analysed code. It's not a generic function you can call anywhere.
isExitJoinId :: Var -> Bool
isExitJoinId id
= isJoinId id
&& isOneOcc (idOccInfo id)
&& occ_in_lam (idOccInfo id) == IsInsideLam
{-
Note [Dead binders]
~~~~~~~~~~~~~~~~~~~~
Note that dead-ness is maintained by the simplifier, so that it is
accurate after simplification as well as before.
Note [Cascading case merge]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Case merging should cascade in one sweep, because it
happens bottom-up
case e of a {
DEFAULT -> case a of b
DEFAULT -> case b of c {
DEFAULT -> e
A -> ea
B -> eb
C -> ec
==>
case e of a {
DEFAULT -> case a of b
DEFAULT -> let c = b in e
A -> let c = b in ea
B -> eb
C -> ec
==>
case e of a {
DEFAULT -> let b = a in let c = b in e
A -> let b = a in let c = b in ea
B -> let b = a in eb
C -> ec
However here's a tricky case that we still don't catch, and I don't
see how to catch it in one pass:
case x of c1 { I# a1 ->
case a1 of c2 ->
0 -> ...
DEFAULT -> case x of c3 { I# a2 ->
case a2 of ...
After occurrence analysis (and its binder-swap) we get this
case x of c1 { I# a1 ->
let x = c1 in -- Binder-swap addition
case a1 of c2 ->
0 -> ...
DEFAULT -> case x of c3 { I# a2 ->
case a2 of ...
When we simplify the inner case x, we'll see that
x=c1=I# a1. So we'll bind a2 to a1, and get
case x of c1 { I# a1 ->
case a1 of c2 ->
0 -> ...
DEFAULT -> case a1 of ...
This is correct, but we can't do a case merge in this sweep
because c2 /= a1. Reason: the binding c1=I# a1 went inwards
without getting changed to c1=I# c2.
I don't think this is worth fixing, even if I knew how. It'll
all come out in the next pass anyway.
-}
| sdiehl/ghc | compiler/simplCore/SimplUtils.hs | bsd-3-clause | 94,895 | 0 | 20 | 28,907 | 9,672 | 5,204 | 4,468 | -1 | -1 |
import qualified Spec.B.Build
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "Spec.B.Build" Spec.B.Build.spec
| strager/b | test/Spec.hs | bsd-3-clause | 145 | 0 | 6 | 24 | 50 | 28 | 22 | 6 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE GADTs #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.IndexUtils
-- Copyright : (c) Duncan Coutts 2008
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Extra utils related to the package indexes.
-----------------------------------------------------------------------------
module Distribution.Client.IndexUtils (
getIndexFileAge,
getInstalledPackages,
getSourcePackages,
Index(..),
PackageEntry(..),
parsePackageIndex,
updateRepoIndexCache,
updatePackageIndexCacheFile,
readCacheStrict,
BuildTreeRefType(..), refTypeFromTypeCode, typeCodeFromRefType
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Codec.Archive.Tar.Index as Tar
import qualified Distribution.Client.Tar as Tar
import Distribution.Client.Types
import Distribution.Package
( PackageId, PackageIdentifier(..), PackageName(..)
, Package(..), packageVersion, packageName
, Dependency(Dependency) )
import Distribution.Client.PackageIndex (PackageIndex)
import qualified Distribution.Client.PackageIndex as PackageIndex
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import qualified Distribution.PackageDescription.Parse as PackageDesc.Parse
import Distribution.PackageDescription
( GenericPackageDescription )
import Distribution.PackageDescription.Parse
( parsePackageDescription )
import Distribution.Simple.Compiler
( Compiler, PackageDBStack )
import Distribution.Simple.Program
( ProgramConfiguration )
import qualified Distribution.Simple.Configure as Configure
( getInstalledPackages )
import Distribution.ParseUtils
( ParseResult(..) )
import Distribution.Version
( Version(Version), intersectVersionRanges )
import Distribution.Text
( display, simpleParse )
import Distribution.Verbosity
( Verbosity, normal, lessVerbose )
import Distribution.Simple.Utils
( die, warn, info, fromUTF8, ignoreBOM )
import Distribution.Client.Setup
( RepoContext(..) )
import Data.Char (isAlphaNum)
import Data.Maybe (mapMaybe, catMaybes, maybeToList)
import Data.List (isPrefixOf)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (Monoid(..))
#endif
import qualified Data.Map as Map
import Control.Monad (when, liftM)
import Control.Exception (evaluate)
import qualified Data.ByteString.Lazy as BS
import qualified Data.ByteString.Lazy.Char8 as BS.Char8
import qualified Data.ByteString.Char8 as BSS
import Data.ByteString.Lazy (ByteString)
import Distribution.Client.GZipUtils (maybeDecompress)
import Distribution.Client.Utils ( byteStringToFilePath
, tryFindAddSourcePackageDesc )
import Distribution.Compat.Exception (catchIO)
import Distribution.Client.Compat.Time (getFileAge, getModTime)
import System.Directory (doesFileExist, doesDirectoryExist)
import System.FilePath
( (</>), takeExtension, replaceExtension, splitDirectories, normalise )
import System.FilePath.Posix as FilePath.Posix
( takeFileName )
import System.IO
import System.IO.Unsafe (unsafeInterleaveIO)
import System.IO.Error (isDoesNotExistError)
import qualified Hackage.Security.Client as Sec
import qualified Hackage.Security.Util.Some as Sec
-- | Reduced-verbosity version of 'Configure.getInstalledPackages'
getInstalledPackages :: Verbosity -> Compiler
-> PackageDBStack -> ProgramConfiguration
-> IO InstalledPackageIndex
getInstalledPackages verbosity comp packageDbs conf =
Configure.getInstalledPackages verbosity' comp packageDbs conf
where
verbosity' = lessVerbose verbosity
------------------------------------------------------------------------
-- Reading the source package index
--
-- | Read a repository index from disk, from the local files specified by
-- a list of 'Repo's.
--
-- All the 'SourcePackage's are marked as having come from the appropriate
-- 'Repo'.
--
-- This is a higher level wrapper used internally in cabal-install.
getSourcePackages :: Verbosity -> RepoContext -> IO SourcePackageDb
getSourcePackages verbosity repoCtxt | null (repoContextRepos repoCtxt) = do
warn verbosity $ "No remote package servers have been specified. Usually "
++ "you would have one specified in the config file."
return SourcePackageDb {
packageIndex = mempty,
packagePreferences = mempty
}
getSourcePackages verbosity repoCtxt = do
info verbosity "Reading available packages..."
pkgss <- mapM (\r -> readRepoIndex verbosity repoCtxt r) (repoContextRepos repoCtxt)
let (pkgs, prefs) = mconcat pkgss
prefs' = Map.fromListWith intersectVersionRanges
[ (name, range) | Dependency name range <- prefs ]
_ <- evaluate pkgs
_ <- evaluate prefs'
return SourcePackageDb {
packageIndex = pkgs,
packagePreferences = prefs'
}
readCacheStrict :: Verbosity -> Index -> (PackageEntry -> pkg) -> IO ([pkg], [Dependency])
readCacheStrict verbosity index mkPkg = do
updateRepoIndexCache verbosity index
cache <- liftM readIndexCache $ BSS.readFile (cacheFile index)
withFile (indexFile index) ReadMode $ \indexHnd ->
packageListFromCache mkPkg indexHnd cache ReadPackageIndexStrict
-- | Read a repository index from disk, from the local file specified by
-- the 'Repo'.
--
-- All the 'SourcePackage's are marked as having come from the given 'Repo'.
--
-- This is a higher level wrapper used internally in cabal-install.
--
readRepoIndex :: Verbosity -> RepoContext -> Repo
-> IO (PackageIndex SourcePackage, [Dependency])
readRepoIndex verbosity repoCtxt repo =
handleNotFound $ do
warnIfIndexIsOld =<< getIndexFileAge repo
updateRepoIndexCache verbosity (RepoIndex repoCtxt repo)
readPackageIndexCacheFile mkAvailablePackage (RepoIndex repoCtxt repo)
where
mkAvailablePackage pkgEntry =
SourcePackage {
packageInfoId = pkgid,
packageDescription = packageDesc pkgEntry,
packageSource = case pkgEntry of
NormalPackage _ _ _ _ -> RepoTarballPackage repo pkgid Nothing
BuildTreeRef _ _ _ path _ -> LocalUnpackedPackage path,
packageDescrOverride = case pkgEntry of
NormalPackage _ _ pkgtxt _ -> Just pkgtxt
_ -> Nothing
}
where
pkgid = packageId pkgEntry
handleNotFound action = catchIO action $ \e -> if isDoesNotExistError e
then do
case repo of
RepoRemote{..} -> warn verbosity $ errMissingPackageList repoRemote
RepoSecure{..} -> warn verbosity $ errMissingPackageList repoRemote
RepoLocal{..} -> warn verbosity $
"The package list for the local repo '" ++ repoLocalDir
++ "' is missing. The repo is invalid."
return mempty
else ioError e
isOldThreshold = 15 --days
warnIfIndexIsOld dt = do
when (dt >= isOldThreshold) $ case repo of
RepoRemote{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt
RepoSecure{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt
RepoLocal{..} -> return ()
errMissingPackageList repoRemote =
"The package list for '" ++ remoteRepoName repoRemote
++ "' does not exist. Run 'cabal update' to download it."
errOutdatedPackageList repoRemote dt =
"The package list for '" ++ remoteRepoName repoRemote
++ "' is " ++ shows (floor dt :: Int) " days old.\nRun "
++ "'cabal update' to get the latest list of available packages."
-- | Return the age of the index file in days (as a Double).
getIndexFileAge :: Repo -> IO Double
getIndexFileAge repo = getFileAge $ repoLocalDir repo </> "00-index.tar"
-- | It is not necessary to call this, as the cache will be updated when the
-- index is read normally. However you can do the work earlier if you like.
--
updateRepoIndexCache :: Verbosity -> Index -> IO ()
updateRepoIndexCache verbosity index =
whenCacheOutOfDate index $ do
updatePackageIndexCacheFile verbosity index
whenCacheOutOfDate :: Index -> IO () -> IO ()
whenCacheOutOfDate index action = do
exists <- doesFileExist $ cacheFile index
if not exists
then action
else do
indexTime <- getModTime $ indexFile index
cacheTime <- getModTime $ cacheFile index
when (indexTime > cacheTime) action
------------------------------------------------------------------------
-- Reading the index file
--
-- | An index entry is either a normal package, or a local build tree reference.
data PackageEntry =
NormalPackage PackageId GenericPackageDescription ByteString BlockNo
| BuildTreeRef BuildTreeRefType
PackageId GenericPackageDescription FilePath BlockNo
-- | A build tree reference is either a link or a snapshot.
data BuildTreeRefType = SnapshotRef | LinkRef
deriving Eq
refTypeFromTypeCode :: Tar.TypeCode -> BuildTreeRefType
refTypeFromTypeCode t
| t == Tar.buildTreeRefTypeCode = LinkRef
| t == Tar.buildTreeSnapshotTypeCode = SnapshotRef
| otherwise =
error "Distribution.Client.IndexUtils.refTypeFromTypeCode: unknown type code"
typeCodeFromRefType :: BuildTreeRefType -> Tar.TypeCode
typeCodeFromRefType LinkRef = Tar.buildTreeRefTypeCode
typeCodeFromRefType SnapshotRef = Tar.buildTreeSnapshotTypeCode
instance Package PackageEntry where
packageId (NormalPackage pkgid _ _ _) = pkgid
packageId (BuildTreeRef _ pkgid _ _ _) = pkgid
packageDesc :: PackageEntry -> GenericPackageDescription
packageDesc (NormalPackage _ descr _ _) = descr
packageDesc (BuildTreeRef _ _ descr _ _) = descr
-- | Parse an uncompressed \"00-index.tar\" repository index file represented
-- as a 'ByteString'.
--
data PackageOrDep = Pkg PackageEntry | Dep Dependency
-- | Read @00-index.tar.gz@ and extract @.cabal@ and @preferred-versions@ files
--
-- We read the index using 'Tar.read', which gives us a lazily constructed
-- 'TarEntries'. We translate it to a list of entries using 'tarEntriesList',
-- which preserves the lazy nature of 'TarEntries', and finally 'concatMap' a
-- function over this to translate it to a list of IO actions returning
-- 'PackageOrDep's. We can use 'lazySequence' to turn this into a list of
-- 'PackageOrDep's, still maintaining the lazy nature of the original tar read.
parsePackageIndex :: ByteString -> [IO (Maybe PackageOrDep)]
parsePackageIndex = concatMap (uncurry extract) . tarEntriesList . Tar.read
where
extract :: BlockNo -> Tar.Entry -> [IO (Maybe PackageOrDep)]
extract blockNo entry = tryExtractPkg ++ tryExtractPrefs
where
tryExtractPkg = do
mkPkgEntry <- maybeToList $ extractPkg entry blockNo
return $ fmap (fmap Pkg) mkPkgEntry
tryExtractPrefs = do
prefs' <- maybeToList $ extractPrefs entry
fmap (return . Just . Dep) prefs'
-- | Turn the 'Entries' data structure from the @tar@ package into a list,
-- and pair each entry with its block number.
--
-- NOTE: This preserves the lazy nature of 'Entries': the tar file is only read
-- as far as the list is evaluated.
tarEntriesList :: Show e => Tar.Entries e -> [(BlockNo, Tar.Entry)]
tarEntriesList = go 0
where
go !_ Tar.Done = []
go !_ (Tar.Fail e) = error ("tarEntriesList: " ++ show e)
go !n (Tar.Next e es') = (n, e) : go (Tar.nextEntryOffset e n) es'
extractPkg :: Tar.Entry -> BlockNo -> Maybe (IO (Maybe PackageEntry))
extractPkg entry blockNo = case Tar.entryContent entry of
Tar.NormalFile content _
| takeExtension fileName == ".cabal"
-> case splitDirectories (normalise fileName) of
[pkgname,vers,_] -> case simpleParse vers of
Just ver -> Just . return $ Just (NormalPackage pkgid descr content blockNo)
where
pkgid = PackageIdentifier (PackageName pkgname) ver
parsed = parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack
$ content
descr = case parsed of
ParseOk _ d -> d
_ -> error $ "Couldn't read cabal file "
++ show fileName
_ -> Nothing
_ -> Nothing
Tar.OtherEntryType typeCode content _
| Tar.isBuildTreeRefTypeCode typeCode ->
Just $ do
let path = byteStringToFilePath content
dirExists <- doesDirectoryExist path
result <- if not dirExists then return Nothing
else do
cabalFile <- tryFindAddSourcePackageDesc path "Error reading package index."
descr <- PackageDesc.Parse.readPackageDescription normal cabalFile
return . Just $ BuildTreeRef (refTypeFromTypeCode typeCode) (packageId descr)
descr path blockNo
return result
_ -> Nothing
where
fileName = Tar.entryPath entry
extractPrefs :: Tar.Entry -> Maybe [Dependency]
extractPrefs entry = case Tar.entryContent entry of
Tar.NormalFile content _
| takeFileName entrypath == "preferred-versions"
-> Just prefs
where
entrypath = Tar.entryPath entry
prefs = parsePreferredVersions content
_ -> Nothing
parsePreferredVersions :: ByteString -> [Dependency]
parsePreferredVersions = mapMaybe simpleParse
. filter (not . isPrefixOf "--")
. lines
. BS.Char8.unpack -- TODO: Are we sure no unicode?
------------------------------------------------------------------------
-- Reading and updating the index cache
--
-- | Variation on 'sequence' which evaluates the actions lazily
--
-- Pattern matching on the result list will execute just the first action;
-- more generally pattern matching on the first @n@ '(:)' nodes will execute
-- the first @n@ actions.
lazySequence :: [IO a] -> IO [a]
lazySequence = unsafeInterleaveIO . go
where
go [] = return []
go (x:xs) = do x' <- x
xs' <- lazySequence xs
return (x' : xs')
-- | Which index do we mean?
data Index =
-- | The main index for the specified repository
RepoIndex RepoContext Repo
-- | A sandbox-local repository
-- Argument is the location of the index file
| SandboxIndex FilePath
indexFile :: Index -> FilePath
indexFile (RepoIndex _ctxt repo) = repoLocalDir repo </> "00-index.tar"
indexFile (SandboxIndex index) = index
cacheFile :: Index -> FilePath
cacheFile (RepoIndex _ctxt repo) = repoLocalDir repo </> "00-index.cache"
cacheFile (SandboxIndex index) = index `replaceExtension` "cache"
updatePackageIndexCacheFile :: Verbosity -> Index -> IO ()
updatePackageIndexCacheFile verbosity index = do
info verbosity ("Updating index cache file " ++ cacheFile index)
withIndexEntries index $ \entries -> do
let cache = Cache { cacheEntries = entries }
writeFile (cacheFile index) (showIndexCache cache)
-- | Read the index (for the purpose of building a cache)
--
-- The callback is provided with list of cache entries, which is guaranteed to
-- be lazily constructed. This list must ONLY be used in the scope of the
-- callback; when the callback is terminated the file handle to the index will
-- be closed and further attempts to read from the list will result in (pure)
-- I/O exceptions.
--
-- In the construction of the index for a secure repo we take advantage of the
-- index built by the @hackage-security@ library to avoid reading the @.tar@
-- file as much as possible (we need to read it only to extract preferred
-- versions). This helps performance, but is also required for correctness:
-- the new @01-index.tar.gz@ may have multiple versions of preferred-versions
-- files, and 'parsePackageIndex' does not correctly deal with that (see #2956);
-- by reading the already-built cache from the security library we will be sure
-- to only read the latest versions of all files.
--
-- TODO: It would be nicer if we actually incrementally updated @cabal@'s
-- cache, rather than reconstruct it from zero on each update. However, this
-- would require a change in the cache format.
withIndexEntries :: Index -> ([IndexCacheEntry] -> IO a) -> IO a
withIndexEntries (RepoIndex repoCtxt repo@RepoSecure{..}) callback =
repoContextWithSecureRepo repoCtxt repo $ \repoSecure ->
Sec.withIndex repoSecure $ \Sec.IndexCallbacks{..} -> do
let mk :: (Sec.DirectoryEntry, fp, Maybe (Sec.Some Sec.IndexFile))
-> IO [IndexCacheEntry]
mk (_, _fp, Nothing) =
return [] -- skip unrecognized file
mk (_, _fp, Just (Sec.Some (Sec.IndexPkgMetadata _pkgId))) =
return [] -- skip metadata
mk (dirEntry, _fp, Just (Sec.Some (Sec.IndexPkgCabal pkgId))) = do
let blockNo = fromIntegral (Sec.directoryEntryBlockNo dirEntry)
return [CachePackageId pkgId blockNo]
mk (dirEntry, _fp, Just (Sec.Some file@(Sec.IndexPkgPrefs _pkgName))) = do
content <- Sec.indexEntryContent `fmap` indexLookupFileEntry dirEntry file
return $ map CachePreference (parsePreferredVersions content)
entriess <- lazySequence $ map mk (Sec.directoryEntries indexDirectory)
callback $ concat entriess
withIndexEntries index callback = do
withFile (indexFile index) ReadMode $ \h -> do
bs <- maybeDecompress `fmap` BS.hGetContents h
pkgsOrPrefs <- lazySequence $ parsePackageIndex bs
callback $ map toCache (catMaybes pkgsOrPrefs)
where
toCache :: PackageOrDep -> IndexCacheEntry
toCache (Pkg (NormalPackage pkgid _ _ blockNo)) = CachePackageId pkgid blockNo
toCache (Pkg (BuildTreeRef refType _ _ _ blockNo)) = CacheBuildTreeRef refType blockNo
toCache (Dep d) = CachePreference d
data ReadPackageIndexMode = ReadPackageIndexStrict
| ReadPackageIndexLazyIO
readPackageIndexCacheFile :: Package pkg
=> (PackageEntry -> pkg)
-> Index
-> IO (PackageIndex pkg, [Dependency])
readPackageIndexCacheFile mkPkg index = do
cache <- liftM readIndexCache $ BSS.readFile (cacheFile index)
indexHnd <- openFile (indexFile index) ReadMode
packageIndexFromCache mkPkg indexHnd cache ReadPackageIndexLazyIO
packageIndexFromCache :: Package pkg
=> (PackageEntry -> pkg)
-> Handle
-> Cache
-> ReadPackageIndexMode
-> IO (PackageIndex pkg, [Dependency])
packageIndexFromCache mkPkg hnd cache mode = do
(pkgs, prefs) <- packageListFromCache mkPkg hnd cache mode
pkgIndex <- evaluate $ PackageIndex.fromList pkgs
return (pkgIndex, prefs)
-- | Read package list
--
-- The result packages (though not the preferences) are guaranteed to be listed
-- in the same order as they are in the tar file (because later entries in a tar
-- file mask earlier ones).
packageListFromCache :: (PackageEntry -> pkg)
-> Handle
-> Cache
-> ReadPackageIndexMode
-> IO ([pkg], [Dependency])
packageListFromCache mkPkg hnd Cache{..} mode = accum mempty [] cacheEntries
where
accum srcpkgs prefs [] = return (reverse srcpkgs, prefs)
accum srcpkgs prefs (CachePackageId pkgid blockno : entries) = do
-- Given the cache entry, make a package index entry.
-- The magic here is that we use lazy IO to read the .cabal file
-- from the index tarball if it turns out that we need it.
-- Most of the time we only need the package id.
~(pkg, pkgtxt) <- unsafeInterleaveIO $ do
pkgtxt <- getEntryContent blockno
pkg <- readPackageDescription pkgtxt
return (pkg, pkgtxt)
let srcpkg = case mode of
ReadPackageIndexLazyIO ->
mkPkg (NormalPackage pkgid pkg pkgtxt blockno)
ReadPackageIndexStrict ->
pkg `seq` pkgtxt `seq` mkPkg (NormalPackage pkgid pkg
pkgtxt blockno)
accum (srcpkg:srcpkgs) prefs entries
accum srcpkgs prefs (CacheBuildTreeRef refType blockno : entries) = do
-- We have to read the .cabal file eagerly here because we can't cache the
-- package id for build tree references - the user might edit the .cabal
-- file after the reference was added to the index.
path <- liftM byteStringToFilePath . getEntryContent $ blockno
pkg <- do let err = "Error reading package index from cache."
file <- tryFindAddSourcePackageDesc path err
PackageDesc.Parse.readPackageDescription normal file
let srcpkg = mkPkg (BuildTreeRef refType (packageId pkg) pkg path blockno)
accum (srcpkg:srcpkgs) prefs entries
accum srcpkgs prefs (CachePreference pref : entries) =
accum srcpkgs (pref:prefs) entries
getEntryContent :: BlockNo -> IO ByteString
getEntryContent blockno = do
entry <- Tar.hReadEntry hnd blockno
case Tar.entryContent entry of
Tar.NormalFile content _size -> return content
Tar.OtherEntryType typecode content _size
| Tar.isBuildTreeRefTypeCode typecode
-> return content
_ -> interror "unexpected tar entry type"
readPackageDescription :: ByteString -> IO GenericPackageDescription
readPackageDescription content =
case parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack $ content of
ParseOk _ d -> return d
_ -> interror "failed to parse .cabal file"
interror msg = die $ "internal error when reading package index: " ++ msg
++ "The package index or index cache is probably "
++ "corrupt. Running cabal update might fix it."
------------------------------------------------------------------------
-- Index cache data structure
--
-- | Tar files are block structured with 512 byte blocks. Every header and file
-- content starts on a block boundary.
--
type BlockNo = Tar.TarEntryOffset
data IndexCacheEntry = CachePackageId PackageId BlockNo
| CacheBuildTreeRef BuildTreeRefType BlockNo
| CachePreference Dependency
deriving (Eq)
installedUnitId, blocknoKey, buildTreeRefKey, preferredVersionKey :: String
installedUnitId = "pkg:"
blocknoKey = "b#"
buildTreeRefKey = "build-tree-ref:"
preferredVersionKey = "pref-ver:"
readIndexCacheEntry :: BSS.ByteString -> Maybe IndexCacheEntry
readIndexCacheEntry = \line ->
case BSS.words line of
[key, pkgnamestr, pkgverstr, sep, blocknostr]
| key == BSS.pack installedUnitId && sep == BSS.pack blocknoKey ->
case (parseName pkgnamestr, parseVer pkgverstr [],
parseBlockNo blocknostr) of
(Just pkgname, Just pkgver, Just blockno)
-> Just (CachePackageId (PackageIdentifier pkgname pkgver) blockno)
_ -> Nothing
[key, typecodestr, blocknostr] | key == BSS.pack buildTreeRefKey ->
case (parseRefType typecodestr, parseBlockNo blocknostr) of
(Just refType, Just blockno)
-> Just (CacheBuildTreeRef refType blockno)
_ -> Nothing
(key: remainder) | key == BSS.pack preferredVersionKey ->
fmap CachePreference (simpleParse (BSS.unpack (BSS.unwords remainder)))
_ -> Nothing
where
parseName str
| BSS.all (\c -> isAlphaNum c || c == '-') str
= Just (PackageName (BSS.unpack str))
| otherwise = Nothing
parseVer str vs =
case BSS.readInt str of
Nothing -> Nothing
Just (v, str') -> case BSS.uncons str' of
Just ('.', str'') -> parseVer str'' (v:vs)
Just _ -> Nothing
Nothing -> Just (Version (reverse (v:vs)) [])
parseBlockNo str =
case BSS.readInt str of
Just (blockno, remainder)
| BSS.null remainder -> Just (fromIntegral blockno)
_ -> Nothing
parseRefType str =
case BSS.uncons str of
Just (typeCode, remainder)
| BSS.null remainder && Tar.isBuildTreeRefTypeCode typeCode
-> Just (refTypeFromTypeCode typeCode)
_ -> Nothing
showIndexCacheEntry :: IndexCacheEntry -> String
showIndexCacheEntry entry = unwords $ case entry of
CachePackageId pkgid b -> [ installedUnitId
, display (packageName pkgid)
, display (packageVersion pkgid)
, blocknoKey
, show b
]
CacheBuildTreeRef t b -> [ buildTreeRefKey
, [typeCodeFromRefType t]
, show b
]
CachePreference dep -> [ preferredVersionKey
, display dep
]
-- | Cabal caches various information about the Hackage index
data Cache = Cache {
cacheEntries :: [IndexCacheEntry]
}
readIndexCache :: BSS.ByteString -> Cache
readIndexCache bs = Cache {
cacheEntries = mapMaybe readIndexCacheEntry $ BSS.lines bs
}
showIndexCache :: Cache -> String
showIndexCache Cache{..} = unlines $ map showIndexCacheEntry cacheEntries
| edsko/cabal | cabal-install/Distribution/Client/IndexUtils.hs | bsd-3-clause | 25,773 | 0 | 22 | 6,359 | 5,460 | 2,849 | 2,611 | 424 | 11 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{- | __Operator Client API__
The functions in this module correspond to
the [Consul Operator API](https://www.consul.io/api/operator).
This module is a WIP, feel free to contribute via
the [repo on GitHub](https://github.com/AlphaHeavy/consul-haskell).
__Missing Functions__
* All API functions
-}
module Network.Consul.Client.Operator
(
) where
import Import
| alphaHeavy/consul-haskell | src/Network/Consul/Client/Operator.hs | bsd-3-clause | 511 | 0 | 3 | 72 | 19 | 15 | 4 | 7 | 0 |
module Main where
import Test.Framework as TF (defaultMain, testGroup, Test)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Control.Monad.MissingM
import Data.Maybe (listToMaybe)
import Data.Functor.Identity
main :: IO ()
main = defaultMain tests
tests :: [TF.Test]
tests = [
testGroup "The" [
testProperty "prop_findMvsfilter" prop_findMvsfilter,
testProperty "prop_findMapMvsfilter" prop_findMapMvsfilter
]
]
instance Show (a -> b) where
show = const "(function)"
-- just testing with the Identity monad for now.
prop_findMvsfilter :: (Int -> Bool) -> [Int] -> Bool
prop_findMvsfilter f as =
let f' = return . f
in runIdentity (findM f' as) == listToMaybe (filter f as)
prop_findMapMvsfilter :: (Int -> Int) -> (Int -> Bool) -> [Int] -> Bool
prop_findMapMvsfilter mapper tester as =
let amb x = let b = mapper x in if tester b then Just b else Nothing
in runIdentity (findMapM (return . amb) as) == (listToMaybe (filter tester (fmap mapper as)))
| techtangents/missingm | tests/Main.hs | bsd-3-clause | 1,007 | 0 | 13 | 183 | 343 | 183 | 160 | 23 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MonadComprehensions #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE ViewPatterns #-}
module Queries.SIGMOD.Nested
( nationCount
, allRegionsNations
, shippingDelay
, expectedRevenueFor
) where
import Database.DSH
import Schema.TPCH
--------------------------------------------------------------------------------
-- Constructing and flattening (via aggregation) nested data
nationCount :: Q [(Text, Integer)]
nationCount =
[ pair (r_nameQ r)
(length [ n | n <- nations, n_regionkeyQ n == r_regionkeyQ r ])
| r <- regions
]
-- A query that constructs nested data
allRegionsNations :: Q [(Text, [Text])]
allRegionsNations =
[ pair (r_nameQ r)
[ n_nameQ n | n <- nations, n_regionkeyQ n == r_regionkeyQ r ]
| r <- regions
]
--------------------------------------------------------------------------------
-- Complex reports with nested results
-- Return all orders with a given status
ordersWithStatus :: Text -> Q [Order]
ordersWithStatus status =
[ o | o <- orders, o_orderstatusQ o == toQ status ]
-- Has the order been ordered by a given customer?
orderedBy :: Q Order -> Q Customer -> Q Bool
orderedBy o c = o_custkeyQ o == c_custkeyQ c
-- | Does the customer originate from the given nation?
custFromNation :: Q Customer -> Text -> Q Bool
custFromNation c nn =
or [ n_nameQ n == toQ nn && n_nationkeyQ n == c_nationkeyQ c
| n <- nations
]
-- The discounted price of an item
discPrice :: Q LineItem -> Q Decimal
discPrice l = l_extendedpriceQ l * (1 - l_discountQ l)
-- The price of an item after taxes
chargedPrice :: Q LineItem -> Q Decimal
chargedPrice l = discPrice l * (1 + l_taxQ l)
-- The total price of a number of items.
revenue :: Q [LineItem] -> Q Decimal
revenue ls = sum $ map discPrice ls
-- Revenue from a given order.
orderRevenue :: Q Order -> Q Decimal
orderRevenue o = revenue $ orderItems o
-- All lineitems of one particular order
orderItems :: Q Order -> Q [LineItem]
orderItems o = [ l | l <- lineitems, l_orderkeyQ l == o_orderkeyQ o ]
-- Expected revenue report
expectedRevenueFor :: Text -> Q [(Text, [(Day, Decimal)])]
expectedRevenueFor nationName =
[ pair (c_nameQ c) [ pair (o_orderdateQ o) (orderRevenue o)
| o <- ordersWithStatus "P"
, o `orderedBy` c
]
| c <- customers
, c `custFromNation` nationName
, any (\o -> o `orderedBy` c) (ordersWithStatus "P")
]
--------------------------------------------------------------------------------
-- A simpler running example
-- For all orders, compute item quantities and average shipping time
shippingDelay :: Q [(Integer, [Decimal], Double)]
shippingDelay =
[ let ls = orderItems o
in tup3 (o_orderkeyQ o)
[ l_quantityQ l | l <- sortWith l_shipdateQ ls ]
(avg [ integerToDouble $ diffDays (o_orderdateQ o) (l_shipdateQ l) | l <- ls ])
| o <- orders
, o_totalpriceQ o > 500000
]
| ulricha/dsh-example-queries | Queries/SIGMOD/Nested.hs | bsd-3-clause | 3,159 | 0 | 15 | 746 | 831 | 433 | 398 | 58 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Database.DSH.Backend.Sql.Common
( SqlVector(..)
, SqlCode(..)
, generateSqlShape
) where
import Data.Maybe
import qualified Database.Algebra.Dag as D
import qualified Database.Algebra.Table.Lang as TA
import Database.DSH.Common.Impossible
import Database.DSH.Common.QueryPlan
import Database.DSH.Backend.Sql.Vector
-- | An abstraction over SQL code for a particular system (e.g. PostgreSQL,
-- MonetDB, HyPer).
class SqlCode c where
-- | Generate code from a table algebra DAG. Implementations provide a list
-- of queries and an optional prelude which might set up shared temporary
-- tables or indexing.
genSqlCode :: D.AlgebraDag TA.TableAlgebra -> (Maybe c, [c])
-- | In a query shape, render each root node for the algebraic plan into a
-- separate PostgreSQL SQL query.
generateSqlShape :: SqlCode c => QueryPlan TA.TableAlgebra TADVec -> Shape (SqlVector c)
generateSqlShape taPlan = renderSql $ queryShape taPlan
where
roots = D.rootNodes $ queryDag taPlan
(_sqlShared, sqlQueries) = genSqlCode (queryDag taPlan)
nodeToQuery = zip roots sqlQueries
lookupNode n = fromMaybe $impossible $ lookup n nodeToQuery
-- We do not need order columns to reconstruct results: order information is
-- encoded in the SQL queries' ORDER BY clause. We rely on the physical
-- order of the result table.
renderSql = fmap (\(TADVec q _ k r i) -> SqlVector (lookupNode q) k r i)
| ulricha/dsh-sql | src/Database/DSH/Backend/Sql/Common.hs | bsd-3-clause | 1,596 | 0 | 12 | 394 | 282 | 162 | 120 | 20 | 1 |
module QuestionTwo where
| rossng/COMS22201-lab2 | src/QuestionTwo.hs | bsd-3-clause | 25 | 0 | 2 | 3 | 4 | 3 | 1 | 1 | 0 |
{-# LANGUAGE TemplateHaskell, CPP #-}
{-# OPTIONS_GHC -w #-}
-----------------------------------------------------------------------------
-- |
-- Module : Generics.Instant.TH
-- Copyright : (c) 2011 Universiteit Utrecht
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable
--
-- This module contains Template Haskell code that can be used to
-- automatically generate the boilerplate code for the generic deriving
-- library.
-----------------------------------------------------------------------------
-- Adapted from Generics.Deriving.TH
module Generics.Instant.TH (
-- * Main generator
deriveAll, deriveAllL
-- * Individual generators
, deriveConstructors
, deriveRepresentable
, deriveRep
-- * Utilities
, simplInstance, gadtInstance
, genRepName, typeVariables, tyVarBndrToName
) where
import Generics.Instant.Base
import Generics.SYB (everywhere, mkT, everything, mkQ, gshow)
import Language.Haskell.TH hiding (Fixity())
import Language.Haskell.TH.Syntax (Lift(..), showName)
import Data.List (intercalate, nub, elemIndex)
import qualified Data.Map as M
import Control.Monad
import Control.Arrow ((&&&))
-- Used by gadtInstance
data TypeArgsEqs = TypeArgsEqs { args :: [Type] -- ^ Constructor args
, vars :: [Name] -- ^ Variables
, teqs :: [(Type,Type)] -- ^ Type equalities
} deriving Show
-- | Given the names of a generic class, a type to instantiate, a function in
-- the class and the default implementation, generates the code for a basic
-- generic instance.
simplInstance :: Name -> Name -> Name -> Name -> Q [Dec]
simplInstance cl ty fn df = do
i <- reify ty
let typ = return (foldl (\a -> AppT a . VarT . tyVarBndrToName)
(ConT ty) (typeVariables i))
fmap (: []) $ instanceD (cxt []) (conT cl `appT` typ)
[funD fn [clause [] (normalB (varE df)) []]]
-- | Given the names of a generic class, a GADT type to instantiate, a function
-- in the class and the default implementation, generates the code for a basic
-- generic instance. This is tricky in general because we have to analyze the
-- return types of each of the GADT constructors and give instances accordingly.
gadtInstance :: Name -> Name -> Name -> Name -> Q [Dec]
gadtInstance cl ty fn df = do
i <- reify ty
let typ = (foldl (\a -> AppT a . VarT . tyVarBndrToName)
(ConT ty) (typeVariables i))
dt :: ([TyVarBndr],[Con])
dt = case i of
TyConI (DataD _ _ vs cs _) -> (vs, cs)
_ -> error ("gadtInstance: " ++ show ty ++ "is not a valid type")
-- List of index variable names
idxs :: [Name]
idxs = extractIndices (fst dt) (snd dt)
-- Get all the arguments, variables, and type equalities introduced by the
-- constructors
eqs :: [Name] -> [Con] -> [TypeArgsEqs]
eqs nms cs = map f cs where
f :: Con -> TypeArgsEqs
f (NormalC _ tys) = TypeArgsEqs (map snd tys) [] []
f (RecC _ tys) = TypeArgsEqs (map (\(_,_,t) -> t) tys) [] []
f (InfixC t1 _ t2) = TypeArgsEqs [snd t1, snd t2] [] []
f (ForallC vs cxt c) = case f c of
TypeArgsEqs ts vs' eqs' ->
TypeArgsEqs ts (tyVarBndrsToNames vs ++ vs')
((concatMap g cxt) ++ eqs')
g :: Pred -> [(Type,Type)]
#if MIN_VERSION_template_haskell(2,10,0)
g (AppT (AppT EqualityT (VarT t1)) t2)
#else
g (EqualP (VarT t1) t2)
#endif
| t1 `elem` nms = [(VarT t1,t2)]
| otherwise = []
g _ = []
subst :: [(Type,Type)] -> Type -> Type
subst s = everywhere (mkT f) where
f (VarT a) = case lookup (VarT a) s of
Nothing -> VarT a
Just t -> t
f x = x
mkInst :: TypeArgsEqs -> Dec
mkInst t = InstanceD (map mkCxt (args t))
(ConT cl `AppT` subst (teqs t) typ) instBody
mkCxt :: Type -> Pred
mkCxt =
#if MIN_VERSION_template_haskell(2,10,0)
AppT (ConT cl)
#else
ClassP cl . (:[])
#endif
-- The instance body is empty for regular cases
instBody :: [Dec]
instBody = [FunD fn [Clause [] (NormalB (VarE df)) []]]
update :: TypeArgsEqs -> [TypeArgsEqs] -> [TypeArgsEqs]
-- update True t1 [] = [t1]
update _ [] = []
update t1 (t2:ts) | teqs t1 == teqs t2 =
t2 {args = nub (args t1 ++ args t2)} : ts
| otherwise = t2 : update t1 ts
-- Types without any type equalities (not real GADTs) need to be handled
-- differently. Others are dealt with using filterMerge.
handleADTs :: ([TypeArgsEqs] -> [TypeArgsEqs])
-> [TypeArgsEqs] -> [TypeArgsEqs]
handleADTs f ts | and (map (null . teqs) ts)
= [TypeArgsEqs (concatMap args ts) [] []]
| otherwise = f ts
-- We need to
-- 1) ignore constructors that don't introduce any type equalities
-- 2) merge constructors with the same return type
-- This code is terribly inefficient and could easily be improved, btw.
filterMerge :: [TypeArgsEqs] -> [TypeArgsEqs]
filterMerge (t0@(TypeArgsEqs ts vs eqs):t)
| eqs == [] = update t0 (filterMerge t)
| otherwise = case filterMerge t of
l -> if or (concat
[ [ typeMatch vs (vars t2) eq1 eq2
| eq1 <- eqs, eq2 <- teqs t2 ] | t2 <- l ])
then update t0 l
else t0 : l
filterMerge [] = []
-- For (2) above, we need to consider type equality modulo
-- quantified-variable names
typeMatch :: [Name] -> [Name] -> (Type,Type) -> (Type,Type) -> Bool
typeMatch vs1 vs2 eq1 eq2 | length vs1 /= length vs2 = False
| otherwise
= eq1 == everywhere (mkT f) eq2
where f (VarT n) = case n `elemIndex` vs2 of
-- is not a quantified variable
Nothing -> VarT n
-- it is, replace it with the equivalent var
Just i -> VarT (vs1 !! i)
f x = x
allTypeArgsEqs = eqs idxs (snd dt)
normInsts = map mkInst (handleADTs filterMerge allTypeArgsEqs)
return $ normInsts
-- | Given the type and the name (as string) for the type to derive,
-- generate the 'Constructor' instances and the 'Representable' instance.
deriveAll :: Name -> Q [Dec]
deriveAll n =
do a <- deriveConstructors n
b <- deriveRepresentable n
return (a ++ b)
-- | Same as 'deriveAll', but taking a list as input.
deriveAllL :: [Name] -> Q [Dec]
deriveAllL = fmap concat . mapM deriveAll
-- | Given a datatype name, derive datatypes and
-- instances of class 'Constructor'.
deriveConstructors :: Name -> Q [Dec]
deriveConstructors = constrInstance
-- | Given the type and the name (as string) for the Representable type
-- synonym to derive, generate the 'Representable' instance.
deriveRepresentable :: Name -> Q [Dec]
deriveRepresentable n = do
rep <- deriveRep n
inst <- deriveInst n
return $ rep ++ inst
-- | Derive only the 'Rep' type synonym. Not needed if 'deriveRepresentable'
-- is used.
deriveRep :: Name -> Q [Dec]
deriveRep n = do
i <- reify n
let d = case i of
TyConI dec -> dec
_ -> error "unknown construct"
exTyFamsInsts <- genExTyFamInsts d
fmap (: exTyFamsInsts) $
tySynD (genRepName n) (typeVariables i) (repType d (typeVariables i))
deriveInst :: Name -> Q [Dec]
deriveInst t = do
i <- reify t
let typ q = return $ foldl (\a -> AppT a . VarT . tyVarBndrToName) (ConT q)
(typeVariables i)
-- inlPrg = pragInlD t (inlineSpecPhase True False True 1)
fcs <- mkFrom t 1 0 t
tcs <- mkTo t 1 0 t
liftM (:[]) $
instanceD (cxt [])
(conT ''Representable `appT` typ t)
[
#if __GLASGOW_HASKELL__ >= 707
tySynInstD ''Rep (tySynEqn [typ t] (typ (genRepName t)))
#else
tySynInstD ''Rep [typ t] (typ (genRepName t))
#endif
, {- inlPrg, -} funD 'from fcs, funD 'to tcs]
constrInstance :: Name -> Q [Dec]
constrInstance n = do
i <- reify n
case i of
TyConI (DataD _ n _ cs _) -> mkInstance n cs
TyConI (NewtypeD _ n _ c _) -> mkInstance n [c]
_ -> return []
where
mkInstance n cs = do
ds <- mapM (mkConstrData n) cs
is <- mapM (mkConstrInstance n) cs
return $ ds ++ is
typeVariables :: Info -> [TyVarBndr]
typeVariables (TyConI (DataD _ _ tv _ _)) = tv
typeVariables (TyConI (NewtypeD _ _ tv _ _)) = tv
typeVariables _ = []
tyVarBndrsToNames :: [TyVarBndr] -> [Name]
tyVarBndrsToNames = map tyVarBndrToName
tyVarBndrToName :: TyVarBndr -> Name
tyVarBndrToName (PlainTV name) = name
tyVarBndrToName (KindedTV name _) = name
stripRecordNames :: Con -> Con
stripRecordNames (RecC n f) =
NormalC n (map (\(_, s, t) -> (s, t)) f)
stripRecordNames c = c
genName :: [Name] -> Name
genName = mkName . (++"_") . intercalate "_" . map nameBase
genRepName :: Name -> Name
genRepName = mkName . (++"_") . ("Rep" ++) . nameBase
mkConstrData :: Name -> Con -> Q Dec
mkConstrData dt (NormalC n _) =
dataD (cxt []) (genName [dt, n]) [] [] []
mkConstrData dt r@(RecC _ _) =
mkConstrData dt (stripRecordNames r)
mkConstrData dt (InfixC t1 n t2) =
mkConstrData dt (NormalC n [t1,t2])
-- Contexts are ignored
mkConstrData dt (ForallC _ _ c) = mkConstrData dt c
instance Lift Fixity where
lift Prefix = conE 'Prefix
lift (Infix a n) = conE 'Infix `appE` [| a |] `appE` [| n |]
instance Lift Associativity where
lift LeftAssociative = conE 'LeftAssociative
lift RightAssociative = conE 'RightAssociative
lift NotAssociative = conE 'NotAssociative
mkConstrInstance :: Name -> Con -> Q Dec
-- Contexts are ignored
mkConstrInstance dt (ForallC _ _ c) = mkConstrInstance dt c
mkConstrInstance dt (NormalC n _) = mkConstrInstanceWith dt n []
mkConstrInstance dt (RecC n _) = mkConstrInstanceWith dt n
[ funD 'conIsRecord [clause [wildP] (normalB (conE 'True)) []]]
mkConstrInstance dt (InfixC t1 n t2) =
do
i <- reify n
let fi = case i of
DataConI _ _ _ f -> convertFixity f
_ -> Prefix
instanceD (cxt []) (appT (conT ''Constructor) (conT $ genName [dt, n]))
[funD 'conName [clause [wildP] (normalB (stringE (nameBase n))) []],
funD 'conFixity [clause [wildP] (normalB [| fi |]) []]]
where
convertFixity (Fixity n d) = Infix (convertDirection d) n
convertDirection InfixL = LeftAssociative
convertDirection InfixR = RightAssociative
convertDirection InfixN = NotAssociative
mkConstrInstanceWith :: Name -> Name -> [Q Dec] -> Q Dec
mkConstrInstanceWith dt n extra =
instanceD (cxt []) (appT (conT ''Constructor) (conT $ genName [dt, n]))
(funD 'conName [clause [wildP] (normalB (stringE (nameBase n))) []] : extra)
repType :: Dec -> [TyVarBndr] -> Q Type
repType i repVs =
do let sum :: Q Type -> Q Type -> Q Type
sum a b = conT ''(:+:) `appT` a `appT` b
case i of
(DataD _ dt vs cs _) ->
(foldBal' sum (error "Empty datatypes are not supported.")
(map (repConGADT (dt, tyVarBndrsToNames vs) repVs
(extractIndices vs cs)) cs))
(NewtypeD _ dt vs c _) -> repConGADT (dt, tyVarBndrsToNames vs) repVs
(extractIndices vs [c]) c
(TySynD t _ _) -> error "type synonym?"
_ -> error "unknown construct"
-- Given a datatype declaration, returns a list of its type variables which are
-- used as index and not as data
extractIndices :: [TyVarBndr] -> [Con] -> [Name]
extractIndices vs = nub . everything (++) ([] `mkQ` isIndexEq) where
isIndexEq :: Pred -> [Name]
isIndexEq p = case p of
#if MIN_VERSION_template_haskell(2,10,0)
AppT (AppT EqualityT (VarT a)) (VarT b)
#else
EqualP (VarT a) (VarT b)
#endif
-> if a `elem` tyVarBndrsToNames vs
then (a:) (if b `elem` tyVarBndrsToNames vs then [b] else [])
else []
#if MIN_VERSION_template_haskell(2,10,0)
AppT (AppT EqualityT (VarT a)) _
#else
EqualP (VarT a) _
#endif
-> if a `elem` tyVarBndrsToNames vs then [a] else []
#if MIN_VERSION_template_haskell(2,10,0)
AppT (AppT EqualityT _) (VarT a)
#else
EqualP _ (VarT a)
#endif
-> if a `elem` tyVarBndrsToNames vs then [a] else []
_ -> []
repConGADT :: (Name, [Name]) -> [TyVarBndr] -> [Name] -> Con -> Q Type
-- We only accept one index variable, for now
repConGADT _ _ vs@(_:_:_) (ForallC _ _ _) =
error ("Datatype indexed over >1 variable: " ++ show vs)
-- Handle type equality constraints
repConGADT d@(dt, dtVs) repVs [indexVar] (ForallC vs ctx c) =
do
let
genTypeEqs p = case p of
#if MIN_VERSION_template_haskell(2,10,0)
(AppT (AppT EqualityT t1) t2:r)
#else
((EqualP t1 t2):r)
#endif
-> let (t1s,t2s) = genTypeEqs r
in ( TupleT 2 `AppT` (substTyVar vsN t1) `AppT` t1s
, TupleT 2 `AppT` (substTyVar vsN t2) `AppT` t2s)
(_:r) -> genTypeEqs r -- other constraints are ignored
[] -> baseEqs
substTyVar :: [Name] -> Type -> Type
substTyVar ns = everywhere (mkT f) where
f (VarT v) = case elemIndex v ns of
Nothing -> VarT v
Just i -> ConT ''X
`AppT` ConT (genName [dt,getConName c])
`AppT` int2TLNat i
`AppT` VarT indexVar
f x = x
vsN :: [Name]
vsN = tyVarBndrsToNames vs
-- Go on with generating the representation type, taking the equalities
repCon (dt, dtVs) (everywhere (mkT (substTyVar vsN)) c) (genTypeEqs ctx)
-- No constraints, go on as usual
repConGADT d _repVs _ c = repCon d c baseEqs
-- Extract the constructor name
getConName :: Con -> Name
getConName (NormalC n _) = n
getConName (RecC n _) = n
getConName (InfixC _ n _) = n
getConName (ForallC _ _ c) = getConName c
-- Generate a type-level natural from an Int
int2TLNat :: Int -> Type
int2TLNat 0 = ConT 'Ze
int2TLNat n = ConT 'Su `AppT` int2TLNat (n-1)
-- Generate the mobility rules for the existential type families
genExTyFamInsts :: Dec -> Q [Dec]
genExTyFamInsts (DataD _ n _ cs _) = fmap concat $
mapM (genExTyFamInsts' n) cs
genExTyFamInsts (NewtypeD _ n _ c _) = genExTyFamInsts' n c
genExTyFamInsts' :: Name -> Con -> Q [Dec]
genExTyFamInsts' dt (ForallC vs cxt c) =
do let mR = mobilityRules (tyVarBndrsToNames vs) cxt
conName = ConT (genName [dt,getConName c])
#if __GLASGOW_HASKELL__ >= 707
tySynInst ty n x = TySynInstD ''X (TySynEqn [conName, int2TLNat n, ty] x)
#else
tySynInst ty n x = TySynInstD ''X [conName, int2TLNat n, ty] x
#endif
return [ tySynInst ty n (VarT nm) | (n,(nm, ty)) <- zip [0..] mR ]
genExTyFamInsts' _ _ = return []
-- Compute the shape of the mobility rules
mobilityRules :: [Name] -> Cxt -> [(Name,Type)]
mobilityRules [] _ = []
mobilityRules vs cxt = concat [ mobilityRules' v p | v <- vs, p <- cxt ] where
mobilityRules' :: Name -> Pred -> [(Name,Type)]
mobilityRules' v p = case p of
#if MIN_VERSION_template_haskell(2,10,0)
AppT (AppT EqualityT (VarT _)) (VarT _)
#else
EqualP (VarT _) (VarT _)
#endif
-> []
#if MIN_VERSION_template_haskell(2,10,0)
AppT (AppT EqualityT (VarT a)) x
#else
EqualP (VarT a) x
#endif
| v `inComplex` x -> [(v,x)]
| otherwise -> []
#if MIN_VERSION_template_haskell(2,10,0)
AppT (AppT EqualityT x) (VarT a)
-> mobilityRules' v (AppT (AppT EqualityT (VarT a)) x)
#else
EqualP x (VarT a)
-> mobilityRules' v (EqualP (VarT a) x)
#endif
_ -> []
inComplex :: Name -> Type -> Bool
inComplex v (VarT _) = False
inComplex v x = everything (||) (False `mkQ` q) x where
q (VarT x) | x == v = True
q (VarT x) | otherwise = False
q _ = False
flattenEqs :: (Type, Type) -> Q Type
flattenEqs (t1, t2) = return t1 `appT` return t2
-- () ~ ()
baseEqs :: (Type, Type)
baseEqs = (TupleT 0, TupleT 0)
repCon :: (Name, [Name]) -> Con -> (Type,Type) -> Q Type
repCon _ (ForallC _ _ _) _ = error "impossible"
repCon (dt, vs) (NormalC n []) (t1,t2) =
conT ''CEq `appT` (conT $ genName [dt, n]) `appT` return t1
`appT` return t2 `appT` conT ''U
repCon (dt, vs) (NormalC n fs) (t1,t2) =
conT ''CEq `appT` (conT $ genName [dt, n]) `appT` return t1
`appT` return t2 `appT`
(foldBal prod (map (repField (dt, vs) . snd) fs)) where
prod :: Q Type -> Q Type -> Q Type
prod a b = conT ''(:*:) `appT` a `appT` b
repCon (dt, vs) r@(RecC n []) (t1,t2) =
conT ''CEq `appT` (conT $ genName [dt, n]) `appT` return t1
`appT` return t2 `appT` conT ''U
repCon (dt, vs) r@(RecC n fs) (t1,t2) =
conT ''CEq `appT` (conT $ genName [dt, n]) `appT` return t1
`appT` return t2 `appT`
(foldBal prod (map (repField' (dt, vs) n) fs)) where
prod :: Q Type -> Q Type -> Q Type
prod a b = conT ''(:*:) `appT` a `appT` b
repCon d (InfixC t1 n t2) eqs = repCon d (NormalC n [t1,t2]) eqs
--dataDeclToType :: (Name, [Name]) -> Type
--dataDeclToType (dt, vs) = foldl (\a b -> AppT a (VarT b)) (ConT dt) vs
repField :: (Name, [Name]) -> Type -> Q Type
--repField d t | t == dataDeclToType d = conT ''I
repField d t = conT ''Rec `appT` return t
repField' :: (Name, [Name]) -> Name -> (Name, Strict, Type) -> Q Type
--repField' d ns (_, _, t) | t == dataDeclToType d = conT ''I
repField' (dt, vs) ns (f, _, t) = conT ''Rec `appT` return t
-- Note: we should generate Var too, at some point
mkFrom :: Name -> Int -> Int -> Name -> Q [Q Clause]
mkFrom ns m i n =
do
-- runIO $ putStrLn $ "processing " ++ show n
let wrapE e = e -- lrE m i e
i <- reify n
let b = case i of
TyConI (DataD _ dt vs cs _) ->
zipWith (fromCon wrapE ns (dt, map tyVarBndrToName vs)
(length cs)) [1..] cs
TyConI (NewtypeD _ dt vs c _) ->
[fromCon wrapE ns (dt, map tyVarBndrToName vs) 1 0 c]
TyConI (TySynD t _ _) -> error "type synonym?"
-- [clause [varP (field 0)] (normalB (wrapE $ conE 'K1 `appE` varE (field 0))) []]
_ -> error "unknown construct"
return b
mkTo :: Name -> Int -> Int -> Name -> Q [Q Clause]
mkTo ns m i n =
do
-- runIO $ putStrLn $ "processing " ++ show n
let wrapP p = p -- lrP m i p
i <- reify n
let b = case i of
TyConI (DataD _ dt vs cs _) ->
zipWith (toCon wrapP ns (dt, map tyVarBndrToName vs)
(length cs)) [1..] cs
TyConI (NewtypeD _ dt vs c _) ->
[toCon wrapP ns (dt, map tyVarBndrToName vs) 1 0 c]
TyConI (TySynD t _ _) -> error "type synonym?"
-- [clause [wrapP $ conP 'K1 [varP (field 0)]] (normalB $ varE (field 0)) []]
_ -> error "unknown construct"
return b
fromCon :: (Q Exp -> Q Exp) -> Name -> (Name, [Name]) -> Int -> Int -> Con -> Q Clause
-- Contexts are ignored
fromCon wrap ns d m i (ForallC _ _ c) = fromCon wrap ns d m i c
fromCon wrap ns (dt, vs) m i (NormalC cn []) =
clause
[conP cn []]
(normalB $ wrap $ lrE m i $ appE (conE 'C) $ conE 'U) []
fromCon wrap ns (dt, vs) m i (NormalC cn fs) =
-- runIO (putStrLn ("constructor " ++ show ix)) >>
clause
[conP cn (map (varP . field) [0..length fs - 1])]
(normalB $ wrap $ lrE m i $ conE 'C `appE`
foldBal prod (zipWith (fromField (dt, vs)) [0..] (map snd fs))) []
where prod x y = conE '(:*:) `appE` x `appE` y
fromCon wrap ns (dt, vs) m i r@(RecC cn []) =
clause
[conP cn []]
(normalB $ wrap $ lrE m i $ conE 'C `appE` (conE 'U)) []
fromCon wrap ns (dt, vs) m i r@(RecC cn fs) =
clause
[conP cn (map (varP . field) [0..length fs - 1])]
(normalB $ wrap $ lrE m i $ conE 'C `appE`
foldBal prod (zipWith (fromField (dt, vs)) [0..] (map trd fs))) []
where prod x y = conE '(:*:) `appE` x `appE` y
fromCon wrap ns (dt, vs) m i (InfixC t1 cn t2) =
fromCon wrap ns (dt, vs) m i (NormalC cn [t1,t2])
fromField :: (Name, [Name]) -> Int -> Type -> Q Exp
--fromField (dt, vs) nr t | t == dataDeclToType (dt, vs) = conE 'I `appE` varE (field nr)
fromField (dt, vs) nr t = conE 'Rec `appE` varE (field nr)
toCon :: (Q Pat -> Q Pat) -> Name -> (Name, [Name]) -> Int -> Int -> Con -> Q Clause
-- Contexts are ignored
toCon wrap ns d m i (ForallC _ _ c) = toCon wrap ns d m i c
toCon wrap ns (dt, vs) m i (NormalC cn []) =
clause
[wrap $ lrP m i $ conP 'C [conP 'U []]]
(normalB $ conE cn) []
toCon wrap ns (dt, vs) m i (NormalC cn fs) =
-- runIO (putStrLn ("constructor " ++ show ix)) >>
clause
[wrap $ lrP m i $ conP 'C
[foldBal prod (zipWith (toField (dt, vs)) [0..] (map snd fs))]]
(normalB $ foldl appE (conE cn) (map (varE . field) [0..length fs - 1])) []
where prod x y = conP '(:*:) [x,y]
toCon wrap ns (dt, vs) m i r@(RecC cn []) =
clause
[wrap $ lrP m i $ conP 'U []]
(normalB $ conE cn) []
toCon wrap ns (dt, vs) m i r@(RecC cn fs) =
clause
[wrap $ lrP m i $ conP 'C
[foldBal prod (zipWith (toField (dt, vs)) [0..] (map trd fs))]]
(normalB $ foldl appE (conE cn) (map (varE . field) [0..length fs - 1])) []
where prod x y = conP '(:*:) [x,y]
toCon wrap ns (dt, vs) m i (InfixC t1 cn t2) =
toCon wrap ns (dt, vs) m i (NormalC cn [t1,t2])
toField :: (Name, [Name]) -> Int -> Type -> Q Pat
--toField (dt, vs) nr t | t == dataDeclToType (dt, vs) = conP 'I [varP (field nr)]
toField (dt, vs) nr t = conP 'Rec [varP (field nr)]
field :: Int -> Name
field n = mkName $ "f" ++ show n
lrP :: Int -> Int -> (Q Pat -> Q Pat)
{-
lrP 1 0 p = p
lrP m 0 p = conP 'L [p]
lrP m i p = conP 'R [lrP (m-1) (i-1) p]
-}
lrP m i p | m == 0 = error "1"
| m == 1 = p
| i <= div m 2 = conP 'L [lrP (div m 2) i p]
| i > div m 2 = conP 'R [lrP (m - div m 2) (i - div m 2) p]
lrE :: Int -> Int -> (Q Exp -> Q Exp)
{-
lrE 1 0 e = e
lrE m 0 e = conE 'L `appE` e
lrE m i e = conE 'R `appE` lrE (m-1) (i-1) e
-}
lrE m i e | m == 0 = error "2"
| m == 1 = e
| i <= div m 2 = conE 'L `appE` lrE (div m 2) i e
| i > div m 2 = conE 'R `appE` lrE (m - div m 2) (i - div m 2) e
trd (_,_,c) = c
-- | Variant of foldr1 which returns a special element for empty lists
foldr1' f x [] = x
foldr1' _ _ [x] = x
foldr1' f x (h:t) = f h (foldr1' f x t)
-- | Variant of foldr1 for producing balanced lists
foldBal :: (a -> a -> a) -> [a] -> a
foldBal op = foldBal' op (error "foldBal: empty list")
foldBal' :: (a -> a -> a) -> a -> [a] -> a
foldBal' _ x [] = x
foldBal' _ _ [y] = y
foldBal' op x l = let (a,b) = splitAt (length l `div` 2) l
in foldBal' op x a `op` foldBal' op x b
| dreixel/instant-generics | src/Generics/Instant/TH.hs | bsd-3-clause | 23,771 | 0 | 22 | 7,277 | 8,732 | 4,566 | 4,166 | 422 | 13 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE FunctionalDependencies #-}
module Symengine.NumberTheory(
Symengine.NumberTheory.gcd,
Symengine.NumberTheory.lcm,
gcd_extended,
next_prime,
Symengine.NumberTheory.mod,
quotient,
quotient_and_mod,
mod_f,
quotient_f,
quotient_and_mod_f,
mod_inverse,
fibonacci,
fibonacci2,
lucas,
-- I do not understand exactly what lucas2 does. Clarify and then
-- export
-- lucas2,
binomial,
factorial
)
where
import Foreign.C.Types
import Foreign.Ptr
import Foreign.C.String
import Foreign.Storable
import Foreign.Marshal.Array
import Foreign.Marshal.Alloc
import Foreign.ForeignPtr
import Control.Applicative
import Control.Monad -- for foldM
import System.IO.Unsafe
import Control.Monad
import GHC.Real
import Symengine.Internal
import Symengine.BasicSym
gcd :: BasicSym -> BasicSym -> BasicSym
gcd = lift_basicsym_binaryop ntheory_gcd_ffi
lcm :: BasicSym -> BasicSym -> BasicSym
lcm = lift_basicsym_binaryop ntheory_lcm_ffi
gcd_extended :: BasicSym -> BasicSym -> (BasicSym, BasicSym, BasicSym)
gcd_extended a b = unsafePerformIO $ do
g <- basicsym_new
s <- basicsym_new
t <- basicsym_new
with4 g s t a (\g s t a ->
with b (\b ->
ntheory_gcd_ext_ffi g s t a b))
return (g, s, t)
next_prime :: BasicSym -> BasicSym
next_prime = lift_basicsym_unaryop ntheory_nextprime_ffi
type Quotient = BasicSym
type Modulo = BasicSym
mod :: BasicSym -> BasicSym -> Quotient
mod = lift_basicsym_binaryop ntheory_mod_ffi
quotient :: BasicSym -> BasicSym -> BasicSym
quotient = lift_basicsym_binaryop ntheory_quotient_ffi
quotient_and_mod :: BasicSym -> BasicSym -> (Quotient, Modulo)
quotient_and_mod a b = unsafePerformIO $ do
quotient <- basicsym_new
modulo <- basicsym_new
with4 quotient modulo a b ntheory_quotient_mod_ffi
return $ (quotient, modulo)
mod_f :: BasicSym -> BasicSym -> Quotient
mod_f = lift_basicsym_binaryop ntheory_mod_f_ffi
quotient_f :: BasicSym -> BasicSym -> BasicSym
quotient_f = lift_basicsym_binaryop ntheory_quotient_f_ffi
quotient_and_mod_f :: BasicSym -> BasicSym -> (Quotient, Modulo)
quotient_and_mod_f a b = unsafePerformIO $ do
quotient <- basicsym_new
modulo <- basicsym_new
with4 quotient modulo a b ntheory_quotient_mod_f_ffi
return $ (quotient, modulo)
mod_inverse :: BasicSym -> BasicSym -> Quotient
mod_inverse = lift_basicsym_binaryop ntheory_mod_inverse_ffi
fibonacci :: Int -> BasicSym
fibonacci i = unsafePerformIO $ do
fib <- basicsym_new
with fib (\fib -> ntheory_fibonacci_ffi fib (fromIntegral i))
return fib
fibonacci2 :: Int -> (BasicSym, BasicSym)
fibonacci2 n = unsafePerformIO $ do
g <- basicsym_new
s <- basicsym_new
with2 g s (\g s -> ntheory_fibonacci2_ffi g s (fromIntegral n))
return (g, s)
lucas :: Int -> BasicSym
lucas n = unsafePerformIO $ do
l <- basicsym_new
with l (\l -> ntheory_lucas_ffi l (fromIntegral n))
return l
{-
lucas2 :: BasicSym -> BasicSym -> (BasicSym, BasicSym)
lucas2 n n_prev = unsafePerformIO $ do
g <- basicsym_new
s <- basicsym_new
with4 g s n n_prev ntheory_lucas2_ffi
return (g, s)
-}
binomial :: BasicSym -> Int -> BasicSym
binomial n r = unsafePerformIO $ do
ncr <- basicsym_new
with2 ncr n (\ncr n -> ntheory_binomial_ffi ncr n (fromIntegral r))
return ncr
factorial :: Int -> BasicSym
factorial n = unsafePerformIO $ do
fact <- basicsym_new
with fact (\fact -> ntheory_factorial_ffi fact (fromIntegral n))
return fact
-- FFI Bindings
-- gcd, lcm
foreign import ccall "symengine/cwrapper.h ntheory_gcd" ntheory_gcd_ffi ::
Ptr CBasicSym -> Ptr CBasicSym -> Ptr CBasicSym -> IO CInt
foreign import ccall "symengine/cwrapper.h ntheory_lcm" ntheory_lcm_ffi ::
Ptr CBasicSym -> Ptr CBasicSym -> Ptr CBasicSym -> IO CInt
foreign import ccall "symengine/cwrapper.h ntheory_gcd_ext" ntheory_gcd_ext_ffi
:: Ptr CBasicSym -> Ptr CBasicSym -> Ptr CBasicSym ->
Ptr CBasicSym -> Ptr CBasicSym -> IO CInt
-- prime
foreign import ccall "symengine/cwrapper.h ntheory_nextprime"
ntheory_nextprime_ffi :: Ptr CBasicSym -> Ptr CBasicSym -> IO CInt
-- modulus
foreign import ccall "symengine/cwrapper.h ntheory_mod"
ntheory_mod_ffi :: Ptr CBasicSym -> Ptr CBasicSym -> Ptr CBasicSym -> IO CInt
foreign import ccall "symengine/cwrapper.h ntheory_quotient"
ntheory_quotient_ffi :: Ptr CBasicSym -> Ptr CBasicSym -> Ptr CBasicSym -> IO CInt
foreign import ccall "symengine/cwrapper.h ntheory_quotient_mod"
ntheory_quotient_mod_ffi :: Ptr CBasicSym -> Ptr CBasicSym ->
Ptr CBasicSym -> Ptr CBasicSym -> IO CInt
-- _f versions (round towards -inf)
foreign import ccall "symengine/cwrapper.h ntheory_mod_f"
ntheory_mod_f_ffi :: Ptr CBasicSym -> Ptr CBasicSym -> Ptr CBasicSym -> IO CInt
foreign import ccall "symengine/cwrapper.h ntheory_quotient_f"
ntheory_quotient_f_ffi :: Ptr CBasicSym -> Ptr CBasicSym -> Ptr CBasicSym -> IO CInt
foreign import ccall "symengine/cwrapper.h ntheory_quotient_mod_f"
ntheory_quotient_mod_f_ffi :: Ptr CBasicSym -> Ptr CBasicSym ->
Ptr CBasicSym -> Ptr CBasicSym -> IO CInt
-- mod inverse
foreign import ccall "symengine/cwrapper.h ntheory_mod_inverse"
ntheory_mod_inverse_ffi :: Ptr CBasicSym -> Ptr CBasicSym -> Ptr CBasicSym -> IO CInt
-- fibonacci
foreign import ccall "symengine/cwrapper.h ntheory_fibonacci"
ntheory_fibonacci_ffi :: Ptr CBasicSym ->
CULong -> IO CInt
foreign import ccall "symengine/cwrapper.h ntheory_fibonacci2"
ntheory_fibonacci2_ffi :: Ptr CBasicSym -> Ptr CBasicSym ->
CULong -> IO CInt
-- lucas
foreign import ccall "symengine/cwrapper.h ntheory_lucas"
ntheory_lucas_ffi :: Ptr CBasicSym ->
CULong -> IO CInt
foreign import ccall "symengine/cwrapper.h ntheory_lucas2"
ntheory_lucas2_ffi :: Ptr CBasicSym -> Ptr CBasicSym ->
CULong -> IO CInt
-- binomial
foreign import ccall "symengine/cwrapper.h ntheory_binomial"
ntheory_binomial_ffi :: Ptr CBasicSym -> Ptr CBasicSym ->
CULong -> IO CInt
-- factorial
foreign import ccall "symengine/cwrapper.h ntheory_factorial"
ntheory_factorial_ffi :: Ptr CBasicSym ->
CULong -> IO CInt
| bollu/symengine.hs-1 | src/Symengine/NumberTheory.hs | mit | 6,259 | 0 | 14 | 1,136 | 1,542 | 787 | 755 | 142 | 1 |
{-# LANGUAGE TypeOperators, MultiParamTypeClasses, DeriveGeneric, OverloadedStrings #-}
import Control.Exception
import qualified Data.ByteString as S
import Data.Int
import Data.Maybe
import Database.PostgreSQL.ORM.Association
import Database.PostgreSQL.ORM.CreateTable
import Database.PostgreSQL.ORM.Model
import Database.PostgreSQL.Simple
import GHC.Generics
import Control.Applicative
import Database.PostgreSQL.Devel
import Database.PostgreSQL.Keywords
import System.IO.Unsafe
import Data.GetField
import Database.PostgreSQL.ORM.DBSelect
data MyType = MyType { myString :: String -- position 0
, myInt :: Int -- position 1
, myBool :: Bool -- position 2
, myMaybeChar :: Maybe Char -- position 3
, myMaybeString :: Maybe String -- position 4
} deriving (Show, Generic)
myType :: MyType
myType = MyType "my type" 21 True Nothing (Just "maybe string")
data Foo = Foo {
foo_key :: !DBKey
, foo_name :: String
, parent :: !(Maybe (DBRef Bar))
} deriving (Show, Generic)
instance Model Foo
mkFoo :: String -> Foo
mkFoo name = Foo NullKey name Nothing
data Bar = Bar {
barId :: !DBKey
, barNone :: !(Maybe Int32)
, barName :: !String
, barParent :: !(Maybe (DBRef Bar))
} deriving (Show, Generic)
instance Model Bar where modelInfo = underscoreModelInfo "bar"
data ParentBar = ParentBar
instance RowAlias ParentBar where rowAliasName _ = "parent_bar"
selfJoinTable :: JoinTable Bar (As ParentBar Bar)
selfJoinTable = defaultJoinTable
selfJoin :: Association Bar (As ParentBar Bar)
otherSelfJoin :: Association (As ParentBar Bar) Bar
(selfJoin, otherSelfJoin) = jtAssocs selfJoinTable
toParent :: Association Bar (As ParentBar Bar)
toParent = belongsTo
toChild :: Association (As ParentBar Bar) Bar
toChild = has
mkBar :: String -> Bar
mkBar msg = Bar NullKey (Just n) msg Nothing
where n = foldl (+) 0 $ map (toEnum . fromEnum) msg
data Joiner = Joiner {
jkey :: !DBKey
, jcomment :: !String
, jfoo :: (DBRef Foo)
, jbar :: !(Maybe (DBRef Bar))
} deriving (Show, Generic)
instance Model Joiner
joiner :: Joiner
joiner = Joiner (DBKey 5) "join comment" (DBRef 1) Nothing
bar :: Bar
bar = Bar NullKey (Just 44) "hi" Nothing
mkc :: IO Connection
mkc = connectPostgreSQL ""
c :: Connection
{-# NOINLINE c #-}
c = unsafePerformIO mkc
bar' :: Bar
bar' = Bar NullKey (Just 75) "bye" Nothing
data X = X deriving (Generic)
instance RowAlias X
{-
selfjoin :: IO [Bar :. As X Bar]
selfjoin = bracket mkc close $ \c ->
findWhere "bar.id = x.parent" c () :: IO [Bar :. As X Bar]
selfjoin' :: IO [(Bar,Bar)]
selfjoin' = bracket mkc close $ \c ->
map (\(b1 :. b2) -> (b1, fromAs X b2)) <$>
findWhere "bar.bar_key = X.bar_parent" c ()
getOne :: (Model a) => DBKeyType -> IO a
getOne k = bracket mkc close $ \c ->
let r = fromJust <$> findRow c (DBRef k `gAsTypeOf1` r)
in r
-}
data T1 = T1 deriving (Show, Generic)
instance RowAlias T1
data Author = Author {
authorId :: DBKey
} deriving (Show, Generic)
instance Model Author where modelInfo = underscoreModelInfo "author"
data Post = Post {
postId :: DBKey
, postAuthorId :: DBRef Author
} deriving (Show, Generic)
instance Model Post where modelInfo = underscoreModelInfo "post"
data Comment = Comment {
commentId :: DBKey
, commentPostId :: DBRef Post
} deriving (Show, Generic)
instance Model Comment where modelInfo = underscoreModelInfo "comment"
author_posts :: Association Author Post
post_author :: Association Post Author
(post_author, author_posts) = dbrefAssocs defaultDBRefInfo
post_comments :: Association Post Comment
post_comments = has
comment_post :: Association Comment Post
comment_post = belongsTo
comment_author :: Association Comment Author
comment_author = chainAssoc comment_post post_author
author_comments :: Association Author Comment
author_comments = chainAssoc author_posts post_comments
{-
junk = do
foos <- findAll c :: IO [Foo]
bars <- findAll c :: IO [Bar]
-- sequence $ zipWith (addJoin c) foos (drop 4 bars)
-- sequence $ zipWith (addJoin c) foos (drop 19 bars)
-- sequence $ zipWith (addJoin c) (drop 41 foos) bars
-}
data Quizog = Quizog {
qId :: !DBKey
, qNone :: !(Maybe Int32)
, qName :: !String
, qParent :: !(Maybe (DBRef Bar))
, qEd :: !String
} deriving (Show, Generic)
instance Model Quizog
quizog :: Quizog
quizog = Quizog { qId = NullKey
, qNone = Just 3
, qName = "Mr. Quizog to you"
, qParent = Nothing
, qEd = "Q.E.D."
}
dumbdbs :: DBSelect (Only Int)
dumbdbs = expressionDBSelect "1 + 1"
postdbs :: DBSelect Post
postdbs = modelDBSelect
lastval :: DBSelect (Only DBKeyType)
lastval = expressionDBSelect "lastval ()"
rankFraction :: DBSelect (Int, Maybe Double)
rankFraction = expressionDBSelect
"rank() OVER (ORDER BY none), none::float4/SUM(none) OVER () AS fraction"
bardbs :: DBSelect Bar
bardbs = modelDBSelect
| charles-cooper/postgresql-orm | examples.hs | gpl-3.0 | 5,083 | 0 | 13 | 1,122 | 1,250 | 683 | 567 | 153 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Hans.Nat.Forward ( tryForwardUdp, tryForwardTcp ) where
import Hans.Addr.Types (Addr)
import Hans.Lens (view)
import Hans.Network (lookupRoute,RouteInfo(..))
import Hans.Tcp.Packet (TcpHeader(..),tcpSyn)
import Hans.Types
import Hans.Udp.Packet (UdpHeader(..))
-- TCP -------------------------------------------------------------------------
-- | Try to produce a new TCP packet that should be forwarded. Returns 'Nothing'
-- if the packet was destined for the local machine.
tryForwardTcp :: NetworkStack
-> Addr -- ^ Local addr
-> Addr -- ^ Remote addr
-> TcpHeader
-> IO (Maybe (RouteInfo Addr,Addr,TcpHeader))
tryForwardTcp ns local remote hdr =
do let key = Flow local (tcpDestPort hdr) remote (tcpSourcePort hdr)
mbEntry <- tcpForwardingActive ns key
case mbEntry of
-- forwarding is already established, rewrite the packet
Just entry -> return $! rewrite key entry
-- No forwarding entry exists. If it's a syn packet and there's a rule, start a
-- new session.
Nothing
| view tcpSyn hdr ->
do mbRule <- shouldForwardTcp ns key
case mbRule of
Nothing -> return Nothing
-- add an entry to the table, and rewrite the packet
Just rule ->
do mbSess <- newSession ns key rule
case mbSess of
Just entry -> do addTcpSession ns entry
return $! rewrite key entry
Nothing -> return Nothing
| otherwise ->
return Nothing
where
-- rewrite the source and destination in the header
rewrite key entry =
let other = otherSide key entry
hdr' = hdr { tcpSourcePort = flowLocalPort other
, tcpDestPort = flowRemotePort other
, tcpChecksum = 0 }
in hdr' `seq` Just (flowLocal other, flowRemote other, hdr')
-- UDP -------------------------------------------------------------------------
-- | Try to produce a new TCP packet that should be forwarded. Returns 'Nothing'
-- if the packet was destined for the local machine.
tryForwardUdp :: NetworkStack
-> Addr -- ^ Local addr
-> Addr -- ^ Remote addr
-> UdpHeader
-> IO (Maybe (RouteInfo Addr,Addr,UdpHeader))
tryForwardUdp ns local remote hdr =
do let key = Flow local (udpDestPort hdr) remote (udpSourcePort hdr)
mbEntry <- udpForwardingActive ns key
case mbEntry of
-- forwarding is already established, rewrite the packet
Just entry -> return $! rewrite key entry
-- No forwarding entry exists. If a rule exists, add it to the table.
Nothing ->
do mbRule <- shouldForwardUdp ns key
case mbRule of
Nothing -> return Nothing
-- add an entry to the table, and rewrite the packet
Just rule ->
do mbSess <- newSession ns key rule
case mbSess of
Just entry -> do addUdpSession ns entry
return $! rewrite key entry
Nothing -> return Nothing
where
rewrite key entry =
let other = otherSide key entry
hdr' = hdr { udpSourcePort = flowLocalPort other
, udpDestPort = flowRemotePort other
, udpChecksum = 0 }
in hdr' `seq` Just (flowLocal other, flowRemote other, hdr')
newSession :: NetworkStack -> Flow Addr -> PortForward -> IO (Maybe Session)
newSession ns flow rule =
do l <- lookupRoute ns (flowRemote flow)
r <- lookupRoute ns (pfDestAddr rule)
p <- nextTcpPort ns (flowLocal flow) (pfDestAddr rule) (pfDestPort rule)
case (l,r,p) of
(Just riLeft, Just riRight, Just rightPort) ->
return $ Just
$ Session { sessLeft = flow { flowLocal = riLeft }
, sessRight = Flow { flowLocal = riRight
, flowLocalPort = rightPort
, flowRemote = pfDestAddr rule
, flowRemotePort = pfDestPort rule } }
_ -> return Nothing
| GaloisInc/HaNS | src/Hans/Nat/Forward.hs | bsd-3-clause | 4,376 | 0 | 30 | 1,534 | 972 | 495 | 477 | 77 | 4 |
module Tholos.App.Config where
import Control.Monad.Error.Class (MonadError)
import Control.Monad.IO.Class (MonadIO)
import Database.PostgreSQL.Simple (ConnectInfo (..), Connection,
close, connect)
import System.Environment (getEnv)
import Tholos.App.Environment
import Tholos.Business.Interface
data AppConfig = AppConfig
{ appEnv :: Environment
, appPort :: Int
, conn :: Connection
} --deriving (Show)
mkAppConfig :: Connection -> IO AppConfig
mkAppConfig conn = do
env <- read <$> getEnv "ENV"
port <- read <$> getEnv "PORT"
return $ AppConfig env port conn
| charlescrain/tholos | src/Tholos/App/Config.hs | bsd-3-clause | 702 | 0 | 9 | 210 | 167 | 97 | 70 | 17 | 1 |
{-# LANGUAGE CPP, NondecreasingIndentation #-}
{-# OPTIONS_GHC -fno-cse #-}
--
-- (c) The University of Glasgow 2002-2006
--
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
-- | The dynamic linker for GHCi.
--
-- This module deals with the top-level issues of dynamic linking,
-- calling the object-code linker and the byte-code linker where
-- necessary.
module Linker ( getHValue, showLinkerState,
linkExpr, linkDecls, unload, withExtendedLinkEnv,
extendLinkEnv, deleteFromLinkEnv,
extendLoadedPkgs,
linkPackages,initDynLinker,linkModule,
linkCmdLineLibs,
-- Saving/restoring globals
PersistentLinkerState, saveLinkerGlobals, restoreLinkerGlobals
) where
#include "HsVersions.h"
import LoadIface
import ObjLink
import ByteCodeLink
import ByteCodeItbls
import ByteCodeAsm
import TcRnMonad
import Packages
import DriverPhases
import Finder
import HscTypes
import Name
import NameEnv
import NameSet
import UniqFM
import Module
import ListSetOps
import DynFlags
import BasicTypes
import Outputable
import Panic
import Util
import ErrUtils
import SrcLoc
import qualified Maybes
import UniqSet
import FastString
import Platform
import SysTools
-- Standard libraries
import Control.Monad
import Data.IORef
import Data.List
import Data.Maybe
import Control.Concurrent.MVar
import System.FilePath
import System.IO
import System.Directory
import Exception
{- **********************************************************************
The Linker's state
********************************************************************* -}
{-
The persistent linker state *must* match the actual state of the
C dynamic linker at all times, so we keep it in a private global variable.
The global IORef used for PersistentLinkerState actually contains another MVar.
The reason for this is that we want to allow another loaded copy of the GHC
library to side-effect the PLS and for those changes to be reflected here.
The PersistentLinkerState maps Names to actual closures (for
interpreted code only), for use during linking.
-}
GLOBAL_VAR_M(v_PersistentLinkerState, newMVar (panic "Dynamic linker not initialised"), MVar PersistentLinkerState)
GLOBAL_VAR(v_InitLinkerDone, False, Bool) -- Set True when dynamic linker is initialised
modifyPLS_ :: (PersistentLinkerState -> IO PersistentLinkerState) -> IO ()
modifyPLS_ f = readIORef v_PersistentLinkerState >>= flip modifyMVar_ f
modifyPLS :: (PersistentLinkerState -> IO (PersistentLinkerState, a)) -> IO a
modifyPLS f = readIORef v_PersistentLinkerState >>= flip modifyMVar f
data PersistentLinkerState
= PersistentLinkerState {
-- Current global mapping from Names to their true values
closure_env :: ClosureEnv,
-- The current global mapping from RdrNames of DataCons to
-- info table addresses.
-- When a new Unlinked is linked into the running image, or an existing
-- module in the image is replaced, the itbl_env must be updated
-- appropriately.
itbl_env :: !ItblEnv,
-- The currently loaded interpreted modules (home package)
bcos_loaded :: ![Linkable],
-- And the currently-loaded compiled modules (home package)
objs_loaded :: ![Linkable],
-- The currently-loaded packages; always object code
-- Held, as usual, in dependency order; though I am not sure if
-- that is really important
pkgs_loaded :: ![UnitId],
-- we need to remember the name of previous temporary DLL/.so
-- libraries so we can link them (see #10322)
temp_sos :: ![(FilePath, String)] }
emptyPLS :: DynFlags -> PersistentLinkerState
emptyPLS _ = PersistentLinkerState {
closure_env = emptyNameEnv,
itbl_env = emptyNameEnv,
pkgs_loaded = init_pkgs,
bcos_loaded = [],
objs_loaded = [],
temp_sos = [] }
-- Packages that don't need loading, because the compiler
-- shares them with the interpreted program.
--
-- The linker's symbol table is populated with RTS symbols using an
-- explicit list. See rts/Linker.c for details.
where init_pkgs = [rtsUnitId]
extendLoadedPkgs :: [UnitId] -> IO ()
extendLoadedPkgs pkgs =
modifyPLS_ $ \s ->
return s{ pkgs_loaded = pkgs ++ pkgs_loaded s }
extendLinkEnv :: [(Name,HValue)] -> IO ()
-- Automatically discards shadowed bindings
extendLinkEnv new_bindings =
modifyPLS_ $ \pls ->
let new_closure_env = extendClosureEnv (closure_env pls) new_bindings
in return pls{ closure_env = new_closure_env }
deleteFromLinkEnv :: [Name] -> IO ()
deleteFromLinkEnv to_remove =
modifyPLS_ $ \pls ->
let new_closure_env = delListFromNameEnv (closure_env pls) to_remove
in return pls{ closure_env = new_closure_env }
-- | Get the 'HValue' associated with the given name.
--
-- May cause loading the module that contains the name.
--
-- Throws a 'ProgramError' if loading fails or the name cannot be found.
getHValue :: HscEnv -> Name -> IO HValue
getHValue hsc_env name = do
initDynLinker (hsc_dflags hsc_env)
pls <- modifyPLS $ \pls -> do
if (isExternalName name) then do
(pls', ok) <- linkDependencies hsc_env pls noSrcSpan [nameModule name]
if (failed ok) then throwGhcExceptionIO (ProgramError "")
else return (pls', pls')
else
return (pls, pls)
lookupName (closure_env pls) name
linkDependencies :: HscEnv -> PersistentLinkerState
-> SrcSpan -> [Module]
-> IO (PersistentLinkerState, SuccessFlag)
linkDependencies hsc_env pls span needed_mods = do
-- initDynLinker (hsc_dflags hsc_env)
let hpt = hsc_HPT hsc_env
dflags = hsc_dflags hsc_env
-- The interpreter and dynamic linker can only handle object code built
-- the "normal" way, i.e. no non-std ways like profiling or ticky-ticky.
-- So here we check the build tag: if we're building a non-standard way
-- then we need to find & link object files built the "normal" way.
maybe_normal_osuf <- checkNonStdWay dflags span
-- Find what packages and linkables are required
(lnks, pkgs) <- getLinkDeps hsc_env hpt pls
maybe_normal_osuf span needed_mods
-- Link the packages and modules required
pls1 <- linkPackages' dflags pkgs pls
linkModules dflags pls1 lnks
-- | Temporarily extend the linker state.
withExtendedLinkEnv :: (ExceptionMonad m) =>
[(Name,HValue)] -> m a -> m a
withExtendedLinkEnv new_env action
= gbracket (liftIO $ extendLinkEnv new_env)
(\_ -> reset_old_env)
(\_ -> action)
where
-- Remember that the linker state might be side-effected
-- during the execution of the IO action, and we don't want to
-- lose those changes (we might have linked a new module or
-- package), so the reset action only removes the names we
-- added earlier.
reset_old_env = liftIO $ do
modifyPLS_ $ \pls ->
let cur = closure_env pls
new = delListFromNameEnv cur (map fst new_env)
in return pls{ closure_env = new }
-- filterNameMap removes from the environment all entries except
-- those for a given set of modules;
-- Note that this removes all *local* (i.e. non-isExternal) names too
-- (these are the temporary bindings from the command line).
-- Used to filter both the ClosureEnv and ItblEnv
filterNameMap :: [Module] -> NameEnv (Name, a) -> NameEnv (Name, a)
filterNameMap mods env
= filterNameEnv keep_elt env
where
keep_elt (n,_) = isExternalName n
&& (nameModule n `elem` mods)
-- | Display the persistent linker state.
showLinkerState :: DynFlags -> IO ()
showLinkerState dflags
= do pls <- readIORef v_PersistentLinkerState >>= readMVar
log_action dflags dflags SevDump noSrcSpan defaultDumpStyle
(vcat [text "----- Linker state -----",
text "Pkgs:" <+> ppr (pkgs_loaded pls),
text "Objs:" <+> ppr (objs_loaded pls),
text "BCOs:" <+> ppr (bcos_loaded pls)])
{- **********************************************************************
Initialisation
********************************************************************* -}
-- | Initialise the dynamic linker. This entails
--
-- a) Calling the C initialisation procedure,
--
-- b) Loading any packages specified on the command line,
--
-- c) Loading any packages specified on the command line, now held in the
-- @-l@ options in @v_Opt_l@,
--
-- d) Loading any @.o\/.dll@ files specified on the command line, now held
-- in @ldInputs@,
--
-- e) Loading any MacOS frameworks.
--
-- NOTE: This function is idempotent; if called more than once, it does
-- nothing. This is useful in Template Haskell, where we call it before
-- trying to link.
--
initDynLinker :: DynFlags -> IO ()
initDynLinker dflags =
modifyPLS_ $ \pls0 -> do
done <- readIORef v_InitLinkerDone
if done then return pls0
else do writeIORef v_InitLinkerDone True
reallyInitDynLinker dflags
reallyInitDynLinker :: DynFlags -> IO PersistentLinkerState
reallyInitDynLinker dflags =
do { -- Initialise the linker state
let pls0 = emptyPLS dflags
-- (a) initialise the C dynamic linker
; initObjLinker
-- (b) Load packages from the command-line (Note [preload packages])
; pls <- linkPackages' dflags (preloadPackages (pkgState dflags)) pls0
-- steps (c), (d) and (e)
; linkCmdLineLibs' dflags pls
}
linkCmdLineLibs :: DynFlags -> IO ()
linkCmdLineLibs dflags = do
initDynLinker dflags
modifyPLS_ $ \pls -> do
linkCmdLineLibs' dflags pls
linkCmdLineLibs' :: DynFlags -> PersistentLinkerState -> IO PersistentLinkerState
linkCmdLineLibs' dflags@(DynFlags { ldInputs = cmdline_ld_inputs
, libraryPaths = lib_paths}) pls =
do { -- (c) Link libraries from the command-line
; let minus_ls = [ lib | Option ('-':'l':lib) <- cmdline_ld_inputs ]
; libspecs <- mapM (locateLib dflags False lib_paths) minus_ls
-- (d) Link .o files from the command-line
; classified_ld_inputs <- mapM (classifyLdInput dflags)
[ f | FileOption _ f <- cmdline_ld_inputs ]
-- (e) Link any MacOS frameworks
; let platform = targetPlatform dflags
; let (framework_paths, frameworks) =
if platformUsesFrameworks platform
then (frameworkPaths dflags, cmdlineFrameworks dflags)
else ([],[])
-- Finally do (c),(d),(e)
; let cmdline_lib_specs = catMaybes classified_ld_inputs
++ libspecs
++ map Framework frameworks
; if null cmdline_lib_specs then return pls
else do
{ pls1 <- foldM (preloadLib dflags lib_paths framework_paths) pls
cmdline_lib_specs
; maybePutStr dflags "final link ... "
; ok <- resolveObjs
; if succeeded ok then maybePutStrLn dflags "done"
else throwGhcExceptionIO (ProgramError "linking extra libraries/objects failed")
; return pls1
}}
{- Note [preload packages]
Why do we need to preload packages from the command line? This is an
explanation copied from #2437:
I tried to implement the suggestion from #3560, thinking it would be
easy, but there are two reasons we link in packages eagerly when they
are mentioned on the command line:
* So that you can link in extra object files or libraries that
depend on the packages. e.g. ghc -package foo -lbar where bar is a
C library that depends on something in foo. So we could link in
foo eagerly if and only if there are extra C libs or objects to
link in, but....
* Haskell code can depend on a C function exported by a package, and
the normal dependency tracking that TH uses can't know about these
dependencies. The test ghcilink004 relies on this, for example.
I conclude that we need two -package flags: one that says "this is a
package I want to make available", and one that says "this is a
package I want to link in eagerly". Would that be too complicated for
users?
-}
classifyLdInput :: DynFlags -> FilePath -> IO (Maybe LibrarySpec)
classifyLdInput dflags f
| isObjectFilename platform f = return (Just (Object f))
| isDynLibFilename platform f = return (Just (DLLPath f))
| otherwise = do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle
(text ("Warning: ignoring unrecognised input `" ++ f ++ "'"))
return Nothing
where platform = targetPlatform dflags
preloadLib :: DynFlags -> [String] -> [String] -> PersistentLinkerState
-> LibrarySpec -> IO PersistentLinkerState
preloadLib dflags lib_paths framework_paths pls lib_spec
= do maybePutStr dflags ("Loading object " ++ showLS lib_spec ++ " ... ")
case lib_spec of
Object static_ish
-> do (b, pls1) <- preload_static lib_paths static_ish
maybePutStrLn dflags (if b then "done"
else "not found")
return pls1
Archive static_ish
-> do b <- preload_static_archive lib_paths static_ish
maybePutStrLn dflags (if b then "done"
else "not found")
return pls
DLL dll_unadorned
-> do maybe_errstr <- loadDLL (mkSOName platform dll_unadorned)
case maybe_errstr of
Nothing -> maybePutStrLn dflags "done"
Just mm | platformOS platform /= OSDarwin ->
preloadFailed mm lib_paths lib_spec
Just mm | otherwise -> do
-- As a backup, on Darwin, try to also load a .so file
-- since (apparently) some things install that way - see
-- ticket #8770.
err2 <- loadDLL $ ("lib" ++ dll_unadorned) <.> "so"
case err2 of
Nothing -> maybePutStrLn dflags "done"
Just _ -> preloadFailed mm lib_paths lib_spec
return pls
DLLPath dll_path
-> do maybe_errstr <- loadDLL dll_path
case maybe_errstr of
Nothing -> maybePutStrLn dflags "done"
Just mm -> preloadFailed mm lib_paths lib_spec
return pls
Framework framework ->
if platformUsesFrameworks (targetPlatform dflags)
then do maybe_errstr <- loadFramework framework_paths framework
case maybe_errstr of
Nothing -> maybePutStrLn dflags "done"
Just mm -> preloadFailed mm framework_paths lib_spec
return pls
else panic "preloadLib Framework"
where
platform = targetPlatform dflags
preloadFailed :: String -> [String] -> LibrarySpec -> IO ()
preloadFailed sys_errmsg paths spec
= do maybePutStr dflags "failed.\n"
throwGhcExceptionIO $
CmdLineError (
"user specified .o/.so/.DLL could not be loaded ("
++ sys_errmsg ++ ")\nWhilst trying to load: "
++ showLS spec ++ "\nAdditional directories searched:"
++ (if null paths then " (none)" else
intercalate "\n" (map (" "++) paths)))
-- Not interested in the paths in the static case.
preload_static _paths name
= do b <- doesFileExist name
if not b then return (False, pls)
else if dynamicGhc
then do pls1 <- dynLoadObjs dflags pls [name]
return (True, pls1)
else do loadObj name
return (True, pls)
preload_static_archive _paths name
= do b <- doesFileExist name
if not b then return False
else do if dynamicGhc
then panic "Loading archives not supported"
else loadArchive name
return True
{- **********************************************************************
Link a byte-code expression
********************************************************************* -}
-- | Link a single expression, /including/ first linking packages and
-- modules that this expression depends on.
--
-- Raises an IO exception ('ProgramError') if it can't find a compiled
-- version of the dependents to link.
--
linkExpr :: HscEnv -> SrcSpan -> UnlinkedBCO -> IO HValue
linkExpr hsc_env span root_ul_bco
= do {
-- Initialise the linker (if it's not been done already)
let dflags = hsc_dflags hsc_env
; initDynLinker dflags
-- Take lock for the actual work.
; modifyPLS $ \pls0 -> do {
-- Link the packages and modules required
; (pls, ok) <- linkDependencies hsc_env pls0 span needed_mods
; if failed ok then
throwGhcExceptionIO (ProgramError "")
else do {
-- Link the expression itself
let ie = itbl_env pls
ce = closure_env pls
-- Link the necessary packages and linkables
; (_, (root_hval:_)) <- linkSomeBCOs dflags False ie ce [root_ul_bco]
; return (pls, root_hval)
}}}
where
free_names = nameSetElems (bcoFreeNames root_ul_bco)
needed_mods :: [Module]
needed_mods = [ nameModule n | n <- free_names,
isExternalName n, -- Names from other modules
not (isWiredInName n) -- Exclude wired-in names
] -- (see note below)
-- Exclude wired-in names because we may not have read
-- their interface files, so getLinkDeps will fail
-- All wired-in names are in the base package, which we link
-- by default, so we can safely ignore them here.
dieWith :: DynFlags -> SrcSpan -> MsgDoc -> IO a
dieWith dflags span msg = throwGhcExceptionIO (ProgramError (showSDoc dflags (mkLocMessage SevFatal span msg)))
checkNonStdWay :: DynFlags -> SrcSpan -> IO (Maybe FilePath)
checkNonStdWay dflags srcspan =
if interpWays == haskellWays
then return Nothing
-- see #3604: object files compiled for way "dyn" need to link to the
-- dynamic packages, so we can't load them into a statically-linked GHCi.
-- we have to treat "dyn" in the same way as "prof".
--
-- In the future when GHCi is dynamically linked we should be able to relax
-- this, but they we may have to make it possible to load either ordinary
-- .o files or -dynamic .o files into GHCi (currently that's not possible
-- because the dynamic objects contain refs to e.g. __stginit_base_Prelude_dyn
-- whereas we have __stginit_base_Prelude_.
else if objectSuf dflags == normalObjectSuffix && not (null haskellWays)
then failNonStd dflags srcspan
else return $ Just $ if dynamicGhc
then "dyn_o"
else "o"
where haskellWays = filter (not . wayRTSOnly) (ways dflags)
normalObjectSuffix :: String
normalObjectSuffix = phaseInputExt StopLn
failNonStd :: DynFlags -> SrcSpan -> IO (Maybe FilePath)
failNonStd dflags srcspan = dieWith dflags srcspan $
ptext (sLit "Dynamic linking required, but this is a non-standard build (eg. prof).") $$
ptext (sLit "You need to build the program twice: once the") <+> ghciWay <+> ptext (sLit "way, and then") $$
ptext (sLit "in the desired way using -osuf to set the object file suffix.")
where ghciWay = if dynamicGhc
then ptext (sLit "dynamic")
else ptext (sLit "normal")
getLinkDeps :: HscEnv -> HomePackageTable
-> PersistentLinkerState
-> Maybe FilePath -- replace object suffices?
-> SrcSpan -- for error messages
-> [Module] -- If you need these
-> IO ([Linkable], [UnitId]) -- ... then link these first
-- Fails with an IO exception if it can't find enough files
getLinkDeps hsc_env hpt pls replace_osuf span mods
-- Find all the packages and linkables that a set of modules depends on
= do {
-- 1. Find the dependent home-pkg-modules/packages from each iface
-- (omitting modules from the interactive package, which is already linked)
; (mods_s, pkgs_s) <- follow_deps (filterOut isInteractiveModule mods)
emptyUniqSet emptyUniqSet;
; let {
-- 2. Exclude ones already linked
-- Main reason: avoid findModule calls in get_linkable
mods_needed = mods_s `minusList` linked_mods ;
pkgs_needed = pkgs_s `minusList` pkgs_loaded pls ;
linked_mods = map (moduleName.linkableModule)
(objs_loaded pls ++ bcos_loaded pls) }
-- 3. For each dependent module, find its linkable
-- This will either be in the HPT or (in the case of one-shot
-- compilation) we may need to use maybe_getFileLinkable
; let { osuf = objectSuf dflags }
; lnks_needed <- mapM (get_linkable osuf) mods_needed
; return (lnks_needed, pkgs_needed) }
where
dflags = hsc_dflags hsc_env
this_pkg = thisPackage dflags
-- The ModIface contains the transitive closure of the module dependencies
-- within the current package, *except* for boot modules: if we encounter
-- a boot module, we have to find its real interface and discover the
-- dependencies of that. Hence we need to traverse the dependency
-- tree recursively. See bug #936, testcase ghci/prog007.
follow_deps :: [Module] -- modules to follow
-> UniqSet ModuleName -- accum. module dependencies
-> UniqSet UnitId -- accum. package dependencies
-> IO ([ModuleName], [UnitId]) -- result
follow_deps [] acc_mods acc_pkgs
= return (uniqSetToList acc_mods, uniqSetToList acc_pkgs)
follow_deps (mod:mods) acc_mods acc_pkgs
= do
mb_iface <- initIfaceCheck hsc_env $
loadInterface msg mod (ImportByUser False)
iface <- case mb_iface of
Maybes.Failed err -> throwGhcExceptionIO (ProgramError (showSDoc dflags err))
Maybes.Succeeded iface -> return iface
when (mi_boot iface) $ link_boot_mod_error mod
let
pkg = moduleUnitId mod
deps = mi_deps iface
pkg_deps = dep_pkgs deps
(boot_deps, mod_deps) = partitionWith is_boot (dep_mods deps)
where is_boot (m,True) = Left m
is_boot (m,False) = Right m
boot_deps' = filter (not . (`elementOfUniqSet` acc_mods)) boot_deps
acc_mods' = addListToUniqSet acc_mods (moduleName mod : mod_deps)
acc_pkgs' = addListToUniqSet acc_pkgs $ map fst pkg_deps
--
if pkg /= this_pkg
then follow_deps mods acc_mods (addOneToUniqSet acc_pkgs' pkg)
else follow_deps (map (mkModule this_pkg) boot_deps' ++ mods)
acc_mods' acc_pkgs'
where
msg = text "need to link module" <+> ppr mod <+>
text "due to use of Template Haskell"
link_boot_mod_error mod =
throwGhcExceptionIO (ProgramError (showSDoc dflags (
text "module" <+> ppr mod <+>
text "cannot be linked; it is only available as a boot module")))
no_obj :: Outputable a => a -> IO b
no_obj mod = dieWith dflags span $
ptext (sLit "cannot find object file for module ") <>
quotes (ppr mod) $$
while_linking_expr
while_linking_expr = ptext (sLit "while linking an interpreted expression")
-- This one is a build-system bug
get_linkable osuf mod_name -- A home-package module
| Just mod_info <- lookupUFM hpt mod_name
= adjust_linkable (Maybes.expectJust "getLinkDeps" (hm_linkable mod_info))
| otherwise
= do -- It's not in the HPT because we are in one shot mode,
-- so use the Finder to get a ModLocation...
mb_stuff <- findHomeModule hsc_env mod_name
case mb_stuff of
Found loc mod -> found loc mod
_ -> no_obj mod_name
where
found loc mod = do {
-- ...and then find the linkable for it
mb_lnk <- findObjectLinkableMaybe mod loc ;
case mb_lnk of {
Nothing -> no_obj mod ;
Just lnk -> adjust_linkable lnk
}}
adjust_linkable lnk
| Just new_osuf <- replace_osuf = do
new_uls <- mapM (adjust_ul new_osuf)
(linkableUnlinked lnk)
return lnk{ linkableUnlinked=new_uls }
| otherwise =
return lnk
adjust_ul new_osuf (DotO file) = do
MASSERT(osuf `isSuffixOf` file)
let file_base = dropTail (length osuf + 1) file
new_file = file_base <.> new_osuf
ok <- doesFileExist new_file
if (not ok)
then dieWith dflags span $
ptext (sLit "cannot find normal object file ")
<> quotes (text new_file) $$ while_linking_expr
else return (DotO new_file)
adjust_ul _ (DotA fp) = panic ("adjust_ul DotA " ++ show fp)
adjust_ul _ (DotDLL fp) = panic ("adjust_ul DotDLL " ++ show fp)
adjust_ul _ l@(BCOs {}) = return l
{- **********************************************************************
Loading a Decls statement
********************************************************************* -}
linkDecls :: HscEnv -> SrcSpan -> CompiledByteCode -> IO () --[HValue]
linkDecls hsc_env span (ByteCode unlinkedBCOs itblEnv) = do
-- Initialise the linker (if it's not been done already)
let dflags = hsc_dflags hsc_env
initDynLinker dflags
-- Take lock for the actual work.
modifyPLS $ \pls0 -> do
-- Link the packages and modules required
(pls, ok) <- linkDependencies hsc_env pls0 span needed_mods
if failed ok
then throwGhcExceptionIO (ProgramError "")
else do
-- Link the expression itself
let ie = plusNameEnv (itbl_env pls) itblEnv
ce = closure_env pls
-- Link the necessary packages and linkables
(final_gce, _) <- linkSomeBCOs dflags False ie ce unlinkedBCOs
let pls2 = pls { closure_env = final_gce,
itbl_env = ie }
return (pls2, ()) --hvals)
where
free_names = concatMap (nameSetElems . bcoFreeNames) unlinkedBCOs
needed_mods :: [Module]
needed_mods = [ nameModule n | n <- free_names,
isExternalName n, -- Names from other modules
not (isWiredInName n) -- Exclude wired-in names
] -- (see note below)
-- Exclude wired-in names because we may not have read
-- their interface files, so getLinkDeps will fail
-- All wired-in names are in the base package, which we link
-- by default, so we can safely ignore them here.
{- **********************************************************************
Loading a single module
********************************************************************* -}
linkModule :: HscEnv -> Module -> IO ()
linkModule hsc_env mod = do
initDynLinker (hsc_dflags hsc_env)
modifyPLS_ $ \pls -> do
(pls', ok) <- linkDependencies hsc_env pls noSrcSpan [mod]
if (failed ok) then throwGhcExceptionIO (ProgramError "could not link module")
else return pls'
{- **********************************************************************
Link some linkables
The linkables may consist of a mixture of
byte-code modules and object modules
********************************************************************* -}
linkModules :: DynFlags -> PersistentLinkerState -> [Linkable]
-> IO (PersistentLinkerState, SuccessFlag)
linkModules dflags pls linkables
= mask_ $ do -- don't want to be interrupted by ^C in here
let (objs, bcos) = partition isObjectLinkable
(concatMap partitionLinkable linkables)
-- Load objects first; they can't depend on BCOs
(pls1, ok_flag) <- dynLinkObjs dflags pls objs
if failed ok_flag then
return (pls1, Failed)
else do
pls2 <- dynLinkBCOs dflags pls1 bcos
return (pls2, Succeeded)
-- HACK to support f-x-dynamic in the interpreter; no other purpose
partitionLinkable :: Linkable -> [Linkable]
partitionLinkable li
= let li_uls = linkableUnlinked li
li_uls_obj = filter isObject li_uls
li_uls_bco = filter isInterpretable li_uls
in
case (li_uls_obj, li_uls_bco) of
(_:_, _:_) -> [li {linkableUnlinked=li_uls_obj},
li {linkableUnlinked=li_uls_bco}]
_ -> [li]
findModuleLinkable_maybe :: [Linkable] -> Module -> Maybe Linkable
findModuleLinkable_maybe lis mod
= case [LM time nm us | LM time nm us <- lis, nm == mod] of
[] -> Nothing
[li] -> Just li
_ -> pprPanic "findModuleLinkable" (ppr mod)
linkableInSet :: Linkable -> [Linkable] -> Bool
linkableInSet l objs_loaded =
case findModuleLinkable_maybe objs_loaded (linkableModule l) of
Nothing -> False
Just m -> linkableTime l == linkableTime m
{- **********************************************************************
The object-code linker
********************************************************************* -}
dynLinkObjs :: DynFlags -> PersistentLinkerState -> [Linkable]
-> IO (PersistentLinkerState, SuccessFlag)
dynLinkObjs dflags pls objs = do
-- Load the object files and link them
let (objs_loaded', new_objs) = rmDupLinkables (objs_loaded pls) objs
pls1 = pls { objs_loaded = objs_loaded' }
unlinkeds = concatMap linkableUnlinked new_objs
wanted_objs = map nameOfObject unlinkeds
if dynamicGhc
then do pls2 <- dynLoadObjs dflags pls1 wanted_objs
return (pls2, Succeeded)
else do mapM_ loadObj wanted_objs
-- Link them all together
ok <- resolveObjs
-- If resolving failed, unload all our
-- object modules and carry on
if succeeded ok then do
return (pls1, Succeeded)
else do
pls2 <- unload_wkr dflags [] pls1
return (pls2, Failed)
dynLoadObjs :: DynFlags -> PersistentLinkerState -> [FilePath]
-> IO PersistentLinkerState
dynLoadObjs _ pls [] = return pls
dynLoadObjs dflags pls objs = do
let platform = targetPlatform dflags
(soFile, libPath , libName) <- newTempLibName dflags (soExt platform)
let -- When running TH for a non-dynamic way, we still need to make
-- -l flags to link against the dynamic libraries, so we turn
-- Opt_Static off
dflags1 = gopt_unset dflags Opt_Static
dflags2 = dflags1 {
-- We don't want the original ldInputs in
-- (they're already linked in), but we do want
-- to link against previous dynLoadObjs
-- libraries if there were any, so that the linker
-- can resolve dependencies when it loads this
-- library.
ldInputs =
concatMap
(\(lp, l) ->
[ Option ("-L" ++ lp)
, Option ("-Wl,-rpath")
, Option ("-Wl," ++ lp)
, Option ("-l" ++ l)
])
(temp_sos pls),
-- Even if we're e.g. profiling, we still want
-- the vanilla dynamic libraries, so we set the
-- ways / build tag to be just WayDyn.
ways = [WayDyn],
buildTag = mkBuildTag [WayDyn],
outputFile = Just soFile
}
-- link all "loaded packages" so symbols in those can be resolved
-- Note: We are loading packages with local scope, so to see the
-- symbols in this link we must link all loaded packages again.
linkDynLib dflags2 objs (pkgs_loaded pls)
consIORef (filesToNotIntermediateClean dflags) soFile
m <- loadDLL soFile
case m of
Nothing -> return pls { temp_sos = (libPath, libName) : temp_sos pls }
Just err -> panic ("Loading temp shared object failed: " ++ err)
rmDupLinkables :: [Linkable] -- Already loaded
-> [Linkable] -- New linkables
-> ([Linkable], -- New loaded set (including new ones)
[Linkable]) -- New linkables (excluding dups)
rmDupLinkables already ls
= go already [] ls
where
go already extras [] = (already, extras)
go already extras (l:ls)
| linkableInSet l already = go already extras ls
| otherwise = go (l:already) (l:extras) ls
{- **********************************************************************
The byte-code linker
********************************************************************* -}
dynLinkBCOs :: DynFlags -> PersistentLinkerState -> [Linkable]
-> IO PersistentLinkerState
dynLinkBCOs dflags pls bcos = do
let (bcos_loaded', new_bcos) = rmDupLinkables (bcos_loaded pls) bcos
pls1 = pls { bcos_loaded = bcos_loaded' }
unlinkeds :: [Unlinked]
unlinkeds = concatMap linkableUnlinked new_bcos
cbcs :: [CompiledByteCode]
cbcs = map byteCodeOfObject unlinkeds
ul_bcos = [b | ByteCode bs _ <- cbcs, b <- bs]
ies = [ie | ByteCode _ ie <- cbcs]
gce = closure_env pls
final_ie = foldr plusNameEnv (itbl_env pls) ies
(final_gce, _linked_bcos) <- linkSomeBCOs dflags True final_ie gce ul_bcos
-- XXX What happens to these linked_bcos?
let pls2 = pls1 { closure_env = final_gce,
itbl_env = final_ie }
return pls2
-- Link a bunch of BCOs and return them + updated closure env.
linkSomeBCOs :: DynFlags
-> Bool -- False <=> add _all_ BCOs to returned closure env
-- True <=> add only toplevel BCOs to closure env
-> ItblEnv
-> ClosureEnv
-> [UnlinkedBCO]
-> IO (ClosureEnv, [HValue])
-- The returned HValues are associated 1-1 with
-- the incoming unlinked BCOs. Each gives the
-- value of the corresponding unlinked BCO
linkSomeBCOs dflags toplevs_only ie ce_in ul_bcos
= do let nms = map unlinkedBCOName ul_bcos
hvals <- fixIO
( \ hvs -> let ce_out = extendClosureEnv ce_in (zipLazy nms hvs)
in mapM (linkBCO dflags ie ce_out) ul_bcos )
let ce_all_additions = zip nms hvals
ce_top_additions = filter (isExternalName.fst) ce_all_additions
ce_additions = if toplevs_only then ce_top_additions
else ce_all_additions
ce_out = -- make sure we're not inserting duplicate names into the
-- closure environment, which leads to trouble.
ASSERT(all (not . (`elemNameEnv` ce_in)) (map fst ce_additions))
extendClosureEnv ce_in ce_additions
return (ce_out, hvals)
{- **********************************************************************
Unload some object modules
********************************************************************* -}
-- ---------------------------------------------------------------------------
-- | Unloading old objects ready for a new compilation sweep.
--
-- The compilation manager provides us with a list of linkables that it
-- considers \"stable\", i.e. won't be recompiled this time around. For
-- each of the modules current linked in memory,
--
-- * if the linkable is stable (and it's the same one -- the user may have
-- recompiled the module on the side), we keep it,
--
-- * otherwise, we unload it.
--
-- * we also implicitly unload all temporary bindings at this point.
--
unload :: DynFlags
-> [Linkable] -- ^ The linkables to *keep*.
-> IO ()
unload dflags linkables
= mask_ $ do -- mask, so we're safe from Ctrl-C in here
-- Initialise the linker (if it's not been done already)
initDynLinker dflags
new_pls
<- modifyPLS $ \pls -> do
pls1 <- unload_wkr dflags linkables pls
return (pls1, pls1)
debugTraceMsg dflags 3 (text "unload: retaining objs" <+> ppr (objs_loaded new_pls))
debugTraceMsg dflags 3 (text "unload: retaining bcos" <+> ppr (bcos_loaded new_pls))
return ()
unload_wkr :: DynFlags
-> [Linkable] -- stable linkables
-> PersistentLinkerState
-> IO PersistentLinkerState
-- Does the core unload business
-- (the wrapper blocks exceptions and deals with the PLS get and put)
unload_wkr _ linkables pls
= do let (objs_to_keep, bcos_to_keep) = partition isObjectLinkable linkables
objs_loaded' <- filterM (maybeUnload objs_to_keep) (objs_loaded pls)
bcos_loaded' <- filterM (maybeUnload bcos_to_keep) (bcos_loaded pls)
let bcos_retained = map linkableModule bcos_loaded'
itbl_env' = filterNameMap bcos_retained (itbl_env pls)
closure_env' = filterNameMap bcos_retained (closure_env pls)
new_pls = pls { itbl_env = itbl_env',
closure_env = closure_env',
bcos_loaded = bcos_loaded',
objs_loaded = objs_loaded' }
return new_pls
where
maybeUnload :: [Linkable] -> Linkable -> IO Bool
maybeUnload keep_linkables lnk
| linkableInSet lnk keep_linkables = return True
-- We don't do any cleanup when linking objects with the dynamic linker.
-- Doing so introduces extra complexity for not much benefit.
| dynamicGhc = return False
| otherwise
= do mapM_ unloadObj [f | DotO f <- linkableUnlinked lnk]
-- The components of a BCO linkable may contain
-- dot-o files. Which is very confusing.
--
-- But the BCO parts can be unlinked just by
-- letting go of them (plus of course depopulating
-- the symbol table which is done in the main body)
return False
{- **********************************************************************
Loading packages
********************************************************************* -}
data LibrarySpec
= Object FilePath -- Full path name of a .o file, including trailing .o
-- For dynamic objects only, try to find the object
-- file in all the directories specified in
-- v_Library_paths before giving up.
| Archive FilePath -- Full path name of a .a file, including trailing .a
| DLL String -- "Unadorned" name of a .DLL/.so
-- e.g. On unix "qt" denotes "libqt.so"
-- On WinDoze "burble" denotes "burble.DLL"
-- loadDLL is platform-specific and adds the lib/.so/.DLL
-- suffixes platform-dependently
| DLLPath FilePath -- Absolute or relative pathname to a dynamic library
-- (ends with .dll or .so).
| Framework String -- Only used for darwin, but does no harm
-- If this package is already part of the GHCi binary, we'll already
-- have the right DLLs for this package loaded, so don't try to
-- load them again.
--
-- But on Win32 we must load them 'again'; doing so is a harmless no-op
-- as far as the loader is concerned, but it does initialise the list
-- of DLL handles that rts/Linker.c maintains, and that in turn is
-- used by lookupSymbol. So we must call addDLL for each library
-- just to get the DLL handle into the list.
partOfGHCi :: [PackageName]
partOfGHCi
| isWindowsHost || isDarwinHost = []
| otherwise = map (PackageName . mkFastString)
["base", "template-haskell", "editline"]
showLS :: LibrarySpec -> String
showLS (Object nm) = "(static) " ++ nm
showLS (Archive nm) = "(static archive) " ++ nm
showLS (DLL nm) = "(dynamic) " ++ nm
showLS (DLLPath nm) = "(dynamic) " ++ nm
showLS (Framework nm) = "(framework) " ++ nm
-- | Link exactly the specified packages, and their dependents (unless of
-- course they are already linked). The dependents are linked
-- automatically, and it doesn't matter what order you specify the input
-- packages.
--
linkPackages :: DynFlags -> [UnitId] -> IO ()
-- NOTE: in fact, since each module tracks all the packages it depends on,
-- we don't really need to use the package-config dependencies.
--
-- However we do need the package-config stuff (to find aux libs etc),
-- and following them lets us load libraries in the right order, which
-- perhaps makes the error message a bit more localised if we get a link
-- failure. So the dependency walking code is still here.
linkPackages dflags new_pkgs = do
-- It's probably not safe to try to load packages concurrently, so we take
-- a lock.
initDynLinker dflags
modifyPLS_ $ \pls -> do
linkPackages' dflags new_pkgs pls
linkPackages' :: DynFlags -> [UnitId] -> PersistentLinkerState
-> IO PersistentLinkerState
linkPackages' dflags new_pks pls = do
pkgs' <- link (pkgs_loaded pls) new_pks
return $! pls { pkgs_loaded = pkgs' }
where
link :: [UnitId] -> [UnitId] -> IO [UnitId]
link pkgs new_pkgs =
foldM link_one pkgs new_pkgs
link_one pkgs new_pkg
| new_pkg `elem` pkgs -- Already linked
= return pkgs
| Just pkg_cfg <- lookupPackage dflags new_pkg
= do { -- Link dependents first
pkgs' <- link pkgs (depends pkg_cfg)
-- Now link the package itself
; linkPackage dflags pkg_cfg
; return (new_pkg : pkgs') }
| otherwise
= throwGhcExceptionIO (CmdLineError ("unknown package: " ++ unitIdString new_pkg))
linkPackage :: DynFlags -> PackageConfig -> IO ()
linkPackage dflags pkg
= do
let platform = targetPlatform dflags
dirs = Packages.libraryDirs pkg
let hs_libs = Packages.hsLibraries pkg
-- The FFI GHCi import lib isn't needed as
-- compiler/ghci/Linker.hs + rts/Linker.c link the
-- interpreted references to FFI to the compiled FFI.
-- We therefore filter it out so that we don't get
-- duplicate symbol errors.
hs_libs' = filter ("HSffi" /=) hs_libs
-- Because of slight differences between the GHC dynamic linker and
-- the native system linker some packages have to link with a
-- different list of libraries when using GHCi. Examples include: libs
-- that are actually gnu ld scripts, and the possability that the .a
-- libs do not exactly match the .so/.dll equivalents. So if the
-- package file provides an "extra-ghci-libraries" field then we use
-- that instead of the "extra-libraries" field.
extra_libs =
(if null (Packages.extraGHCiLibraries pkg)
then Packages.extraLibraries pkg
else Packages.extraGHCiLibraries pkg)
++ [ lib | '-':'l':lib <- Packages.ldOptions pkg ]
hs_classifieds <- mapM (locateLib dflags True dirs) hs_libs'
extra_classifieds <- mapM (locateLib dflags False dirs) extra_libs
let classifieds = hs_classifieds ++ extra_classifieds
-- Complication: all the .so's must be loaded before any of the .o's.
let known_dlls = [ dll | DLLPath dll <- classifieds ]
dlls = [ dll | DLL dll <- classifieds ]
objs = [ obj | Object obj <- classifieds ]
archs = [ arch | Archive arch <- classifieds ]
maybePutStr dflags
("Loading package " ++ sourcePackageIdString pkg ++ " ... ")
-- See comments with partOfGHCi
when (packageName pkg `notElem` partOfGHCi) $ do
loadFrameworks platform pkg
mapM_ load_dyn (known_dlls ++ map (mkSOName platform) dlls)
-- After loading all the DLLs, we can load the static objects.
-- Ordering isn't important here, because we do one final link
-- step to resolve everything.
mapM_ loadObj objs
mapM_ loadArchive archs
maybePutStr dflags "linking ... "
ok <- resolveObjs
if succeeded ok
then maybePutStrLn dflags "done."
else let errmsg = "unable to load package `"
++ sourcePackageIdString pkg ++ "'"
in throwGhcExceptionIO (InstallationError errmsg)
-- we have already searched the filesystem; the strings passed to load_dyn
-- can be passed directly to loadDLL. They are either fully-qualified
-- ("/usr/lib/libfoo.so"), or unqualified ("libfoo.so"). In the latter case,
-- loadDLL is going to search the system paths to find the library.
--
load_dyn :: FilePath -> IO ()
load_dyn dll = do r <- loadDLL dll
case r of
Nothing -> return ()
Just err -> throwGhcExceptionIO (CmdLineError ("can't load .so/.DLL for: "
++ dll ++ " (" ++ err ++ ")" ))
loadFrameworks :: Platform -> PackageConfig -> IO ()
loadFrameworks platform pkg
= when (platformUsesFrameworks platform) $ mapM_ load frameworks
where
fw_dirs = Packages.frameworkDirs pkg
frameworks = Packages.frameworks pkg
load fw = do r <- loadFramework fw_dirs fw
case r of
Nothing -> return ()
Just err -> throwGhcExceptionIO (CmdLineError ("can't load framework: "
++ fw ++ " (" ++ err ++ ")" ))
-- Try to find an object file for a given library in the given paths.
-- If it isn't present, we assume that addDLL in the RTS can find it,
-- which generally means that it should be a dynamic library in the
-- standard system search path.
locateLib :: DynFlags -> Bool -> [FilePath] -> String -> IO LibrarySpec
locateLib dflags is_hs dirs lib
| not is_hs
-- For non-Haskell libraries (e.g. gmp, iconv):
-- first look in library-dirs for a dynamic library (libfoo.so)
-- then look in library-dirs for a static library (libfoo.a)
-- then try "gcc --print-file-name" to search gcc's search path
-- for a dynamic library (#5289)
-- otherwise, assume loadDLL can find it
--
= findDll `orElse` findArchive `orElse` tryGcc `orElse` tryGccPrefixed `orElse` assumeDll
| not dynamicGhc
-- When the GHC package was not compiled as dynamic library
-- (=DYNAMIC not set), we search for .o libraries or, if they
-- don't exist, .a libraries.
= findObject `orElse` findArchive `orElse` assumeDll
| otherwise
-- When the GHC package was compiled as dynamic library (=DYNAMIC set),
-- we search for .so libraries first.
= findHSDll `orElse` findDynObject `orElse` assumeDll
where
obj_file = lib <.> "o"
dyn_obj_file = lib <.> "dyn_o"
arch_file = "lib" ++ lib <.> "a"
hs_dyn_lib_name = lib ++ '-':programName dflags ++ projectVersion dflags
hs_dyn_lib_file = mkHsSOName platform hs_dyn_lib_name
so_name = mkSOName platform lib
lib_so_name = "lib" ++ so_name
dyn_lib_file = case (arch, os) of
(ArchX86_64, OSSolaris2) -> "64" </> so_name
_ -> so_name
findObject = liftM (fmap Object) $ findFile dirs obj_file
findDynObject = liftM (fmap Object) $ findFile dirs dyn_obj_file
findArchive = liftM (fmap Archive) $ findFile dirs arch_file
findHSDll = liftM (fmap DLLPath) $ findFile dirs hs_dyn_lib_file
findDll = liftM (fmap DLLPath) $ findFile dirs dyn_lib_file
tryGcc = liftM (fmap DLLPath) $ searchForLibUsingGcc dflags so_name dirs
tryGccPrefixed = liftM (fmap DLLPath) $ searchForLibUsingGcc dflags lib_so_name dirs
assumeDll = return (DLL lib)
infixr `orElse`
f `orElse` g = f >>= maybe g return
platform = targetPlatform dflags
arch = platformArch platform
os = platformOS platform
searchForLibUsingGcc :: DynFlags -> String -> [FilePath] -> IO (Maybe FilePath)
searchForLibUsingGcc dflags so dirs = do
-- GCC does not seem to extend the library search path (using -L) when using
-- --print-file-name. So instead pass it a new base location.
str <- askCc dflags (map (FileOption "-B") dirs
++ [Option "--print-file-name", Option so])
let file = case lines str of
[] -> ""
l:_ -> l
if (file == so)
then return Nothing
else return (Just file)
-- ----------------------------------------------------------------------------
-- Loading a dynamic library (dlopen()-ish on Unix, LoadLibrary-ish on Win32)
-- Darwin / MacOS X only: load a framework
-- a framework is a dynamic library packaged inside a directory of the same
-- name. They are searched for in different paths than normal libraries.
loadFramework :: [FilePath] -> FilePath -> IO (Maybe String)
loadFramework extraPaths rootname
= do { either_dir <- tryIO getHomeDirectory
; let homeFrameworkPath = case either_dir of
Left _ -> []
Right dir -> [dir </> "Library/Frameworks"]
ps = extraPaths ++ homeFrameworkPath ++ defaultFrameworkPaths
; mb_fwk <- findFile ps fwk_file
; case mb_fwk of
Just fwk_path -> loadDLL fwk_path
Nothing -> return (Just "not found") }
-- Tried all our known library paths, but dlopen()
-- has no built-in paths for frameworks: give up
where
fwk_file = rootname <.> "framework" </> rootname
-- sorry for the hardcoded paths, I hope they won't change anytime soon:
defaultFrameworkPaths = ["/Library/Frameworks", "/System/Library/Frameworks"]
{- **********************************************************************
Helper functions
********************************************************************* -}
maybePutStr :: DynFlags -> String -> IO ()
maybePutStr dflags s
= when (verbosity dflags > 1) $
do let act = log_action dflags
act dflags SevInteractive noSrcSpan defaultUserStyle (text s)
maybePutStrLn :: DynFlags -> String -> IO ()
maybePutStrLn dflags s = maybePutStr dflags (s ++ "\n")
{- **********************************************************************
Tunneling global variables into new instance of GHC library
********************************************************************* -}
saveLinkerGlobals :: IO (MVar PersistentLinkerState, Bool)
saveLinkerGlobals = liftM2 (,) (readIORef v_PersistentLinkerState) (readIORef v_InitLinkerDone)
restoreLinkerGlobals :: (MVar PersistentLinkerState, Bool) -> IO ()
restoreLinkerGlobals (pls, ild) = do
writeIORef v_PersistentLinkerState pls
writeIORef v_InitLinkerDone ild
| siddhanathan/ghc | compiler/ghci/Linker.hs | bsd-3-clause | 53,535 | 9 | 25 | 16,592 | 9,313 | 4,806 | 4,507 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Matching guarded right-hand-sides (GRHSs)
-}
{-# LANGUAGE CPP #-}
module Eta.DeSugar.DsGRHSs ( dsGuarded, dsGRHSs, dsGRHS, isTrueLHsExpr ) where
import {-# SOURCE #-} Eta.DeSugar.DsExpr ( dsLExpr, dsLocalBinds )
import {-# SOURCE #-} Eta.DeSugar.Match ( matchSinglePat )
import Eta.HsSyn.HsSyn
import Eta.Core.MkCore
import Eta.Core.CoreSyn
import Eta.BasicTypes.Var
import Eta.Types.Type
import Eta.DeSugar.DsMonad
import Eta.DeSugar.DsUtils
import Eta.Prelude.TysWiredIn
import Eta.Prelude.PrelNames
import Eta.BasicTypes.Module
import Eta.BasicTypes.Name
import Eta.BasicTypes.SrcLoc
import Eta.Utils.Outputable
import Eta.Utils.Util
#include "HsVersions.h"
{-
@dsGuarded@ is used for both @case@ expressions and pattern bindings.
It desugars:
\begin{verbatim}
| g1 -> e1
...
| gn -> en
where binds
\end{verbatim}
producing an expression with a runtime error in the corner if
necessary. The type argument gives the type of the @ei@.
-}
dsGuarded :: GRHSs Id (LHsExpr Id) -> Type -> DsM CoreExpr
dsGuarded grhss rhs_ty = do
match_result <- dsGRHSs PatBindRhs [] grhss rhs_ty
error_expr <- mkErrorAppDs nON_EXHAUSTIVE_GUARDS_ERROR_ID rhs_ty empty
extractMatchResult match_result error_expr
-- In contrast, @dsGRHSs@ produces a @MatchResult@.
dsGRHSs :: HsMatchContext Name -> [Pat Id] -- These are to build a MatchContext from
-> GRHSs Id (LHsExpr Id) -- Guarded RHSs
-> Type -- Type of RHS
-> DsM MatchResult
dsGRHSs hs_ctx _ (GRHSs grhss binds) rhs_ty
= ASSERT( notNull grhss )
do { match_results <- mapM (dsGRHS hs_ctx rhs_ty) grhss
; let match_result1 = foldr1 combineMatchResults match_results
match_result2 = adjustMatchResultDs (dsLocalBinds binds) match_result1
-- NB: nested dsLet inside matchResult
; return match_result2 }
dsGRHS :: HsMatchContext Name -> Type -> LGRHS Id (LHsExpr Id) -> DsM MatchResult
dsGRHS hs_ctx rhs_ty (L _ (GRHS guards rhs))
= matchGuards (map unLoc guards) (PatGuard hs_ctx) rhs rhs_ty
{-
************************************************************************
* *
* matchGuard : make a MatchResult from a guarded RHS *
* *
************************************************************************
-}
matchGuards :: [GuardStmt Id] -- Guard
-> HsStmtContext Name -- Context
-> LHsExpr Id -- RHS
-> Type -- Type of RHS of guard
-> DsM MatchResult
-- See comments with HsExpr.Stmt re what a BodyStmt means
-- Here we must be in a guard context (not do-expression, nor list-comp)
matchGuards [] _ rhs _
= do { core_rhs <- dsLExpr rhs
; return (cantFailMatchResult core_rhs) }
-- BodyStmts must be guards
-- Turn an "otherwise" guard is a no-op. This ensures that
-- you don't get a "non-exhaustive eqns" message when the guards
-- finish in "otherwise".
-- NB: The success of this clause depends on the typechecker not
-- wrapping the 'otherwise' in empty HsTyApp or HsWrap constructors
-- If it does, you'll get bogus overlap warnings
matchGuards (BodyStmt e _ _ _ : stmts) ctx rhs rhs_ty
| Just addTicks <- isTrueLHsExpr e = do
match_result <- matchGuards stmts ctx rhs rhs_ty
return (adjustMatchResultDs addTicks match_result)
matchGuards (BodyStmt expr _ _ _ : stmts) ctx rhs rhs_ty = do
match_result <- matchGuards stmts ctx rhs rhs_ty
pred_expr <- dsLExpr expr
return (mkGuardedMatchResult pred_expr match_result)
matchGuards (LetStmt binds : stmts) ctx rhs rhs_ty = do
match_result <- matchGuards stmts ctx rhs rhs_ty
return (adjustMatchResultDs (dsLocalBinds binds) match_result)
-- NB the dsLet occurs inside the match_result
-- Reason: dsLet takes the body expression as its argument
-- so we can't desugar the bindings without the
-- body expression in hand
matchGuards (BindStmt pat bind_rhs _ _ : stmts) ctx rhs rhs_ty = do
match_result <- matchGuards stmts ctx rhs rhs_ty
core_rhs <- dsLExpr bind_rhs
matchSinglePat core_rhs (StmtCtxt ctx) pat rhs_ty match_result
matchGuards (LastStmt {} : _) _ _ _ = panic "matchGuards LastStmt"
matchGuards (ParStmt {} : _) _ _ _ = panic "matchGuards ParStmt"
matchGuards (TransStmt {} : _) _ _ _ = panic "matchGuards TransStmt"
matchGuards (RecStmt {} : _) _ _ _ = panic "matchGuards RecStmt"
matchGuards (ApplicativeStmt {} : _) _ _ _ =
panic "matchGuards ApplicativeLastStmt"
isTrueLHsExpr :: LHsExpr Id -> Maybe (CoreExpr -> DsM CoreExpr)
-- Returns Just {..} if we're sure that the expression is True
-- I.e. * 'True' datacon
-- * 'otherwise' Id
-- * Trivial wappings of these
-- The arguments to Just are any HsTicks that we have found,
-- because we still want to tick then, even it they are aways evaluted.
isTrueLHsExpr (L _ (HsVar v)) | v `hasKey` otherwiseIdKey
|| v `hasKey` getUnique trueDataConId
= Just return
-- trueDataConId doesn't have the same unique as trueDataCon
isTrueLHsExpr (L _ (HsTick tickish e))
| Just ticks <- isTrueLHsExpr e
= Just (\x -> ticks x >>= return . (Tick tickish))
-- This encodes that the result is constant True for Hpc tick purposes;
-- which is specifically what isTrueLHsExpr is trying to find out.
isTrueLHsExpr (L _ (HsBinTick ixT _ e))
| Just ticks <- isTrueLHsExpr e
= Just (\x -> do e <- ticks x
this_mod <- getModule
return (Tick (HpcTick this_mod ixT) e))
isTrueLHsExpr (L _ (HsPar e)) = isTrueLHsExpr e
isTrueLHsExpr _ = Nothing
{-
Should {\em fail} if @e@ returns @D@
\begin{verbatim}
f x | p <- e', let C y# = e, f y# = r1
| otherwise = r2
\end{verbatim}
-}
| rahulmutt/ghcvm | compiler/Eta/DeSugar/DsGRHSs.hs | bsd-3-clause | 6,225 | 0 | 15 | 1,662 | 1,235 | 630 | 605 | -1 | -1 |
module Dotnet.System.UInt32 where
import Dotnet
import qualified Dotnet.System.ValueType
data UInt32_ a
type UInt32 a = Dotnet.System.ValueType.ValueType (UInt32_ a)
| alekar/hugs | dotnet/lib/Dotnet/System/UInt32.hs | bsd-3-clause | 168 | 0 | 7 | 20 | 41 | 27 | 14 | -1 | -1 |
-- Time-stamp: <2010-03-31 20:47:48 cklin>
module Monad where
import Control.Monad (liftM, mapM_, when)
import Data.Maybe (isJust)
import Types
import Common
type EndoTi a = a -> Ti a
--------- Type inference monad and its combinators
-- Type inference state consists of a sequence number for generating
-- fresh meta type variables and a list of strings that record diagnosis
-- and error messages in reverse chronological order.
type TiS a = (Int, [String], a)
type TiM a = TiS (Maybe a)
newtype Ti a =
Ti { unTi :: Int -> TiM a }
instance Monad Ti where
fail w = Ti $ \s -> (s, ["ERROR: " ++ w], Nothing)
return a = Ti $ \s -> (s, [], Just a)
m >>= k = Ti $ \s -> mapJust bind (unTi m s) where
bind (s1, w1, v1) = (s2, w2 ++ w1, v2)
where (s2, w2, v2) = unTi (k v1) s1
die :: Ti a
die = Ti $ \s -> (s, [], Nothing)
mapJust :: (TiS a -> TiM b) -> TiM a -> TiM b
mapJust _ (s, w, Nothing) = (s, w, Nothing)
mapJust f (s, w, Just a) = f (s, w, a)
runTi :: Ti a -> Int -> a
runTi m s = a where (_, _, Just a) = unTi m s
-- Arrest any failure in a monadic computation. The arrested
-- computation returns Nothing if a failure occurred.
catchTi :: Ti a -> Ti (Maybe a)
catchTi m = Ti $ m1 where
m1 s = (s1, w, Just x)
where (s1, w, x) = unTi m s
succeedTi :: Ti a -> Ti Bool
succeedTi = liftM isJust . catchTi . catchNotes
-- Unleash the inner Maybe monad. Warning: all messages in the
-- attempted computations, error or otherwise, are discarded. To
-- preserve the messages, set the verbose flag to True.
verbose = False
attemptTi :: [Ti a] -> Endo (Ti a)
attemptTi ax final = attempt ax where
attempt [] = final
attempt (m:mx) =
do (w, result) <- arrestTi m
when verbose (mapM_ (mesg . ("o " ++)) w)
case result of
Just a -> return a
Nothing -> attempt mx
-- Generate fresh meta type variables, or just the serial number.
newMetaTv :: Ti Type
newMetaTv = liftM TyMeta newMetaIndex
newMetaIndex :: Ti Int
newMetaIndex = Ti $ \s -> (s+1, [], Just s)
freshenTv :: [a] -> Ti [Type]
freshenTv = mapM (const newMetaTv)
freshenIndex :: [a] -> Ti [Int]
freshenIndex = mapM (const newMetaIndex)
freshenTyCon :: EndoTi Type
freshenTyCon (TyCon tc ax) = liftM (TyCon tc) (freshenTv ax)
freshenTyCon v = bug ("Non-constructor type " ++ show v)
renameToNew :: [Int] -> Ti Rename
renameToNew xs = liftM (toMap . zip xs) (freshenIndex xs)
-- Write to or read from the internal messages store. Unlike the fail
-- function, the mesg function writes a message without declaring a
-- failure. The catchNotes function erases all messages, even if the
-- transformed computation fails.
mesg :: String -> Ti ()
mesg w = Ti $ \s -> (s, [w], Just ())
replay :: [String] -> Ti ()
replay = mapM_ mesg
arrestTi :: Ti a -> Ti ([String], Maybe a)
arrestTi = catchNotes . catchTi
catchNotes :: Ti a -> Ti ([String], a)
catchNotes m = Ti m1 where
m1 s = (s1, [], liftM attach x)
where (s1, w, x) = unTi m s
attach a = (reverse w, a)
-- To ensure freshness of newly generated type variables in the presence
-- of hard-coded type variables in the Ti computation (for example, in
-- unit tests), we choose 100 as the initial sequence number. The
-- programmer should make sure that all hard-coded meta type variables
-- have index numbers < 100.
initSeq :: Int
initSeq = 100
trapTi :: Ti a -> Maybe a
trapTi m = runTi (catchTi m) initSeq
examineTi :: Ti a -> IO (Maybe a)
examineTi m =
let (w, v) = runTi (arrestTi m) initSeq
in do mapM_ putStrLn w
return v
| cartazio/omega | vendor/algorithm-p/Monad.hs | bsd-3-clause | 3,625 | 0 | 14 | 880 | 1,270 | 679 | 591 | 72 | 3 |
module LiftToToplevel.PatBindIn4 where
-- Issue https://github.com/RefactoringTools/HaRe/issues/42
-- liftToTopLevel of a should fail.
-- Alternatively, it should result in something like
{-
f x = a x
a x = fst (x, x)
b x = snd (x, x)
-}
f x = let (a, b) = (x, x) in a
| RefactoringTools/HaRe | test/testdata/LiftToToplevel/PatBindIn4.hs | bsd-3-clause | 271 | 0 | 9 | 52 | 42 | 25 | 17 | 2 | 1 |
module Distribution.Solver.Types.Variable where
import Distribution.Solver.Types.OptionalStanza
import Distribution.PackageDescription (FlagName)
-- | Variables used by the dependency solver. This type is similar to the
-- internal 'Var' type, except that flags and stanzas are associated with
-- package names instead of package instances.
data Variable qpn =
PackageVar qpn
| FlagVar qpn FlagName
| StanzaVar qpn OptionalStanza
deriving (Eq, Show)
| themoritz/cabal | cabal-install/Distribution/Solver/Types/Variable.hs | bsd-3-clause | 463 | 0 | 6 | 71 | 63 | 40 | 23 | 8 | 0 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sq-AL">
<title>Simple Example Add-On</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/simpleexample/src/main/javahelp/org/zaproxy/addon/simpleexample/resources/help_sq_AL/helpset_sq_AL.hs | apache-2.0 | 966 | 77 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
-- | Combinators for constructing properties.
{-# LANGUAGE CPP #-}
#ifndef NO_SAFE_HASKELL
{-# LANGUAGE Safe #-}
#endif
module Test.QuickCheck.Property where
--------------------------------------------------------------------------
-- imports
import Test.QuickCheck.Gen
import Test.QuickCheck.Gen.Unsafe
import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Text( showErr, isOneLine, putLine )
import Test.QuickCheck.Exception
import Test.QuickCheck.State hiding (labels)
#ifndef NO_TIMEOUT
import System.Timeout(timeout)
#endif
import Data.Maybe
import Control.Applicative
import Control.Monad
import qualified Data.Map as Map
import Data.Map(Map)
import qualified Data.Set as Set
import Data.Set(Set)
--------------------------------------------------------------------------
-- fixities
infixr 0 ==>
infixr 1 .&.
infixr 1 .&&.
infixr 1 .||.
-- The story for exception handling:
--
-- To avoid insanity, we have rules about which terms can throw
-- exceptions when we evaluate them:
-- * A rose tree must evaluate to WHNF without throwing an exception
-- * The 'ok' component of a Result must evaluate to Just True or
-- Just False or Nothing rather than raise an exception
-- * IORose _ must never throw an exception when executed
--
-- Both rose trees and Results may loop when we evaluate them, though,
-- so we have to be careful not to force them unnecessarily.
--
-- We also have to be careful when we use fmap or >>= in the Rose
-- monad that the function we supply is total, or else use
-- protectResults afterwards to install exception handlers. The
-- mapResult function on Properties installs an exception handler for
-- us, though.
--
-- Of course, the user is free to write "error "ha ha" :: Result" if
-- they feel like it. We have to make sure that any user-supplied Rose
-- Results or Results get wrapped in exception handlers, which we do by:
-- * Making the 'property' function install an exception handler
-- round its argument. This function always gets called in the
-- right places, because all our Property-accepting functions are
-- actually polymorphic over the Testable class so they have to
-- call 'property'.
-- * Installing an exception handler round a Result before we put it
-- in a rose tree (the only place Results can end up).
--------------------------------------------------------------------------
-- * Property and Testable types
-- | The type of properties.
--
-- Backwards combatibility note: in older versions of QuickCheck
-- 'Property' was a type synonym for @'Gen' 'Prop'@, so you could mix
-- and match property combinators and 'Gen' monad operations. Code
-- that does this will no longer typecheck.
-- However, it is easy to fix: because of the 'Testable' typeclass, any
-- combinator that expects a 'Property' will also accept a @'Gen' 'Property'@.
-- If you have a 'Property' where you need a @'Gen' 'a'@, simply wrap
-- the property combinator inside a 'return' to get a @'Gen' 'Property'@, and
-- all should be well.
newtype Property = MkProperty { unProperty :: Gen Prop }
-- | The class of things which can be tested, i.e. turned into a property.
class Testable prop where
-- | Convert the thing to a property.
property :: prop -> Property
-- | If true, the property will only be tested once.
-- However, if used inside a quantifier, it will be tested normally.
exhaustive :: prop -> Bool
exhaustive _ = False
-- | If a property returns 'Discard', the current test case is discarded,
-- the same as if a precondition was false.
data Discard = Discard
instance Testable Discard where
property _ = property rejected
exhaustive _ = True
instance Testable Bool where
property = property . liftBool
exhaustive _ = True
instance Testable Result where
property = MkProperty . return . MkProp . protectResults . return
exhaustive _ = True
instance Testable Prop where
property (MkProp r) = MkProperty . return . MkProp . ioRose . return $ r
exhaustive _ = True
instance Testable prop => Testable (Gen prop) where
property mp = MkProperty $ do p <- mp; unProperty (property p)
instance Testable Property where
property = property . unProperty
-- | Do I/O inside a property. This can obviously lead to unrepeatable
-- testcases, so use with care.
{-# DEPRECATED morallyDubiousIOProperty "Use ioProperty instead" #-}
morallyDubiousIOProperty :: Testable prop => IO prop -> Property
morallyDubiousIOProperty = ioProperty -- Silly names aren't all they're cracked up to be :)
-- | Do I/O inside a property. This can obviously lead to unrepeatable
-- testcases, so use with care.
--
-- For more advanced monadic testing you may want to look at
-- "Test.QuickCheck.Monadic".
ioProperty :: Testable prop => IO prop -> Property
ioProperty = MkProperty . fmap (MkProp . ioRose . fmap unProp) . promote . fmap (unProperty . property)
instance (Arbitrary a, Show a, Testable prop) => Testable (a -> prop) where
property f = forAllShrink arbitrary shrink f
-- ** Exception handling
protect :: (AnException -> a) -> IO a -> IO a
protect f x = either f id `fmap` tryEvaluateIO x
--------------------------------------------------------------------------
-- ** Type Prop
newtype Prop = MkProp{ unProp :: Rose Result }
-- ** type Rose
data Rose a = MkRose a [Rose a] | IORose (IO (Rose a))
-- Only use IORose if you know that the argument is not going to throw an exception!
-- Otherwise, try ioRose.
ioRose :: IO (Rose Result) -> Rose Result
ioRose = IORose . protectRose
joinRose :: Rose (Rose a) -> Rose a
joinRose (IORose rs) = IORose (fmap joinRose rs)
joinRose (MkRose (IORose rm) rs) = IORose $ do r <- rm; return (joinRose (MkRose r rs))
joinRose (MkRose (MkRose x ts) tts) =
-- first shrinks outer quantification; makes most sense
MkRose x (map joinRose tts ++ ts)
-- first shrinks inner quantification: terrible
--MkRose x (ts ++ map joinRose tts)
instance Functor Rose where
-- f must be total
fmap f (IORose rs) = IORose (fmap (fmap f) rs)
fmap f (MkRose x rs) = MkRose (f x) [ fmap f r | r <- rs ]
instance Applicative Rose where
pure = return
-- f must be total
(<*>) = liftM2 ($)
instance Monad Rose where
return x = MkRose x []
-- k must be total
m >>= k = joinRose (fmap k m)
-- | Execute the "IORose" bits of a rose tree, returning a tree
-- constructed by MkRose.
reduceRose :: Rose Result -> IO (Rose Result)
reduceRose r@(MkRose _ _) = return r
reduceRose (IORose m) = m >>= reduceRose
-- | Apply a function to the outermost MkRose constructor of a rose tree.
-- The function must be total!
onRose :: (a -> [Rose a] -> Rose a) -> Rose a -> Rose a
onRose f (MkRose x rs) = f x rs
onRose f (IORose m) = IORose (fmap (onRose f) m)
-- | Wrap a rose tree in an exception handler.
protectRose :: IO (Rose Result) -> IO (Rose Result)
protectRose = protect (return . exception "Exception")
-- | Wrap all the Results in a rose tree in exception handlers.
protectResults :: Rose Result -> Rose Result
protectResults = onRose $ \x rs ->
IORose $ do
y <- protectResult (return x)
return (MkRose y (map protectResults rs))
-- ** Result type
-- | Different kinds of callbacks
data Callback
= PostTest CallbackKind (State -> Result -> IO ()) -- ^ Called just after a test
| PostFinalFailure CallbackKind (State -> Result -> IO ()) -- ^ Called with the final failing test-case
data CallbackKind = Counterexample -- ^ Affected by the 'verbose' combinator
| NotCounterexample -- ^ Not affected by the 'verbose' combinator
-- | The result of a single test.
data Result
= MkResult
{ ok :: Maybe Bool -- ^ result of the test case; Nothing = discard
, expect :: Bool -- ^ indicates what the expected result of the property is
, reason :: String -- ^ a message indicating what went wrong
, theException :: Maybe AnException -- ^ the exception thrown, if any
, abort :: Bool -- ^ if True, the test should not be repeated
, labels :: Map String Int -- ^ all labels used by this property
, stamp :: Set String -- ^ the collected values for this test case
, callbacks :: [Callback] -- ^ the callbacks for this test case
}
exception :: String -> AnException -> Result
exception msg err
| isDiscard err = rejected
| otherwise = failed{ reason = formatException msg err,
theException = Just err }
formatException :: String -> AnException -> String
formatException msg err = msg ++ ":" ++ format (show err)
where format xs | isOneLine xs = " '" ++ xs ++ "'"
| otherwise = "\n" ++ unlines [ " " ++ l | l <- lines xs ]
protectResult :: IO Result -> IO Result
protectResult = protect (exception "Exception")
succeeded, failed, rejected :: Result
(succeeded, failed, rejected) =
(result{ ok = Just True },
result{ ok = Just False },
result{ ok = Nothing })
where
result =
MkResult
{ ok = undefined
, expect = True
, reason = ""
, theException = Nothing
, abort = False
, labels = Map.empty
, stamp = Set.empty
, callbacks = []
}
--------------------------------------------------------------------------
-- ** Lifting and mapping functions
liftBool :: Bool -> Result
liftBool True = succeeded
liftBool False = failed { reason = "Falsifiable" }
mapResult :: Testable prop => (Result -> Result) -> prop -> Property
mapResult f = mapRoseResult (protectResults . fmap f)
mapTotalResult :: Testable prop => (Result -> Result) -> prop -> Property
mapTotalResult f = mapRoseResult (fmap f)
-- f here mustn't throw an exception (rose tree invariant).
mapRoseResult :: Testable prop => (Rose Result -> Rose Result) -> prop -> Property
mapRoseResult f = mapProp (\(MkProp t) -> MkProp (f t))
mapProp :: Testable prop => (Prop -> Prop) -> prop -> Property
mapProp f = MkProperty . fmap f . unProperty . property
--------------------------------------------------------------------------
-- ** Property combinators
-- | Changes the maximum test case size for a property.
mapSize :: Testable prop => (Int -> Int) -> prop -> Property
mapSize f p = MkProperty (sized ((`resize` unProperty (property p)) . f))
-- | Shrinks the argument to property if it fails. Shrinking is done
-- automatically for most types. This is only needed when you want to
-- override the default behavior.
shrinking :: Testable prop =>
(a -> [a]) -- ^ 'shrink'-like function.
-> a -- ^ The original argument
-> (a -> prop) -> Property
shrinking shrinker x0 pf = MkProperty (fmap (MkProp . joinRose . fmap unProp) (promote (props x0)))
where
props x =
MkRose (unProperty (property (pf x))) [ props x' | x' <- shrinker x ]
-- | Disables shrinking for a property altogether.
noShrinking :: Testable prop => prop -> Property
noShrinking = mapRoseResult (onRose (\res _ -> MkRose res []))
-- | Adds a callback
callback :: Testable prop => Callback -> prop -> Property
callback cb = mapTotalResult (\res -> res{ callbacks = cb : callbacks res })
-- | Adds the given string to the counterexample.
counterexample :: Testable prop => String -> prop -> Property
counterexample s =
callback $ PostFinalFailure Counterexample $ \st _res -> do
res <- tryEvaluateIO (putLine (terminal st) s)
case res of
Left err ->
putLine (terminal st) (formatException "Exception thrown while printing test case" err)
Right () ->
return ()
-- | Adds the given string to the counterexample.
{-# DEPRECATED printTestCase "Use counterexample instead" #-}
printTestCase :: Testable prop => String -> prop -> Property
printTestCase = counterexample
-- | Performs an 'IO' action after the last failure of a property.
whenFail :: Testable prop => IO () -> prop -> Property
whenFail m =
callback $ PostFinalFailure NotCounterexample $ \_st _res ->
m
-- | Performs an 'IO' action every time a property fails. Thus,
-- if shrinking is done, this can be used to keep track of the
-- failures along the way.
whenFail' :: Testable prop => IO () -> prop -> Property
whenFail' m =
callback $ PostTest NotCounterexample $ \_st res ->
if ok res == Just False
then m
else return ()
-- | Prints out the generated testcase every time the property is tested.
-- Only variables quantified over /inside/ the 'verbose' are printed.
verbose :: Testable prop => prop -> Property
verbose = mapResult (\res -> res { callbacks = newCallbacks (callbacks res) ++ callbacks res })
where newCallbacks cbs =
PostTest Counterexample (\st res -> putLine (terminal st) (status res ++ ":")):
[ PostTest Counterexample f | PostFinalFailure Counterexample f <- cbs ]
status MkResult{ok = Just True} = "Passed"
status MkResult{ok = Just False} = "Failed"
status MkResult{ok = Nothing} = "Skipped (precondition false)"
-- | Indicates that a property is supposed to fail.
-- QuickCheck will report an error if it does not fail.
expectFailure :: Testable prop => prop -> Property
expectFailure = mapTotalResult (\res -> res{ expect = False })
-- | Modifies a property so that it only will be tested once.
once :: Testable prop => prop -> Property
once = mapTotalResult (\res -> res{ abort = True })
-- | Attaches a label to a property. This is used for reporting
-- test case distribution.
label :: Testable prop => String -> prop -> Property
label s = classify True s
-- | Labels a property with a value:
--
-- > collect x = label (show x)
collect :: (Show a, Testable prop) => a -> prop -> Property
collect x = label (show x)
-- | Conditionally labels test case.
classify :: Testable prop =>
Bool -- ^ @True@ if the test case should be labelled.
-> String -- ^ Label.
-> prop -> Property
classify b s = cover b 0 s
-- | Checks that at least the given proportion of /successful/ test
-- cases belong to the given class. Discarded tests (i.e. ones
-- with a false precondition) do not affect coverage.
cover :: Testable prop =>
Bool -- ^ @True@ if the test case belongs to the class.
-> Int -- ^ The required percentage (0-100) of test cases.
-> String -- ^ Label for the test case class.
-> prop -> Property
cover x n s =
x `seq` n `seq` s `listSeq`
mapTotalResult $
\res -> res {
labels = Map.insertWith max s n (labels res),
stamp = if x then Set.insert s (stamp res) else stamp res }
where [] `listSeq` z = z
(x:xs) `listSeq` z = x `seq` xs `listSeq` z
-- | Implication for properties: The resulting property holds if
-- the first argument is 'False' (in which case the test case is discarded),
-- or if the given property holds.
(==>) :: Testable prop => Bool -> prop -> Property
False ==> _ = property Discard
True ==> p = property p
-- | Considers a property failed if it does not complete within
-- the given number of microseconds.
within :: Testable prop => Int -> prop -> Property
within n = mapRoseResult f
-- We rely on the fact that the property will catch the timeout
-- exception and turn it into a failed test case.
where
f rose = ioRose $ do
let m `orError` x = fmap (fromMaybe (error x)) m
MkRose res roses <- timeout n (reduceRose rose) `orError`
"within: timeout exception not caught in Rose Result"
res' <- timeout n (protectResult (return res)) `orError`
"within: timeout exception not caught in Result"
return (MkRose res' (map f roses))
#ifdef NO_TIMEOUT
timeout _ = fmap Just
#endif
-- | Explicit universal quantification: uses an explicitly given
-- test case generator.
forAll :: (Show a, Testable prop)
=> Gen a -> (a -> prop) -> Property
forAll gen pf =
MkProperty $
gen >>= \x ->
unProperty (counterexample (show x) (pf x))
-- | Like 'forAll', but tries to shrink the argument for failing test cases.
forAllShrink :: (Show a, Testable prop)
=> Gen a -> (a -> [a]) -> (a -> prop) -> Property
forAllShrink gen shrinker pf =
MkProperty $
gen >>= \x ->
unProperty $
shrinking shrinker x $ \x' ->
counterexample (show x') (pf x')
-- | Nondeterministic choice: 'p1' '.&.' 'p2' picks randomly one of
-- 'p1' and 'p2' to test. If you test the property 100 times it
-- makes 100 random choices.
(.&.) :: (Testable prop1, Testable prop2) => prop1 -> prop2 -> Property
p1 .&. p2 =
MkProperty $
arbitrary >>= \b ->
unProperty $
counterexample (if b then "LHS" else "RHS") $
if b then property p1 else property p2
-- | Conjunction: 'p1' '.&&.' 'p2' passes if both 'p1' and 'p2' pass.
(.&&.) :: (Testable prop1, Testable prop2) => prop1 -> prop2 -> Property
p1 .&&. p2 = conjoin [property p1, property p2]
-- | Take the conjunction of several properties.
conjoin :: Testable prop => [prop] -> Property
conjoin ps =
MkProperty $
do roses <- mapM (fmap unProp . unProperty . property) ps
return (MkProp (conj id roses))
where
conj k [] =
MkRose (k succeeded) []
conj k (p : ps) = IORose $ do
rose@(MkRose result _) <- reduceRose p
case ok result of
_ | not (expect result) ->
return (return failed { reason = "expectFailure may not occur inside a conjunction" })
Just True -> return (conj (addLabels result . addCallbacks result . k) ps)
Just False -> return rose
Nothing -> do
rose2@(MkRose result2 _) <- reduceRose (conj (addCallbacks result . k) ps)
return $
-- Nasty work to make sure we use the right callbacks
case ok result2 of
Just True -> MkRose (result2 { ok = Nothing }) []
Just False -> rose2
Nothing -> rose2
addCallbacks result r =
r { callbacks = callbacks result ++ callbacks r }
addLabels result r =
r { labels = Map.unionWith max (labels result) (labels r),
stamp = Set.union (stamp result) (stamp r) }
-- | Disjunction: 'p1' '.||.' 'p2' passes unless 'p1' and 'p2' simultaneously fail.
(.||.) :: (Testable prop1, Testable prop2) => prop1 -> prop2 -> Property
p1 .||. p2 = disjoin [property p1, property p2]
-- | Take the disjunction of several properties.
disjoin :: Testable prop => [prop] -> Property
disjoin ps =
MkProperty $
do roses <- mapM (fmap unProp . unProperty . property) ps
return (MkProp (foldr disj (MkRose failed []) roses))
where
disj :: Rose Result -> Rose Result -> Rose Result
disj p q =
do result1 <- p
case ok result1 of
_ | not (expect result1) -> return expectFailureError
Just True -> return result1
Just False -> do
result2 <- q
return $
case ok result2 of
_ | not (expect result2) -> expectFailureError
Just True -> result2
Just False ->
MkResult {
ok = Just False,
expect = True,
reason = sep (reason result1) (reason result2),
theException = theException result1 `mplus` theException result2,
-- The following three fields are not important because the
-- test case has failed anyway
abort = False,
labels = Map.empty,
stamp = Set.empty,
callbacks =
callbacks result1 ++
[PostFinalFailure Counterexample $ \st _res -> putLine (terminal st) ""] ++
callbacks result2 }
Nothing -> result2
Nothing -> do
result2 <- q
return (case ok result2 of
_ | not (expect result2) -> expectFailureError
Just True -> result2
_ -> result1)
expectFailureError = failed { reason = "expectFailure may not occur inside a disjunction" }
sep [] s = s
sep s [] = s
sep s s' = s ++ ", " ++ s'
-- | Like '==', but prints a counterexample when it fails.
infix 4 ===
(===) :: (Eq a, Show a) => a -> a -> Property
x === y =
counterexample (show x ++ " /= " ++ show y) (x == y)
--------------------------------------------------------------------------
-- the end.
| Warbo/quickcheck | Test/QuickCheck/Property.hs | bsd-3-clause | 20,286 | 0 | 27 | 4,864 | 4,871 | 2,563 | 2,308 | 312 | 11 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
@Uniques@ are used to distinguish entities in the compiler (@Ids@,
@Classes@, etc.) from each other. Thus, @Uniques@ are the basic
comparison key in the compiler.
If there is any single operation that needs to be fast, it is @Unique@
comparison. Unsurprisingly, there is quite a bit of huff-and-puff
directed to that end.
Some of the other hair in this code is to be able to use a
``splittable @UniqueSupply@'' if requested/possible (not standard
Haskell).
-}
{-# LANGUAGE CPP, BangPatterns, MagicHash #-}
module Unique (
-- * Main data types
Unique, Uniquable(..),
-- ** Constructors, desctructors and operations on 'Unique's
hasKey,
pprUnique,
mkUniqueGrimily, -- Used in UniqSupply only!
getKey, getKeyFastInt, -- Used in Var, UniqFM, Name only!
mkUnique, unpkUnique, -- Used in BinIface only
incrUnique, -- Used for renumbering
deriveUnique, -- Ditto
newTagUnique, -- Used in CgCase
initTyVarUnique,
-- ** Making built-in uniques
-- now all the built-in Uniques (and functions to make them)
-- [the Oh-So-Wonderful Haskell module system wins again...]
mkAlphaTyVarUnique,
mkPrimOpIdUnique,
mkTupleTyConUnique, mkTupleDataConUnique,
mkPreludeMiscIdUnique, mkPreludeDataConUnique,
mkPreludeTyConUnique, mkPreludeClassUnique,
mkPArrDataConUnique,
mkVarOccUnique, mkDataOccUnique, mkTvOccUnique, mkTcOccUnique,
mkRegSingleUnique, mkRegPairUnique, mkRegClassUnique, mkRegSubUnique,
mkCostCentreUnique,
mkBuiltinUnique,
mkPseudoUniqueD,
mkPseudoUniqueE,
mkPseudoUniqueH
) where
#include "HsVersions.h"
import BasicTypes
import FastTypes
import FastString
import Outputable
-- import StaticFlags
import Util
--just for implementing a fast [0,61) -> Char function
import GHC.Exts (indexCharOffAddr#, Char(..))
import Data.Char ( chr, ord )
{-
************************************************************************
* *
\subsection[Unique-type]{@Unique@ type and operations}
* *
************************************************************************
The @Chars@ are ``tag letters'' that identify the @UniqueSupply@.
Fast comparison is everything on @Uniques@:
-}
--why not newtype Int?
-- | The type of unique identifiers that are used in many places in GHC
-- for fast ordering and equality tests. You should generate these with
-- the functions from the 'UniqSupply' module
data Unique = MkUnique FastInt
{-
Now come the functions which construct uniques from their pieces, and vice versa.
The stuff about unique *supplies* is handled further down this module.
-}
unpkUnique :: Unique -> (Char, Int) -- The reverse
mkUniqueGrimily :: Int -> Unique -- A trap-door for UniqSupply
getKey :: Unique -> Int -- for Var
getKeyFastInt :: Unique -> FastInt -- for Var
incrUnique :: Unique -> Unique
deriveUnique :: Unique -> Int -> Unique
newTagUnique :: Unique -> Char -> Unique
mkUniqueGrimily x = MkUnique (iUnbox x)
{-# INLINE getKey #-}
getKey (MkUnique x) = iBox x
{-# INLINE getKeyFastInt #-}
getKeyFastInt (MkUnique x) = x
incrUnique (MkUnique i) = MkUnique (i +# _ILIT(1))
-- deriveUnique uses an 'X' tag so that it won't clash with
-- any of the uniques produced any other way
deriveUnique (MkUnique i) delta = mkUnique 'X' (iBox i + delta)
-- newTagUnique changes the "domain" of a unique to a different char
newTagUnique u c = mkUnique c i where (_,i) = unpkUnique u
-- pop the Char in the top 8 bits of the Unique(Supply)
-- No 64-bit bugs here, as long as we have at least 32 bits. --JSM
-- and as long as the Char fits in 8 bits, which we assume anyway!
mkUnique :: Char -> Int -> Unique -- Builds a unique from pieces
-- NOT EXPORTED, so that we can see all the Chars that
-- are used in this one module
mkUnique c i
= MkUnique (tag `bitOrFastInt` bits)
where
!tag = fastOrd (cUnbox c) `shiftLFastInt` _ILIT(24)
!bits = iUnbox i `bitAndFastInt` _ILIT(16777215){-``0x00ffffff''-}
unpkUnique (MkUnique u)
= let
-- as long as the Char may have its eighth bit set, we
-- really do need the logical right-shift here!
tag = cBox (fastChr (u `shiftRLFastInt` _ILIT(24)))
i = iBox (u `bitAndFastInt` _ILIT(16777215){-``0x00ffffff''-})
in
(tag, i)
{-
************************************************************************
* *
\subsection[Uniquable-class]{The @Uniquable@ class}
* *
************************************************************************
-}
-- | Class of things that we can obtain a 'Unique' from
class Uniquable a where
getUnique :: a -> Unique
hasKey :: Uniquable a => a -> Unique -> Bool
x `hasKey` k = getUnique x == k
instance Uniquable FastString where
getUnique fs = mkUniqueGrimily (iBox (uniqueOfFS fs))
instance Uniquable Int where
getUnique i = mkUniqueGrimily i
{-
************************************************************************
* *
\subsection[Unique-instances]{Instance declarations for @Unique@}
* *
************************************************************************
And the whole point (besides uniqueness) is fast equality. We don't
use `deriving' because we want {\em precise} control of ordering
(equality on @Uniques@ is v common).
-}
eqUnique, ltUnique, leUnique :: Unique -> Unique -> Bool
eqUnique (MkUnique u1) (MkUnique u2) = u1 ==# u2
ltUnique (MkUnique u1) (MkUnique u2) = u1 <# u2
leUnique (MkUnique u1) (MkUnique u2) = u1 <=# u2
cmpUnique :: Unique -> Unique -> Ordering
cmpUnique (MkUnique u1) (MkUnique u2)
= if u1 ==# u2 then EQ else if u1 <# u2 then LT else GT
instance Eq Unique where
a == b = eqUnique a b
a /= b = not (eqUnique a b)
instance Ord Unique where
a < b = ltUnique a b
a <= b = leUnique a b
a > b = not (leUnique a b)
a >= b = not (ltUnique a b)
compare a b = cmpUnique a b
-----------------
instance Uniquable Unique where
getUnique u = u
-- We do sometimes make strings with @Uniques@ in them:
showUnique :: Unique -> String
showUnique uniq
= case unpkUnique uniq of
(tag, u) -> finish_show tag u (iToBase62 u)
finish_show :: Char -> Int -> String -> String
finish_show 't' u _pp_u | u < 26
= -- Special case to make v common tyvars, t1, t2, ...
-- come out as a, b, ... (shorter, easier to read)
[chr (ord 'a' + u)]
finish_show tag _ pp_u = tag : pp_u
pprUnique :: Unique -> SDoc
pprUnique u = text (showUnique u)
instance Outputable Unique where
ppr = pprUnique
instance Show Unique where
show uniq = showUnique uniq
{-
************************************************************************
* *
\subsection[Utils-base62]{Base-62 numbers}
* *
************************************************************************
A character-stingy way to read/write numbers (notably Uniques).
The ``62-its'' are \tr{[0-9a-zA-Z]}. We don't handle negative Ints.
Code stolen from Lennart.
-}
iToBase62 :: Int -> String
iToBase62 n_
= ASSERT(n_ >= 0) go (iUnbox n_) ""
where
go n cs | n <# _ILIT(62)
= case chooseChar62 n of { c -> c `seq` (c : cs) }
| otherwise
= case (quotRem (iBox n) 62) of { (q_, r_) ->
case iUnbox q_ of { q -> case iUnbox r_ of { r ->
case (chooseChar62 r) of { c -> c `seq`
(go q (c : cs)) }}}}
chooseChar62 :: FastInt -> Char
{-# INLINE chooseChar62 #-}
chooseChar62 n = C# (indexCharOffAddr# chars62 n)
!chars62 = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"#
{-
************************************************************************
* *
\subsection[Uniques-prelude]{@Uniques@ for wired-in Prelude things}
* *
************************************************************************
Allocation of unique supply characters:
v,t,u : for renumbering value-, type- and usage- vars.
B: builtin
C-E: pseudo uniques (used in native-code generator)
X: uniques derived by deriveUnique
_: unifiable tyvars (above)
0-9: prelude things below
(no numbers left any more..)
:: (prelude) parallel array data constructors
other a-z: lower case chars for unique supplies. Used so far:
d desugarer
f AbsC flattener
g SimplStg
n Native codegen
r Hsc name cache
s simplifier
-}
mkAlphaTyVarUnique :: Int -> Unique
mkPreludeClassUnique :: Int -> Unique
mkPreludeTyConUnique :: Int -> Unique
mkTupleTyConUnique :: TupleSort -> Int -> Unique
mkPreludeDataConUnique :: Int -> Unique
mkTupleDataConUnique :: TupleSort -> Int -> Unique
mkPrimOpIdUnique :: Int -> Unique
mkPreludeMiscIdUnique :: Int -> Unique
mkPArrDataConUnique :: Int -> Unique
mkAlphaTyVarUnique i = mkUnique '1' i
mkPreludeClassUnique i = mkUnique '2' i
-- Prelude type constructors occupy *three* slots.
-- The first is for the tycon itself; the latter two
-- are for the generic to/from Ids. See TysWiredIn.mk_tc_gen_info.
mkPreludeTyConUnique i = mkUnique '3' (3*i)
mkTupleTyConUnique BoxedTuple a = mkUnique '4' (3*a)
mkTupleTyConUnique UnboxedTuple a = mkUnique '5' (3*a)
mkTupleTyConUnique ConstraintTuple a = mkUnique 'k' (3*a)
-- Data constructor keys occupy *two* slots. The first is used for the
-- data constructor itself and its wrapper function (the function that
-- evaluates arguments as necessary and calls the worker). The second is
-- used for the worker function (the function that builds the constructor
-- representation).
mkPreludeDataConUnique i = mkUnique '6' (2*i) -- Must be alphabetic
mkTupleDataConUnique BoxedTuple a = mkUnique '7' (2*a) -- ditto (*may* be used in C labels)
mkTupleDataConUnique UnboxedTuple a = mkUnique '8' (2*a)
mkTupleDataConUnique ConstraintTuple a = mkUnique 'h' (2*a)
mkPrimOpIdUnique op = mkUnique '9' op
mkPreludeMiscIdUnique i = mkUnique '0' i
-- No numbers left anymore, so I pick something different for the character tag
mkPArrDataConUnique a = mkUnique ':' (2*a)
-- The "tyvar uniques" print specially nicely: a, b, c, etc.
-- See pprUnique for details
initTyVarUnique :: Unique
initTyVarUnique = mkUnique 't' 0
mkPseudoUniqueD, mkPseudoUniqueE, mkPseudoUniqueH,
mkBuiltinUnique :: Int -> Unique
mkBuiltinUnique i = mkUnique 'B' i
mkPseudoUniqueD i = mkUnique 'D' i -- used in NCG for getUnique on RealRegs
mkPseudoUniqueE i = mkUnique 'E' i -- used in NCG spiller to create spill VirtualRegs
mkPseudoUniqueH i = mkUnique 'H' i -- used in NCG spiller to create spill VirtualRegs
mkRegSingleUnique, mkRegPairUnique, mkRegSubUnique, mkRegClassUnique :: Int -> Unique
mkRegSingleUnique = mkUnique 'R'
mkRegSubUnique = mkUnique 'S'
mkRegPairUnique = mkUnique 'P'
mkRegClassUnique = mkUnique 'L'
mkCostCentreUnique :: Int -> Unique
mkCostCentreUnique = mkUnique 'C'
mkVarOccUnique, mkDataOccUnique, mkTvOccUnique, mkTcOccUnique :: FastString -> Unique
-- See Note [The Unique of an OccName] in OccName
mkVarOccUnique fs = mkUnique 'i' (iBox (uniqueOfFS fs))
mkDataOccUnique fs = mkUnique 'd' (iBox (uniqueOfFS fs))
mkTvOccUnique fs = mkUnique 'v' (iBox (uniqueOfFS fs))
mkTcOccUnique fs = mkUnique 'c' (iBox (uniqueOfFS fs))
| green-haskell/ghc | compiler/basicTypes/Unique.hs | bsd-3-clause | 12,380 | 0 | 24 | 3,323 | 2,055 | 1,109 | 946 | 154 | 3 |
-- Character classification
{-# LANGUAGE CPP #-}
module Ctype
( is_ident -- Char# -> Bool
, is_symbol -- Char# -> Bool
, is_any -- Char# -> Bool
, is_space -- Char# -> Bool
, is_lower -- Char# -> Bool
, is_upper -- Char# -> Bool
, is_digit -- Char# -> Bool
, is_alphanum -- Char# -> Bool
, is_decdigit, is_hexdigit, is_octdigit, is_bindigit
, hexDigit, octDecDigit
) where
#include "HsVersions.h"
import Data.Int ( Int32 )
import Data.Bits ( Bits((.&.)) )
import Data.Char ( ord, chr )
import Panic
-- Bit masks
cIdent, cSymbol, cAny, cSpace, cLower, cUpper, cDigit :: Int
cIdent = 1
cSymbol = 2
cAny = 4
cSpace = 8
cLower = 16
cUpper = 32
cDigit = 64
-- | The predicates below look costly, but aren't, GHC+GCC do a great job
-- at the big case below.
{-# INLINE is_ctype #-}
is_ctype :: Int -> Char -> Bool
is_ctype mask c = (fromIntegral (charType c) .&. fromIntegral mask) /= (0::Int32)
is_ident, is_symbol, is_any, is_space, is_lower, is_upper, is_digit,
is_alphanum :: Char -> Bool
is_ident = is_ctype cIdent
is_symbol = is_ctype cSymbol
is_any = is_ctype cAny
is_space = is_ctype cSpace
is_lower = is_ctype cLower
is_upper = is_ctype cUpper
is_digit = is_ctype cDigit
is_alphanum = is_ctype (cLower+cUpper+cDigit)
-- Utils
hexDigit :: Char -> Int
hexDigit c | is_decdigit c = ord c - ord '0'
| otherwise = ord (to_lower c) - ord 'a' + 10
octDecDigit :: Char -> Int
octDecDigit c = ord c - ord '0'
is_decdigit :: Char -> Bool
is_decdigit c
= c >= '0' && c <= '9'
is_hexdigit :: Char -> Bool
is_hexdigit c
= is_decdigit c
|| (c >= 'a' && c <= 'f')
|| (c >= 'A' && c <= 'F')
is_octdigit :: Char -> Bool
is_octdigit c = c >= '0' && c <= '7'
is_bindigit :: Char -> Bool
is_bindigit c = c == '0' || c == '1'
to_lower :: Char -> Char
to_lower c
| c >= 'A' && c <= 'Z' = chr (ord c - (ord 'A' - ord 'a'))
| otherwise = c
-- | We really mean .|. instead of + below, but GHC currently doesn't do
-- any constant folding with bitops. *sigh*
charType :: Char -> Int
charType c = case c of
'\0' -> 0 -- \000
'\1' -> 0 -- \001
'\2' -> 0 -- \002
'\3' -> 0 -- \003
'\4' -> 0 -- \004
'\5' -> 0 -- \005
'\6' -> 0 -- \006
'\7' -> 0 -- \007
'\8' -> 0 -- \010
'\9' -> cSpace -- \t (not allowed in strings, so !cAny)
'\10' -> cSpace -- \n (ditto)
'\11' -> cSpace -- \v (ditto)
'\12' -> cSpace -- \f (ditto)
'\13' -> cSpace -- ^M (ditto)
'\14' -> 0 -- \016
'\15' -> 0 -- \017
'\16' -> 0 -- \020
'\17' -> 0 -- \021
'\18' -> 0 -- \022
'\19' -> 0 -- \023
'\20' -> 0 -- \024
'\21' -> 0 -- \025
'\22' -> 0 -- \026
'\23' -> 0 -- \027
'\24' -> 0 -- \030
'\25' -> 0 -- \031
'\26' -> 0 -- \032
'\27' -> 0 -- \033
'\28' -> 0 -- \034
'\29' -> 0 -- \035
'\30' -> 0 -- \036
'\31' -> 0 -- \037
'\32' -> cAny + cSpace --
'\33' -> cAny + cSymbol -- !
'\34' -> cAny -- "
'\35' -> cAny + cSymbol -- #
'\36' -> cAny + cSymbol -- $
'\37' -> cAny + cSymbol -- %
'\38' -> cAny + cSymbol -- &
'\39' -> cAny + cIdent -- '
'\40' -> cAny -- (
'\41' -> cAny -- )
'\42' -> cAny + cSymbol -- *
'\43' -> cAny + cSymbol -- +
'\44' -> cAny -- ,
'\45' -> cAny + cSymbol -- -
'\46' -> cAny + cSymbol -- .
'\47' -> cAny + cSymbol -- /
'\48' -> cAny + cIdent + cDigit -- 0
'\49' -> cAny + cIdent + cDigit -- 1
'\50' -> cAny + cIdent + cDigit -- 2
'\51' -> cAny + cIdent + cDigit -- 3
'\52' -> cAny + cIdent + cDigit -- 4
'\53' -> cAny + cIdent + cDigit -- 5
'\54' -> cAny + cIdent + cDigit -- 6
'\55' -> cAny + cIdent + cDigit -- 7
'\56' -> cAny + cIdent + cDigit -- 8
'\57' -> cAny + cIdent + cDigit -- 9
'\58' -> cAny + cSymbol -- :
'\59' -> cAny -- ;
'\60' -> cAny + cSymbol -- <
'\61' -> cAny + cSymbol -- =
'\62' -> cAny + cSymbol -- >
'\63' -> cAny + cSymbol -- ?
'\64' -> cAny + cSymbol -- @
'\65' -> cAny + cIdent + cUpper -- A
'\66' -> cAny + cIdent + cUpper -- B
'\67' -> cAny + cIdent + cUpper -- C
'\68' -> cAny + cIdent + cUpper -- D
'\69' -> cAny + cIdent + cUpper -- E
'\70' -> cAny + cIdent + cUpper -- F
'\71' -> cAny + cIdent + cUpper -- G
'\72' -> cAny + cIdent + cUpper -- H
'\73' -> cAny + cIdent + cUpper -- I
'\74' -> cAny + cIdent + cUpper -- J
'\75' -> cAny + cIdent + cUpper -- K
'\76' -> cAny + cIdent + cUpper -- L
'\77' -> cAny + cIdent + cUpper -- M
'\78' -> cAny + cIdent + cUpper -- N
'\79' -> cAny + cIdent + cUpper -- O
'\80' -> cAny + cIdent + cUpper -- P
'\81' -> cAny + cIdent + cUpper -- Q
'\82' -> cAny + cIdent + cUpper -- R
'\83' -> cAny + cIdent + cUpper -- S
'\84' -> cAny + cIdent + cUpper -- T
'\85' -> cAny + cIdent + cUpper -- U
'\86' -> cAny + cIdent + cUpper -- V
'\87' -> cAny + cIdent + cUpper -- W
'\88' -> cAny + cIdent + cUpper -- X
'\89' -> cAny + cIdent + cUpper -- Y
'\90' -> cAny + cIdent + cUpper -- Z
'\91' -> cAny -- [
'\92' -> cAny + cSymbol -- backslash
'\93' -> cAny -- ]
'\94' -> cAny + cSymbol -- ^
'\95' -> cAny + cIdent + cLower -- _
'\96' -> cAny -- `
'\97' -> cAny + cIdent + cLower -- a
'\98' -> cAny + cIdent + cLower -- b
'\99' -> cAny + cIdent + cLower -- c
'\100' -> cAny + cIdent + cLower -- d
'\101' -> cAny + cIdent + cLower -- e
'\102' -> cAny + cIdent + cLower -- f
'\103' -> cAny + cIdent + cLower -- g
'\104' -> cAny + cIdent + cLower -- h
'\105' -> cAny + cIdent + cLower -- i
'\106' -> cAny + cIdent + cLower -- j
'\107' -> cAny + cIdent + cLower -- k
'\108' -> cAny + cIdent + cLower -- l
'\109' -> cAny + cIdent + cLower -- m
'\110' -> cAny + cIdent + cLower -- n
'\111' -> cAny + cIdent + cLower -- o
'\112' -> cAny + cIdent + cLower -- p
'\113' -> cAny + cIdent + cLower -- q
'\114' -> cAny + cIdent + cLower -- r
'\115' -> cAny + cIdent + cLower -- s
'\116' -> cAny + cIdent + cLower -- t
'\117' -> cAny + cIdent + cLower -- u
'\118' -> cAny + cIdent + cLower -- v
'\119' -> cAny + cIdent + cLower -- w
'\120' -> cAny + cIdent + cLower -- x
'\121' -> cAny + cIdent + cLower -- y
'\122' -> cAny + cIdent + cLower -- z
'\123' -> cAny -- {
'\124' -> cAny + cSymbol -- |
'\125' -> cAny -- }
'\126' -> cAny + cSymbol -- ~
'\127' -> 0 -- \177
_ -> panic ("charType: " ++ show c)
| urbanslug/ghc | compiler/parser/Ctype.hs | bsd-3-clause | 8,072 | 0 | 11 | 3,498 | 2,145 | 1,175 | 970 | 189 | 129 |
module T10495 where
import Data.Coerce
foo = coerce
| urbanslug/ghc | testsuite/tests/typecheck/should_fail/T10495.hs | bsd-3-clause | 54 | 0 | 4 | 10 | 14 | 9 | 5 | 3 | 1 |
{-
From: Jon Hill <[email protected]@[email protected]>
To: glasgow-haskell-bugs
Subject: Unfriendly error message
Date: Thu, 25 Jun 1992 09:22:55 +0100
Hello again,
I came across a rather nasty error message when I gave a function an
incorrect type signature (the context is wrong). I can remember reading
in the source about this problem - I just thought I'd let you know anyway :-)
-}
module ShouldFail where
test::(Num a, Eq a) => a -> Bool
test x = (x `mod` 3) == 0
{-
granite> ndph bug002.ldh
Data Parallel Haskell Compiler, version 0.01 (Glasgow 0.07)
"<unknown>", line <unknown>: Cannot express dicts in terms of dictionaries available:
dicts_encl:
"<built-in>", line : dict.87 :: <Num a>
"<built-in>", line : dict.88 :: <Eq a>
dicts_encl':
"<built-in>", line : dict.87 :: <Num a>
"<built-in>", line : dict.88 :: <Eq a>
dicts:
"<built-in>", line : dict.87 :: <Num a>
"<built-in>", line : dict.88 :: <Eq a>
super_class_dict: "<built-in>", line : dict.80 :: <Integral a>
Fail: Compilation errors found
dph: execution of the Haskell compiler had trouble
-}
| ryantm/ghc | testsuite/tests/typecheck/should_fail/tcfail034.hs | bsd-3-clause | 1,111 | 0 | 7 | 199 | 51 | 30 | 21 | 3 | 1 |
module Commands where
import Control.Monad
import Expense
type Cmd = Expenses -> IO Expenses
execIOCmds :: Expenses -> [Cmd] -> IO Expenses
execIOCmds = foldM (\a f -> f a)
displayExpenses :: Expenses -> IO Expenses
displayExpenses exps = mapM_ print exps >> return exps
selectPerson :: String -> Expenses -> IO Expenses
selectPerson n exps = return $ filter (\e -> person e == n) exps
| fredmorcos/attic | projects/pet/archive/pet_haskell_old_2/Commands.hs | isc | 411 | 0 | 10 | 92 | 147 | 76 | 71 | 10 | 1 |
{-# LANGUAGE CPP #-}
module Application (liquidityCheck) where
import Prelude (show, (.))
import BasicPrelude hiding (show, (.))
import Network.Wai (Request(..), Application)
import Network.HTTP.Types (ok200, badRequest400)
import Network.Wai.Util (stringHeaders, textBuilder)
import Text.Blaze.Html (Html)
import Network.Wai.Digestive (queryFormEnv)
import qualified SimpleForm.Validation as SFV
import qualified SimpleForm as SFW
import SimpleForm.Combined (ShowRead(..), unShowRead, Validation(..), Widget)
import SimpleForm.Digestive.Combined (SimpleForm, getSimpleForm, postSimpleForm, input, input_, fieldset)
import SimpleForm.Render.XHTML5 (render)
import Data.Base58Address (RippleAddress)
import Control.Concurrent.STM.TMVar (TMVar)
import Control.Error (hush, MaybeT(..), maybeT, hoistMaybe)
import Text.Digestive hiding (text)
import Network.URI (URI(..))
import qualified Data.Text as T
import qualified Pipes.Concurrent as PC
import Records
import MustacheTemplates
import PathFind
import Amount
import Websocket
#include "PathHelpers.hs"
s :: (IsString s) => String -> s
s = fromString
htmlEscape :: String -> String
htmlEscape = concatMap escChar
where
escChar '&' = "&"
escChar '"' = """
escChar '<' = "<"
escChar '>' = ">"
escChar c = [c]
currencyCode :: Currency -> (Char,Char,Char)
currencyCode XRP = ('X','R','P')
currencyCode (Currency code _) = code
codeToText :: (Char,Char,Char) -> Text
codeToText (a,b,c) = T.pack [a,b,c]
textToCode :: Text -> Maybe (Char, Char, Char)
textToCode txt = case T.unpack txt of
[a,b,c] -> Just (a,b,c)
_ -> Nothing
threeLetterCode :: (Widget (Char,Char,Char), Validation (Char,Char,Char))
threeLetterCode = (SFW.text . fmap codeToText, SFV.pmap textToCode SFV.text)
cmap :: (Functor f, Functor g) => (a -> b) -> f (g a) -> f (g b)
cmap = fmap . fmap
pathFindForm :: (Monad m) => SimpleForm PathFindRequest (Form Html m PathFindRequest)
pathFindForm = do
from' <- cmap unShowRead $ input_ (s"from") (Just . ShowRead . source_account)
to' <- cmap unShowRead $ input_ (s"to") (Just . ShowRead . destination_account)
amount' <- fieldset (s"amount") destination_amount $ do
quantity' <- input_ (s"quantity") (\(Amount q _) -> Just q)
currency' <- input (s"currency") (\(Amount _ c) -> Just $ currencyCode c) threeLetterCode mempty
return $ (.) <$> (Amount <$> quantity') <*> (Currency <$> currency')
return $ PathFindRequest <$> from' <*> to' <*> (amount' <*> to')
liquidityCheck :: URI -> (PC.Output (PathFindRequest, TMVar (Either RippleError PathFindResponse))) -> Application
liquidityCheck _ ws req
| null (queryString req) = do
form <- getSimpleForm render Nothing pathFindForm
textBuilder ok200 headers (viewLiquidityCheck htmlEscape $ Liquidity { result = [], pfForm = form })
| otherwise = do
(form, pathfind) <- postSimpleForm render (return $ queryFormEnv $ queryString req) pathFindForm
alts <- liftIO $ maybeT (return []) return $ do
pf <- hoistMaybe pathfind
(PathFindResponse alts _) <- MaybeT $ fmap hush $ syncCall ws ((,) pf)
return $ map (\(Alternative amnt) -> Alt $ show amnt) alts
let result' = case pathfind of
Just (PathFindRequest from to _) -> [Result (show from) (show to) alts]
Nothing -> []
textBuilder (maybe badRequest400 (const ok200) pathfind) headers (viewLiquidityCheck htmlEscape $ Liquidity { result = result', pfForm = form })
where
Just headers = stringHeaders [("Content-Type", "text/html; charset=utf-8")]
| singpolyma/localripple | Application.hs | isc | 3,477 | 0 | 17 | 530 | 1,337 | 730 | 607 | 73 | 5 |
module Y2016.M07.D07.Solution where
import Data.SymbolTable
import Data.SymbolTable.Decompiler
import Y2016.M07.D06.PrideAndPrejudice
import Y2016.M07.D06.Solution
{--
Pride and Prejudice and ... Dracula? Anyone? No?
So, now that we have Pride and Prejudice encoded (see above), let's use it
to encode another work.
Today's exercise.
Dracula, by Bram Stoker, is available on the web here:
http://www.gutenberg.org/cache/epub/345/pg345.txt
Encode it into a symbol table as you did for Pride and Prejudice. Good.
(there's no need to duplicate work already done: that's why we have access
to modules out there already.
--}
dracula2Syms :: Novel -> SymbolTable
dracula2Syms = wordsOnly
{--
*Y2016.M07.D07.Solution> fetchURL "http://www.gutenberg.org/cache/epub/345/pg345.txt" ~> drac
*Y2016.M07.D07.Solution> dracula2Syms drac ~> dracsyms
*Y2016.M07.D07.Solution> top dracsyms ~> 10724
--}
reinterpretDracula :: SymbolTable -> SymbolTable -> Novel -> Novel
reinterpretDracula pnpsyms draculasyms =
unwords . map (reindex draculasyms pnpsyms . regularize) . words
reindex :: SymbolTable -> SymbolTable -> String -> String
reindex from to word = strVal to (intVal from word * top to `div` top from)
{--
*Y2016.M07.D07.Solution> let pnpsyms = snd (decompile "foo" S0)
*Y2016.M07.D07.Solution> let janesdrac = reinterpretDracula pnpsyms dracsyms drac
*Y2016.M07.D07.Solution> unwords . take 10 . drop 1500 $ words janesdrac
"THOSE SWELLED DESERTS WHATSOMETHING SLY FALSEHOOD BRILLIANCY PATRONESS NONE INCLUDING"
And there it is: Bram Stoker Dracula, reenvisioned by Jane Austen.
--}
| geophf/1HaskellADay | exercises/HAD/Y2016/M07/D07/Solution.hs | mit | 1,591 | 0 | 10 | 223 | 158 | 88 | 70 | 12 | 1 |
module Network.Transport.IVC (
module Network.Transport.IVC,
module Network.Transport.IVC.Util
) where
-- export everything for testing
import Network.Transport
import Network.Transport.IVC.Util
import Data.Word (Word32)
import Data.List (intercalate)
import qualified Data.Map.Strict as M(Map, empty, lookup, insert)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import Control.Monad
import Control.Exception
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Concurrent.Chan
import Data.List.Split (splitOn)
import Hypervisor.XenStore (XenStore, xsGetDomId,
xsRead, xsWrite, xsRemove, xsMakeDirectory,
xsSetPermissions, XSPerm(ReadWritePerm))
import Hypervisor.DomainInfo (DomId)
import Communication.IVC (InChannel, OutChannel, get, put)
import Communication.Rendezvous (peerConnection)
data IVCTransport = IVCTransport {
transportDomId :: DomId,
transportState :: MVar TransportState
}
data TransportState = TransportState {
localEndPoints :: M.Map EndPointAddress LocalEndPoint,
nextEndPointId :: EndPointId
}
data LocalEndPoint = LocalEndPoint {
eventChan :: Chan Event,
localEndPointState :: MVar LocalEndPointState
}
data LocalEndPointState = LocalEndPointState {
nextRemoteConnectionId :: ConnectionId,
nextLocalConnectionId :: ConnectionId
}
type EndPointId = Word32
createTransport :: XenStore -> IO (Either IOException Transport)
createTransport xs = do
me <- xsGetDomId xs
ts <- newMVar (TransportState M.empty 0)
let transport = IVCTransport me ts
rootPath = "/transport/" ++ show me
removePath xs rootPath
xsMakeDirectory xs rootPath
xsSetPermissions xs rootPath [ReadWritePerm me]
forkServer xs me (createHandler xs ts)
return $ Right Transport { newEndPoint = apiNewEndPoint xs transport,
closeTransport = apiCloseTransport xs me }
-- should deal with open connections in the future
apiCloseTransport :: XenStore -> DomId -> IO ()
apiCloseTransport xs domId = do
removePath xs ("/transport/" ++ show domId)
forkServer :: XenStore -> DomId
-> (EndPointAddress -> EndPointAddress -> String -> IO ())
-> IO ()
forkServer xs me handler =
void . forkIO $ do
let rootPath = "/transport/" ++ show me
forever $ do
conns <- listKeys xs rootPath
forM_ conns $ \connName -> do
let from = EndPointAddress . BSC.pack $
intercalate "-" (take 2 (splitOn "-" connName))
val <- xsRead xs (rootPath ++ "/" ++ connName)
let to = EndPointAddress . BSC.pack $ val
xsRemove xs (rootPath ++ "/" ++ connName)
handler from to connName
threadDelay 100000
-- handle imcoming connection to a transport (dommain)
createHandler :: XenStore -> MVar TransportState
-> EndPointAddress -> EndPointAddress -> String -> IO ()
createHandler xs ts from to connName = do
void . forkIO $ do
state <- readMVar ts
-- error occur if the endpoint does not exist
let Just localendpoint = M.lookup to (localEndPoints state)
es = localEndPointState localendpoint
chan = eventChan localendpoint
leftSide :: XenStore -> IO (OutChannel ByteString)
rightSide :: XenStore -> IO (InChannel ByteString)
(leftSide, rightSide) = peerConnection connName 1
inChan <- rightSide xs
connectId <- modifyMVar es $ \state -> do
let connectId = nextRemoteConnectionId state
return (state { nextRemoteConnectionId = connectId + 1 }, connectId)
writeChan chan (ConnectionOpened connectId ReliableOrdered from)
forever $ do
bs <- get inChan -- expected to block while waiting
writeChan chan (Received connectId [bs])
apiNewEndPoint :: XenStore -> IVCTransport
-> IO (Either (TransportError NewEndPointErrorCode) EndPoint)
apiNewEndPoint xs transport = do
chan <- newChan
es <- newMVar (LocalEndPointState 0 0)
let localendpoint = LocalEndPoint chan es
me = transportDomId transport
ts = transportState transport
addr <- modifyMVar ts $ \state -> do
let addr = encodeEndPointAddress me (nextEndPointId state)
return (state {
localEndPoints = M.insert addr localendpoint (localEndPoints state),
nextEndPointId = nextEndPointId state + 1
}, addr)
return $ Right EndPoint { receive = readChan chan,
address = addr,
connect = apiConnect xs es addr,
newMulticastGroup = undefined,
resolveMulticastGroup = undefined,
closeEndPoint = return () }
-- pass in client address to build unique connection name in xenstore
apiConnect :: XenStore -> MVar LocalEndPointState -> EndPointAddress
-> EndPointAddress -> Reliability -> ConnectHints
-> IO (Either (transportError ConnectErrorCode) Connection)
apiConnect xs es from to _ _ = do
connectId <- modifyMVar es $ \state -> do
let connectId = nextLocalConnectionId state
return (state { nextLocalConnectionId = connectId + 1 }, connectId)
let connName = endPointAddressToString from ++ "-" ++ show connectId
Just (other, _) = decodeEndPointAddress to
xsWrite xs ("/transport/" ++ show other ++ "/" ++ connName)
(endPointAddressToString to)
let leftSide :: XenStore -> IO (OutChannel ByteString)
rightSide :: XenStore -> IO (InChannel ByteString)
(leftSide, rightSide) = peerConnection connName 1
outChan <- leftSide xs
return $ Right Connection { send = apiSend outChan,
close = return () }
apiSend :: OutChannel ByteString -> [ByteString]
-> IO (Either (TransportError SendErrorCode) ())
apiSend outChan bss = do
put outChan (BS.concat bss)
return $ Right ()
endPointAddressToString :: EndPointAddress -> String
endPointAddressToString (EndPointAddress bs) =
BSC.unpack bs
-- in the format of domXX-XX
encodeEndPointAddress :: DomId -> EndPointId -> EndPointAddress
encodeEndPointAddress domId ix =
EndPointAddress . BSC.pack $ show domId ++ "-" ++ show ix
decodeEndPointAddress :: EndPointAddress -> Maybe (DomId, EndPointId)
decodeEndPointAddress addr =
case splitOn "-" (endPointAddressToString addr) of
h : t : _ -> Just (read h, read t)
_ -> Nothing
| hackern/network-transport-ivc | src/Network/Transport/IVC.hs | mit | 6,584 | 0 | 23 | 1,633 | 1,826 | 929 | 897 | 139 | 2 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.Node
(js_insertBefore, insertBefore, js_replaceChild, replaceChild,
js_removeChild, removeChild, js_appendChild, appendChild,
js_hasChildNodes, hasChildNodes, js_cloneNode, cloneNode,
js_normalize, normalize, js_isSupported, isSupported,
js_isSameNode, isSameNode, js_isEqualNode, isEqualNode,
js_lookupPrefix, lookupPrefix, js_isDefaultNamespace,
isDefaultNamespace, js_lookupNamespaceURI, lookupNamespaceURI,
js_compareDocumentPosition, compareDocumentPosition, js_contains,
contains, pattern ELEMENT_NODE, pattern ATTRIBUTE_NODE,
pattern TEXT_NODE, pattern CDATA_SECTION_NODE,
pattern ENTITY_REFERENCE_NODE, pattern ENTITY_NODE,
pattern PROCESSING_INSTRUCTION_NODE, pattern COMMENT_NODE,
pattern DOCUMENT_NODE, pattern DOCUMENT_TYPE_NODE,
pattern DOCUMENT_FRAGMENT_NODE, pattern NOTATION_NODE,
pattern DOCUMENT_POSITION_DISCONNECTED,
pattern DOCUMENT_POSITION_PRECEDING,
pattern DOCUMENT_POSITION_FOLLOWING,
pattern DOCUMENT_POSITION_CONTAINS,
pattern DOCUMENT_POSITION_CONTAINED_BY,
pattern DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC, js_getNodeName,
getNodeName, js_setNodeValue, setNodeValue, js_getNodeValue,
getNodeValue, js_getNodeType, getNodeType, js_getParentNode,
getParentNode, js_getChildNodes, getChildNodes, js_getFirstChild,
getFirstChild, js_getLastChild, getLastChild,
js_getPreviousSibling, getPreviousSibling, js_getNextSibling,
getNextSibling, js_getOwnerDocument, getOwnerDocument,
js_getNamespaceURI, getNamespaceURI, js_setPrefix, setPrefix,
js_getPrefix, getPrefix, js_getLocalName, getLocalName,
js_getBaseURI, getBaseURI, js_setTextContent, setTextContent,
js_getTextContent, getTextContent, js_getParentElement,
getParentElement, Node, castToNode, gTypeNode, IsNode, toNode)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"insertBefore\"]($2, $3)"
js_insertBefore ::
JSRef Node -> JSRef Node -> JSRef Node -> IO (JSRef Node)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.insertBefore Mozilla Node.insertBefore documentation>
insertBefore ::
(MonadIO m, IsNode self, IsNode newChild, IsNode refChild) =>
self -> Maybe newChild -> Maybe refChild -> m (Maybe Node)
insertBefore self newChild refChild
= liftIO
((js_insertBefore (unNode (toNode self))
(maybe jsNull (unNode . toNode) newChild)
(maybe jsNull (unNode . toNode) refChild))
>>= fromJSRef)
foreign import javascript unsafe "$1[\"replaceChild\"]($2, $3)"
js_replaceChild ::
JSRef Node -> JSRef Node -> JSRef Node -> IO (JSRef Node)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.replaceChild Mozilla Node.replaceChild documentation>
replaceChild ::
(MonadIO m, IsNode self, IsNode newChild, IsNode oldChild) =>
self -> Maybe newChild -> Maybe oldChild -> m (Maybe Node)
replaceChild self newChild oldChild
= liftIO
((js_replaceChild (unNode (toNode self))
(maybe jsNull (unNode . toNode) newChild)
(maybe jsNull (unNode . toNode) oldChild))
>>= fromJSRef)
foreign import javascript unsafe "$1[\"removeChild\"]($2)"
js_removeChild :: JSRef Node -> JSRef Node -> IO (JSRef Node)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.removeChild Mozilla Node.removeChild documentation>
removeChild ::
(MonadIO m, IsNode self, IsNode oldChild) =>
self -> Maybe oldChild -> m (Maybe Node)
removeChild self oldChild
= liftIO
((js_removeChild (unNode (toNode self))
(maybe jsNull (unNode . toNode) oldChild))
>>= fromJSRef)
foreign import javascript unsafe "$1[\"appendChild\"]($2)"
js_appendChild :: JSRef Node -> JSRef Node -> IO (JSRef Node)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.appendChild Mozilla Node.appendChild documentation>
appendChild ::
(MonadIO m, IsNode self, IsNode newChild) =>
self -> Maybe newChild -> m (Maybe Node)
appendChild self newChild
= liftIO
((js_appendChild (unNode (toNode self))
(maybe jsNull (unNode . toNode) newChild))
>>= fromJSRef)
foreign import javascript unsafe
"($1[\"hasChildNodes\"]() ? 1 : 0)" js_hasChildNodes ::
JSRef Node -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.hasChildNodes Mozilla Node.hasChildNodes documentation>
hasChildNodes :: (MonadIO m, IsNode self) => self -> m Bool
hasChildNodes self
= liftIO (js_hasChildNodes (unNode (toNode self)))
foreign import javascript unsafe "$1[\"cloneNode\"]($2)"
js_cloneNode :: JSRef Node -> Bool -> IO (JSRef Node)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.cloneNode Mozilla Node.cloneNode documentation>
cloneNode ::
(MonadIO m, IsNode self) => self -> Bool -> m (Maybe Node)
cloneNode self deep
= liftIO ((js_cloneNode (unNode (toNode self)) deep) >>= fromJSRef)
foreign import javascript unsafe "$1[\"normalize\"]()" js_normalize
:: JSRef Node -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.normalize Mozilla Node.normalize documentation>
normalize :: (MonadIO m, IsNode self) => self -> m ()
normalize self = liftIO (js_normalize (unNode (toNode self)))
foreign import javascript unsafe
"($1[\"isSupported\"]($2,\n$3) ? 1 : 0)" js_isSupported ::
JSRef Node -> JSString -> JSRef (Maybe JSString) -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.isSupported Mozilla Node.isSupported documentation>
isSupported ::
(MonadIO m, IsNode self, ToJSString feature, ToJSString version) =>
self -> feature -> Maybe version -> m Bool
isSupported self feature version
= liftIO
(js_isSupported (unNode (toNode self)) (toJSString feature)
(toMaybeJSString version))
foreign import javascript unsafe "($1[\"isSameNode\"]($2) ? 1 : 0)"
js_isSameNode :: JSRef Node -> JSRef Node -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.isSameNode Mozilla Node.isSameNode documentation>
isSameNode ::
(MonadIO m, IsNode self, IsNode other) =>
self -> Maybe other -> m Bool
isSameNode self other
= liftIO
(js_isSameNode (unNode (toNode self))
(maybe jsNull (unNode . toNode) other))
foreign import javascript unsafe
"($1[\"isEqualNode\"]($2) ? 1 : 0)" js_isEqualNode ::
JSRef Node -> JSRef Node -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.isEqualNode Mozilla Node.isEqualNode documentation>
isEqualNode ::
(MonadIO m, IsNode self, IsNode other) =>
self -> Maybe other -> m Bool
isEqualNode self other
= liftIO
(js_isEqualNode (unNode (toNode self))
(maybe jsNull (unNode . toNode) other))
foreign import javascript unsafe "$1[\"lookupPrefix\"]($2)"
js_lookupPrefix ::
JSRef Node -> JSRef (Maybe JSString) -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.lookupPrefix Mozilla Node.lookupPrefix documentation>
lookupPrefix ::
(MonadIO m, IsNode self, ToJSString namespaceURI,
FromJSString result) =>
self -> Maybe namespaceURI -> m (Maybe result)
lookupPrefix self namespaceURI
= liftIO
(fromMaybeJSString <$>
(js_lookupPrefix (unNode (toNode self))
(toMaybeJSString namespaceURI)))
foreign import javascript unsafe
"($1[\"isDefaultNamespace\"]($2) ? 1 : 0)" js_isDefaultNamespace ::
JSRef Node -> JSRef (Maybe JSString) -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.isDefaultNamespace Mozilla Node.isDefaultNamespace documentation>
isDefaultNamespace ::
(MonadIO m, IsNode self, ToJSString namespaceURI) =>
self -> Maybe namespaceURI -> m Bool
isDefaultNamespace self namespaceURI
= liftIO
(js_isDefaultNamespace (unNode (toNode self))
(toMaybeJSString namespaceURI))
foreign import javascript unsafe "$1[\"lookupNamespaceURI\"]($2)"
js_lookupNamespaceURI ::
JSRef Node -> JSRef (Maybe JSString) -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.lookupNamespaceURI Mozilla Node.lookupNamespaceURI documentation>
lookupNamespaceURI ::
(MonadIO m, IsNode self, ToJSString prefix, FromJSString result) =>
self -> Maybe prefix -> m (Maybe result)
lookupNamespaceURI self prefix
= liftIO
(fromMaybeJSString <$>
(js_lookupNamespaceURI (unNode (toNode self))
(toMaybeJSString prefix)))
foreign import javascript unsafe
"$1[\"compareDocumentPosition\"]($2)" js_compareDocumentPosition ::
JSRef Node -> JSRef Node -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.compareDocumentPosition Mozilla Node.compareDocumentPosition documentation>
compareDocumentPosition ::
(MonadIO m, IsNode self, IsNode other) =>
self -> Maybe other -> m Word
compareDocumentPosition self other
= liftIO
(js_compareDocumentPosition (unNode (toNode self))
(maybe jsNull (unNode . toNode) other))
foreign import javascript unsafe "($1[\"contains\"]($2) ? 1 : 0)"
js_contains :: JSRef Node -> JSRef Node -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.contains Mozilla Node.contains documentation>
contains ::
(MonadIO m, IsNode self, IsNode other) =>
self -> Maybe other -> m Bool
contains self other
= liftIO
(js_contains (unNode (toNode self))
(maybe jsNull (unNode . toNode) other))
pattern ELEMENT_NODE = 1
pattern ATTRIBUTE_NODE = 2
pattern TEXT_NODE = 3
pattern CDATA_SECTION_NODE = 4
pattern ENTITY_REFERENCE_NODE = 5
pattern ENTITY_NODE = 6
pattern PROCESSING_INSTRUCTION_NODE = 7
pattern COMMENT_NODE = 8
pattern DOCUMENT_NODE = 9
pattern DOCUMENT_TYPE_NODE = 10
pattern DOCUMENT_FRAGMENT_NODE = 11
pattern NOTATION_NODE = 12
pattern DOCUMENT_POSITION_DISCONNECTED = 1
pattern DOCUMENT_POSITION_PRECEDING = 2
pattern DOCUMENT_POSITION_FOLLOWING = 4
pattern DOCUMENT_POSITION_CONTAINS = 8
pattern DOCUMENT_POSITION_CONTAINED_BY = 16
pattern DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC = 32
foreign import javascript unsafe "$1[\"nodeName\"]" js_getNodeName
:: JSRef Node -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.nodeName Mozilla Node.nodeName documentation>
getNodeName ::
(MonadIO m, IsNode self, FromJSString result) =>
self -> m (Maybe result)
getNodeName self
= liftIO
(fromMaybeJSString <$> (js_getNodeName (unNode (toNode self))))
foreign import javascript unsafe "$1[\"nodeValue\"] = $2;"
js_setNodeValue :: JSRef Node -> JSRef (Maybe JSString) -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.nodeValue Mozilla Node.nodeValue documentation>
setNodeValue ::
(MonadIO m, IsNode self, ToJSString val) =>
self -> Maybe val -> m ()
setNodeValue self val
= liftIO
(js_setNodeValue (unNode (toNode self)) (toMaybeJSString val))
foreign import javascript unsafe "$1[\"nodeValue\"]"
js_getNodeValue :: JSRef Node -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.nodeValue Mozilla Node.nodeValue documentation>
getNodeValue ::
(MonadIO m, IsNode self, FromJSString result) =>
self -> m (Maybe result)
getNodeValue self
= liftIO
(fromMaybeJSString <$> (js_getNodeValue (unNode (toNode self))))
foreign import javascript unsafe "$1[\"nodeType\"]" js_getNodeType
:: JSRef Node -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.nodeType Mozilla Node.nodeType documentation>
getNodeType :: (MonadIO m, IsNode self) => self -> m Word
getNodeType self = liftIO (js_getNodeType (unNode (toNode self)))
foreign import javascript unsafe "$1[\"parentNode\"]"
js_getParentNode :: JSRef Node -> IO (JSRef Node)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.parentNode Mozilla Node.parentNode documentation>
getParentNode :: (MonadIO m, IsNode self) => self -> m (Maybe Node)
getParentNode self
= liftIO ((js_getParentNode (unNode (toNode self))) >>= fromJSRef)
foreign import javascript unsafe "$1[\"childNodes\"]"
js_getChildNodes :: JSRef Node -> IO (JSRef NodeList)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.childNodes Mozilla Node.childNodes documentation>
getChildNodes ::
(MonadIO m, IsNode self) => self -> m (Maybe NodeList)
getChildNodes self
= liftIO ((js_getChildNodes (unNode (toNode self))) >>= fromJSRef)
foreign import javascript unsafe "$1[\"firstChild\"]"
js_getFirstChild :: JSRef Node -> IO (JSRef Node)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.firstChild Mozilla Node.firstChild documentation>
getFirstChild :: (MonadIO m, IsNode self) => self -> m (Maybe Node)
getFirstChild self
= liftIO ((js_getFirstChild (unNode (toNode self))) >>= fromJSRef)
foreign import javascript unsafe "$1[\"lastChild\"]"
js_getLastChild :: JSRef Node -> IO (JSRef Node)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.lastChild Mozilla Node.lastChild documentation>
getLastChild :: (MonadIO m, IsNode self) => self -> m (Maybe Node)
getLastChild self
= liftIO ((js_getLastChild (unNode (toNode self))) >>= fromJSRef)
foreign import javascript unsafe "$1[\"previousSibling\"]"
js_getPreviousSibling :: JSRef Node -> IO (JSRef Node)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.previousSibling Mozilla Node.previousSibling documentation>
getPreviousSibling ::
(MonadIO m, IsNode self) => self -> m (Maybe Node)
getPreviousSibling self
= liftIO
((js_getPreviousSibling (unNode (toNode self))) >>= fromJSRef)
foreign import javascript unsafe "$1[\"nextSibling\"]"
js_getNextSibling :: JSRef Node -> IO (JSRef Node)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.nextSibling Mozilla Node.nextSibling documentation>
getNextSibling ::
(MonadIO m, IsNode self) => self -> m (Maybe Node)
getNextSibling self
= liftIO ((js_getNextSibling (unNode (toNode self))) >>= fromJSRef)
foreign import javascript unsafe "$1[\"ownerDocument\"]"
js_getOwnerDocument :: JSRef Node -> IO (JSRef Document)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.ownerDocument Mozilla Node.ownerDocument documentation>
getOwnerDocument ::
(MonadIO m, IsNode self) => self -> m (Maybe Document)
getOwnerDocument self
= liftIO
((js_getOwnerDocument (unNode (toNode self))) >>= fromJSRef)
foreign import javascript unsafe "$1[\"namespaceURI\"]"
js_getNamespaceURI :: JSRef Node -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.namespaceURI Mozilla Node.namespaceURI documentation>
getNamespaceURI ::
(MonadIO m, IsNode self, FromJSString result) =>
self -> m (Maybe result)
getNamespaceURI self
= liftIO
(fromMaybeJSString <$> (js_getNamespaceURI (unNode (toNode self))))
foreign import javascript unsafe "$1[\"prefix\"] = $2;"
js_setPrefix :: JSRef Node -> JSRef (Maybe JSString) -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.prefix Mozilla Node.prefix documentation>
setPrefix ::
(MonadIO m, IsNode self, ToJSString val) =>
self -> Maybe val -> m ()
setPrefix self val
= liftIO
(js_setPrefix (unNode (toNode self)) (toMaybeJSString val))
foreign import javascript unsafe "$1[\"prefix\"]" js_getPrefix ::
JSRef Node -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.prefix Mozilla Node.prefix documentation>
getPrefix ::
(MonadIO m, IsNode self, FromJSString result) =>
self -> m (Maybe result)
getPrefix self
= liftIO
(fromMaybeJSString <$> (js_getPrefix (unNode (toNode self))))
foreign import javascript unsafe "$1[\"localName\"]"
js_getLocalName :: JSRef Node -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.localName Mozilla Node.localName documentation>
getLocalName ::
(MonadIO m, IsNode self, FromJSString result) =>
self -> m (Maybe result)
getLocalName self
= liftIO
(fromMaybeJSString <$> (js_getLocalName (unNode (toNode self))))
foreign import javascript unsafe "$1[\"baseURI\"]" js_getBaseURI ::
JSRef Node -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.baseURI Mozilla Node.baseURI documentation>
getBaseURI ::
(MonadIO m, IsNode self, FromJSString result) =>
self -> m (Maybe result)
getBaseURI self
= liftIO
(fromMaybeJSString <$> (js_getBaseURI (unNode (toNode self))))
foreign import javascript unsafe "$1[\"textContent\"] = $2;"
js_setTextContent :: JSRef Node -> JSRef (Maybe JSString) -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.textContent Mozilla Node.textContent documentation>
setTextContent ::
(MonadIO m, IsNode self, ToJSString val) =>
self -> Maybe val -> m ()
setTextContent self val
= liftIO
(js_setTextContent (unNode (toNode self)) (toMaybeJSString val))
foreign import javascript unsafe "$1[\"textContent\"]"
js_getTextContent :: JSRef Node -> IO (JSRef (Maybe JSString))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.textContent Mozilla Node.textContent documentation>
getTextContent ::
(MonadIO m, IsNode self, FromJSString result) =>
self -> m (Maybe result)
getTextContent self
= liftIO
(fromMaybeJSString <$> (js_getTextContent (unNode (toNode self))))
foreign import javascript unsafe "$1[\"parentElement\"]"
js_getParentElement :: JSRef Node -> IO (JSRef Element)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Node.parentElement Mozilla Node.parentElement documentation>
getParentElement ::
(MonadIO m, IsNode self) => self -> m (Maybe Element)
getParentElement self
= liftIO
((js_getParentElement (unNode (toNode self))) >>= fromJSRef) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/Node.hs | mit | 19,501 | 242 | 13 | 3,796 | 4,687 | 2,468 | 2,219 | 338 | 1 |
module ProjectEuler.Problem005 (solve) where
import Data.Numbers.Primes
solve :: Integer -> Integer
solve n
= product
$ concatMap (\y -> maximum $ (map . filter) (== y) xs) ys
where
xs = map primeFactors [2..n]
ys = takeWhile (< n) primes
| hachibu/project-euler | src/ProjectEuler/Problem005.hs | mit | 256 | 0 | 12 | 57 | 102 | 57 | 45 | 8 | 1 |
import Data.List
import System.IO
import System.Directory
import System.Environment
type Arguments = [String]
type Action = Arguments -> IO ()
type Command = String
dispatch :: [(Command, Action)]
dispatch = [ ("add", add)
, ("remove", remove)
, ("view", view)
, ("bump", bump)
]
main = do
(command:args) <- getArgs
let (Just action) = lookup command dispatch in (if (lookup command dispatch) == Nothing then do errorExit else (action args))
errorExit :: IO ()
errorExit = putStrLn "ERROR: ARGUMENTS"
add :: Action
add [filePath, task] = do
appendFile filePath $ task ++ "\n"
remove :: Action
remove [filePath, taskNumber] = do
handle <- openFile filePath ReadMode
(tempName, tempHandle) <- openTempFile "." "temp"
contents <- hGetContents handle
let tasks = lines contents
let number = read taskNumber
newTasks = delete (tasks !! number) tasks
hPutStr tempHandle $ unlines newTasks
hClose handle
hClose tempHandle
removeFile filePath
renameFile tempName filePath
view :: Action
view [filePath] = do
contents <- readFile filePath
let tasks = lines contents
numberedTasks = zipWith (\n line -> show n ++ " - " ++ line) [0..] tasks
putStrLn $ unlines numberedTasks
bump :: Action
bump [filePath, taskNumber] = do
handle <- openFile filePath ReadMode
(tempName, tempHandle) <- openTempFile "." "temp"
contents <- hGetContents handle
let tasks = lines contents
number = read taskNumber
newTasks = delete (tasks !! number) tasks
newerTasks = (tasks !! number) : newTasks
hPutStr tempHandle $ unlines newerTasks
hClose handle
hClose tempHandle
removeFile filePath
renameFile tempName filePath
| RAFIRAF/HASKELL | IO/todo2.hs | mit | 1,728 | 0 | 15 | 382 | 608 | 303 | 305 | 53 | 2 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
-- | Use an email address as an identifier via Google's OpenID login system.
--
-- This backend will not use the OpenID identifier at all. It only uses OpenID
-- as a login system. By using this plugin, you are trusting Google to validate
-- an email address, and requiring users to have a Google account. On the plus
-- side, you get to use email addresses as the identifier, many users have
-- existing Google accounts, the login system has been long tested (as opposed
-- to BrowserID), and it requires no credential managing or setup (as opposed
-- to Email).
module Yesod.Auth.GoogleEmail
( authGoogleEmail
, forwardUrl
) where
import Yesod.Auth
import qualified Web.Authenticate.OpenId as OpenId
import Yesod.Handler
import Yesod.Widget (whamlet)
import Yesod.Request
#if MIN_VERSION_blaze_html(0, 5, 0)
import Text.Blaze.Html (toHtml)
#else
import Text.Blaze (toHtml)
#endif
import Data.Text (Text)
import qualified Yesod.Auth.Message as Msg
import qualified Data.Text as T
import Control.Exception.Lifted (try, SomeException)
pid :: Text
pid = "googleemail"
forwardUrl :: AuthRoute
forwardUrl = PluginR pid ["forward"]
googleIdent :: Text
googleIdent = "https://www.google.com/accounts/o8/id"
authGoogleEmail :: YesodAuth m => AuthPlugin m
authGoogleEmail =
AuthPlugin pid dispatch login
where
complete = PluginR pid ["complete"]
login tm =
[whamlet|
$newline never
<a href=@{tm forwardUrl}>_{Msg.LoginGoogle}
|]
dispatch "GET" ["forward"] = do
render <- getUrlRender
toMaster <- getRouteToMaster
let complete' = render $ toMaster complete
master <- getYesod
eres <- lift $ try $ OpenId.getForwardUrl googleIdent complete' Nothing
[ ("openid.ax.type.email", "http://schema.openid.net/contact/email")
, ("openid.ns.ax", "http://openid.net/srv/ax/1.0")
, ("openid.ns.ax.required", "email")
, ("openid.ax.mode", "fetch_request")
, ("openid.ax.required", "email")
, ("openid.ui.icon", "true")
] (authHttpManager master)
either
(\err -> do
setMessage $ toHtml $ show (err :: SomeException)
redirect $ toMaster LoginR
)
redirect
eres
dispatch "GET" ["complete", ""] = dispatch "GET" ["complete"] -- compatibility issues
dispatch "GET" ["complete"] = do
rr <- getRequest
completeHelper $ reqGetParams rr
dispatch "POST" ["complete", ""] = dispatch "POST" ["complete"] -- compatibility issues
dispatch "POST" ["complete"] = do
(posts, _) <- runRequestBody
completeHelper posts
dispatch _ _ = notFound
completeHelper :: YesodAuth m => [(Text, Text)] -> GHandler Auth m ()
completeHelper gets' = do
master <- getYesod
eres <- lift $ try $ OpenId.authenticateClaimed gets' (authHttpManager master)
toMaster <- getRouteToMaster
let onFailure err = do
setMessage $ toHtml $ show (err :: SomeException)
redirect $ toMaster LoginR
let onSuccess oir = do
let OpenId.Identifier ident = OpenId.oirOpLocal oir
memail <- lookupGetParam "openid.ext1.value.email"
case (memail, "https://www.google.com/accounts/o8/id" `T.isPrefixOf` ident) of
(Just email, True) -> setCreds True $ Creds pid email []
(_, False) -> do
setMessage "Only Google login is supported"
redirect $ toMaster LoginR
(Nothing, _) -> do
setMessage "No email address provided"
redirect $ toMaster LoginR
either onFailure onSuccess eres
| piyush-kurur/yesod | yesod-auth/Yesod/Auth/GoogleEmail.hs | mit | 3,859 | 0 | 18 | 1,031 | 802 | 428 | 374 | -1 | -1 |
module MppException where
-- Exception monad.
-- Ok a : a is a (partially or wholely complete) AST.
-- Except e : e is a string that records the location and source of
-- a parse error.
data E a = Ok a | Except String
bindE :: E a -> (a -> E b) -> E b
m `bindE` k =
case m of
Ok a -> k a
Except e -> Except e
returnE :: a -> E a
returnE a = Ok a
-- Construct an instance of the Exception monad with the given message.
exceptE :: String -> E a
exceptE err = Except err
isOkay (Ok a) = True
isOkay (Except e) = False
tkVal (Ok a) = a
tkVal (Except e) = error "Can't take a value out of an exception"
err (Except e) = e
allOkay [] = True
allOkay ((Except _):rst) = False
allOkay ((Ok _):rst) = allOkay rst
firstErr :: [E a] -> String
firstErr [] = error "shouldn't happen"
firstErr ((Except e):rst) = e
firstErr ((Ok _):rst) = firstErr rst
allErr [] = ""
allErr ((Except e):rst) = e ++ "\n\n" ++ allErr rst
allErr ((Ok _):rst) = allErr rst
| JamesSullivan1/Mpp | src/MppException.hs | mit | 994 | 0 | 9 | 257 | 407 | 206 | 201 | 26 | 2 |
module GHCJS.DOM.PositionError (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/PositionError.hs | mit | 43 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.XSLTProcessor
(js_newXSLTProcessor, newXSLTProcessor, js_importStylesheet,
importStylesheet, js_transformToFragment, transformToFragment,
js_transformToDocument, transformToDocument, js_setParameter,
setParameter, js_getParameter, getParameter, js_removeParameter,
removeParameter, js_clearParameters, clearParameters, js_reset,
reset, XSLTProcessor, castToXSLTProcessor, gTypeXSLTProcessor)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "new window[\"XSLTProcessor\"]()"
js_newXSLTProcessor :: IO XSLTProcessor
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XSLTProcessor Mozilla XSLTProcessor documentation>
newXSLTProcessor :: (MonadIO m) => m XSLTProcessor
newXSLTProcessor = liftIO (js_newXSLTProcessor)
foreign import javascript unsafe "$1[\"importStylesheet\"]($2)"
js_importStylesheet :: XSLTProcessor -> Nullable Node -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XSLTProcessor.importStylesheet Mozilla XSLTProcessor.importStylesheet documentation>
importStylesheet ::
(MonadIO m, IsNode stylesheet) =>
XSLTProcessor -> Maybe stylesheet -> m ()
importStylesheet self stylesheet
= liftIO
(js_importStylesheet (self)
(maybeToNullable (fmap toNode stylesheet)))
foreign import javascript unsafe
"$1[\"transformToFragment\"]($2,\n$3)" js_transformToFragment ::
XSLTProcessor ->
Nullable Node ->
Nullable Document -> IO (Nullable DocumentFragment)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XSLTProcessor.transformToFragment Mozilla XSLTProcessor.transformToFragment documentation>
transformToFragment ::
(MonadIO m, IsNode source, IsDocument docVal) =>
XSLTProcessor ->
Maybe source -> Maybe docVal -> m (Maybe DocumentFragment)
transformToFragment self source docVal
= liftIO
(nullableToMaybe <$>
(js_transformToFragment (self)
(maybeToNullable (fmap toNode source))
(maybeToNullable (fmap toDocument docVal))))
foreign import javascript unsafe "$1[\"transformToDocument\"]($2)"
js_transformToDocument ::
XSLTProcessor -> Nullable Node -> IO (Nullable Document)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XSLTProcessor.transformToDocument Mozilla XSLTProcessor.transformToDocument documentation>
transformToDocument ::
(MonadIO m, IsNode source) =>
XSLTProcessor -> Maybe source -> m (Maybe Document)
transformToDocument self source
= liftIO
(nullableToMaybe <$>
(js_transformToDocument (self)
(maybeToNullable (fmap toNode source))))
foreign import javascript unsafe "$1[\"setParameter\"]($2, $3, $4)"
js_setParameter ::
XSLTProcessor -> JSString -> JSString -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XSLTProcessor.setParameter Mozilla XSLTProcessor.setParameter documentation>
setParameter ::
(MonadIO m, ToJSString namespaceURI, ToJSString localName,
ToJSString value) =>
XSLTProcessor -> namespaceURI -> localName -> value -> m ()
setParameter self namespaceURI localName value
= liftIO
(js_setParameter (self) (toJSString namespaceURI)
(toJSString localName)
(toJSString value))
foreign import javascript unsafe "$1[\"getParameter\"]($2, $3)"
js_getParameter ::
XSLTProcessor -> JSString -> JSString -> IO (Nullable JSString)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XSLTProcessor.getParameter Mozilla XSLTProcessor.getParameter documentation>
getParameter ::
(MonadIO m, ToJSString namespaceURI, ToJSString localName,
FromJSString result) =>
XSLTProcessor -> namespaceURI -> localName -> m (Maybe result)
getParameter self namespaceURI localName
= liftIO
(fromMaybeJSString <$>
(js_getParameter (self) (toJSString namespaceURI)
(toJSString localName)))
foreign import javascript unsafe "$1[\"removeParameter\"]($2, $3)"
js_removeParameter ::
XSLTProcessor -> JSString -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XSLTProcessor.removeParameter Mozilla XSLTProcessor.removeParameter documentation>
removeParameter ::
(MonadIO m, ToJSString namespaceURI, ToJSString localName) =>
XSLTProcessor -> namespaceURI -> localName -> m ()
removeParameter self namespaceURI localName
= liftIO
(js_removeParameter (self) (toJSString namespaceURI)
(toJSString localName))
foreign import javascript unsafe "$1[\"clearParameters\"]()"
js_clearParameters :: XSLTProcessor -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XSLTProcessor.clearParameters Mozilla XSLTProcessor.clearParameters documentation>
clearParameters :: (MonadIO m) => XSLTProcessor -> m ()
clearParameters self = liftIO (js_clearParameters (self))
foreign import javascript unsafe "$1[\"reset\"]()" js_reset ::
XSLTProcessor -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XSLTProcessor.reset Mozilla XSLTProcessor.reset documentation>
reset :: (MonadIO m) => XSLTProcessor -> m ()
reset self = liftIO (js_reset (self)) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/XSLTProcessor.hs | mit | 6,237 | 74 | 13 | 1,158 | 1,291 | 713 | 578 | 103 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-applicationreferencedatasource-mappingparameters.html
module Stratosphere.ResourceProperties.KinesisAnalyticsApplicationReferenceDataSourceMappingParameters where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.KinesisAnalyticsApplicationReferenceDataSourceCSVMappingParameters
import Stratosphere.ResourceProperties.KinesisAnalyticsApplicationReferenceDataSourceJSONMappingParameters
-- | Full data type definition for
-- KinesisAnalyticsApplicationReferenceDataSourceMappingParameters. See
-- 'kinesisAnalyticsApplicationReferenceDataSourceMappingParameters' for a
-- more convenient constructor.
data KinesisAnalyticsApplicationReferenceDataSourceMappingParameters =
KinesisAnalyticsApplicationReferenceDataSourceMappingParameters
{ _kinesisAnalyticsApplicationReferenceDataSourceMappingParametersCSVMappingParameters :: Maybe KinesisAnalyticsApplicationReferenceDataSourceCSVMappingParameters
, _kinesisAnalyticsApplicationReferenceDataSourceMappingParametersJSONMappingParameters :: Maybe KinesisAnalyticsApplicationReferenceDataSourceJSONMappingParameters
} deriving (Show, Eq)
instance ToJSON KinesisAnalyticsApplicationReferenceDataSourceMappingParameters where
toJSON KinesisAnalyticsApplicationReferenceDataSourceMappingParameters{..} =
object $
catMaybes
[ fmap (("CSVMappingParameters",) . toJSON) _kinesisAnalyticsApplicationReferenceDataSourceMappingParametersCSVMappingParameters
, fmap (("JSONMappingParameters",) . toJSON) _kinesisAnalyticsApplicationReferenceDataSourceMappingParametersJSONMappingParameters
]
-- | Constructor for
-- 'KinesisAnalyticsApplicationReferenceDataSourceMappingParameters'
-- containing required fields as arguments.
kinesisAnalyticsApplicationReferenceDataSourceMappingParameters
:: KinesisAnalyticsApplicationReferenceDataSourceMappingParameters
kinesisAnalyticsApplicationReferenceDataSourceMappingParameters =
KinesisAnalyticsApplicationReferenceDataSourceMappingParameters
{ _kinesisAnalyticsApplicationReferenceDataSourceMappingParametersCSVMappingParameters = Nothing
, _kinesisAnalyticsApplicationReferenceDataSourceMappingParametersJSONMappingParameters = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-applicationreferencedatasource-mappingparameters.html#cfn-kinesisanalytics-applicationreferencedatasource-mappingparameters-csvmappingparameters
kaardsmpCSVMappingParameters :: Lens' KinesisAnalyticsApplicationReferenceDataSourceMappingParameters (Maybe KinesisAnalyticsApplicationReferenceDataSourceCSVMappingParameters)
kaardsmpCSVMappingParameters = lens _kinesisAnalyticsApplicationReferenceDataSourceMappingParametersCSVMappingParameters (\s a -> s { _kinesisAnalyticsApplicationReferenceDataSourceMappingParametersCSVMappingParameters = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalytics-applicationreferencedatasource-mappingparameters.html#cfn-kinesisanalytics-applicationreferencedatasource-mappingparameters-jsonmappingparameters
kaardsmpJSONMappingParameters :: Lens' KinesisAnalyticsApplicationReferenceDataSourceMappingParameters (Maybe KinesisAnalyticsApplicationReferenceDataSourceJSONMappingParameters)
kaardsmpJSONMappingParameters = lens _kinesisAnalyticsApplicationReferenceDataSourceMappingParametersJSONMappingParameters (\s a -> s { _kinesisAnalyticsApplicationReferenceDataSourceMappingParametersJSONMappingParameters = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/KinesisAnalyticsApplicationReferenceDataSourceMappingParameters.hs | mit | 3,722 | 0 | 12 | 207 | 255 | 150 | 105 | 29 | 1 |
module Knapp.Conduit
(split,
toLeft,
toRight,
sideStream,
byCharacter)
where
import Control.Monad
import Control.Monad.Trans
import Data.Conduit
import Data.Conduit.Internal
import qualified Data.Conduit.List as C
import Data.Text (Text)
import qualified Data.Text as T
split
:: (Monad m) => Sink a m ra -> Sink b m rb -> Sink (Either a b) m (ra, rb)
split sinkA sinkB = do
let loop as bs = do
item <- await
case item of
Nothing -> do
ra <- lift $ C.sourceList (reverse as) $$ sinkA
rb <- lift $ C.sourceList (reverse bs) $$ sinkB
return (ra, rb)
Just (Left a) -> loop (a : as) bs
Just (Right b) -> loop as (b : bs)
loop [] []
{-
One thing to note is that you have to pass around the most recent finalizer explicitly. Each time you provide a HaveOutput finalizer, it resets the finalizer. In our case, when we yield a Right value, we don't want to adjust the finalizer at all, so we have to keep track of what finalizer was returned by the most recent HaveOutput for left. Initially, we provide a dummy `return ()` finalizer.
-- Michael Snoyman, regarding the implementation of toLeft.
-}
toLeft :: Monad m => Conduit a m a' -> Conduit (Either a b) m (Either a' b)
toLeft =
go (return ())
where
go final (PipeM mp) = PipeM (liftM (go final) mp)
go final (Leftover p a) = Leftover (go final p) (Left a)
go _ (Done ()) = Done ()
go _ (HaveOutput p final a') = HaveOutput (go final p) final (Left a')
go final left@(NeedInput p c) =
NeedInput p' c'
where
p' (Left a) = go final (p a)
p' (Right b) = HaveOutput (go final left) final (Right b)
c' () = go final $ c ()
toRight :: Monad m => Conduit b m b' -> Conduit (Either a b) m (Either a b')
toRight =
go (return ())
where
go final (PipeM mp) = PipeM (liftM (go final) mp)
go final (Leftover p b) = Leftover (go final p) (Right b)
go _ (Done ()) = Done ()
go _ (HaveOutput p final b') = HaveOutput (go final p) final (Right b')
go final right@(NeedInput p c) =
NeedInput p' c'
where
p' (Left a) = HaveOutput (go final right) final (Left a)
p' (Right b) = go final (p b)
c' () = go final $ c ()
sideStream
:: Monad m
=> Conduit b m (Either a b')
-> Conduit (Either a b) m (Either a b')
sideStream = go (return ())
where go final (PipeM mp) = PipeM (liftM (go final) mp)
go final (Leftover p b) = Leftover (go final p) (Right b)
go _ (Done ()) = Done ()
go _ (HaveOutput p final r) = HaveOutput (go final p) final r
go final right@(NeedInput p c) = NeedInput p' c'
where p' (Left a) = HaveOutput (go final right) final (Left a)
p' (Right b) = go final (p b)
c' () = go final (c ())
byCharacter :: (Monad m) => Conduit Text m Char
byCharacter = do
maybeText <- await
case maybeText of
Nothing -> return ()
Just text -> do
mapM_ yield $ T.unpack text
byCharacter
| IreneKnapp/Eyeshadow | knapp.hs/Haskell/Knapp/Conduit.hs | mit | 3,061 | 0 | 22 | 892 | 1,309 | 644 | 665 | 71 | 6 |
module Faun.Parser.Numbers
( parseDouble
, getDouble
) where
import Text.Parsec
import Text.Parsec.String (Parser)
import Faun.Parser.Core
import qualified Text.Parsec.Token as Tok
parseDouble :: String -> Either ParseError Double
parseDouble = parse (contents getDouble) "<stdin>"
getDouble, ndouble, pdouble, int :: Parser Double
getDouble = try ndouble <|> try pdouble <|> int
int = do
i <- Tok.integer lexer
return $ fromIntegral i
ndouble = do
reservedOp "-"
f <- Tok.float lexer
return (-f)
pdouble = do
optional $ reservedOp "+"
Tok.float lexer
| PhDP/Sphinx-AI | Faun/Parser/Numbers.hs | mit | 573 | 0 | 9 | 101 | 193 | 102 | 91 | 21 | 1 |
{- TextureFonts.hs; Mun Hon Cheong ([email protected]) 2005
This module handles the fonts and crosshairs of the game
--credits go to NeHe tutorials for their texturefonts tutorial
-}
module TextureFonts where
import Graphics.UI.GLUT
import Textures
import Data.Char
import Data.HashTable.IO as H hiding (mapM_)
{-# ANN module "HLint: ignore Reduce duplication" #-}
-- build a display list for the fonts
buildFonts :: IO(Maybe TextureObject,DisplayList)
buildFonts = do
lists <- genObjectNames $ fromIntegral (256 :: Int)
let lists2 = concat $ splitList lists
fontTex <- getAndCreateTexture "font"
textureBinding Texture2D $= fontTex
let cxcys = [(realToFrac(x `mod` 16)/16 ,
realToFrac (x `div` 16)/16)| x<-[0..(255 :: Int)]]
mapM_ genFontList (zip cxcys lists2)
return (fontTex,head lists)
splitList :: [DisplayList] -> [[DisplayList]]
splitList [] = []
splitList list = splitList (drop 16 list)++[take 16 list]
-- the steps needed to display every font
genFontList :: ((Float,Float),DisplayList) -> IO()
genFontList ((cx,cy),list) =
defineList list Compile $ do
unsafeRenderPrimitive Quads $ do
texCoord (TexCoord2 cx (1-cy-0.0625))
vertex (Vertex2 0 (16 :: Float))
texCoord (TexCoord2 (cx+0.0625) (1-cy-0.0625))
vertex (Vertex2 16 (16 :: Float))
texCoord (TexCoord2 (cx+0.0625) (1-cy-0.001))
vertex (Vertex2 16 (0 :: Float))
texCoord (TexCoord2 cx (1-cy-0.001))
vertex (Vertex2 0 (0 :: Float))
translate (Vector3 (14 :: Float) 0 0)
-- generates a displaylist for displaying large digits
buildBigNums :: IO DisplayList
buildBigNums = do
lists <- genObjectNames $ fromIntegral (11 :: Int)
texs <- getAndCreateTextures ["0","1","2","3","4","5","6","7","8","9","hyphen"]
mapM_ genBigNumList (zip texs lists)
return $ head lists
-- steps needed to render a big digit
genBigNumList :: (Maybe TextureObject,DisplayList) -> IO()
genBigNumList (tex,list) =
defineList list Compile $ do
textureBinding Texture2D $= tex
unsafeRenderPrimitive Quads $ do
texCoord (TexCoord2 0 ( 1 :: Float))
vertex (Vertex2 0 ( 0 :: Float))
texCoord (TexCoord2 0 ( 0 :: Float))
vertex (Vertex2 0 (45 :: Float))
texCoord (TexCoord2 1 ( 0 :: Float))
vertex (Vertex2 30 (45 :: Float))
texCoord (TexCoord2 1 ( 1 :: Float))
vertex (Vertex2 30 ( 0 :: Float))
translate (Vector3 (32 :: Float) 0 0)
-- renders a large digit
renderNum :: Float -> Float -> DisplayList -> Int -> IO()
renderNum x y (DisplayList base) n = unsafePreservingMatrix $ do
loadIdentity
texture Texture2D $= Enabled
alphaFunc $= Just (Greater,0.1:: Float)
let list = map toDList (show n)
unsafePreservingMatrix $ do
translate (Vector3 x y (0::Float))
mapM_ callList list
alphaFunc $= Nothing
texture Texture2D $= Disabled
where
toDList c = DisplayList (base +fromIntegral(ord c-48))
-- print a string starting at a 2D screen position
printFonts' :: Float -> Float ->
(Maybe TextureObject,DisplayList)->
Int-> String -> IO()
printFonts' x y (fontTex,DisplayList _) st string =
unsafePreservingMatrix $ do
loadIdentity
texture Texture2D $= Enabled
textureBinding Texture2D $= fontTex
translate (Vector3 x y (0::Float))
let lists = map (toDisplayList (128*fromIntegral st)) string
alphaFunc $= Just (Greater,0.1:: Float)
mapM_ callList lists --(map DisplayList [17..(32:: GLuint)])
alphaFunc $= Nothing
texture Texture2D $= Disabled
-- sets up the orthographic mode so we can
-- draw at 2D screen coordinates
setUpOrtho :: IO a -> IO()
setUpOrtho func = do
matrixMode $= Projection
unsafePreservingMatrix $ do
loadIdentity
ortho 0 640 0 480 (-1) 1
matrixMode $= Modelview 0
_ <- func
matrixMode $= Projection
matrixMode $= Modelview 0
-- just renders the crosshair
renderCrosshair :: BasicHashTable String (Maybe TextureObject) -> IO()
renderCrosshair texs = do
Just crosshairTex <- H.lookup texs "crosshair"
texture Texture2D $= Enabled
textureBinding Texture2D $= crosshairTex
unsafePreservingMatrix $ do
loadIdentity
translate (Vector3 304 224 (0::Float))
alphaFunc $= Just (Greater,0.1:: Float)
unsafeRenderPrimitive Quads $ do
texCoord (TexCoord2 0 (1 :: Float))
vertex (Vertex2 0 (0 :: Float))
texCoord (TexCoord2 0 (0 :: Float))
vertex (Vertex2 0 (32 :: Float))
texCoord (TexCoord2 1 (0 :: Float))
vertex (Vertex2 32 (32 :: Float))
texCoord (TexCoord2 1 (1 :: Float))
vertex (Vertex2 32 (0 :: Float))
alphaFunc $= Nothing
texture Texture2D $= Disabled
toDisplayList :: GLuint -> Char -> DisplayList
toDisplayList _ c = DisplayList (fromIntegral (ord c) - 31)
| pushkinma/frag | src/TextureFonts.hs | gpl-2.0 | 4,885 | 0 | 16 | 1,121 | 1,737 | 864 | 873 | 110 | 1 |
{- ============================================================================
| Copyright 2010 Matthew D. Steele <[email protected]> |
| |
| This file is part of Pylos. |
| |
| Pylos is free software: you can redistribute it and/or modify it |
| under the terms of the GNU General Public License as published by the Free |
| Software Foundation, either version 3 of the License, or (at your option) |
| any later version. |
| |
| Pylos is distributed in the hope that it will be useful, but |
| WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY |
| or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| for more details. |
| |
| You should have received a copy of the GNU General Public License along |
| with Pylos. If not, see <http://www.gnu.org/licenses/>. |
============================================================================ -}
module Pylos.Views.Board
(GameplayAction(..), newGameplayView)
where
import Control.Applicative ((<|>))
import Control.Arrow ((&&&))
import Control.Monad (when)
import Data.Ix (range)
import Data.List (find)
import Data.Traversable (traverse)
import Pylos.Constants (screenWidth)
import Pylos.Data.Color
import Pylos.Data.Point
import Pylos.Data.TotalMap (makeTotalMap, tmGet)
import Pylos.Draw
import Pylos.Event
import Pylos.Game
import Pylos.State
import Pylos.Utility (maybeM)
import Pylos.Views
import Pylos.Views.Widgets (drawBevelRect, newTextButton)
-------------------------------------------------------------------------------
data GameplayAction = PlacePiece Coords
| JumpPiece Coords
| LandPiece Coords
| RemovePiece Coords
| UndoPiece
| EndTurnEarly
| GameOver
newGameplayView :: (MonadDraw m) => m (View GameState GameplayAction)
newGameplayView = do
background <- loadSprite "board.png"
let backgroundView =
inertView $ const $ blitTopleft background (pZero :: IPoint)
-- Board:
let boardSide = 332
let boardTop = 52
boardView <- fmap (subView_ (Rect ((screenWidth - boardSide) `div` 2)
boardTop boardSide boardSide)) newBoardView
-- Player views:
let pvWidth = 120
let pvMargin = 15
let pvHeight = 105
whiteView <- fmap (subView_ (Rect pvMargin 130 pvWidth pvHeight)) $
newPlayerView WhiteTeam
blackView <- fmap (subView_ (Rect (screenWidth - pvMargin - pvWidth) 130
pvWidth pvHeight)) $
newPlayerView BlackTeam
-- Buttons:
let buttonTop = 415
let canUndo state =
case gsPhase state of
LandPhase _ -> True
RemovePhase _ -> True
AnimatePhase (Animation { animNextPhase = RemovePhase move }) ->
not $ null $ getRemovals move
_ -> False
undoButton <- fmap (subView_ (Rect 375 buttonTop 90 30) .
viewMap (const "Undo" &&& canUndo) id) $
newTextButton Nothing UndoPiece
let canEndEarly state = case gsPhase state of
RemovePhase move -> not $ null $ getRemovals move
_ -> False
doneButton <- fmap (subView_ (Rect 275 buttonTop 90 30) .
viewMap (const "Done" &&& canEndEarly) id) $
newTextButton Nothing EndTurnEarly
let endGameText state = case gsPhase state of
VictoryPhase -> "Game Over"
_ -> "Resign"
endGameButton <- fmap (subView_ (Rect 175 buttonTop 90 30) .
viewMap (endGameText &&& const True) id) $
newTextButton Nothing GameOver
return $ compoundView [backgroundView, boardView, whiteView, blackView,
undoButton, doneButton, endGameButton]
-------------------------------------------------------------------------------
newPlayerView :: (MonadDraw m) => Team -> m (View GameState a)
newPlayerView team = do
font1 <- loadFont "caligula.ttf" 36
font2 <- loadFont "caligula.ttf" 24
let
paint :: GameState -> Paint ()
paint state = do
cx <- fmap (`div` 2) canvasWidth
let kind = case tmGet team $ gsPlayers state of
HumanPlayer -> "Human"
CpuPlayer -> "CPU"
drawText font2 whiteColor (LocCenter $ Point cx 20) kind
let name = case team of { BlackTeam -> "Black"; WhiteTeam -> "White" }
drawText font1 whiteColor (LocCenter $ Point cx 50) name
let remain = "Pieces: " ++ show (remainingPieces team $ gsBoard state)
drawText font2 whiteColor (LocCenter $ Point cx 85) remain
rect <- canvasRect
when (gsTurn state == team) $ do
drawBevelRect (Tint 255 230 100 255) 5 rect
return $ inertView paint
-------------------------------------------------------------------------------
newBoardView :: (MonadDraw m) => m (View GameState GameplayAction)
newBoardView = do
pieceSprites <- let piecePath BlackTeam = "black-piece.png"
piecePath WhiteTeam = "white-piece.png"
in traverse loadSprite $ makeTotalMap piecePath
let
paint :: GameState -> Paint ()
paint state = do
-- Draw the base of the board.
size <- canvasSize
let drawBase = tintOval (Tint 255 255 255 10)
let shrink rect = let w = rectW rect / 5
h = rectH rect / 5
in adjustRect w h w h rect
mapM_ drawBase $ map shrink $ baseRects size (gsBoard state)
-- Draw pieces, including hilights (if any):
let getHilights phase =
case phase of
LandPhase fromCoords -> [(fromCoords, jumpTint)]
RemovePhase move ->
case move of
PlaceMove coords removals ->
(coords, placeTint) : map (flip (,) removeTint) removals
JumpMove fromCoords toCoords removals ->
(fromCoords, jumpTint) : (toCoords, placeTint) :
map (flip (,) removeTint) removals
AnimatePhase anim -> getHilights (animNextPhase anim)
_ -> []
where placeTint = Tint 0 255 0 128
jumpTint = Tint 255 0 0 128
removeTint = Tint 0 0 255 128
let hilights = getHilights (gsPhase state)
let board' = case gsPhase state of
RemovePhase move ->
applyMove (gsTurn state) move (gsBoard state)
_ -> gsBoard state
let drawPiece rect team = blitStretch (tmGet team pieceSprites) rect
let drawHilight rect tint = drawOval tint rect
let drawCell (coords, rect) = do
maybeM (pyramidGet coords board') (drawPiece rect)
maybeM (lookup coords hilights) (drawHilight rect)
mapM_ drawCell $ coordsAndRects size board'
-- Draw the current animation, if any:
case gsPhase state of
AnimatePhase anim -> do
let mkCenter (Rect x y w h) = Rect (x + w/2) (y + h/2) 0 0
let transition (Rect x1 y1 w1 h1) (Rect x2 y2 w2 h2) = do
let tau = animCurrent anim / animMaximum anim
let linear start end = start + tau * (end - start)
let w' = linear w1 w2
h' = linear h1 h2
let x' = linear (x1 + w1/2) (x2 + w2/2) - w'/2
y' = linear (y1 + h1/2) (y2 + h2/2) - h'/2
let rect' = Rect x' y' w' h'
drawPiece rect' (gsTurn state)
case animKind anim of
AnimPlace coords -> do
let rect = coordsRect size board' coords
transition (mkCenter rect) rect
AnimJump fromCoords toCoords -> do
transition (coordsRect size board' fromCoords)
(coordsRect size board' toCoords)
AnimRemove coords -> do
let rect = coordsRect size board' coords
transition rect (mkCenter rect)
_ -> return ()
handler :: GameState -> Event -> Handler (Maybe GameplayAction)
handler state (EvMouseDown pt) = do
rect <- canvasRect
let ptCoords :: (Coords -> Bool) -> Maybe Coords
ptCoords fn = find fn $ reverse $ map fst $
filter (ovalContains (fmap fromIntegral $
pt `pSub` rectTopleft rect) .
snd) $
coordsAndRects (rectSize rect) board
case gsPhase state of
CpuTurnPhase -> ignore
CpuRunningPhase -> ignore
BeginPhase ->
return $ (fmap PlacePiece $ ptCoords $ canPlacePiece board) <|>
(fmap JumpPiece $ ptCoords $
ownedAnd $ canRemovePiece board)
LandPhase fromCoords ->
return $ fmap LandPiece $ ptCoords $ canJumpPiece board fromCoords
RemovePhase move ->
return $ fmap RemovePiece $ ptCoords $ ownedAnd $ canRemovePiece $
applyMove (gsTurn state) move board
AnimatePhase _ -> ignore
EndPhase -> ignore
VictoryPhase -> do
if rectContains rect pt then return (Just GameOver) else ignore
where
board = gsBoard state
team = gsTurn state
ownedAnd :: (Coords -> Bool) -> (Coords -> Bool)
ownedAnd fn coords = pyramidGet coords board == Just team && fn coords
handler _ (EvKeyDown KeyZ [KeyModCmd] _) = return (Just UndoPiece)
handler state (EvKeyDown KeySpace _ _) =
case gsPhase state of
VictoryPhase -> return (Just GameOver)
_ -> return (Just EndTurnEarly)
handler _ _ = ignore
ignore = return Nothing
ovalContains :: DPoint -> DRect -> Bool
ovalContains (Point px py) (Rect x y w h) =
let hRad = w / 2
vRad = h / 2
dx = (px - (x + hRad)) / hRad
dy = (py - (y + vRad)) / vRad
in dx*dx + dy*dy <= 1
coordsRect :: (Int, Int) -> Board -> Coords -> DRect
coordsRect size board coords = withMkRect size board ($ coords)
coordsAndRects :: (Int, Int) -> Board -> [(Coords, DRect)]
coordsAndRects size board =
withMkRect size board (\mkRect -> map (id &&& mkRect) $ allCoords board)
baseRects :: (Int, Int) -> Board -> [DRect]
baseRects size board = withMkRect size board $ flip map cs where
cs = [Coords lev row col | (row, col) <- range ((1, 1), (lev, lev))]
lev = pyramidLevels board
withMkRect :: (Int, Int) -> Board -> ((Coords -> DRect) -> a) -> a
withMkRect (width, height) board fn =
let gap = 5 :: Double
fullSide = fromIntegral (min width height)
levels = fromIntegral (pyramidLevels board)
baseSide = (fullSide - gap * (levels - 1)) / levels
stride = baseSide + gap
mkRect (Coords level row col) =
let level' = fromIntegral level
side = baseSide * (level' + 4 * levels) / (5 * levels)
offset = (fullSide - side - stride * (level' - 1)) / 2
in Rect (offset + stride * fromIntegral (col - 1))
(offset + stride * fromIntegral (row - 1)) side side
in fn mkRect
return $ View paint handler
-------------------------------------------------------------------------------
| mdsteele/pylos | src/Pylos/Views/Board.hs | gpl-3.0 | 11,925 | 21 | 24 | 4,188 | 3,293 | 1,654 | 1,639 | 218 | 22 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Run
(
module Game.Run.RunData,
module Game.Run.RunWorld,
module Game.Run.Helpers,
) where
import Game.Run.RunData
import Game.Run.RunWorld
import Game.Run.Helpers
| karamellpelle/grid | source/Game/Run.hs | gpl-3.0 | 943 | 0 | 5 | 172 | 66 | 51 | 15 | 8 | 0 |
module Main where
-- These import are coppied from simormar's test-async.hs and
-- twittner's ZMQ3 Properties.hs. I have to find out which
-- ones I actually need.
import Test.QuickCheck
import Test.Framework (defaultMain, testGroup)
import Test.Framework.Providers.QuickCheck2
import Control.Monad
import Prelude as P
import Data.Monoid
import Data.ScrollBuffer as S
import Data.Sequence as Seq
import Data.Foldable as Fold
import Prelude hiding (catch)
instance (Arbitrary a) => Arbitrary (ScrollBuffer a) where
arbitrary = do
elems <- listOf arbitrary
wInd <- arbitrary
if P.null elems
then return $ ScrollBuffer (Seq.fromList elems) 0
else
return $ ScrollBuffer (Seq.fromList elems) (wInd `mod` P.length elems)
main :: IO ()
main = defaultMain tests
tests = [
testGroup "Monoid laws" [
testProperty "mempty `mappend` ScrollBuffer" prop_mempty_x
, testProperty "ScrollBuffer `mappend` mempty" prop_x_mempty
, testProperty "Buffer 1 `mappend` Buffer 2" prop_monoid_x_y
]
, testGroup "Cursor movement" [
testProperty "Non-zero cursor moves back for 1 element" prop_nz_add_one
,testProperty "Zero-cursor to end for 1 element advance" prop_z_add_one
,testProperty "Advance by self-copy" prop_self_copy
]
, testGroup "Splitting" [
testProperty "Split parts sum to whole" prop_split_sum_correct
, testProperty "Get last n while n <= length" prop_last_n
, testProperty "Get last n with n too big" prop_last_n_big
]
]
prop_mempty_x :: ScrollBuffer Double -> Bool
prop_mempty_x sb = mempty `mappend` sb == sb
where _ = sb :: ScrollBuffer Double
prop_x_mempty :: ScrollBuffer Int -> Bool
prop_x_mempty sb = sb `mappend` mempty == sb
where _ = sb :: ScrollBuffer Int
prop_monoid_x_y :: ScrollBuffer Int ->
ScrollBuffer Int ->
ScrollBuffer Int -> Bool
prop_monoid_x_y sb1 sb2 sb3 =
(sb1 `mappend` sb2) `mappend` sb3 ==
sb1 `mappend` (sb2 `mappend` sb3)
where _ = sb1 :: ScrollBuffer Int
prop_nz_add_one :: ScrollBuffer Char -> Char -> Property
prop_nz_add_one sb@(ScrollBuffer _ inInd) e =
inInd /= 0 ==> outInd == inInd - 1
where
(ScrollBuffer _ outInd) = sb S.|> e
_ = sb :: ScrollBuffer Char
prop_z_add_one :: ScrollBuffer Double -> Double -> Property
prop_z_add_one sb@(ScrollBuffer s inInd) e =
inInd == 0 && (not $ S.null sb) ==>
outInd == Seq.length s - 1
where
(ScrollBuffer _ outInd) = sb S.|> e
_ = sb :: ScrollBuffer Double
prop_self_copy :: ScrollBuffer Double -> Bool
prop_self_copy sb@(ScrollBuffer s _) =
sb == sb `advanceList` Fold.toList s
where _ = sb :: ScrollBuffer Double
prop_split_sum_correct :: ScrollBuffer String -> Bool
prop_split_sum_correct sb@(ScrollBuffer s _) =
P.length a + P.length b == Seq.length s
where
(a,b) = toScrollParts sb
_ = sb :: ScrollBuffer String
prop_last_n :: ScrollBuffer (ScrollBuffer Double) -> Int -> Property
prop_last_n sb@(ScrollBuffer s _) n =
n <= Seq.length s ==>
n ~~| sb == Seq.drop (Seq.length s - n) s
where
_ = sb :: ScrollBuffer (ScrollBuffer Double)
prop_last_n_big :: ScrollBuffer Double -> Int -> Property
prop_last_n_big sb@(ScrollBuffer s _) n =
n > Seq.length s ==>
n ~~| sb == s
where _ = sb :: ScrollBuffer Double | imalsogreg/scrollbuffer | tests/test.hs | gpl-3.0 | 3,385 | 0 | 14 | 780 | 992 | 517 | 475 | -1 | -1 |
module ASTtoQuery where
--import QueryParser
import Control.Applicative ((<$>))
import Data.Char (toUpper)
import Data.List
import Data.Function
import QueryAST
import Maybe (isJust)
import Printers
import Stats
import System.FilePath
import Text.Regex
import qualified Data.Map as D
-- * Queries after AST processing.
--
type Commands = (Queries, Env)
type ExecResult = ([StatsTree], Env)
type Queries = [Query]
type Env = D.Map String Query
-- | Each query is represented by sub queries,
-- each of which adds a new level to the tree.
--
type Query = ([SubQuery], Maybe String)
type SubQuery = (Header, View, Constraint, Group)
type Constraint = Stats -> (Stats, Stats)
type View = EditStats -> String
type Group = Stats -> [Stats]
-- Convert AST to Query
-- We have to transform the parsing result (QCommands) into
-- SubQueries (ie. a view function, a constraint and a
-- group function) and an Environment.
--
fromQCommands :: Env -> QCommands -> E Commands
fromQCommands env qs = fromQCommands' env [] qs
where fromQCommands' env res [] = Ok (res, env)
fromQCommands' env res (q:qs) = fromQCommand env q >>= f
where f (Left qr) = fromQCommands' env (qr : res) qs
f (Right e) = fromQCommands' newEnv res qs
where newEnv = D.union e env
fromQCommand :: Env -> QCommand -> E (Either Query Env)
fromQCommand env (Left q) = Left <$> fromQQuery env q
fromQCommand env (Right a) = Right <$> i a
where i (QAssign s q) = flip (D.insert s) env <$> fromQQuery env q
fromQQuery :: Env -> QQuery -> E Query
fromQQuery env ([], s) = Ok ([], s)
fromQQuery env ((c:cs),s) = fromQSubQuery env c >>=
(\a -> flip (,) s . (a++) . fst <$> fromQQuery env (cs,s))
-- Sub queries
--
fromQSubQuery :: Env -> QSubQuery -> E [SubQuery]
fromQSubQuery _ q@(QSubQuery _ Ext _ _ _ _) = makeQuery q (takeExtension . fileName)
fromQSubQuery _ q@(QSubQuery _ Lang _ _ _ _) = makeQuery q language
fromQSubQuery _ q@(QSubQuery _ Proj _ _ _ _) = makeQuery q project
fromQSubQuery _ q@(QSubQuery _ Path _ _ _ _) = makeQuery q fileName
fromQSubQuery _ q@(QSubQuery _ File _ _ _ _) = makeQuery q (takeFileName . fileName)
fromQSubQuery _ q@(QSubQuery _ Dir _ _ _ _) = makeQuery q (takeDirectory . fileName)
fromQSubQuery _ q@(QSubQuery _ Year _ _ _ _) = makeQuery q (year . edit)
fromQSubQuery _ q@(QSubQuery _ Month _ _ _ _) = makeQuery q (month . edit)
fromQSubQuery _ q@(QSubQuery _ Week _ _ _ _) = makeQuery q (week . edit)
fromQSubQuery _ q@(QSubQuery _ Day _ _ _ _) = makeQuery q (day . edit)
fromQSubQuery _ q@(QSubQuery _ Dow _ _ _ _) = makeQuery q (dow . edit)
fromQSubQuery _ q@(QSubQuery _ Doy _ _ _ _) = makeQuery q (doy . edit)
fromQSubQuery env (QCall s) = case D.lookup s env of
Just (q,s) -> Ok q
Nothing -> Failed $ "Unknown definition `" ++ s ++ "'"
makeQuery (QSubQuery gr t c h o l) f = Ok [(fromQAs h t, view, constraints, grouping)]
where view = fromQIndex t
constraints = fromQConstraints t c
grouping = fromQLimit l . fromQOrder o . addGrouping gr f
fromQAs (As s) _ = s
fromQAs _ Ext = "Extension"
fromQAs _ Lang = "Language"
fromQAs _ Proj = "Project"
fromQAs _ Path = "File Path"
fromQAs _ File = "File Name"
fromQAs _ Dir = "Directory"
fromQAs _ Year = "Year"
fromQAs _ Month = "Month"
fromQAs _ Week = "Week"
fromQAs _ Day = "Day"
fromQAs _ Dow = "Day of the Week"
fromQAs _ Doy = "Day of the Year"
-- Limiting and ordering is done before passing
-- the grouped Stats on to the constraint function
--
fromQOrder :: QOrder -> [Stats] -> [Stats]
fromQOrder NoOrder = id
fromQOrder Asc = sortBy (compare `on` sumTime)
fromQOrder Desc = sortBy (flip compare `on` sumTime)
fromQLimit :: QLimit -> [Stats] -> [Stats]
fromQLimit NoLimit = id
fromQLimit (Limit i) = take i
-- The view function
--
fromQIndex :: QIndex -> (EditStats -> String)
fromQIndex Ext = takeExtension . fileName
fromQIndex Lang = maybe "Unknown language" snd . language
fromQIndex Proj = maybe "Unknown project" snd . project
fromQIndex Path = fileName
fromQIndex File = takeFileName . fileName
fromQIndex Dir = takeDirectory . fileName
fromQIndex Year = show . year . edit
fromQIndex Month = getMonth . month . edit
fromQIndex Week = show . week . edit
fromQIndex Day = show . day . edit
fromQIndex Dow = getDow . dow . edit
fromQIndex Doy = show . doy . edit
-- The grouping function
--
addGrouping :: Ord a => Bool -> (EditStats -> a) -> Group
addGrouping False _ = map (:[]) -- dontGroup
addGrouping True f = groupWith f . sortBy (compare `on` f)
addToGrouping [] _ = error "Empty query, can't add to grouping"
addToGrouping cs f = init cs ++ [add (last cs)]
where add (v, cs, gr) = (v, cs, f . gr)
-- A Constraint separates Stats matching
-- a predicate from the ones that don't
-- (P(s), ¬P(s))
--
makeConstraint :: Pred EditStats -> Constraint
makeConstraint p = con ([], [])
where con (yes, no) [] = (yes, no)
con (yes, no) (s:st) = con n st
where n = if p s then (s : yes, no) else (yes, s:no)
-- QConstraints to Constraints
-- Individual QConstraints are first converted to predicates
-- and get turned into one Constraint using makeConstraint.
--
fromQConstraints :: QIndex -> [QConstraint] -> Constraint
fromQConstraints i qc = makeConstraint $ foldr f (const True) preds
where f a b p = a p && b p
preds = map (makePred i) qc
makePred :: QIndex -> QConstraint -> Pred EditStats
makePred i (QC Year op e) = numericalC op year e
makePred i (QC Month op e) = numStringC op e month getMonth
makePred i (QC Week op e) = numericalC op week e
makePred i (QC Day op e) = numericalC op day e
makePred i (QC Doy op e) = numericalC op doy e
makePred i (QC Dow op e) = numStringC op e dow getDow
makePred i (QC ind op e) = fromQOper op (fromQExpr e) . fromQIndex ind
makePred i (QCOE op e) = makePred i (QC i op e)
makePred i (QCE e) = makePred i (QC i QE e)
-- helper functions; to do the conversion
stringC op s f = fromQOper op (map toUpper s) . map toUpper . f . edit
numericalC op g e = fromQOper op (fromQExpr e) . show . g . edit
maybeC op g h e = maybe False (fromQOper op (fromQExpr e) . h) . g
numStringC op (QInt i) num _ = fromQOper op (show i) . show . num . edit
numStringC op (QString s) num str = stringC op s (str . num)
-- Expressions
--
fromQExpr :: QExpr -> String
fromQExpr (QInt i) = show i
fromQExpr (QString s) = s
-- Operators are written flipped around to make it easier
-- to write in a point-free style (see above)
--
fromQOper :: QOper -> (String -> String -> Bool)
fromQOper QL = (>)
fromQOper QG = (<)
fromQOper QLE = (>=)
fromQOper QGE = (<=)
fromQOper QE = (==)
fromQOper QNE = (/=)
fromQOper QREG = \m -> isJust . matchRegex (mkRegex m)
| bspaans/EditTimeReport | src/ASTtoQuery.hs | gpl-3.0 | 7,082 | 0 | 12 | 1,784 | 2,683 | 1,406 | 1,277 | 130 | 3 |
class X a where
f :: a -> Int
instance Num a => X (Maybe a) where
f Nothing = 7
f (Just x) = f x
main = f (Just 3) | Helium4Haskell/helium | test/typeClasses/ClassInstaneError10.hs | gpl-3.0 | 128 | 0 | 8 | 45 | 79 | 38 | 41 | 6 | 1 |
{-# LANGUAGE RecordWildCards, GADTs #-}
import Codec.Compression.GZip
{- import Control.Monad
import qualified Data.ByteString.Lazy.Char8 as LB
import Data.Maybe
import qualified Data.Text.Lazy as T
import Data.Text.Lazy.Builder
import Data.Text.Format
import System.Environment -}
import System.FilePath
import System.IO
-- import Text.Hastache
-- import Text.Hastache.Context
--
import HEP.Parser.LHCOAnalysis.Parse
--
import HEP.Physics.Analysis.ATLAS.SUSY
import HEP.Physics.Analysis.ATLAS.SUSY.Format
--
import Debug.Trace
{-
log10 x = log x / log 10
getExponent10 x = floor (log10 x)
getBody10 x = 10**(log10 x - fromIntegral (getExponent10 x))
sciformat (Just x) =
let e = getExponent10 x
b = getBody10 x
-- trunced = (fromIntegral (floor (b*100)) / 100.0) * (10.0**fromIntegral e)
in if e `elem` [-2,-1,0,1,2]
then (T.unpack . toLazyText . fixed (2+(-e))) x
else "$" ++ ((T.unpack . toLazyText . fixed 2 . getBody10) x) ++ "\\times 10^{"
++ (show e) ++ "}$"
sciformat (Nothing) = "0"
-}
filelist1000 =
[ ( 1000, 300, 3.67e-3
, "data20130219/ADMXQLD311MST300.0MG1000.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 1000, 400, 3.67e-3
, "data20130219/ADMXQLD311MST400.0MG1000.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 1000, 500, 3.67e-3
, "data20130219/ADMXQLD311MST500.0MG1000.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 1000, 600, 3.67e-3
, "data20130219/ADMXQLD311MST600.0MG1000.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 1000, 700, 3.67e-3
, "data20130219/ADMXQLD311MST700.0MG1000.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 1000, 800, 3.67e-3
, "data20130219/ADMXQLD311MST800.0MG1000.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz") ]
filelist900 =
[ ( 900, 100, 0.0101
, "data20130221/ADMXQLD311MST100.0MG900.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 900, 200, 0.0101
, "data20130221/ADMXQLD311MST200.0MG900.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 900, 300, 0.0101
, "data20130219/ADMXQLD311MST300.0MG900.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 900, 400, 0.0101
, "data20130219/ADMXQLD311MST400.0MG900.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 900, 500, 0.0101
, "data20130219/ADMXQLD311MST500.0MG900.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 900, 600, 0.0101
, "data20130219/ADMXQLD311MST600.0MG900.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 900, 700, 0.0101
, "data20130219/ADMXQLD311MST700.0MG900.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
]
filelist800 =
[ ( 800, 100, 0.0290
, "data20130221/ADMXQLD311MST100.0MG800.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 800, 200, 0.0290
, "data20130221/ADMXQLD311MST200.0MG800.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 800, 300, 0.0290
, "data20130221/ADMXQLD311MST300.0MG800.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 800, 400, 0.0290
, "data20130221/ADMXQLD311MST400.0MG800.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 800, 500, 0.0290
, "data20130221/ADMXQLD311MST500.0MG800.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 800, 600, 0.0290
, "data20130221/ADMXQLD311MST600.0MG800.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
]
filelist700 =
[ ( 700, 100, 0.0885
, "data20130221/ADMXQLD311MST100.0MG700.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 700, 200, 0.0885
, "data20130221/ADMXQLD311MST200.0MG700.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 700, 300, 0.0885
, "data20130221/ADMXQLD311MST300.0MG700.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 700, 400, 0.0885
, "data20130221/ADMXQLD311MST400.0MG700.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 700, 500, 0.0885
, "data20130221/ADMXQLD311MST500.0MG700.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
]
filelist600 =
[ ( 600, 100, 0.295
, "data20130221/ADMXQLD311MST100.0MG600.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 600, 200, 0.295
, "data20130221/ADMXQLD311MST200.0MG600.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 600, 300, 0.295
, "data20130221/ADMXQLD311MST300.0MG600.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 600, 400, 0.295
, "data20130221/ADMXQLD311MST400.0MG600.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
]
filelist300400500 =
[ ( 300, 100, 32.0
, "data20130219/ADMXQLD311MST100.0MG300.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 400, 100, 5.14
, "data20130219/ADMXQLD311MST100.0MG400.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 400, 200, 5.14
, "data20130219/ADMXQLD311MST200.0MG400.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 500, 100, 1.12
, "data20130219/ADMXQLD311MST100.0MG500.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 500, 200, 1.12
, "data20130219/ADMXQLD311MST200.0MG500.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 500, 300, 1.12
, "data20130219/ADMXQLD311MST300.0MG500.0MSQ50000.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
]
scharm300400500 =
[ ( 300, 100, 32.0
, "data20130222/ADMXQLD211MST50000.0MG300.0MSQ100.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 400, 100, 5.14
, "data20130222/ADMXQLD211MST50000.0MG400.0MSQ100.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 400, 200, 5.14
, "data20130222/ADMXQLD211MST50000.0MG400.0MSQ200.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 500, 100, 1.12
, "data20130222/ADMXQLD211MST50000.0MG500.0MSQ100.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 500, 200, 1.12
, "data20130222/ADMXQLD211MST50000.0MG500.0MSQ200.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 500, 300, 1.12
, "data20130222/ADMXQLD211MST50000.0MG500.0MSQ300.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
]
scharm800 =
[ ( 800, 200, 0.0290
, "data20130222/ADMXQLD211MST50000.0MG800.0MSQ200.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 800, 300, 0.0290
, "data20130222/ADMXQLD211MST50000.0MG800.0MSQ300.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 800, 400, 0.0290
, "data20130222/ADMXQLD211MST50000.0MG800.0MSQ400.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 800, 500, 0.0290
, "data20130222/ADMXQLD211MST50000.0MG800.0MSQ500.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 800, 600, 0.0290
, "data20130222/ADMXQLD211MST50000.0MG800.0MSQ600.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
]
scharm900 =
[ ( 900, 200, 0.0101
, "data20130222/ADMXQLD211MST50000.0MG900.0MSQ200.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 900, 300, 0.0101
, "data20130222/ADMXQLD211MST50000.0MG900.0MSQ300.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 900, 400, 0.0101
, "data20130222/ADMXQLD211MST50000.0MG900.0MSQ400.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 900, 500, 0.0101
, "data20130222/ADMXQLD211MST50000.0MG900.0MSQ500.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 900, 600, 0.0101
, "data20130222/ADMXQLD211MST50000.0MG900.0MSQ600.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 900, 700, 0.0101
, "data20130222/ADMXQLD211MST50000.0MG900.0MSQ700.0_gluinopair_scharmdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
]
scharm1000 =
[ ( 1000, 400, 3.67e-3
, "data20130222/ADMXQLD211MST50000.0MG1000.0MSQ400.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 1000, 500, 3.67e-3
, "data20130222/ADMXQLD211MST50000.0MG1000.0MSQ500.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 1000, 600, 3.67e-3
, "data20130222/ADMXQLD211MST50000.0MG1000.0MSQ600.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 1000, 700, 3.67e-3
, "data20130222/ADMXQLD211MST50000.0MG1000.0MSQ700.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz")
, ( 1000, 800, 3.67e-3
, "data20130222/ADMXQLD211MST50000.0MG1000.0MSQ800.0_gluinopair_stopdecayfull_LHC7ATLAS_NoMatch_NoCut_Cone0.4_Set1_pgs_events.lhco.gz") ]
main = do
h <- openFile "output.dat" WriteMode
hPutStr h header
mapM_ (analysis h) scharm1000
hPutStr h footer
hClose h
| wavewave/lhc-analysis-collection | exe/ATLASMultiLeptonJetMETAnalysis.hs | gpl-3.0 | 10,655 | 0 | 9 | 1,177 | 997 | 651 | 346 | 136 | 1 |
module GtkBlast.GtkUtils
(windowPopup
,windowToggle
,onFileChooserEntryButton
,tvarCheck
,tvarSpinCheck
,editorWidget, Apply(..), Ok(..), Cancel(..), Spawn(..)
,postAsyncWhenPathModified, CloseWatcher, closeWatcher
) where
import Import hiding (on)
import Graphics.UI.Gtk
import Control.Concurrent.STM hiding (check)
import System.Directory
import qualified Filesystem.Path.CurrentOS as F
import qualified System.FSNotify as Notify
newtype Apply = Apply Button
newtype Ok = Ok Button
newtype Cancel = Cancel Button
newtype Spawn = Spawn Button
editorWidget :: IO Text -> (Text -> IO ()) -> Window -> TextView -> Apply -> Ok -> Cancel -> Spawn -> IO ()
editorWidget getInit commit window textView
(Apply buttonApply) (Ok buttonOk) (Cancel buttonCancel) (Spawn buttonSpawn) = do
void $ onDelete window $ const $
True <$ widgetHide window
void $ on buttonSpawn buttonActivated $ do
visible <- get window widgetVisible
if visible
then
widgetHide window
else do
initialText <- getInit
buf <- textViewGetBuffer textView
textBufferSetByteString buf $ encodeUtf8 initialText
widgetShowAll window
void $ on buttonCancel buttonActivated $ do
widgetHide window
void $ on buttonOk buttonActivated $ do
buttonClicked buttonApply
widgetHide window
void $ on buttonApply buttonActivated $ do
buf <- textViewGetBuffer textView
start <- textBufferGetStartIter buf
end <- textBufferGetEndIter buf
text <- decodeUtf8 <$> textBufferGetByteString buf start end True
commit text
windowPopup :: Window -> IO ()
windowPopup window = do
whenM (not <$> get window widgetVisible) $ do
windowPresent window
windowToggle :: Window -> IO ()
windowToggle window =
ifM (get window widgetVisible)
(widgetHide window)
(windowPopup window)
-- if only FileChooserButton worked...
onFileChooserEntryButton :: Bool -> Button -> Entry -> (Text -> IO ()) -> IO () -> IO ()
onFileChooserEntryButton b wfbutton wfentry putErr fin = void $ do
if b
then
aux FileChooserActionSelectFolder $
\fc -> fileChooserSetCurrentFolder fc =<< entryGetText wfentry
else
aux FileChooserActionOpen $
\fc -> fileChooserSetCurrentFolder fc "."
where
aux m fd = do
void $ onEntryActivate wfentry $ do
buttonClicked wfbutton
void $ on wfbutton buttonActivated $ do
d <- fileChooserDialogNew Nothing Nothing m
[("gtk-cancel", ResponseCancel)
,("gtk-open", ResponseAccept)]
void $ fd d
widgetShow d
r <- dialogRun d
case r of
ResponseAccept -> do
mfname <- fileChooserGetFilename d
case mfname of
Nothing ->
putErr "Impossible happened: ResponseAccept with Nothing."
Just f -> do
entrySetText wfentry f
fin
_ -> return ()
widgetHide d
tvarCheck :: (CheckButton -> IO a) -> CheckButton -> IO (TVar a)
tvarCheck getval check = do
tvar <- atomically . newTVar =<< getval check
void $ on check toggled $
atomically . writeTVar tvar =<< getval check
return tvar
tvarSpinCheck :: (SpinButton -> IO a) -> CheckButton -> SpinButton -> IO (TVar (Maybe a))
tvarSpinCheck getspin check spin = do
tvar <- atomically . newTVar =<<
ifM (toggleButtonGetActive check)
(Just <$> getspin spin)
(return Nothing)
void $ on check buttonActivated $ do
ifM (toggleButtonGetActive check)
(atomically . writeTVar tvar . Just =<< getspin spin)
(atomically $ writeTVar tvar Nothing)
void $ onValueSpinned spin $ do
whenM (toggleButtonGetActive check) $
atomically . writeTVar tvar . Just =<< getspin spin
return tvar
newtype CloseWatcher = CloseWatcher (IO ())
closeWatcher :: MonadIO m => CloseWatcher -> m ()
closeWatcher (CloseWatcher m) = liftIO m
postAsyncWhenPathModified :: MonadIO m => String -> IO () -> m CloseWatcher
postAsyncWhenPathModified _fileOrDir action =
liftIO $ fromIOException (return $ CloseWatcher $ return ()) $ do
fileOrDir <- canonicalizePath _fileOrDir
watchman <- Notify.startManager
let post = \ _ -> do
Notify.stopManager watchman
postGUIAsync action
isDir <- doesDirectoryExist fileOrDir
if isDir
then do
let dname = F.fromText (fromString fileOrDir)
Notify.watchDir watchman dname (const True) post
return $ CloseWatcher $ Notify.stopManager watchman
else do
isFile <- doesFileExist fileOrDir
if isFile
then do
let fname = F.fromText $ fromString fileOrDir
dname = F.directory fname
Notify.watchDir watchman dname (\e -> getEventFname e == fname) post
return $ CloseWatcher $ Notify.stopManager watchman
else do
return $ CloseWatcher $ return () -- silently fail
where
getEventFname (Notify.Added f _) = f
getEventFname (Notify.Modified f _) = f
getEventFname (Notify.Removed f _) = f
| exbb2/BlastItWithPiss | src/GtkBlast/GtkUtils.hs | gpl-3.0 | 5,383 | 0 | 22 | 1,596 | 1,596 | 767 | 829 | 130 | 5 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.CloudAsset
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- The cloud asset API manages the history and inventory of cloud
-- resources.
--
-- /See:/ <https://cloud.google.com/asset-inventory/docs/quickstart Cloud Asset API Reference>
module Network.Google.CloudAsset
(
-- * Service Configuration
cloudAssetService
-- * OAuth Scopes
, cloudPlatformScope
-- * API Declaration
, CloudAssetAPI
-- * Resources
-- ** cloudasset.exportAssets
, module Network.Google.Resource.CloudAsset.ExportAssets
-- ** cloudasset.operations.get
, module Network.Google.Resource.CloudAsset.Operations.Get
-- * Types
-- ** GoogleIdentityAccesscontextManagerV1BasicLevel
, GoogleIdentityAccesscontextManagerV1BasicLevel
, googleIdentityAccesscontextManagerV1BasicLevel
, giamvblConditions
, giamvblCombiningFunction
-- ** Status
, Status
, status
, sDetails
, sCode
, sMessage
-- ** GoogleCloudAssetV1p7beta1ExportAssetsRequestContentType
, GoogleCloudAssetV1p7beta1ExportAssetsRequestContentType (..)
-- ** AuditConfig
, AuditConfig
, auditConfig
, acService
, acAuditLogConfigs
-- ** AnalyzeIAMPolicyLongrunningResponse
, AnalyzeIAMPolicyLongrunningResponse
, analyzeIAMPolicyLongrunningResponse
-- ** GoogleCloudAssetV1p7beta1RelatedAssets
, GoogleCloudAssetV1p7beta1RelatedAssets
, googleCloudAssetV1p7beta1RelatedAssets
, gcavraRelationshipAttributes
, gcavraAssets
-- ** GoogleIdentityAccesscontextManagerV1IngressPolicy
, GoogleIdentityAccesscontextManagerV1IngressPolicy
, googleIdentityAccesscontextManagerV1IngressPolicy
, giamvipIngressFrom
, giamvipIngressTo
-- ** Expr
, Expr
, expr
, eLocation
, eExpression
, eTitle
, eDescription
-- ** GoogleIdentityAccesscontextManagerV1ServicePerimeterConfig
, GoogleIdentityAccesscontextManagerV1ServicePerimeterConfig
, googleIdentityAccesscontextManagerV1ServicePerimeterConfig
, giamvspcResources
, giamvspcVPCAccessibleServices
, giamvspcRestrictedServices
, giamvspcEgressPolicies
, giamvspcAccessLevels
, giamvspcIngressPolicies
-- ** GoogleIdentityAccesscontextManagerV1BasicLevelCombiningFunction
, GoogleIdentityAccesscontextManagerV1BasicLevelCombiningFunction (..)
-- ** GoogleIdentityAccesscontextManagerV1IngressSource
, GoogleIdentityAccesscontextManagerV1IngressSource
, googleIdentityAccesscontextManagerV1IngressSource
, giamvisAccessLevel
, giamvisResource
-- ** GoogleIdentityAccesscontextManagerV1AccessLevel
, GoogleIdentityAccesscontextManagerV1AccessLevel
, googleIdentityAccesscontextManagerV1AccessLevel
, giamvalBasic
, giamvalCustom
, giamvalName
, giamvalTitle
, giamvalDescription
-- ** Operation
, Operation
, operation
, oDone
, oError
, oResponse
, oName
, oMetadata
-- ** GoogleIdentityAccesscontextManagerV1EgressPolicy
, GoogleIdentityAccesscontextManagerV1EgressPolicy
, googleIdentityAccesscontextManagerV1EgressPolicy
, giamvepEgressFrom
, giamvepEgressTo
-- ** GoogleCloudAssetV1p7beta1ExportAssetsRequest
, GoogleCloudAssetV1p7beta1ExportAssetsRequest
, googleCloudAssetV1p7beta1ExportAssetsRequest
, gcavearReadTime
, gcavearRelationshipTypes
, gcavearAssetTypes
, gcavearOutputConfig
, gcavearContentType
-- ** GoogleIdentityAccesscontextManagerV1ServicePerimeterPerimeterType
, GoogleIdentityAccesscontextManagerV1ServicePerimeterPerimeterType (..)
-- ** GoogleIdentityAccesscontextManagerV1OSConstraintOSType
, GoogleIdentityAccesscontextManagerV1OSConstraintOSType (..)
-- ** GoogleCloudAssetV1p7beta1GcsDestination
, GoogleCloudAssetV1p7beta1GcsDestination
, googleCloudAssetV1p7beta1GcsDestination
, gcavgdURIPrefix
, gcavgdURI
-- ** GoogleIdentityAccesscontextManagerV1ServicePerimeter
, GoogleIdentityAccesscontextManagerV1ServicePerimeter
, googleIdentityAccesscontextManagerV1ServicePerimeter
, giamvspStatus
, giamvspPerimeterType
, giamvspName
, giamvspSpec
, giamvspTitle
, giamvspUseExplicitDryRunSpec
, giamvspDescription
-- ** GoogleIdentityAccesscontextManagerV1EgressFrom
, GoogleIdentityAccesscontextManagerV1EgressFrom
, googleIdentityAccesscontextManagerV1EgressFrom
, giamvefIdentityType
, giamvefIdentities
-- ** GoogleIdentityAccesscontextManagerV1DevicePolicyAllowedDeviceManagementLevelsItem
, GoogleIdentityAccesscontextManagerV1DevicePolicyAllowedDeviceManagementLevelsItem (..)
-- ** StatusDetailsItem
, StatusDetailsItem
, statusDetailsItem
, sdiAddtional
-- ** GoogleCloudAssetV1p7beta1OutputConfig
, GoogleCloudAssetV1p7beta1OutputConfig
, googleCloudAssetV1p7beta1OutputConfig
, gcavocBigQueryDestination
, gcavocGcsDestination
-- ** GoogleIdentityAccesscontextManagerV1EgressTo
, GoogleIdentityAccesscontextManagerV1EgressTo
, googleIdentityAccesscontextManagerV1EgressTo
, giamvetResources
, giamvetOperations
-- ** GoogleIdentityAccesscontextManagerV1IngressFrom
, GoogleIdentityAccesscontextManagerV1IngressFrom
, googleIdentityAccesscontextManagerV1IngressFrom
, giamvifIdentityType
, giamvifSources
, giamvifIdentities
-- ** GoogleIdentityAccesscontextManagerV1OSConstraint
, GoogleIdentityAccesscontextManagerV1OSConstraint
, googleIdentityAccesscontextManagerV1OSConstraint
, giamvocOSType
, giamvocMinimumVersion
, giamvocRequireVerifiedChromeOS
-- ** GoogleCloudAssetV1p7beta1RelatedAsset
, GoogleCloudAssetV1p7beta1RelatedAsset
, googleCloudAssetV1p7beta1RelatedAsset
, gcavraAsset
, gcavraAssetType
, gcavraAncestors
-- ** GoogleIdentityAccesscontextManagerV1IngressTo
, GoogleIdentityAccesscontextManagerV1IngressTo
, googleIdentityAccesscontextManagerV1IngressTo
, giamvitResources
, giamvitOperations
-- ** GoogleCloudAssetV1p7beta1PartitionSpec
, GoogleCloudAssetV1p7beta1PartitionSpec
, googleCloudAssetV1p7beta1PartitionSpec
, gcavpsPartitionKey
-- ** GoogleIdentityAccesscontextManagerV1AccessPolicy
, GoogleIdentityAccesscontextManagerV1AccessPolicy
, googleIdentityAccesscontextManagerV1AccessPolicy
, giamvapParent
, giamvapEtag
, giamvapName
, giamvapTitle
-- ** GoogleCloudOrgpolicyV1ListPolicy
, GoogleCloudOrgpolicyV1ListPolicy
, googleCloudOrgpolicyV1ListPolicy
, gcovlpInheritFromParent
, gcovlpAllValues
, gcovlpDeniedValues
, gcovlpAllowedValues
, gcovlpSuggestedValue
-- ** GoogleIdentityAccesscontextManagerV1IngressFromIdentityType
, GoogleIdentityAccesscontextManagerV1IngressFromIdentityType (..)
-- ** AuditLogConfigLogType
, AuditLogConfigLogType (..)
-- ** GoogleIdentityAccesscontextManagerV1MethodSelector
, GoogleIdentityAccesscontextManagerV1MethodSelector
, googleIdentityAccesscontextManagerV1MethodSelector
, giamvmsMethod
, giamvmsPermission
-- ** Xgafv
, Xgafv (..)
-- ** GoogleIdentityAccesscontextManagerV1APIOperation
, GoogleIdentityAccesscontextManagerV1APIOperation
, googleIdentityAccesscontextManagerV1APIOperation
, giamvaoMethodSelectors
, giamvaoServiceName
-- ** GoogleIdentityAccesscontextManagerV1CustomLevel
, GoogleIdentityAccesscontextManagerV1CustomLevel
, googleIdentityAccesscontextManagerV1CustomLevel
, giamvclExpr
-- ** GoogleIdentityAccesscontextManagerV1VPCAccessibleServices
, GoogleIdentityAccesscontextManagerV1VPCAccessibleServices
, googleIdentityAccesscontextManagerV1VPCAccessibleServices
, giamvvasAllowedServices
, giamvvasEnableRestriction
-- ** GoogleCloudOrgpolicyV1Policy
, GoogleCloudOrgpolicyV1Policy
, googleCloudOrgpolicyV1Policy
, gcovpBooleanPolicy
, gcovpEtag
, gcovpRestoreDefault
, gcovpUpdateTime
, gcovpVersion
, gcovpListPolicy
, gcovpConstraint
-- ** GoogleIdentityAccesscontextManagerV1EgressFromIdentityType
, GoogleIdentityAccesscontextManagerV1EgressFromIdentityType (..)
-- ** GoogleCloudAssetV1p7beta1RelationshipAttributes
, GoogleCloudAssetV1p7beta1RelationshipAttributes
, googleCloudAssetV1p7beta1RelationshipAttributes
, gcavraAction
, gcavraSourceResourceType
, gcavraType
, gcavraTargetResourceType
-- ** GoogleIdentityAccesscontextManagerV1DevicePolicyAllowedEncryptionStatusesItem
, GoogleIdentityAccesscontextManagerV1DevicePolicyAllowedEncryptionStatusesItem (..)
-- ** Policy
, Policy
, policy
, pAuditConfigs
, pEtag
, pVersion
, pBindings
-- ** GoogleIdentityAccesscontextManagerV1DevicePolicy
, GoogleIdentityAccesscontextManagerV1DevicePolicy
, googleIdentityAccesscontextManagerV1DevicePolicy
, giamvdpOSConstraints
, giamvdpRequireAdminApproval
, giamvdpRequireCorpOwned
, giamvdpRequireScreenlock
, giamvdpAllowedEncryptionStatuses
, giamvdpAllowedDeviceManagementLevels
-- ** OperationMetadata
, OperationMetadata
, operationMetadata
, omAddtional
-- ** AuditLogConfig
, AuditLogConfig
, auditLogConfig
, alcLogType
, alcExemptedMembers
-- ** GoogleCloudAssetV1p7beta1Asset
, GoogleCloudAssetV1p7beta1Asset
, googleCloudAssetV1p7beta1Asset
, gcavaAccessLevel
, gcavaServicePerimeter
, gcavaRelatedAssets
, gcavaUpdateTime
, gcavaAccessPolicy
, gcavaName
, gcavaResource
, gcavaOrgPolicy
, gcavaIAMPolicy
, gcavaAssetType
, gcavaAncestors
-- ** GoogleIdentityAccesscontextManagerV1Condition
, GoogleIdentityAccesscontextManagerV1Condition
, googleIdentityAccesscontextManagerV1Condition
, giamvcMembers
, giamvcRegions
, giamvcNegate
, giamvcIPSubnetworks
, giamvcDevicePolicy
, giamvcRequiredAccessLevels
-- ** GoogleCloudOrgpolicyV1RestoreDefault
, GoogleCloudOrgpolicyV1RestoreDefault
, googleCloudOrgpolicyV1RestoreDefault
-- ** OperationResponse
, OperationResponse
, operationResponse
, orAddtional
-- ** GoogleCloudOrgpolicyV1ListPolicyAllValues
, GoogleCloudOrgpolicyV1ListPolicyAllValues (..)
-- ** GoogleCloudOrgpolicyV1BooleanPolicy
, GoogleCloudOrgpolicyV1BooleanPolicy
, googleCloudOrgpolicyV1BooleanPolicy
, gcovbpEnforced
-- ** GoogleCloudAssetV1p7beta1PartitionSpecPartitionKey
, GoogleCloudAssetV1p7beta1PartitionSpecPartitionKey (..)
-- ** Binding
, Binding
, binding
, bMembers
, bRole
, bCondition
-- ** GoogleCloudAssetV1p7beta1ResourceData
, GoogleCloudAssetV1p7beta1ResourceData
, googleCloudAssetV1p7beta1ResourceData
, gcavrdAddtional
-- ** GoogleCloudAssetV1p7beta1BigQueryDestination
, GoogleCloudAssetV1p7beta1BigQueryDestination
, googleCloudAssetV1p7beta1BigQueryDestination
, gcavbqdPartitionSpec
, gcavbqdSeparateTablesPerAssetType
, gcavbqdDataSet
, gcavbqdForce
, gcavbqdTable
-- ** GoogleCloudAssetV1p7beta1Resource
, GoogleCloudAssetV1p7beta1Resource
, googleCloudAssetV1p7beta1Resource
, gcavrParent
, gcavrLocation
, gcavrData
, gcavrVersion
, gcavrDiscoveryName
, gcavrDiscoveryDocumentURI
, gcavrResourceURL
) where
import Network.Google.Prelude
import Network.Google.CloudAsset.Types
import Network.Google.Resource.CloudAsset.ExportAssets
import Network.Google.Resource.CloudAsset.Operations.Get
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Cloud Asset API service.
type CloudAssetAPI =
ExportAssetsResource :<|> OperationsGetResource
| brendanhay/gogol | gogol-cloudasset/gen/Network/Google/CloudAsset.hs | mpl-2.0 | 12,316 | 0 | 5 | 2,196 | 943 | 664 | 279 | 263 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.DescribeInstanceStatus
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes the status of one or more instances.
--
-- Instance status includes the following components:
--
-- Status checks - Amazon EC2 performs status checks on running EC2 instances
-- to identify hardware and software issues. For more information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/monitoring-system-instance-status-check.html StatusChecks for Your Instances> and <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/TroubleshootingInstances.html Troubleshooting Instances with Failed StatusChecks> in the /Amazon Elastic Compute Cloud User Guide/.
--
-- Scheduled events - Amazon EC2 can schedule events (such as reboot, stop,
-- or terminate) for your instances related to hardware issues, software
-- updates, or system maintenance. For more information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/monitoring-instances-status-check_sched.html Scheduled Eventsfor Your Instances> in the /Amazon Elastic Compute Cloud User Guide/.
--
-- Instance state - You can manage your instances from the moment you launch
-- them through their termination. For more information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-lifecycle.html Instance Lifecycle>
-- in the /Amazon Elastic Compute Cloud User Guide/.
--
--
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeInstanceStatus.html>
module Network.AWS.EC2.DescribeInstanceStatus
(
-- * Request
DescribeInstanceStatus
-- ** Request constructor
, describeInstanceStatus
-- ** Request lenses
, disDryRun
, disFilters
, disIncludeAllInstances
, disInstanceIds
, disMaxResults
, disNextToken
-- * Response
, DescribeInstanceStatusResponse
-- ** Response constructor
, describeInstanceStatusResponse
-- ** Response lenses
, disrInstanceStatuses
, disrNextToken
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data DescribeInstanceStatus = DescribeInstanceStatus
{ _disDryRun :: Maybe Bool
, _disFilters :: List "Filter" Filter
, _disIncludeAllInstances :: Maybe Bool
, _disInstanceIds :: List "InstanceId" Text
, _disMaxResults :: Maybe Int
, _disNextToken :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DescribeInstanceStatus' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'disDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'disFilters' @::@ ['Filter']
--
-- * 'disIncludeAllInstances' @::@ 'Maybe' 'Bool'
--
-- * 'disInstanceIds' @::@ ['Text']
--
-- * 'disMaxResults' @::@ 'Maybe' 'Int'
--
-- * 'disNextToken' @::@ 'Maybe' 'Text'
--
describeInstanceStatus :: DescribeInstanceStatus
describeInstanceStatus = DescribeInstanceStatus
{ _disDryRun = Nothing
, _disInstanceIds = mempty
, _disFilters = mempty
, _disNextToken = Nothing
, _disMaxResults = Nothing
, _disIncludeAllInstances = Nothing
}
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have the
-- required permissions, the error response is 'DryRunOperation'. Otherwise, it is 'UnauthorizedOperation'.
disDryRun :: Lens' DescribeInstanceStatus (Maybe Bool)
disDryRun = lens _disDryRun (\s a -> s { _disDryRun = a })
-- | One or more filters.
--
-- 'availability-zone' - The Availability Zone of the instance.
--
-- 'event.code' - The code for the scheduled event ('instance-reboot' | 'system-reboot' | 'system-maintenance' | 'instance-retirement' | 'instance-stop').
--
-- 'event.description' - A description of the event.
--
-- 'event.not-after' - The latest end time for the scheduled event (for
-- example, '2014-09-15T17:15:20.000Z').
--
-- 'event.not-before' - The earliest start time for the scheduled event (for
-- example, '2014-09-15T17:15:20.000Z').
--
-- 'instance-state-code' - The code for the instance state, as a 16-bit
-- unsigned integer. The high byte is an opaque internal value and should be
-- ignored. The low byte is set based on the state represented. The valid values
-- are 0 (pending), 16 (running), 32 (shutting-down), 48 (terminated), 64
-- (stopping), and 80 (stopped).
--
-- 'instance-state-name' - The state of the instance ('pending' | 'running' | 'shutting-down' | 'terminated' | 'stopping' | 'stopped').
--
-- 'instance-status.reachability' - Filters on instance status where the name
-- is 'reachability' ('passed' | 'failed' | 'initializing' | 'insufficient-data').
--
-- 'instance-status.status' - The status of the instance ('ok' | 'impaired' | 'initializing' | 'insufficient-data' | 'not-applicable').
--
-- 'system-status.reachability' - Filters on system status where the name is 'reachability' ('passed' | 'failed' | 'initializing' | 'insufficient-data').
--
-- 'system-status.status' - The system status of the instance ('ok' | 'impaired' | 'initializing' | 'insufficient-data' | 'not-applicable').
--
--
disFilters :: Lens' DescribeInstanceStatus [Filter]
disFilters = lens _disFilters (\s a -> s { _disFilters = a }) . _List
-- | When 'true', includes the health status for all instances. When 'false', includes
-- the health status for running instances only.
--
-- Default: 'false'
disIncludeAllInstances :: Lens' DescribeInstanceStatus (Maybe Bool)
disIncludeAllInstances =
lens _disIncludeAllInstances (\s a -> s { _disIncludeAllInstances = a })
-- | One or more instance IDs.
--
-- Default: Describes all your instances.
--
-- Constraints: Maximum 100 explicitly specified instance IDs.
disInstanceIds :: Lens' DescribeInstanceStatus [Text]
disInstanceIds = lens _disInstanceIds (\s a -> s { _disInstanceIds = a }) . _List
-- | The maximum number of results to return for the request in a single page. The
-- remaining results of the initial request can be seen by sending another
-- request with the returned 'NextToken' value. This value can be between 5 and
-- 1000; if 'MaxResults' is given a value larger than 1000, only 1000 results are
-- returned. You cannot specify this parameter and the instance IDs parameter in
-- the same request.
disMaxResults :: Lens' DescribeInstanceStatus (Maybe Int)
disMaxResults = lens _disMaxResults (\s a -> s { _disMaxResults = a })
-- | The token to retrieve the next page of results.
disNextToken :: Lens' DescribeInstanceStatus (Maybe Text)
disNextToken = lens _disNextToken (\s a -> s { _disNextToken = a })
data DescribeInstanceStatusResponse = DescribeInstanceStatusResponse
{ _disrInstanceStatuses :: List "item" InstanceStatus
, _disrNextToken :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DescribeInstanceStatusResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'disrInstanceStatuses' @::@ ['InstanceStatus']
--
-- * 'disrNextToken' @::@ 'Maybe' 'Text'
--
describeInstanceStatusResponse :: DescribeInstanceStatusResponse
describeInstanceStatusResponse = DescribeInstanceStatusResponse
{ _disrInstanceStatuses = mempty
, _disrNextToken = Nothing
}
-- | One or more instance status descriptions.
disrInstanceStatuses :: Lens' DescribeInstanceStatusResponse [InstanceStatus]
disrInstanceStatuses =
lens _disrInstanceStatuses (\s a -> s { _disrInstanceStatuses = a })
. _List
-- | The token to use to retrieve the next page of results. This value is 'null'
-- when there are no more results to return.
disrNextToken :: Lens' DescribeInstanceStatusResponse (Maybe Text)
disrNextToken = lens _disrNextToken (\s a -> s { _disrNextToken = a })
instance ToPath DescribeInstanceStatus where
toPath = const "/"
instance ToQuery DescribeInstanceStatus where
toQuery DescribeInstanceStatus{..} = mconcat
[ "DryRun" =? _disDryRun
, "Filter" `toQueryList` _disFilters
, "IncludeAllInstances" =? _disIncludeAllInstances
, "InstanceId" `toQueryList` _disInstanceIds
, "MaxResults" =? _disMaxResults
, "NextToken" =? _disNextToken
]
instance ToHeaders DescribeInstanceStatus
instance AWSRequest DescribeInstanceStatus where
type Sv DescribeInstanceStatus = EC2
type Rs DescribeInstanceStatus = DescribeInstanceStatusResponse
request = post "DescribeInstanceStatus"
response = xmlResponse
instance FromXML DescribeInstanceStatusResponse where
parseXML x = DescribeInstanceStatusResponse
<$> x .@? "instanceStatusSet" .!@ mempty
<*> x .@? "nextToken"
instance AWSPager DescribeInstanceStatus where
page rq rs
| stop (rs ^. disrNextToken) = Nothing
| otherwise = (\x -> rq & disNextToken ?~ x)
<$> (rs ^. disrNextToken)
| romanb/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DescribeInstanceStatus.hs | mpl-2.0 | 9,894 | 0 | 11 | 1,897 | 987 | 607 | 380 | 96 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceManagement.Services.Enable
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Enables a service for a project, so it can be used for the project. See
-- [Cloud Auth Guide](https:\/\/cloud.google.com\/docs\/authentication) for
-- more information. Operation
--
-- /See:/ <https://cloud.google.com/service-management/ Service Management API Reference> for @servicemanagement.services.enable@.
module Network.Google.Resource.ServiceManagement.Services.Enable
(
-- * REST Resource
ServicesEnableResource
-- * Creating a Request
, servicesEnable
, ServicesEnable
-- * Request Lenses
, seXgafv
, seUploadProtocol
, seAccessToken
, seUploadType
, sePayload
, seServiceName
, seCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceManagement.Types
-- | A resource alias for @servicemanagement.services.enable@ method which the
-- 'ServicesEnable' request conforms to.
type ServicesEnableResource =
"v1" :>
"services" :>
CaptureMode "serviceName" "enable" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] EnableServiceRequest :>
Post '[JSON] Operation
-- | Enables a service for a project, so it can be used for the project. See
-- [Cloud Auth Guide](https:\/\/cloud.google.com\/docs\/authentication) for
-- more information. Operation
--
-- /See:/ 'servicesEnable' smart constructor.
data ServicesEnable =
ServicesEnable'
{ _seXgafv :: !(Maybe Xgafv)
, _seUploadProtocol :: !(Maybe Text)
, _seAccessToken :: !(Maybe Text)
, _seUploadType :: !(Maybe Text)
, _sePayload :: !EnableServiceRequest
, _seServiceName :: !Text
, _seCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ServicesEnable' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'seXgafv'
--
-- * 'seUploadProtocol'
--
-- * 'seAccessToken'
--
-- * 'seUploadType'
--
-- * 'sePayload'
--
-- * 'seServiceName'
--
-- * 'seCallback'
servicesEnable
:: EnableServiceRequest -- ^ 'sePayload'
-> Text -- ^ 'seServiceName'
-> ServicesEnable
servicesEnable pSePayload_ pSeServiceName_ =
ServicesEnable'
{ _seXgafv = Nothing
, _seUploadProtocol = Nothing
, _seAccessToken = Nothing
, _seUploadType = Nothing
, _sePayload = pSePayload_
, _seServiceName = pSeServiceName_
, _seCallback = Nothing
}
-- | V1 error format.
seXgafv :: Lens' ServicesEnable (Maybe Xgafv)
seXgafv = lens _seXgafv (\ s a -> s{_seXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
seUploadProtocol :: Lens' ServicesEnable (Maybe Text)
seUploadProtocol
= lens _seUploadProtocol
(\ s a -> s{_seUploadProtocol = a})
-- | OAuth access token.
seAccessToken :: Lens' ServicesEnable (Maybe Text)
seAccessToken
= lens _seAccessToken
(\ s a -> s{_seAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
seUploadType :: Lens' ServicesEnable (Maybe Text)
seUploadType
= lens _seUploadType (\ s a -> s{_seUploadType = a})
-- | Multipart request metadata.
sePayload :: Lens' ServicesEnable EnableServiceRequest
sePayload
= lens _sePayload (\ s a -> s{_sePayload = a})
-- | Required. Name of the service to enable. Specifying an unknown service
-- name will cause the request to fail.
seServiceName :: Lens' ServicesEnable Text
seServiceName
= lens _seServiceName
(\ s a -> s{_seServiceName = a})
-- | JSONP
seCallback :: Lens' ServicesEnable (Maybe Text)
seCallback
= lens _seCallback (\ s a -> s{_seCallback = a})
instance GoogleRequest ServicesEnable where
type Rs ServicesEnable = Operation
type Scopes ServicesEnable =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/service.management"]
requestClient ServicesEnable'{..}
= go _seServiceName _seXgafv _seUploadProtocol
_seAccessToken
_seUploadType
_seCallback
(Just AltJSON)
_sePayload
serviceManagementService
where go
= buildClient (Proxy :: Proxy ServicesEnableResource)
mempty
| brendanhay/gogol | gogol-servicemanagement/gen/Network/Google/Resource/ServiceManagement/Services/Enable.hs | mpl-2.0 | 5,306 | 0 | 17 | 1,220 | 788 | 461 | 327 | 114 | 1 |
{-# LANGUAGE OverloadedStrings, RecordWildCards, TupleSections #-}
module Resources where
import Blaze.ByteString.Builder
import Control.Concurrent.STM.TMVar
import Control.Concurrent.STM.TVar
import Control.Monad.STM
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Functor
import Data.Monoid
import Data.Text (Text,unpack)
import Data.Text.Encoding (encodeUtf8)
import Data.Time.Clock.POSIX (getPOSIXTime)
import Data.Word
import GHC.Exts (sortWith)
type Content = BL.ByteString
type RawResource = TMVar Content -> Resource
data Resource = Resource { rid :: Word8
, priority :: Word8
, zlibGroup :: Word8
, sign :: Bool
, name :: Text
, desc :: Text
, var :: TMVar Content
}
data Conf = Conf { resources :: [Resource]
}
rawResources :: [RawResource]
rawResources =
[Resource 0 0 0 False "control" "Kryptoradio control channel"
,Resource 1 2 0 False "bitcoin" "Bitoin packet (transactions and blocks)"
,Resource 2 4 0 False "exchange" "Currency exchange data (Bitstamp and Bitpay) including order book"
,Resource 3 1 0 False "fimk" "FIMKrypto block explorer dump (transactions and blocks)"
,Resource 4 5 0 False "qsl" "QSL verification codes"
,Resource 5 6 0 False "irc" "Internet Relay Chat (subscribed channels only)"
]
-- |Create transactional variables from raw resource text.
newConf :: [RawResource] -> IO Conf
newConf raws = Conf <$> (mapM (<$> newEmptyTMVarIO) raws)
-- |Get resource id and new message using correct priority.
priorityTake :: [Resource] -> STM (Resource,Content)
priorityTake res = foldr1 orElse $ map f $ sortWith priority res
where f r = (r,) <$> takeTMVar (var r)
buildResource :: Resource -> Builder
buildResource Resource{..} = fromWord8 rid <>
fromWord8 zlibGroup <>
fromWord8 (if sign then 1 else 0) <>
cString name <>
cString desc <>
fromByteString "TODO: SHA256 HASH OF LAST PACKET" -- Dummy str of 256 bits
-- |Outputs Kryptoradio sync packet
syncPacket :: Conf -> IO BL.ByteString
syncPacket Conf{..} = do
timestamp <- getTimestamp
return $ toLazyByteString $
binaryBlob "TODO PUBLIC KEY" <>
binaryBlob "TODO SIGNATURE" <>
fromByteString "TODO: SHA256 HASH OF LAST SYNC " <> -- Dummy str of 256 bits
timestamp <>
foldr (mappend.buildResource) mempty resources
-- |C style string: encoded in UTF-8 and terminated by null byte (\0)
cString :: Text -> Builder
cString x = (fromByteString $ encodeUtf8 x) <> nul
nul :: Builder
nul = fromWord8 0
showText :: Text -> ShowS
showText = showString . unpack
-- |ByteString of given length. Maximum string length is 255 bytes.
binaryBlob :: BL.ByteString -> Builder
binaryBlob x = fromWord8 (fromIntegral $ BL.length x) <>
fromLazyByteString x
-- |Current time with microsecond precision
getTimestamp :: IO Builder
getTimestamp = do
ts <- getPOSIXTime
return $ fromWord64be $ floor $ ts * 1e6
| koodilehto/kryptoradio | encoder/Resources.hs | agpl-3.0 | 3,269 | 0 | 12 | 865 | 732 | 401 | 331 | 67 | 2 |
{-# LANGUAGE FlexibleInstances, FlexibleContexts, TypeFamilies,
MultiParamTypeClasses, OverlappingInstances, IncoherentInstances #-}
module HEP.Jet.FastJet.Class.TObject.Cast where
import Foreign.Ptr
import Foreign.ForeignPtr
import HEP.Jet.FastJet.TypeCast
import System.IO.Unsafe
import HEP.Jet.FastJet.Class.TObject.RawType
import HEP.Jet.FastJet.Class.TObject.Interface
instance (ITObject a, FPtr a) => Castable a (Ptr RawTObject) where
cast = unsafeForeignPtrToPtr . castForeignPtr . get_fptr
uncast = cast_fptr_to_obj . castForeignPtr . unsafePerformIO . newForeignPtr_
instance Castable TObject (Ptr RawTObject) where
cast = unsafeForeignPtrToPtr . castForeignPtr . get_fptr
uncast = cast_fptr_to_obj . castForeignPtr . unsafePerformIO . newForeignPtr_
| wavewave/HFastJet | oldsrc/HEP/Jet/FastJet/Class/TObject/Cast.hs | lgpl-2.1 | 790 | 0 | 8 | 103 | 159 | 94 | 65 | 15 | 0 |
module Game.Board.Run
(CommFuncs(..), Connector, Dealer, GameM,
runGame)
where
import qualified Data.Map as M
import Control.Monad.State
import Control.Monad.Reader
import Control.Monad.Random
import Game.Board.Internal.Types
-- ugly =(
-- this is very bad
runGame ::
Connector sh pl pr ->
Dealer s sh pl ->
GameM s sh pl pr Bool ->
(Player -> pr ()) ->
IO ()
runGame setup initial turn vic = do
gen <- newStdGen
(ps, cf) <- setup
let ((s, sh, plg), gen') = runRand initial gen
pls = sequence (plg <$ ps)
(pl, gen'') = flip runRand gen' $ M.fromList . zip ps <$> pls
game = takeTurns turn
comm =
flip evalRandT gen'' .
flip evalStateT s .
flip runReaderT (PlayerList (tail ps) (head ps)) $ -- TODO give partiality descriptive error instead
game
io =
flip runReaderT cf .
flip evalStateT (GameState sh pl) .
unwrapComm $
comm
PlayerList ps' w <- io
_ <- doQueries cf (map (\p -> (p, Query (vic w) return)) (w:ps'))
return ()
takeTurns :: GameM s sh pl pr Bool -> GameM s sh pl pr PlayerList
takeTurns g = ReaderT go
where go pls@(PlayerList (p:ps) c) = do
w <- runReaderT g pls
if w then return pls
else go (PlayerList (ps ++ [c]) p)
go _ = error "No players?!"
| benzrf/board | src/Game/Board/Run.hs | lgpl-3.0 | 1,331 | 0 | 16 | 382 | 535 | 277 | 258 | 41 | 3 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilyDependencies #-}
module XMonad where
import XActionOutput
import XConfig
import XEventInput
import XLogging (LogMsg, XLogger, XLoggerT (..), runXLoggerT)
import qualified XLogging as Logging
import XNodeState
import XPersistent
import XTypes
------------------------------------------------------------------------------
import Control.Arrow ((&&&))
import Control.Monad.RWS
import Protolude hiding (pass)
--------------------------------------------------------------------------------
-- State Machine
--------------------------------------------------------------------------------
-- | Interface to handle commands in the underlying state machine.
-- Relates a state machine type (`sm`) to a command type (`v`).
-- Provides pure function to applying command to a state machine.
--
-- Functional dependency ensures only a single state machine command
-- to be defined to update the state machine.
class XSMP sm v | sm -> v where
data XSMPError sm v
type XSMPCtx sm v = ctx | ctx -> sm v
applyCmdXSMP :: XSMPCtx sm v -> sm -> v -> Either (XSMPError sm v) sm
class (Monad m, XSMP sm v) => XSM sm v m | m sm -> v where
validateCmd :: v -> m (Either (XSMPError sm v) ())
askXSMPCtx :: m (XSMPCtx sm v)
-- TODO : use this
applyCmdXSM :: XSM sm v m => sm -> v -> m (Either (XSMPError sm v) sm)
applyCmdXSM sm v = do
res <- validateCmd v
case res of
Left err -> pure (Left err)
Right () -> do
ctx <- askXSMPCtx
pure (applyCmdXSMP ctx sm v)
--------------------------------------------------------------------------------
-- X Monad
--------------------------------------------------------------------------------
tellActions :: [Action sm v] -> TransitionM sm v ()
tellActions = tell
data TransitionEnv sm v = TransitionEnv
{ nodeConfig :: NodeConfig
, stateMachine :: sm
, nodeState :: XNodeState v
}
newtype TransitionM sm v a = TransitionM
{ unTransitionM :: XLoggerT v (RWS (TransitionEnv sm v) [Action sm v] PersistentState) a
} deriving (Functor, Applicative, Monad)
instance MonadWriter [Action sm v] (TransitionM sm v) where
tell = TransitionM . XLoggerT . tell
listen = TransitionM . XLoggerT . listen . unXLoggerT . unTransitionM
pass = TransitionM . XLoggerT . pass . unXLoggerT . unTransitionM
instance MonadReader (TransitionEnv sm v) (TransitionM sm v) where
ask = TransitionM . XLoggerT $ ask
local f = TransitionM . XLoggerT . local f . unXLoggerT . unTransitionM
instance MonadState PersistentState (TransitionM sm v) where
get = TransitionM . XLoggerT $ lift get
put = TransitionM . XLoggerT . lift . put
instance XLogger v (RWS (TransitionEnv sm v) [Action sm v] PersistentState) where
loggerCtx = asks ((configNodeId . nodeConfig) &&& nodeState)
runTransitionM
:: TransitionEnv sm v
-> PersistentState
-> TransitionM sm v a
-> ((a, [LogMsg]), PersistentState, [Action sm v])
runTransitionM transEnv persistentState transitionM =
runRWS (runXLoggerT (unTransitionM transitionM)) transEnv persistentState
--------------------------------------------------------------------------------
-- Handlers
--------------------------------------------------------------------------------
type ClientInputHandler ns sm r v
= Show v
=> NodeState ns v
-> ClientId
-> r
-> TransitionM sm v (ResultState ns v)
type TimeoutHandler ns sm v
= Show v
=> NodeState ns v
-> Timeout
-> TransitionM sm v (ResultState ns v)
--------------------------------------------------------------------------------
-- Logging
--------------------------------------------------------------------------------
logCritical,logDebug,logInfo :: [Text] -> TransitionM sm v ()
logDebug = TransitionM . Logging.logDebug
logInfo = TransitionM . Logging.logInfo
logCritical = TransitionM . Logging.logCritical
| haroldcarr/learn-haskell-coq-ml-etc | haskell/topic/program-structure/2019-01-hc-example-based-on-adjointio-raft/src/XMonad.hs | unlicense | 4,345 | 3 | 15 | 906 | 1,011 | 548 | 463 | 79 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-|
Contains utilities for manipulating the core K3 data structures.
-}
module Language.K3.Core.Utils
( check0Children
, check1Child
, check2Children
, check3Children
, check4Children
, check5Children
, check6Children
, check7Children
, check8Children
, prependToRole
, mapTree
, modifyTree
, foldMapTree
, foldTree
, biFoldTree
, biFoldMapTree
, foldRebuildTree
, foldMapRebuildTree
, biFoldRebuildTree
, biFoldMapRebuildTree
, mapIn1RebuildTree
, foldIn1RebuildTree
, foldMapIn1RebuildTree
, foldProgramWithDecl
, foldProgram
, mapProgramWithDecl
, mapProgram
, foldExpression
, mapExpression
, mapNamedDeclaration
, foldNamedDeclaration
, mapNamedDeclExpression
, foldNamedDeclExpression
, mapMaybeAnnotation
, mapMaybeExprAnnotation
, mapAnnotation
, mapExprAnnotation
, foldReturnExpression
, foldMapReturnExpression
, mapReturnExpression
, defaultExpression
, freeVariables
, bindingVariables
, modifiedVariables
, lambdaClosures
, lambdaClosuresDecl
, compareDAST
, compareEAST
, compareTAST
, compareDStrictAST
, compareEStrictAST
, compareTStrictAST
, stripDeclAnnotations
, stripNamedDeclAnnotations
, stripExprAnnotations
, stripTypeAnnotations
, stripAllDeclAnnotations
, stripAllExprAnnotations
, stripAllTypeAnnotations
, repairProgram
, foldProgramUID
, maxProgramUID
, minProgramUID
, collectProgramUIDs
, duplicateProgramUIDs
, stripDCompare
, stripECompare
, stripComments
, stripDUIDSpan
, stripEUIDSpan
, stripTUIDSpan
, stripProperties
, stripDeclProperties
, stripAllProperties
, stripTypeAnns
, stripDeclTypeAnns
, stripEffectAnns
, stripDeclEffectAnns
, stripAllEffectAnns
, stripTypeAndEffectAnns
, stripDeclTypeAndEffectAnns
, stripAllTypeAndEffectAnns
) where
import Control.Applicative
import Control.Arrow
import Control.Monad
import Data.Functor.Identity
import Data.List
import Data.Maybe
import Data.Tree
import Data.IntMap ( IntMap )
import qualified Data.IntMap as IntMap
import Debug.Trace
import Language.K3.Core.Annotation
import Language.K3.Core.Common
import Language.K3.Core.Declaration
import Language.K3.Core.Expression
import Language.K3.Core.Type
import qualified Language.K3.Core.Constructor.Expression as EC
import Language.K3.Utils.Pretty hiding ( wrap )
import Language.Haskell.TH hiding ( Type )
-- * Generated routines
-- Defines check0Children through check8Children. These routines accept a
-- K3 tree and verify that it has a given number of children. When it does, the
-- result is Just a tuple with that many elements. When it does not, the result
-- is Nothing.
$(
let mkCheckChildren :: Int -> Q [Dec]
mkCheckChildren n = do
let fname = mkName $ "check" ++ show n ++ "Child" ++
(if n /= 1 then "ren" else "")
ename <- newName "tree"
elnames <- mapM (newName . ("el" ++) . show) [1::Int .. n]
typN <- newName "a"
let typ = varT typN
let tupTyp = foldl appT (tupleT n) $ replicate n $ [t|K3 $(typ)|]
ftype <- [t| K3 $(typ) -> Maybe $(tupTyp) |]
let ftype' = ForallT [PlainTV typN] [] ftype
let signature = sigD fname $ return ftype'
let badMatch = match wildP (normalB [| Nothing |]) []
let goodMatch = match (listP $ map varP elnames) (normalB $
appE ([|return|]) $ tupE $ map varE elnames) []
let bodyExp = caseE ([|subForest $(varE ename)|]) [goodMatch, badMatch]
let cl = clause [varP ename] (normalB bodyExp) []
let impl = funD fname [cl]
sequence [signature,impl]
in
concat <$> mapM mkCheckChildren [0::Int .. 8]
)
-- Prepend declarations to the beginning of a role
prependToRole :: K3 Declaration -> [K3 Declaration] -> K3 Declaration
prependToRole (Node r@(DRole _ :@: _) sub) ds = Node r (ds++sub)
prependToRole _ _ = error "Expected a role"
-- | Transform a tree by mapping a function over every tree node. The function
-- is provided transformed children for every new node built.
mapTree :: (Monad m) => ([Tree b] -> Tree a -> m (Tree b)) -> Tree a -> m (Tree b)
mapTree f n@(Node _ []) = f [] n
mapTree f n@(Node _ ch) = mapM (mapTree f) ch >>= flip f n
-- | Transform a tree by mapping a function over every tree node.
-- The children of a node are pre-transformed recursively
modifyTree :: (Monad m) => (Tree a -> m (Tree a)) -> Tree a -> m (Tree a)
modifyTree f n@(Node _ []) = f n
modifyTree f (Node x ch) = do
ch' <- mapM (modifyTree f) ch
f (Node x ch')
-- | Map an accumulator over a tree, recurring independently over each child.
-- The result is produced by transforming independent subresults in bottom-up fashion.
foldMapTree :: (Monad m) => ([b] -> Tree a -> m b) -> b -> Tree a -> m b
foldMapTree f x n@(Node _ []) = f [x] n
foldMapTree f x n@(Node _ ch) = mapM (foldMapTree f x) ch >>= flip f n
-- | Fold over a tree, threading the accumulator between children.
foldTree :: (Monad m) => (b -> Tree a -> m b) -> b -> Tree a -> m b
foldTree f x n@(Node _ []) = f x n
foldTree f x n@(Node _ ch) = foldM (foldTree f) x ch >>= flip f n
-- | Joint top-down and bottom-up traversal of a tree.
-- This variant threads an accumulator across all siblings, and thus all
-- nodes in the prefix of the tree to every node.
biFoldTree :: (Monad m)
=> (td -> Tree a -> m (td, [td]))
-> (td -> bu -> Tree a -> m bu)
-> td -> bu -> Tree a -> m bu
biFoldTree tdF buF tdAcc buAcc n@(Node _ []) = tdF tdAcc n >>= \(td,_) -> buF td buAcc n
biFoldTree tdF buF tdAcc buAcc n@(Node _ ch) = do
(ntd, cntd) <- tdF tdAcc n
if (length cntd) /= (length ch)
then fail "Invalid top-down accumulation in biFoldTree"
else do
nbu <- foldM (\nbuAcc (ctd, c) -> biFoldTree tdF buF ctd nbuAcc c) buAcc $ zip cntd ch
buF ntd nbu n
-- | Join top-down and bottom-up traversal of a tree.
-- This variant threads a bottom-up accumulator independently between siblings.
-- Thus there is no sideways information passing (except for top-down accumulation).
-- tdF: takes the top-down accumulator and node. Returns a top-down value for post-processing
-- at the same node, and messages for each child
biFoldMapTree :: (Monad m)
=> (td -> Tree a -> m (td, [td]))
-> (td -> [bu] -> Tree a -> m bu)
-> td -> bu -> Tree a -> m bu
biFoldMapTree tdF buF tdAcc buAcc n@(Node _ []) = tdF tdAcc n >>= \(td,_) -> buF td [buAcc] n
biFoldMapTree tdF buF tdAcc buAcc n@(Node _ ch) = do
(ntd, cntd) <- tdF tdAcc n
if (length cntd) /= (length ch)
then fail "Invalid top-down accumulation in biFoldMapTree"
else do
nbu <- mapM (\(ctd,c) -> biFoldMapTree tdF buF ctd buAcc c) $ zip cntd ch
buF ntd nbu n
-- | Rebuild a tree with an accumulator and transformed children at every node.
foldRebuildTree :: (Monad m)
=> (b -> [Tree a] -> Tree a -> m (b, Tree a))
-> b -> Tree a -> m (b, Tree a)
foldRebuildTree f x n@(Node _ []) = f x [] n
foldRebuildTree f x n@(Node _ ch) = foldM rebuild (x,[]) ch >>= uncurry (\a b -> f a b n)
where rebuild (acc, chAcc) c =
foldRebuildTree f acc c >>= (\(nAcc, nc) -> return (nAcc, chAcc++[nc]))
-- | Rebuild a tree with independent accumulators and transformed children at every node.
foldMapRebuildTree :: (Monad m)
=> ([b] -> [Tree a] -> Tree a -> m (b, Tree a))
-> b -> Tree a -> m (b, Tree a)
foldMapRebuildTree f x n@(Node _ []) = f [x] [] n
foldMapRebuildTree f x n@(Node _ ch) =
mapM (foldMapRebuildTree f x) ch >>= (\(a, b) -> f a b n) . unzip
-- | Joint top-down and bottom-up traversal of a tree with both
-- threaded accumulation and tree reconstruction.
biFoldRebuildTree :: (Monad m)
=> (td -> Tree a -> m (td, [td]))
-> (td -> bu -> [Tree a] -> Tree a -> m (bu, Tree a))
-> td -> bu -> Tree a -> m (bu, Tree a)
biFoldRebuildTree tdF buF tdAcc buAcc n@(Node _ []) = tdF tdAcc n >>= \(td,_) -> buF td buAcc [] n
biFoldRebuildTree tdF buF tdAcc buAcc n@(Node _ ch) = do
(ntd, cntd) <- tdF tdAcc n
if (length cntd) /= (length ch)
then fail "Invalid top-down accumulation in biFoldRebuildTree"
else do
(nbu, nch) <- foldM rcrWAccumCh (buAcc, []) $ zip cntd ch
buF ntd nbu nch n
where
rcrWAccumCh (nbuAcc, chAcc) (ctd, c) = do
(rAcc, nc) <- biFoldRebuildTree tdF buF ctd nbuAcc c
return (rAcc, chAcc ++ [nc])
-- | Joint top-down and bottom-up traversal of a tree with both
-- independent accumulation and tree reconstruction.
biFoldMapRebuildTree :: (Monad m)
=> (td -> Tree a -> m (td, [td]))
-> (td -> [bu] -> [Tree a] -> Tree a -> m (bu, Tree a))
-> td -> bu -> Tree a -> m (bu, Tree a)
biFoldMapRebuildTree tdF buF tdAcc buAcc n@(Node _ []) = tdF tdAcc n >>= \(td,_) -> buF td [buAcc] [] n
biFoldMapRebuildTree tdF buF tdAcc buAcc n@(Node _ ch) = do
(ntd, cntd) <- tdF tdAcc n
if (length cntd) /= (length ch)
then fail "Invalid top-down accumulation in biFoldMapRebuildTree"
else do
let rcr (ctd, c) = biFoldMapRebuildTree tdF buF ctd buAcc c
(nbu, nch) <- mapM rcr (zip cntd ch) >>= return . unzip
buF ntd nbu nch n
-- | Rebuild a tree with explicit pre and post transformers applied to the first
-- child of every tree node. This is useful for stateful monads that modify
-- environments based on the type of the first child.
mapIn1RebuildTree :: (Monad m)
=> (Tree a -> Tree a -> m ())
-> (Tree b -> Tree a -> m [m ()])
-> ([Tree b] -> Tree a -> m (Tree b))
-> Tree a -> m (Tree b)
mapIn1RebuildTree _ _ allChF n@(Node _ []) = allChF [] n
mapIn1RebuildTree preCh1F postCh1F allChF n@(Node _ ch) = do
preCh1F (head ch) n
nc1 <- rcr $ head ch
restm <- postCh1F nc1 n
-- Allow for a final action before the call to allChF
let len = length $ tail ch
goodLengths = [len, len + 1]
if length restm `notElem` goodLengths
then fail "Invalid mapIn1RebuildTree sequencing"
else do
nRestCh <- mapM (\(m, c) -> m >> rcr c) $ zip restm $ tail ch
case drop len restm of
[] -> allChF (nc1:nRestCh) n
[m] -> m >> allChF (nc1:nRestCh) n
_ -> error "unexpected"
where rcr = mapIn1RebuildTree preCh1F postCh1F allChF
-- | Tree accumulation and reconstruction, with a priviliged first child accumulation.
-- This function is useful for manipulating ASTs subject to bindings introduced by the first
-- child, for example with let-ins, bind-as and case-of.
-- This function takes a pre- and post-first child traversal accumulator transformation function.
-- The post-first-child transformation additionally returns siblings to which the accumulator
-- should be propagated, for example case-of should not propagate bindings to the None branch.
-- The traversal also takes a merge function to combine the running accumulator
-- passed through siblings with those that skip accumulator propagation.
-- Finally, the traversal takes a post-order accumulator and children transformation function.
foldIn1RebuildTree :: (Monad m)
=> (c -> Tree a -> Tree a -> m c)
-> (c -> Tree b -> Tree a -> m (c, [Bool]))
-> (c -> c -> m c)
-> (c -> [Tree b] -> Tree a -> m (c, Tree b))
-> c -> Tree a -> m (c, Tree b)
foldIn1RebuildTree _ _ _ allChF acc n@(Node _ []) = allChF acc [] n
foldIn1RebuildTree preCh1F postCh1F mergeF allChF acc n@(Node _ ch) = do
nAcc <- preCh1F acc (head ch) n
(nAcc2, nc1) <- rcr nAcc $ head ch
(nAcc3, useInitAccs) <- postCh1F nAcc2 nc1 n
if (length useInitAccs) /= (length $ tail ch)
then fail "Invalid foldIn1RebuildTree accumulation"
else do
(nAcc4, nch) <- foldM rebuild (nAcc3, [nc1]) $ zip useInitAccs $ tail ch
allChF nAcc4 nch n
where rcr = foldIn1RebuildTree preCh1F postCh1F mergeF allChF
rebuild (rAcc, chAcc) (True, c) = do
(nrAcc, nc) <- rcr rAcc c
return (nrAcc, chAcc++[nc])
rebuild (rAcc, chAcc) (False, c) = do
(cAcc, nc) <- rcr acc c
nrAcc <- mergeF rAcc cAcc
return (nrAcc, chAcc++[nc])
-- | A mapping variant of foldIn1RebuildTree that threads a top-down accumulator
-- while reconstructing the tree.
-- preCh1F: The pre-child function takes the top-down accumulator, the first child, and the node.
-- It returns a new accumulator
-- postCh1F: The post-child function takes the pre's accumulator, the processed first child, and the node.
-- It returns an accumulator, and a list of accumulators to be sent down while recursing
-- over the other children.
-- allChF: The all-child function takes the post child's single accumulator, the processed children,
-- and the node, and returns a new tree.
foldMapIn1RebuildTree :: (Monad m)
=> (c -> Tree a -> Tree a -> m c)
-> (c -> d -> Tree b -> Tree a -> m (c, [c]))
-> (c -> [d] -> [Tree b] -> Tree a -> m (d, Tree b))
-> c -> d -> Tree a -> m (d, Tree b)
foldMapIn1RebuildTree _ _ allChF tdAcc buAcc n@(Node _ []) = allChF tdAcc [buAcc] [] n
foldMapIn1RebuildTree preCh1F postCh1F allChF tdAcc buAcc n@(Node _ ch) = do
nCh1Acc <- preCh1F tdAcc (head ch) n
(nCh1BuAcc, nc1) <- rcr nCh1Acc $ head ch
(nAcc, chAccs) <- postCh1F nCh1Acc nCh1BuAcc nc1 n
if length chAccs /= (length $ tail ch)
then fail "Invalid foldMapIn1RebuildTree accumulation"
else do
(chBuAcc, nRestCh) <- zipWithM rcr chAccs (tail ch) >>= return . unzip
allChF nAcc (nCh1BuAcc:chBuAcc) (nc1:nRestCh) n
where rcr a b = foldMapIn1RebuildTree preCh1F postCh1F allChF a buAcc b
-- | Fold a declaration and expression reducer and accumulator over the given program.
foldProgramWithDecl :: (Monad m)
=> (a -> K3 Declaration -> m (a, K3 Declaration))
-> (a -> K3 Declaration -> AnnMemDecl -> m (a, AnnMemDecl))
-> (a -> K3 Declaration -> K3 Expression -> m (a, K3 Expression))
-> Maybe (a -> K3 Declaration -> K3 Type -> m (a, K3 Type))
-> a -> K3 Declaration
-> m (a, K3 Declaration)
foldProgramWithDecl declF annMemF exprF typeFOpt a prog = foldRebuildTree rebuildDecl a prog
where
rebuildDecl acc ch d@(tag &&& annotations -> (DGlobal i t eOpt, anns)) = do
(acc2, nt) <- onType acc d t
(acc3, neOpt) <- rebuildInitializer acc2 d eOpt
declF acc3 $ Node (DGlobal i nt neOpt :@: anns) ch
rebuildDecl acc ch d@(tag &&& annotations -> (DTrigger i t e, anns)) = do
(acc2, nt) <- onType acc d t
(acc3, ne) <- exprF acc2 d e
declF acc3 $ Node (DTrigger i nt ne :@: anns) ch
rebuildDecl acc ch d@(tag &&& annotations -> (DDataAnnotation i tVars mems, anns)) = do
(acc2, nMems) <- foldM (rebuildAnnMem d) (acc, []) mems
declF acc2 $ Node (DDataAnnotation i tVars nMems :@: anns) ch
rebuildDecl acc ch (Node t _) = declF acc $ Node t ch
rebuildAnnMem d (acc, memAcc) (Lifted p n t eOpt anns) =
rebuildMem d acc memAcc t eOpt $ \(nt, neOpt) -> Lifted p n nt neOpt anns
rebuildAnnMem d (acc, memAcc) (Attribute p n t eOpt anns) =
rebuildMem d acc memAcc t eOpt $ \(nt, neOpt) -> Attribute p n nt neOpt anns
rebuildAnnMem d (acc, memAcc) (MAnnotation p n anns) = do
(acc2, nMem) <- annMemF acc d $ MAnnotation p n anns
return (acc2, memAcc ++ [nMem])
rebuildMem d acc memAcc t eOpt rebuildF = do
(acc2, nt) <- onType acc d t
(acc3, neOpt) <- rebuildInitializer acc2 d eOpt
(acc4, nMem) <- annMemF acc3 d $ rebuildF (nt, neOpt)
return (acc4, memAcc ++ [nMem])
rebuildInitializer acc d eOpt =
maybe (return (acc, Nothing)) (\e -> exprF acc d e >>= return . fmap Just) eOpt
onType acc d t = maybe (return (acc,t)) (\f -> f acc d t) typeFOpt
-- | Variant of the foldProgramWithDecl function, ignoring the parent declaration.
foldProgram :: (Monad m)
=> (a -> K3 Declaration -> m (a, K3 Declaration))
-> (a -> AnnMemDecl -> m (a, AnnMemDecl))
-> (a -> K3 Expression -> m (a, K3 Expression))
-> Maybe (a -> K3 Type -> m (a, K3 Type))
-> a -> K3 Declaration
-> m (a, K3 Declaration)
foldProgram declF annMemF exprF typeFOpt a prog =
foldProgramWithDecl declF (ignore2 annMemF) (ignore2 exprF) (ignore2Opt typeFOpt) a prog
where ignore2 f = \x _ y -> f x y
ignore2Opt fOpt = maybe Nothing (\f -> Just $ \x _ y -> f x y) fOpt
-- | Fold a declaration, expression and annotation member transformer over the given program.
-- This variant uses transformer functions that require the containing declaration.
mapProgramWithDecl :: (Monad m)
=> (K3 Declaration -> m (K3 Declaration))
-> (K3 Declaration -> AnnMemDecl -> m AnnMemDecl)
-> (K3 Declaration -> K3 Expression -> m (K3 Expression))
-> Maybe (K3 Declaration -> K3 Type -> m (K3 Type))
-> K3 Declaration
-> m (K3 Declaration)
mapProgramWithDecl declF annMemF exprF typeFOpt prog = do
(_, r) <- foldProgramWithDecl (wrap declF) (wrap2 annMemF) (wrap2 exprF) (maybe Nothing (Just . wrap2) $ typeFOpt) () prog
return r
where wrap f _ x = f x >>= return . ((), )
wrap2 f _ d x = f d x >>= return . ((), )
-- | Fold a declaration, expression and annotation member transformer over the given program.
mapProgram :: (Monad m)
=> (K3 Declaration -> m (K3 Declaration))
-> (AnnMemDecl -> m AnnMemDecl)
-> (K3 Expression -> m (K3 Expression))
-> Maybe (K3 Type -> m (K3 Type))
-> K3 Declaration
-> m (K3 Declaration)
mapProgram declF annMemF exprF typeFOpt prog = do
(_, r) <- foldProgram (wrap declF) (wrap annMemF) (wrap exprF) (maybe Nothing (Just . wrap) $ typeFOpt) () prog
return r
where wrap f _ x = f x >>= return . ((), )
-- | Fold a function and accumulator over all expressions in the given program.
foldExpression :: (Monad m)
=> (a -> K3 Expression -> m (a, K3 Expression)) -> a -> K3 Declaration
-> m (a, K3 Declaration)
foldExpression exprF a prog = foldProgram returnPair returnPair exprF Nothing a prog
where returnPair x y = return (x,y)
-- | Map a function over all expressions in the program tree.
mapExpression :: (Monad m)
=> (K3 Expression -> m (K3 Expression)) -> K3 Declaration -> m (K3 Declaration)
mapExpression exprF prog = foldProgram returnPair returnPair wrapExprF Nothing () prog >>= return . snd
where returnPair a b = return (a,b)
wrapExprF a e = exprF e >>= return . (a,)
-- | Apply a function to a specific named declaration
mapNamedDeclaration :: (Monad m) => Identifier -> (K3 Declaration -> m (K3 Declaration))
-> K3 Declaration -> m (K3 Declaration)
mapNamedDeclaration i declF prog = mapProgram namedDeclF return return Nothing prog
where namedDeclF d@(tag -> DGlobal n _ _) | i == n = declF d
namedDeclF d@(tag -> DTrigger n _ _) | i == n = declF d
namedDeclF d = return d
foldNamedDeclaration :: (Monad m) => Identifier -> (a -> K3 Declaration -> m (a, K3 Declaration))
-> a -> K3 Declaration -> m (a, K3 Declaration)
foldNamedDeclaration i declF acc prog = foldProgram namedDeclF mkP mkP Nothing acc prog
where namedDeclF nacc d@(tag -> DGlobal n _ _) | i == n = declF nacc d
namedDeclF nacc d@(tag -> DTrigger n _ _) | i == n = declF nacc d
namedDeclF nacc d = return (nacc, d)
mkP a b = return (a,b)
-- | Apply a function to a specific declaration's initializer.
mapNamedDeclExpression :: (Monad m) => Identifier -> (K3 Expression -> m (K3 Expression))
-> K3 Declaration -> m (K3 Declaration)
mapNamedDeclExpression i exprF prog = mapProgram namedDeclF return return Nothing prog
where namedDeclF d@(tag -> DGlobal n t eOpt) | i == n = do
neOpt <- maybe (return eOpt) (\e -> exprF e >>= return . Just) eOpt
return $ replaceTag d $ DGlobal n t neOpt
namedDeclF d@(tag -> DTrigger n t e) | i == n = do
ne <- exprF e
return $ replaceTag d $ DTrigger n t ne
namedDeclF d = return d
foldNamedDeclExpression :: (Monad m) => Identifier -> (a -> K3 Expression -> m (a, K3 Expression))
-> a -> K3 Declaration -> m (a, K3 Declaration)
foldNamedDeclExpression i exprF acc prog = foldProgram namedDeclF mkP mkP Nothing acc prog
where namedDeclF nacc d@(tag -> DGlobal n t eOpt) | i == n = do
(a, neOpt) <- case eOpt of
Nothing -> return (nacc, Nothing)
Just e -> exprF nacc e >>= \(a,ne) -> return (a, Just ne)
return . (a,) $ replaceTag d $ DGlobal n t neOpt
namedDeclF nacc d@(tag -> DTrigger n t e) | i == n = do
(a, ne) <- exprF nacc e
return . (a,) $ replaceTag d $ DTrigger n t ne
namedDeclF nacc d = return (nacc, d)
mkP a b = return (a,b)
-- | Map a function over all program annotations, filtering null returns.
mapMaybeAnnotation :: (Applicative m, Monad m)
=> (Annotation Declaration -> m (Maybe (Annotation Declaration)))
-> (Annotation Expression -> m (Maybe (Annotation Expression)))
-> (Annotation Type -> m (Maybe (Annotation Type)))
-> K3 Declaration
-> m (K3 Declaration)
mapMaybeAnnotation declF exprF typeF = mapProgram onDecl onMem onExpr (Just onType)
where onDecl d = nodeF declF d
onMem (Lifted p n t eOpt anns) = memF (Lifted p n) t eOpt anns
onMem (Attribute p n t eOpt anns) = memF (Attribute p n) t eOpt anns
onMem (MAnnotation p n anns) = mapM declF anns >>= \nanns -> return $ MAnnotation p n $ catMaybes nanns
memF ctor t eOpt anns = ctor <$> onType t
<*> maybe (return Nothing) (\e -> onExpr e >>= return . Just) eOpt
<*> (mapM declF anns >>= return . catMaybes)
onExpr e = modifyTree (nodeF exprF) e
onType t = modifyTree (nodeF typeF) t
nodeF f (Node (tg :@: anns) ch) = mapM f anns >>= \nanns -> return $ Node (tg :@: catMaybes nanns) ch
-- | Map a function over all expression annotations, filtering null returns.
mapMaybeExprAnnotation :: (Monad m)
=> (Annotation Expression -> m (Maybe (Annotation Expression)))
-> (Annotation Type -> m (Maybe (Annotation Type)))
-> K3 Expression
-> m (K3 Expression)
mapMaybeExprAnnotation exprF typeF = modifyTree (onNode chainType)
where chainType (EType t) = modifyTree (onNode typeF) t >>= exprF . EType
chainType a = exprF a
onNode f (Node (tg :@: anns) ch) = mapM f anns >>= \nanns -> return $ Node (tg :@: catMaybes nanns) ch
-- | Transform all annotations on a program.
mapAnnotation :: (Applicative m, Monad m)
=> (Annotation Declaration -> m (Annotation Declaration))
-> (Annotation Expression -> m (Annotation Expression))
-> (Annotation Type -> m (Annotation Type))
-> K3 Declaration
-> m (K3 Declaration)
mapAnnotation declF exprF typeF = mapMaybeAnnotation (wrap declF) (wrap exprF) (wrap typeF)
where wrap f a = f a >>= return . Just
-- | Transform all annotations on an expression.
mapExprAnnotation :: (Monad m)
=> (Annotation Expression -> m (Annotation Expression))
-> (Annotation Type -> m (Annotation Type))
-> K3 Expression
-> m (K3 Expression)
mapExprAnnotation exprF typeF = modifyTree (onNode chainType)
where chainType (EType t) = modifyTree (onNode typeF) t >>= exprF . EType
chainType a = exprF a
onNode f (Node (tg :@: anns) ch) = mapM f anns >>= \nanns -> return $ Node (tg :@: nanns) ch
-- | Fold a function and accumulator over all return expressions in the program.
--
-- This function accepts a top-down aggregator, a bottom-up aggregator for return expressions
-- and a bottom-up aggregator for non-return expressions.
-- The top-down aggregator is applied to all expressions (e.g., to track lambda shadowing).
--
-- Return expressions are those expressions defining the return value of an arbitrary expression.
-- For example, the body of a let-in is the return expression, not the binding expression.
-- Each return expression is visited: consider an expression returning a tuple. Both the tuple
-- constructor and individual tuple fields are return expressions, and they will all be visited.
foldReturnExpression :: (Monad m)
=> (a -> K3 Expression -> m (a, [a]))
-> (a -> b -> K3 Expression -> m (b, K3 Expression))
-> (a -> b -> K3 Expression -> m (b, K3 Expression))
-> a -> b -> K3 Expression
-> m (b, K3 Expression)
foldReturnExpression tdF onReturnF onNonReturnF tdAcc buAcc expr =
biFoldRebuildTree (skipChildrenForReturns tdF) skipOrApply (False, tdAcc) buAcc expr
where skipOrApply (skip, tdAcc') buAcc' ch e =
(if skip then onNonReturnF else onReturnF) tdAcc' buAcc' (replaceCh e ch)
-- | Variant of the above with independent rather than serial bottom-up accumulations.
foldMapReturnExpression :: (Monad m)
=> (a -> K3 Expression -> m (a, [a]))
-> (a -> [b] -> K3 Expression -> m (b, K3 Expression))
-> (a -> [b] -> K3 Expression -> m (b, K3 Expression))
-> a -> b -> K3 Expression
-> m (b, K3 Expression)
foldMapReturnExpression tdF onReturnF onNonReturnF tdAcc buAcc expr =
biFoldMapRebuildTree (skipChildrenForReturns tdF) skipOrApply (False, tdAcc) buAcc expr
where skipOrApply (skip, tdAcc') buAcc' ch e =
(if skip then onNonReturnF else onReturnF) tdAcc' buAcc' (replaceCh e ch)
skipChildrenForReturns :: (Monad m)
=> (a -> K3 Expression -> m (a, [a])) -> (Bool, a) -> K3 Expression
-> m ((Bool, a), [(Bool, a)])
skipChildrenForReturns tdF (skip, tdAcc) e =
let chSkip = replicate (length $ children e) True
in do { (nTd, chTd) <- tdF tdAcc e;
if skip then return ((True, nTd), zip chSkip chTd)
else skipChildren e >>= return . ((False, nTd),) . flip zip chTd }
where
skipChildren :: (Monad m) => K3 Expression -> m [Bool]
skipChildren (tag -> EOperate OApp) = return [False, True]
skipChildren (tag -> EOperate OSnd) = return [True, False]
skipChildren (tag -> EOperate OSeq) = return [True, False]
skipChildren (tag -> ELetIn _) = return [True, False]
skipChildren (tag -> EBindAs _) = return [True, False]
skipChildren (tag -> ECaseOf _) = return [True, False, False]
skipChildren (tag -> EIfThenElse) = return [True, False, False]
skipChildren e' = return $ replicate (length $ children e') False
-- | Map a function over all return expressions.
-- See definition of foldReturnExpression for more information.
mapReturnExpression :: (Monad m)
=> (K3 Expression -> m (K3 Expression))
-> (K3 Expression -> m (K3 Expression))
-> K3 Expression -> m (K3 Expression)
mapReturnExpression onReturnF nonReturnF expr =
foldReturnExpression tdF wrapRetF wrapNonRetF () () expr >>= return . snd
where tdF tdAcc e = return (tdAcc, replicate (length $ children e) tdAcc)
wrapRetF _ a e = onReturnF e >>= return . (a,)
wrapNonRetF _ a e = nonReturnF e >>= return . (a,)
{- Expression utilities -}
defaultExpression :: K3 Type -> Either String (K3 Expression)
defaultExpression typ = mapTree mkExpr typ
where mkExpr _ t@(tag -> TBool) = withQualifier t $ EC.constant $ CBool False
mkExpr _ t@(tag -> TByte) = withQualifier t $ EC.constant $ CByte 0
mkExpr _ t@(tag -> TInt) = withQualifier t $ EC.constant $ CInt 0
mkExpr _ t@(tag -> TReal) = withQualifier t $ EC.constant $ CReal 0.0
mkExpr _ t@(tag -> TNumber) = withQualifier t $ EC.constant $ CInt 0
mkExpr _ t@(tag -> TString) = withQualifier t $ EC.constant $ CString ""
mkExpr [e] t@(tag -> TOption) = let nm = case e @~ isEQualified of
Just EMutable -> NoneMut
_ -> NoneImmut
in withQualifier t $ EC.constant $ CNone nm
mkExpr [e] t@(tag -> TIndirection) = withQualifier t $ EC.indirect e
mkExpr ch t@(tag -> TTuple) = withQualifier t $ EC.tuple ch
mkExpr ch t@(tag -> TRecord ids) = withQualifier t $ EC.record $ zip ids ch
mkExpr _ t@(tag -> TCollection) = withQualifier t $
foldl (@+) (EC.empty $ head $ children t) $ extractTCAnns $ annotations t
mkExpr _ (tag -> TFunction) = Left "Cannot create a default expression for a function"
mkExpr _ t@(tag -> TAddress) = withQualifier t $
EC.address (EC.constant $ CString "127.0.0.1") (EC.constant $ CInt 40000)
mkExpr _ t = Left $ boxToString $ ["Cannot create a default expression for: "] %+ prettyLines t
extractTCAnns as = concatMap extract as
where extract (TAnnotation i) = [EAnnotation i]
extract _ = []
withQualifier t e = case t @~ isTQualified of
Just TMutable -> return $ EC.mut e
Just TImmutable -> return $ EC.immut e
_ -> return $ e
-- | Retrieves all free variables in an expression.
freeVariables :: K3 Expression -> [Identifier]
freeVariables expr = either (const []) id $ foldMapTree extractVariable [] expr
where
extractVariable chAcc (tag -> EVariable n) = return $ concat chAcc ++ [n]
extractVariable chAcc (tag -> EAssign i) = return $ concat chAcc ++ [i]
extractVariable chAcc (tag -> ELambda n) = return $ filter (/= n) $ concat chAcc
extractVariable chAcc (tag -> EBindAs b) = return $ (chAcc !! 0) ++ (filter (`notElem` bindingVariables b) $ chAcc !! 1)
extractVariable chAcc (tag -> ELetIn i) = return $ (chAcc !! 0) ++ (filter (/= i) $ chAcc !! 1)
extractVariable chAcc (tag -> ECaseOf i) = return $ let [e, s, n] = chAcc in e ++ filter (/= i) s ++ n
extractVariable chAcc _ = return $ concat chAcc
-- | Retrieves all variables introduced by a binder
bindingVariables :: Binder -> [Identifier]
bindingVariables (BIndirection i) = [i]
bindingVariables (BTuple is) = is
bindingVariables (BRecord ivs) = snd (unzip ivs)
-- | Retrieves all variables modified in an expression.
modifiedVariables :: K3 Expression -> [Identifier]
modifiedVariables expr = either (const []) id $ foldMapTree extractVariable [] expr
where
extractVariable chAcc (tag -> EAssign n) = return $ concat chAcc ++ [n]
extractVariable chAcc (tag -> ELambda n) = return $ filter (/= n) $ concat chAcc
extractVariable chAcc (tag -> EBindAs b) = return $ (chAcc !! 0) ++ (filter (`notElem` bindingVariables b) $ chAcc !! 1)
extractVariable chAcc (tag -> ELetIn i) = return $ (chAcc !! 0) ++ (filter (/= i) $ chAcc !! 1)
extractVariable chAcc (tag -> ECaseOf i) = return $ let [e, s, n] = chAcc in e ++ filter (/= i) s ++ n
extractVariable chAcc _ = return $ concat chAcc
-- | Computes the closure variables captured at lambda expressions.
-- This is a one-pass bottom-up implementation.
type ClosureEnv = IntMap [Identifier]
lambdaClosures :: K3 Declaration -> Either String ClosureEnv
lambdaClosures p = foldExpression lambdaClosuresExpr IntMap.empty p >>= return . fst
lambdaClosuresDecl :: Identifier -> ClosureEnv -> K3 Declaration -> Either String (ClosureEnv, K3 Declaration)
lambdaClosuresDecl n lc p = foldNamedDeclExpression n lambdaClosuresExpr lc p
lambdaClosuresExpr :: ClosureEnv -> K3 Expression -> Either String (ClosureEnv, K3 Expression)
lambdaClosuresExpr lc expr = do
(lcenv,_) <- biFoldMapTree bind extract [] (IntMap.empty, []) expr
return $ (IntMap.union lcenv lc, expr)
where
bind :: [Identifier] -> K3 Expression -> Either String ([Identifier], [[Identifier]])
bind l (tag -> ELambda i) = return (l, [i:l])
bind l (tag -> ELetIn i) = return (l, [l, i:l])
bind l (tag -> EBindAs b) = return (l, [l, bindingVariables b ++ l])
bind l (tag -> ECaseOf i) = return (l, [l, i:l, l])
bind l (children -> ch) = return (l, replicate (length ch) l)
extract :: [Identifier] -> [(ClosureEnv, [Identifier])] -> K3 Expression -> Either String (ClosureEnv, [Identifier])
extract _ chAcc (tag -> EVariable i) = rt chAcc (++[i])
extract _ chAcc (tag -> EAssign i) = rt chAcc (++[i])
extract l (concatLc -> (lcAcc,chAcc)) e@(tag -> ELambda n) = extendLc lcAcc e $ filter (onlyLocals n l) $ concat chAcc
extract _ (concatLc -> (lcAcc,chAcc)) (tag -> EBindAs b) = return . (lcAcc,) $ (chAcc !! 0) ++ (filter (`notElem` bindingVariables b) $ chAcc !! 1)
extract _ (concatLc -> (lcAcc,chAcc)) (tag -> ELetIn i) = return . (lcAcc,) $ (chAcc !! 0) ++ (filter (/= i) $ chAcc !! 1)
extract _ (concatLc -> (lcAcc,chAcc)) (tag -> ECaseOf i) = return . (lcAcc,) $ let [e, s, n] = chAcc in e ++ filter (/= i) s ++ n
extract _ chAcc _ = rt chAcc id
onlyLocals n l i = i /= n && i `elem` l
concatLc :: [(ClosureEnv, [Identifier])] -> (ClosureEnv, [[Identifier]])
concatLc subAcc = let (x,y) = unzip subAcc in (IntMap.unions x, y)
extendLc :: ClosureEnv -> K3 Expression -> [Identifier] -> Either String (ClosureEnv, [Identifier])
extendLc lcenv e ids = case e @~ isEUID of
Just (EUID (UID i)) -> return $ (IntMap.insert i (nub ids) lcenv, ids)
_ -> Left $ boxToString $ ["No UID found on lambda"] %$ prettyLines e
rt subAcc f = return $ second (f . concat) $ concatLc subAcc
-- | Compares declarations and expressions for identical AST structures
-- while ignoring annotations and properties (such as UIDs, spans, etc.)
compareDAST :: K3 Declaration -> K3 Declaration -> Bool
compareDAST d1 d2 = stripAllDeclAnnotations d1 == stripAllDeclAnnotations d2
compareEAST :: K3 Expression -> K3 Expression -> Bool
compareEAST e1 e2 = stripAllExprAnnotations e1 == stripAllExprAnnotations e2
compareTAST :: K3 Type -> K3 Type -> Bool
compareTAST t1 t2 = stripAllTypeAnnotations t1 == stripAllTypeAnnotations t2
compareDStrictAST :: K3 Declaration -> K3 Declaration -> Bool
compareDStrictAST d1 d2 = stripDCompare d1 == stripDCompare d2
compareEStrictAST :: K3 Expression -> K3 Expression -> Bool
compareEStrictAST e1 e2 = stripECompare e1 == stripECompare e2
compareTStrictAST :: K3 Type -> K3 Type -> Bool
compareTStrictAST t1 t2 = stripTCompare t1 == stripTCompare t2
{- Annotation cleaning -}
-- | Strips all annotations from a declaration (including in any contained types and expressions)
stripDeclAnnotations :: (Annotation Declaration -> Bool)
-> (Annotation Expression -> Bool)
-> (Annotation Type -> Bool)
-> K3 Declaration -> K3 Declaration
stripDeclAnnotations dStripF eStripF tStripF d =
runIdentity $ mapProgram stripDeclF stripMemF stripExprF (Just stripTypeF) d
where
stripDeclF (Node (tg :@: anns) ch) = return $ Node (tg :@: stripDAnns anns) ch
stripMemF (Lifted p n t eOpt anns) = return $ Lifted p n t eOpt $ stripDAnns anns
stripMemF (Attribute p n t eOpt anns) = return $ Attribute p n t eOpt $ stripDAnns anns
stripMemF (MAnnotation p n anns) = return $ MAnnotation p n $ stripDAnns anns
stripExprF e = return $ stripExprAnnotations eStripF tStripF e
stripTypeF t = return $ stripTypeAnnotations tStripF t
stripDAnns anns = filter (not . dStripF) anns
-- | Remove effects from a specific declaration
stripNamedDeclAnnotations :: Identifier
-> (Annotation Declaration -> Bool)
-> (Annotation Expression -> Bool)
-> (Annotation Type -> Bool)
-> K3 Declaration -> K3 Declaration
stripNamedDeclAnnotations i dStripF eStripF tStripF p =
runIdentity $ mapProgram stripDeclF return return Nothing p
where
stripDeclF (Node (DGlobal n t eOpt :@: anns) ch) | i == n =
return $ Node (DGlobal n (stripTypeF t) (maybe Nothing (Just . stripExprF) eOpt)
:@: stripDAnns anns) ch
stripDeclF (Node (DTrigger n t e :@: anns) ch) | i == n =
return $ Node (DTrigger n (stripTypeF t) (stripExprF e) :@: stripDAnns anns) ch
stripDeclF d = return d
stripExprF e = stripExprAnnotations eStripF tStripF e
stripTypeF t = stripTypeAnnotations tStripF t
stripDAnns anns = filter (not . dStripF) anns
-- | Strips all annotations from an expression given expression and type annotation filtering functions.
stripExprAnnotations :: (Annotation Expression -> Bool) -> (Annotation Type -> Bool)
-> K3 Expression -> K3 Expression
stripExprAnnotations eStripF tStripF e = runIdentity $ mapTree strip e
where
strip ch n@(tag -> EConstant (CEmpty t)) =
let nct = stripTypeAnnotations tStripF t
in return $ Node (EConstant (CEmpty nct) :@: stripEAnns n) ch
strip ch n = return $ Node (tag n :@: stripEAnns n) ch
stripEAnns n = filter (not . eStripF) $ annotations n
-- | Strips all annotations from a type given a type annotation filtering function.
stripTypeAnnotations :: (Annotation Type -> Bool) -> K3 Type -> K3 Type
stripTypeAnnotations tStripF t = runIdentity $ mapTree strip t
where strip ch n = return $ Node (tag n :@: (filter (not . tStripF) $ annotations n)) ch
-- | Strips all annotations from a declaration deeply.
stripAllDeclAnnotations :: K3 Declaration -> K3 Declaration
stripAllDeclAnnotations = stripDeclAnnotations (const True) (const True) (const True)
-- | Strips all annotations from an expression deeply.
stripAllExprAnnotations :: K3 Expression -> K3 Expression
stripAllExprAnnotations = stripExprAnnotations (const True) (const True)
-- | Strips all annotations from a type deeply.
stripAllTypeAnnotations :: K3 Type -> K3 Type
stripAllTypeAnnotations = stripTypeAnnotations (const True)
{- Annotation removal -}
stripDCompare :: K3 Declaration -> K3 Declaration
stripDCompare = stripDeclAnnotations cleanDecl cleanExpr cleanType
where cleanDecl a = not $ isDUserProperty a
cleanExpr a = not (isEQualified a || isEUserProperty a || isEAnnotation a)
cleanType a = not (isTAnnotation a || isTUserProperty a)
stripECompare :: K3 Expression -> K3 Expression
stripECompare = stripExprAnnotations cleanExpr cleanType
where cleanExpr a = not (isEQualified a || isEUserProperty a || isEAnnotation a)
cleanType a = not (isTAnnotation a || isTUserProperty a)
stripTCompare :: K3 Type -> K3 Type
stripTCompare = stripTypeAnnotations (not . isTAnnotation)
stripComments :: K3 Declaration -> K3 Declaration
stripComments = stripDeclAnnotations isDSyntax isESyntax (const False)
stripDUIDSpan :: K3 Declaration -> K3 Declaration
stripDUIDSpan = stripDeclAnnotations isDUIDSpan isEUIDSpan isTUIDSpan
stripEUIDSpan :: K3 Expression -> K3 Expression
stripEUIDSpan = stripExprAnnotations isEUIDSpan isTUIDSpan
stripTUIDSpan :: K3 Type -> K3 Type
stripTUIDSpan = stripTypeAnnotations isTUIDSpan
stripTypeAnns :: K3 Declaration -> K3 Declaration
stripTypeAnns = stripDeclAnnotations (const False) isAnyETypeAnn (const False)
stripDeclTypeAnns :: Identifier -> K3 Declaration -> K3 Declaration
stripDeclTypeAnns i = stripNamedDeclAnnotations i (const False) isAnyETypeAnn (const False)
-- | Strip all inferred properties from a program
stripProperties :: K3 Declaration -> K3 Declaration
stripProperties = stripDeclAnnotations isDInferredProperty isEInferredProperty isTInferredProperty
-- | Strip all inferred properties from a specific declaration
stripDeclProperties :: Identifier -> K3 Declaration -> K3 Declaration
stripDeclProperties i = stripNamedDeclAnnotations i isDInferredProperty isEInferredProperty isTInferredProperty
-- | Removes all properties from a program.
stripAllProperties :: K3 Declaration -> K3 Declaration
stripAllProperties = stripDeclAnnotations isDProperty isEProperty isTProperty
-- | Strip all inferred effect annotations from a program
stripEffectAnns :: K3 Declaration -> K3 Declaration
stripEffectAnns p = stripDeclAnnotations isAnyDInferredEffectAnn isAnyEEffectAnn (const False) p
-- | Strip all inferred effect annotations from a specific declaration
stripDeclEffectAnns :: Identifier -> K3 Declaration -> K3 Declaration
stripDeclEffectAnns i p = stripNamedDeclAnnotations i isAnyDInferredEffectAnn isAnyEEffectAnn (const False) p
-- | Strip all effects annotations, including user-specified effect signatures.
stripAllEffectAnns :: K3 Declaration -> K3 Declaration
stripAllEffectAnns = stripDeclAnnotations isAnyDEffectAnn isAnyEEffectAnn (const False)
-- | Single-pass composition of type and effect removal.
stripTypeAndEffectAnns :: K3 Declaration -> K3 Declaration
stripTypeAndEffectAnns p =
stripDeclAnnotations isAnyDInferredEffectAnn isAnyETypeOrEffectAnn (const False) p
-- | Single-pass composition of type and effect removal on a specific annotation
stripDeclTypeAndEffectAnns :: Identifier -> K3 Declaration -> K3 Declaration
stripDeclTypeAndEffectAnns i p =
stripNamedDeclAnnotations i isAnyDInferredEffectAnn isAnyETypeOrEffectAnn (const False) p
-- | Single-pass variant removing all effect annotations.
stripAllTypeAndEffectAnns :: K3 Declaration -> K3 Declaration
stripAllTypeAndEffectAnns = stripDeclAnnotations isAnyDEffectAnn isAnyETypeOrEffectAnn (const False)
{-| Tree repair utilities -}
-- | Ensures every node has a valid UID and Span.
-- This currently does not handle literals.
repairProgram :: String -> Maybe Int -> K3 Declaration -> (Int, K3 Declaration)
repairProgram repairMsg nextUIDOpt p =
let nextUID = maybe (let UID maxUID = maxProgramUID p in maxUID + 1) id nextUIDOpt
in runIdentity $ foldProgram repairDecl repairMem repairExpr (Just repairType) nextUID p
where
repairDecl :: Int -> K3 Declaration -> Identity (Int, K3 Declaration)
repairDecl uid n = validateD uid (children n) n
repairExpr :: Int -> K3 Expression -> Identity (Int, K3 Expression)
repairExpr uid n = foldRebuildTree validateE uid n
repairType :: Int -> K3 Type -> Identity (Int, K3 Type)
repairType uid n = foldRebuildTree validateT uid n
repairMem uid (Lifted pol n t eOpt anns) = rebuildMem uid anns $ Lifted pol n (repairTQualifier t) eOpt
repairMem uid (Attribute pol n t eOpt anns) = rebuildMem uid anns $ Attribute pol n (repairTQualifier t) eOpt
repairMem uid (MAnnotation pol n anns) = rebuildMem uid anns $ MAnnotation pol n
validateD :: Int -> [K3 Declaration] -> K3 Declaration -> Identity (Int, K3 Declaration)
validateD uid ch n = ensureUIDSpan uid DUID isDUID DSpan isDSpan ch n >>= return . second repairDQualifier
validateE :: Int -> [K3 Expression] -> K3 Expression -> Identity (Int, K3 Expression)
validateE uid ch n = ensureUIDSpan uid EUID isEUID ESpan isESpan ch n >>= return . second repairEQualifier
validateT :: Int -> [K3 Type] -> K3 Type -> Identity (Int, K3 Type)
validateT uid ch n = ensureUIDSpan uid TUID isTUID TSpan isTSpan ch n >>= return . second repairTQualifier
repairDQualifier d = case tag d of
DGlobal n t eOpt -> replaceTag d (DGlobal n (repairTQualifier t) eOpt)
DTrigger n t e -> replaceTag d (DTrigger n (repairTQualifier t) e)
_ -> d
repairEQualifier :: K3 Expression -> K3 Expression
repairEQualifier n = case tnc n of
(EConstant (CEmpty t), _) -> let nt = runIdentity $ modifyTree (return . repairTQualifier) t
in replaceTag n $ EConstant $ CEmpty nt
(ELetIn _, [t, b]) -> replaceCh n [repairEQAnn t, b]
(ESome, ch) -> replaceCh n $ map repairEQAnn ch
(EIndirect, ch) -> replaceCh n $ map repairEQAnn ch
(ETuple, ch) -> replaceCh n $ map repairEQAnn ch
(ERecord _, ch) -> replaceCh n $ map repairEQAnn ch
_ -> n
repairEQAnn n@((@~ isEQualified) -> Nothing) = n @+ EImmutable
repairEQAnn n = n
repairTQualifier n = case tnc n of
(TOption, ch) -> replaceCh n $ map repairTQAnn ch
(TIndirection, ch) -> replaceCh n $ map repairTQAnn ch
(TTuple, ch) -> replaceCh n $ map repairTQAnn ch
(TRecord _, ch) -> replaceCh n $ map repairTQAnn ch
_ -> n
repairTQAnn n@((@~ isTQualified) -> Nothing) = n @+ TImmutable
repairTQAnn n = n
rebuildMem uid anns ctor = return $ (\(nuid, nanns) -> (nuid, ctor nanns)) $ validateMem uid anns
validateMem uid anns =
let (nuid, extraAnns) =
(\spa -> maybe (uid+1, [DUID $ UID uid]++spa) (const (uid, spa)) $ find isDUID anns)
$ maybe ([DSpan $ GeneratedSpan repairMsg]) (const []) $ find isDSpan anns
in (nuid, anns ++ extraAnns)
ensureUIDSpan uid uCtor uT sCtor sT ch (Node tg _) =
return $ ensureUID uid uCtor uT $ snd $ ensureSpan sCtor sT $ Node tg ch
ensureSpan :: (Eq (Annotation a)) => (Span -> Annotation a) -> (Annotation a -> Bool) -> K3 a -> ((), K3 a)
ensureSpan ctor t n = addAnn () () (ctor $ GeneratedSpan repairMsg) t n
ensureUID :: (Eq (Annotation a)) => Int -> (UID -> Annotation a) -> (Annotation a -> Bool) -> (K3 a) -> (Int, K3 a)
ensureUID uid ctor t n = addAnn (uid+1) uid (ctor $ UID uid) t n
addAnn rUsed rNotUsed a t n = maybe (rUsed, n @+ a) (const (rNotUsed, n)) (n @~ t)
-- | Fold an accumulator over all program UIDs.
foldProgramUID :: (a -> UID -> a) -> a -> K3 Declaration -> (a, K3 Declaration)
foldProgramUID uidF z d = runIdentity $ foldProgram onDecl onMem onExpr (Just onType) z d
where onDecl a n = return $ (dUID a n, n)
onExpr a n = foldTree (\a' n' -> return $ eUID a' n') a n >>= return . (,n)
onType a n = foldTree (\a' n' -> return $ tUID a' n') a n >>= return . (,n)
onMem a mem@(Lifted _ _ _ _ anns) = return $ (dMemUID a anns, mem)
onMem a mem@(Attribute _ _ _ _ anns) = return $ (dMemUID a anns, mem)
onMem a mem@(MAnnotation _ _ anns) = return $ (dMemUID a anns, mem)
dUID a n = maybe a (\case {DUID b -> uidF a b; _ -> a}) $ n @~ isDUID
eUID a n = maybe a (\case {EUID b -> uidF a b; _ -> a}) $ n @~ isEUID
tUID a n = maybe a (\case {TUID b -> uidF a b; _ -> a}) $ n @~ isTUID
dMemUID a anns = maybe a (\case {DUID b -> uidF a b; _ -> a}) $ find isDUID anns
maxProgramUID :: K3 Declaration -> UID
maxProgramUID d = fst $ foldProgramUID maxUID (UID (minBound :: Int)) d
where maxUID (UID a) (UID b) = UID $ max a b
minProgramUID :: K3 Declaration -> UID
minProgramUID d = fst $ foldProgramUID minUID (UID (maxBound :: Int)) d
where minUID (UID a) (UID b) = UID $ min a b
collectProgramUIDs :: K3 Declaration -> [UID]
collectProgramUIDs d = fst $ foldProgramUID (flip (:)) [] d
duplicateProgramUIDs :: K3 Declaration -> [UID]
duplicateProgramUIDs d = let uids = collectProgramUIDs d in uids \\ nub uids
| yliu120/K3 | src/Language/K3/Core/Utils.hs | apache-2.0 | 48,409 | 0 | 22 | 12,451 | 16,672 | 8,467 | 8,205 | 736 | 18 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
module HERMIT.Kernel
( -- * The HERMIT Kernel
AST
, firstAST
, ASTMap
, Kernel
, KernelEnv(..)
, hermitKernel
, CommitMsg(..)
-- ** Kernel Interface
, resumeK
, abortK
, applyK
, queryK
, deleteK
, listK
, tellK
) where
import Prelude hiding (lookup, null)
import Control.Concurrent
import Control.Monad
import Control.Monad.IO.Class
import Data.IORef
import Data.Map
import Data.Typeable
import HERMIT.Context
import HERMIT.External
import HERMIT.GHC hiding (singleton, empty)
import HERMIT.Kure
import HERMIT.Lemma
import HERMIT.Monad
-- | A 'Kernel' is a repository for complete Core syntax trees ('ModGuts') and Lemmas.
data Kernel = Kernel
{ -- | Halt the 'Kernel' and return control to GHC, which compiles the specified 'AST'.
resumeK :: forall m. MonadIO m => AST -> m ()
-- | Halt the 'Kernel' and abort GHC without compiling.
, abortK :: forall m. MonadIO m => m ()
-- | Apply a 'Rewrite' to the specified 'AST' and return a handle to the resulting 'AST'.
, applyK :: forall m. (MonadIO m, MonadCatch m)
=> RewriteH ModGuts -> CommitMsg -> KernelEnv -> AST -> m AST
-- | Apply a 'TransformH' to the 'AST', return the resulting value, and potentially a new 'AST'.
, queryK :: forall m a. (MonadIO m, MonadCatch m)
=> TransformH ModGuts a -> CommitMsg -> KernelEnv -> AST -> m (AST,a)
-- | Delete the internal record of the specified 'AST'.
, deleteK :: forall m. MonadIO m => AST -> m ()
-- | List all the 'AST's tracked by the 'Kernel', including version data.
, listK :: forall m. MonadIO m => m [(AST,Maybe String, Maybe AST)]
-- | Log a new AST with same Lemmas/ModGuts as given AST.
, tellK :: forall m. (MonadIO m, MonadCatch m) => String -> AST -> m AST
} deriving Typeable
data CommitMsg = Always String | Changed String | Never deriving Typeable
msg :: CommitMsg -> Maybe String
msg Never = Nothing
msg (Always s) = Just s
msg (Changed s) = Just s
-- | A /handle/ for a specific version of the 'ModGuts'.
newtype AST = AST Int -- ^ Currently 'AST's are identified by an 'Int' label.
deriving (Eq, Ord, Typeable)
firstAST :: AST
firstAST = AST 0
-- for succ
instance Enum AST where
toEnum = AST
fromEnum (AST i) = i
instance Show AST where
show (AST i) = show i
instance Read AST where
readsPrec p s = [ (AST i,s') | (i,s') <- readsPrec p s ]
instance Extern AST where
type Box AST = AST
box i = i
unbox i = i
data ASTMap = ASTMap { astNext :: AST
, astMap :: Map AST KernelState
} deriving Typeable
emptyASTMap :: ASTMap
emptyASTMap = ASTMap firstAST empty
data KernelState = KernelState { ksLemmas :: Lemmas
, ksGuts :: ModGuts
, _ksParent :: Maybe AST
, _ksCommit :: Maybe String
}
data KernelEnv = KernelEnv { kEnvChan :: KEnvMessage -> HermitM () } deriving Typeable
-- | Internal API. The 'Kernel' object wraps these calls.
data Msg where
Apply :: AST -> (KernelState -> CoreM (KureM (Maybe KernelState, a)))
-> (MVar (KureM (AST, a))) -> Msg
Read :: (Map AST KernelState -> IO ()) -> Msg
Delete :: AST -> Msg
Done :: Maybe AST -> Msg
-- | Put a 'KernelState' in the 'ASTMap', returning
-- the 'AST' to which it was assigned.
insertAST :: KernelState -> ASTMap -> (AST, ASTMap)
insertAST ks (ASTMap k m) = (k, ASTMap (succ k) (insert k ks m))
findAST :: AST -> Map AST KernelState -> (String -> b) -> (KernelState -> b) -> b
findAST ast m f = find ast m (f $ "Cannot find syntax tree: " ++ show ast)
-- | Start a HERMIT client by providing an IO callback that takes the
-- initial 'Kernel' and inital 'AST' handle. The callback is only
-- ever called once. The 'Modguts -> CoreM Modguts' function
-- required by GHC Plugins is returned.
hermitKernel :: IORef (Maybe (AST, ASTMap)) -- ^ Global (across passes) AST store.
-> String -- ^ Last GHC pass name
-> (Kernel -> AST -> IO ()) -- ^ Callback
-> ModGuts -> CoreM ModGuts
hermitKernel store lastPass callback modGuts = do
msgMV :: MVar Msg <- liftIO newEmptyMVar
let withAST :: (MonadIO m, MonadCatch m)
=> AST -> (KernelState -> CoreM (KureM (Maybe KernelState, a))) -> m (AST, a)
withAST ast k = do
r <- liftIO $ do
resVar <- newEmptyMVar
putMVar msgMV $ Apply ast k resVar
takeMVar resVar
runKureM return fail r
readOnly :: MonadIO m => (Map AST KernelState -> KureM a) -> m (KureM a)
readOnly f = liftIO $ do
resVar <- newEmptyMVar
putMVar msgMV (Read (runKureM (putMVar resVar . return)
(putMVar resVar . fail) . f))
takeMVar resVar
let kernel :: Kernel
kernel = Kernel
{ resumeK = liftIO . putMVar msgMV . Done . Just
, abortK = liftIO $ putMVar msgMV (Done Nothing)
, applyK = \ rr cm kEnv ast -> liftM fst $
withAST ast $ \ (KernelState lemmas guts _ _) -> do
let handleS hRes = return $ return
(Just (KernelState (hResLemmas hRes) (hResult hRes) (Just ast) (msg cm)), ())
runHM (mkEnv (kEnvChan kEnv) guts lemmas)
handleS
(return . fail)
(applyT rr (topLevelHermitC guts) guts)
, queryK = \ t cm kEnv ast ->
withAST ast $ \ (KernelState lemmas guts _ _) -> do
let handleS hRes
| hResChanged hRes = f (Just (KernelState (hResLemmas hRes) guts (Just ast) (msg cm)), r)
| Always s <- cm = f (Just (KernelState lemmas guts (Just ast) (Just s)), r)
| otherwise = f (Nothing, r) -- pure query, not recorded in AST store
where r = hResult hRes
f = return . return
runHM (mkEnv (kEnvChan kEnv) guts lemmas)
handleS
(return . fail)
(applyT t (topLevelHermitC guts) guts)
, deleteK = liftIO . putMVar msgMV . Delete
, listK = readOnly (\m -> return [ (ast,cm,p) | (ast,KernelState _ _ p cm) <- toList m ])
>>= runKureM return fail
, tellK = \ str ast -> liftM fst $
withAST ast $ \ (KernelState lemmas guts _ _) ->
return $ return (Just $ KernelState lemmas guts (Just ast) (Just str), ())
}
let loop :: ASTMap -> CoreM ModGuts
loop m = do
cmd <- liftIO $ takeMVar msgMV
case cmd of
Apply ast f resVar -> do
kr <- findAST ast (astMap m) (return . fail) f
let handleS (mbKS, r) =
case mbKS of
Nothing -> liftIO (putMVar resVar $ return (ast,r)) >> loop m
Just ks -> let (ast', m') = insertAST ks m in
liftIO (putMVar resVar (return (ast',r))) >> loop m'
handleF str = liftIO (putMVar resVar $ fail str) >> loop m
runKureM handleS handleF kr
Read fn -> liftIO (fn (astMap m)) >> loop m
Delete ast -> loop $ ASTMap (astNext m) $ delete ast (astMap m)
Done mbAST ->
case mbAST of
Nothing ->
abortKernel "Exiting HERMIT and aborting GHC compilation."
Just ast -> do
findAST ast (astMap m)
(\str -> abortKernel $ str ++ ", exiting HERMIT and aborting GHC compilation.")
(\ks -> liftIO (writeIORef store (Just (ast, m))) >> return (ksGuts ks))
-- Get the most recent AST and ASTMap the last HERMIT pass resumed with.
mbS <- liftIO $ readIORef store
(ast0,m) <- case mbS of
Nothing -> return $ insertAST (KernelState empty modGuts Nothing Nothing) emptyASTMap
Just (ast,m) -> do
ls <- findAST ast (astMap m)
(\str -> abortKernel $ str ++ ", exiting HERMIT and aborting GHC compilation.")
(return . ksLemmas)
return $ insertAST (KernelState ls modGuts (Just ast) (Just lastPass)) m
void $ liftIO $ forkIO $ callback kernel ast0
loop m
abortKernel :: String -> CoreM a
abortKernel = throwGhcException . ProgramError
find :: Ord k => k -> Map k v -> b -> (v -> b) -> b
find k m f s = maybe f s (lookup k m)
| beni55/hermit | src/HERMIT/Kernel.hs | bsd-2-clause | 9,634 | 0 | 31 | 3,696 | 2,698 | 1,394 | 1,304 | 174 | 7 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QIODevice_h.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:31
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Core.QIODevice_h where
import Foreign.C.Types
import Qtc.Enums.Base
import Qtc.Enums.Core.QIODevice
import Qtc.Classes.Base
import Qtc.Classes.Qccs_h
import Qtc.Classes.Core_h
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Foreign.Marshal.Array
instance QunSetUserMethod (QIODevice ()) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIODevice_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
foreign import ccall "qtc_QIODevice_unSetUserMethod" qtc_QIODevice_unSetUserMethod :: Ptr (TQIODevice a) -> CInt -> CInt -> IO (CBool)
instance QunSetUserMethod (QIODeviceSc a) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIODevice_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
instance QunSetUserMethodVariant (QIODevice ()) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIODevice_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariant (QIODeviceSc a) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIODevice_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariantList (QIODevice ()) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIODevice_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QunSetUserMethodVariantList (QIODeviceSc a) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QIODevice_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QsetUserMethod (QIODevice ()) (QIODevice x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QIODevice setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QIODevice_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QIODevice_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIODevice_setUserMethod" qtc_QIODevice_setUserMethod :: Ptr (TQIODevice a) -> CInt -> Ptr (Ptr (TQIODevice x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethod_QIODevice :: (Ptr (TQIODevice x0) -> IO ()) -> IO (FunPtr (Ptr (TQIODevice x0) -> IO ()))
foreign import ccall "wrapper" wrapSetUserMethod_QIODevice_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QIODeviceSc a) (QIODevice x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QIODevice setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QIODevice_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QIODevice_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetUserMethod (QIODevice ()) (QIODevice x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QIODevice setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QIODevice_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QIODevice_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIODevice_setUserMethodVariant" qtc_QIODevice_setUserMethodVariant :: Ptr (TQIODevice a) -> CInt -> Ptr (Ptr (TQIODevice x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethodVariant_QIODevice :: (Ptr (TQIODevice x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> IO (FunPtr (Ptr (TQIODevice x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))))
foreign import ccall "wrapper" wrapSetUserMethodVariant_QIODevice_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QIODeviceSc a) (QIODevice x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QIODevice setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QIODevice_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QIODevice_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QunSetHandler (QIODevice ()) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QIODevice_unSetHandler cobj_qobj cstr_evid
foreign import ccall "qtc_QIODevice_unSetHandler" qtc_QIODevice_unSetHandler :: Ptr (TQIODevice a) -> CWString -> IO (CBool)
instance QunSetHandler (QIODeviceSc a) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QIODevice_unSetHandler cobj_qobj cstr_evid
instance QsetHandler (QIODevice ()) (QIODevice x0 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> IO (CBool)
setHandlerWrapper x0
= do x0obj <- qIODeviceFromPtr x0
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIODevice_setHandler1" qtc_QIODevice_setHandler1 :: Ptr (TQIODevice a) -> CWString -> Ptr (Ptr (TQIODevice x0) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QIODevice1 :: (Ptr (TQIODevice x0) -> IO (CBool)) -> IO (FunPtr (Ptr (TQIODevice x0) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QIODevice1_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QIODeviceSc a) (QIODevice x0 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> IO (CBool)
setHandlerWrapper x0
= do x0obj <- qIODeviceFromPtr x0
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QatEnd_h (QIODevice ()) (()) where
atEnd_h x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_atEnd cobj_x0
foreign import ccall "qtc_QIODevice_atEnd" qtc_QIODevice_atEnd :: Ptr (TQIODevice a) -> IO CBool
instance QatEnd_h (QIODeviceSc a) (()) where
atEnd_h x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_atEnd cobj_x0
instance QsetHandler (QIODevice ()) (QIODevice x0 -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> IO (CLLong)
setHandlerWrapper x0
= do x0obj <- qIODeviceFromPtr x0
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj
rvf <- rv
return (toCLLong rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIODevice_setHandler2" qtc_QIODevice_setHandler2 :: Ptr (TQIODevice a) -> CWString -> Ptr (Ptr (TQIODevice x0) -> IO (CLLong)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QIODevice2 :: (Ptr (TQIODevice x0) -> IO (CLLong)) -> IO (FunPtr (Ptr (TQIODevice x0) -> IO (CLLong)))
foreign import ccall "wrapper" wrapSetHandler_QIODevice2_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QIODeviceSc a) (QIODevice x0 -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> IO (CLLong)
setHandlerWrapper x0
= do x0obj <- qIODeviceFromPtr x0
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj
rvf <- rv
return (toCLLong rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QbytesAvailable_h (QIODevice ()) (()) where
bytesAvailable_h x0 ()
= withLongLongResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_bytesAvailable cobj_x0
foreign import ccall "qtc_QIODevice_bytesAvailable" qtc_QIODevice_bytesAvailable :: Ptr (TQIODevice a) -> IO CLLong
instance QbytesAvailable_h (QIODeviceSc a) (()) where
bytesAvailable_h x0 ()
= withLongLongResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_bytesAvailable cobj_x0
instance QbytesToWrite_h (QIODevice ()) (()) where
bytesToWrite_h x0 ()
= withLongLongResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_bytesToWrite cobj_x0
foreign import ccall "qtc_QIODevice_bytesToWrite" qtc_QIODevice_bytesToWrite :: Ptr (TQIODevice a) -> IO CLLong
instance QbytesToWrite_h (QIODeviceSc a) (()) where
bytesToWrite_h x0 ()
= withLongLongResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_bytesToWrite cobj_x0
instance QcanReadLine_h (QIODevice ()) (()) where
canReadLine_h x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_canReadLine cobj_x0
foreign import ccall "qtc_QIODevice_canReadLine" qtc_QIODevice_canReadLine :: Ptr (TQIODevice a) -> IO CBool
instance QcanReadLine_h (QIODeviceSc a) (()) where
canReadLine_h x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_canReadLine cobj_x0
instance QsetHandler (QIODevice ()) (QIODevice x0 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> IO ()
setHandlerWrapper x0
= do x0obj <- qIODeviceFromPtr x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIODevice_setHandler3" qtc_QIODevice_setHandler3 :: Ptr (TQIODevice a) -> CWString -> Ptr (Ptr (TQIODevice x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QIODevice3 :: (Ptr (TQIODevice x0) -> IO ()) -> IO (FunPtr (Ptr (TQIODevice x0) -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QIODevice3_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QIODeviceSc a) (QIODevice x0 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> IO ()
setHandlerWrapper x0
= do x0obj <- qIODeviceFromPtr x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance Qclose_h (QIODevice ()) (()) where
close_h x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_close cobj_x0
foreign import ccall "qtc_QIODevice_close" qtc_QIODevice_close :: Ptr (TQIODevice a) -> IO ()
instance Qclose_h (QIODeviceSc a) (()) where
close_h x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_close cobj_x0
instance QisSequential_h (QIODevice ()) (()) where
isSequential_h x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_isSequential cobj_x0
foreign import ccall "qtc_QIODevice_isSequential" qtc_QIODevice_isSequential :: Ptr (TQIODevice a) -> IO CBool
instance QisSequential_h (QIODeviceSc a) (()) where
isSequential_h x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_isSequential cobj_x0
instance QsetHandler (QIODevice ()) (QIODevice x0 -> OpenMode -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice4 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice4_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler4 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> CLong -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qIODeviceFromPtr x0
let x1flags = qFlags_fromInt $ fromCLong x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1flags
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIODevice_setHandler4" qtc_QIODevice_setHandler4 :: Ptr (TQIODevice a) -> CWString -> Ptr (Ptr (TQIODevice x0) -> CLong -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QIODevice4 :: (Ptr (TQIODevice x0) -> CLong -> IO (CBool)) -> IO (FunPtr (Ptr (TQIODevice x0) -> CLong -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QIODevice4_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QIODeviceSc a) (QIODevice x0 -> OpenMode -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice4 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice4_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler4 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> CLong -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qIODeviceFromPtr x0
let x1flags = qFlags_fromInt $ fromCLong x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1flags
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance Qopen_h (QIODevice ()) ((OpenMode)) where
open_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_open cobj_x0 (toCLong $ qFlags_toInt x1)
foreign import ccall "qtc_QIODevice_open" qtc_QIODevice_open :: Ptr (TQIODevice a) -> CLong -> IO CBool
instance Qopen_h (QIODeviceSc a) ((OpenMode)) where
open_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_open cobj_x0 (toCLong $ qFlags_toInt x1)
instance Qpos_h (QIODevice ()) (()) where
pos_h x0 ()
= withLongLongResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_pos cobj_x0
foreign import ccall "qtc_QIODevice_pos" qtc_QIODevice_pos :: Ptr (TQIODevice a) -> IO CLLong
instance Qpos_h (QIODeviceSc a) (()) where
pos_h x0 ()
= withLongLongResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_pos cobj_x0
instance Qreset_h (QIODevice ()) (()) (IO (Bool)) where
reset_h x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_reset cobj_x0
foreign import ccall "qtc_QIODevice_reset" qtc_QIODevice_reset :: Ptr (TQIODevice a) -> IO CBool
instance Qreset_h (QIODeviceSc a) (()) (IO (Bool)) where
reset_h x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_reset cobj_x0
instance QsetHandler (QIODevice ()) (QIODevice x0 -> Int -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice5 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice5_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler5 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> CLLong -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qIODeviceFromPtr x0
let x1int = fromCLLong x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1int
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIODevice_setHandler5" qtc_QIODevice_setHandler5 :: Ptr (TQIODevice a) -> CWString -> Ptr (Ptr (TQIODevice x0) -> CLLong -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QIODevice5 :: (Ptr (TQIODevice x0) -> CLLong -> IO (CBool)) -> IO (FunPtr (Ptr (TQIODevice x0) -> CLLong -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QIODevice5_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QIODeviceSc a) (QIODevice x0 -> Int -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice5 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice5_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler5 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> CLLong -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qIODeviceFromPtr x0
let x1int = fromCLLong x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1int
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance Qseek_h (QIODevice ()) ((Int)) where
seek_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_seek cobj_x0 (toCLLong x1)
foreign import ccall "qtc_QIODevice_seek" qtc_QIODevice_seek :: Ptr (TQIODevice a) -> CLLong -> IO CBool
instance Qseek_h (QIODeviceSc a) ((Int)) where
seek_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_seek cobj_x0 (toCLLong x1)
instance Qqsize_h (QIODevice ()) (()) where
qsize_h x0 ()
= withLongLongResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_size cobj_x0
foreign import ccall "qtc_QIODevice_size" qtc_QIODevice_size :: Ptr (TQIODevice a) -> IO CLLong
instance Qqsize_h (QIODeviceSc a) (()) where
qsize_h x0 ()
= withLongLongResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_size cobj_x0
instance QwaitForBytesWritten_h (QIODevice ()) ((Int)) where
waitForBytesWritten_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_waitForBytesWritten cobj_x0 (toCInt x1)
foreign import ccall "qtc_QIODevice_waitForBytesWritten" qtc_QIODevice_waitForBytesWritten :: Ptr (TQIODevice a) -> CInt -> IO CBool
instance QwaitForBytesWritten_h (QIODeviceSc a) ((Int)) where
waitForBytesWritten_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_waitForBytesWritten cobj_x0 (toCInt x1)
instance QwaitForReadyRead_h (QIODevice ()) ((Int)) where
waitForReadyRead_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_waitForReadyRead cobj_x0 (toCInt x1)
foreign import ccall "qtc_QIODevice_waitForReadyRead" qtc_QIODevice_waitForReadyRead :: Ptr (TQIODevice a) -> CInt -> IO CBool
instance QwaitForReadyRead_h (QIODeviceSc a) ((Int)) where
waitForReadyRead_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIODevice_waitForReadyRead cobj_x0 (toCInt x1)
instance QsetHandler (QIODevice ()) (QIODevice x0 -> String -> Int -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice6 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice6_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler6 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> Ptr (TQString ()) -> CLLong -> IO (CLLong)
setHandlerWrapper x0 x1 x2
= do x0obj <- qIODeviceFromPtr x0
x1str <- stringFromPtr x1
let x2int = fromCLLong x2
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj x1str x2int
rvf <- rv
return (toCLLong rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIODevice_setHandler6" qtc_QIODevice_setHandler6 :: Ptr (TQIODevice a) -> CWString -> Ptr (Ptr (TQIODevice x0) -> Ptr (TQString ()) -> CLLong -> IO (CLLong)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QIODevice6 :: (Ptr (TQIODevice x0) -> Ptr (TQString ()) -> CLLong -> IO (CLLong)) -> IO (FunPtr (Ptr (TQIODevice x0) -> Ptr (TQString ()) -> CLLong -> IO (CLLong)))
foreign import ccall "wrapper" wrapSetHandler_QIODevice6_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QIODeviceSc a) (QIODevice x0 -> String -> Int -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice6 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice6_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler6 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> Ptr (TQString ()) -> CLLong -> IO (CLLong)
setHandlerWrapper x0 x1 x2
= do x0obj <- qIODeviceFromPtr x0
x1str <- stringFromPtr x1
let x2int = fromCLLong x2
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj x1str x2int
rvf <- rv
return (toCLLong rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QwriteData_h (QIODevice ()) ((String, Int)) where
writeData_h x0 (x1, x2)
= withLongLongResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QIODevice_writeData cobj_x0 cstr_x1 (toCLLong x2)
foreign import ccall "qtc_QIODevice_writeData" qtc_QIODevice_writeData :: Ptr (TQIODevice a) -> CWString -> CLLong -> IO CLLong
instance QwriteData_h (QIODeviceSc a) ((String, Int)) where
writeData_h x0 (x1, x2)
= withLongLongResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QIODevice_writeData cobj_x0 cstr_x1 (toCLLong x2)
instance QsetHandler (QIODevice ()) (QIODevice x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice7 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice7_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler7 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qIODeviceFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIODevice_setHandler7" qtc_QIODevice_setHandler7 :: Ptr (TQIODevice a) -> CWString -> Ptr (Ptr (TQIODevice x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QIODevice7 :: (Ptr (TQIODevice x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> IO (FunPtr (Ptr (TQIODevice x0) -> Ptr (TQEvent t1) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QIODevice7_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QIODeviceSc a) (QIODevice x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice7 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice7_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler7 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qIODeviceFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance Qevent_h (QIODevice ()) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIODevice_event cobj_x0 cobj_x1
foreign import ccall "qtc_QIODevice_event" qtc_QIODevice_event :: Ptr (TQIODevice a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent_h (QIODeviceSc a) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIODevice_event cobj_x0 cobj_x1
instance QsetHandler (QIODevice ()) (QIODevice x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice8 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice8_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler8 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qIODeviceFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QIODevice_setHandler8" qtc_QIODevice_setHandler8 :: Ptr (TQIODevice a) -> CWString -> Ptr (Ptr (TQIODevice x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QIODevice8 :: (Ptr (TQIODevice x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> IO (FunPtr (Ptr (TQIODevice x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QIODevice8_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QIODeviceSc a) (QIODevice x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QIODevice8 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QIODevice8_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QIODevice_setHandler8 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQIODevice x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qIODeviceFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QeventFilter_h (QIODevice ()) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QIODevice_eventFilter cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QIODevice_eventFilter" qtc_QIODevice_eventFilter :: Ptr (TQIODevice a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter_h (QIODeviceSc a) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QIODevice_eventFilter cobj_x0 cobj_x1 cobj_x2
| keera-studios/hsQt | Qtc/Core/QIODevice_h.hs | bsd-2-clause | 41,241 | 0 | 18 | 9,678 | 13,807 | 6,602 | 7,205 | -1 | -1 |
{-# LANGUAGE
MultiParamTypeClasses
, TemplateHaskell
, ScopedTypeVariables
, FlexibleInstances
, FlexibleContexts
, UndecidableInstances
, GeneralizedNewtypeDeriving
, GADTs
#-}
module Spire.Expression.Types where
import Data.Monoid
import Unbound.LocallyNameless
import Spire.Canonical.Types
import qualified Spire.Canonical.Builtins as B
----------------------------------------------------------------------
data Check =
CLam (Bind Nom Check)
| CPair Check Check
| CRefl | CHere
| CThere Check | CEnd Check
| CRec Check Check | CInit Check
| CArg Check (Bind Nom Check)
| Infer Infer
deriving Show
data Infer =
IQuotes String
| IPi Check (Bind Nom Check)
| ISg Check (Bind Nom Check)
| IEq Infer Infer
| IVar Nom
| IIf Check Infer Infer
| IApp Infer Check
| IAnn Check Check
deriving Show
$(derive [''Check , ''Infer])
instance Alpha Check
instance Alpha Infer
----------------------------------------------------------------------
-- Here 'CDef f e' evs T' Tvs' corresponds to source program
--
-- f : T
-- f = e
--
-- where 'e' elaborates to 'e'' producing mvar bindings 'evs' and 'T'
-- elaborates to 'T'' producing mvar bindings 'Tvs'.
data CDef = CDef Nom Check Check
deriving Show
type CProg = [CDef]
----------------------------------------------------------------------
cVar :: String -> Check
cVar = Infer . iVar
iVar :: String -> Infer
iVar = IVar . s2n
iApps :: Infer -> [Check] -> Infer
iApps = foldl IApp
----------------------------------------------------------------------
| spire/spire | src/Spire/Expression/Types.hs | bsd-3-clause | 1,571 | 0 | 9 | 289 | 315 | 184 | 131 | 45 | 1 |
module RandomStream where
import RandomValue
data RStream a b = Out (RStream a b) b | In (a -> RStream a b)
instance Functor (RStream a) where
fmap f (Out rs b) = Out (fmap f rs) (f b)
fmap f (In g) = In $ \a -> fmap f (g a)
getOne :: RStream a b -> RValue a (RStream a b, b)
getOne (Out rs b) = Done (rs, b)
getOne (In f) = NotDone $ \a -> getOne (f a)
pipeList :: RStream a b -> [a] -> [b]
pipeList (Out rs b) as = b : pipeList rs as
pipeList (In f) (a:as) = pipeList (f a) as
pipeList (In _) [] = error "Reached end of list."
takeRS :: Int -> RStream a b -> RValue a [b]
takeRS 0 _ = Done []
takeRS n (Out rs b) = fmap (b :) (takeRS (n - 1) rs)
takeRS n (In f) = NotDone $ \a -> takeRS n (f a)
markov :: (b -> RValue a b) -> RValue a b -> RStream a b
markov trans (Done x) = Out (markov trans (trans x)) x
markov trans (NotDone f) = In $ \a -> markov trans (f a)
repeatRV :: RValue a b -> RStream a b
repeatRV rV = rRV rV rV
where rRV rV (Done x) = Out (repeatRV rV) x
rRV rV (NotDone f) = In $ \a -> rRV rV (f a) | cullina/Extractor | src/RandomStream.hs | bsd-3-clause | 1,094 | 0 | 11 | 320 | 647 | 325 | 322 | 24 | 2 |
module Control.Lens.Create ( create, Default(..), ASetter ) where
import Control.Lens.Setter ( ASetter, set )
import Data.Default.Class ( Default(..) )
-- | A convenient helper function for using Lenses as constructors.
create :: Default s => ASetter s t a b -> b -> t
create f x = set f x def
| gridaphobe/cabal2nix | lens-construction-helper/src/Control/Lens/Create.hs | bsd-3-clause | 297 | 0 | 7 | 55 | 98 | 57 | 41 | 5 | 1 |
module Crypto.Blockchain.Message where
import qualified Crypto.Blockchain.Block as Blockchain
import Data.Binary (Binary)
import GHC.Generics (Generic)
data Message a =
Tx a
| Block (Blockchain.Block a)
| Ping
deriving (Show, Generic)
instance Binary a => Binary (Message a)
deriving instance Eq a => Eq (Message a)
| cloudhead/cryptocurrency | src/Crypto/Blockchain/Message.hs | bsd-3-clause | 358 | 0 | 9 | 86 | 114 | 64 | 50 | -1 | -1 |
module Chapter5 where
-- Multiple choice
-- 1. c
-- 2. a
-- 3. b
-- 4. c
-- Determine the type
-- 1.
-- a) Num a => a
-- b) Num a => (a, [Char])
-- c) (Integer, [Char])
-- d) Bool
-- e) Int
-- f) Bool
-- 2. Num a => a
-- 3. Num a => a -> a
-- 4. Fractional a => a
-- 5. [Char]
-- Does it compile
-- 1. no. bigNum = (^) 5
-- 2. yes
-- 3. no. c = a 10
-- 4. no. c = 1
-- Type var or type constructor
-- 2.
-- [0] fully polymorphic
-- [1] concrete
-- [2] concrete
-- 3.
-- [0] fully polymorphic
-- [1] constrained Enum b
-- [2] concrete
-- 4.
-- [0] fully poly
-- [1] fully poly
-- [2] concrete
-- Write a type signature
-- 1. [a] -> a
-- 2. Ord a => a -> a -> Bool
-- 3. (a, b) -> b
-- Given a type, write function
i :: a -> a
i = id
c :: a -> b -> a
c = const
c'' :: b -> a -> b
c'' = const
c' :: a -> b -> b
c' _ y = y
r :: [a] -> [a]
r = reverse
co :: (b -> c) -> (a -> b) -> (a -> c)
co = (.)
a :: (a -> c) -> a -> a
a _ x = x
a' :: (a -> b) -> a -> b
a' = ($)
-- Type do know
f :: Int -> String
f = undefined
g :: String -> Char
g = undefined
h :: Int -> Char
h = g . f
data A
data B
data C
q :: A -> B
q = undefined
w :: B -> C
w = undefined
e :: A -> C
e = w . q
data X
data Y
data Z
xz :: X -> Z
xz = undefined
yz :: Y -> Z
yz = undefined
xform :: (X, Y) -> (Z, Z)
xform (x, y) = (xz x, yz y)
munge :: (x -> y) -> (y -> (w, z)) -> x -> w
munge f' g' = fst . g' . f'
| taojang/haskell-programming-book-exercise | src/ch05/chapter5.hs | bsd-3-clause | 1,399 | 0 | 9 | 416 | 476 | 289 | 187 | -1 | -1 |
module Main where
import Diagrams.Backend.Rasterific (renderRasterific, Rasterific)
import Diagrams.Prelude (
P2, p2, r2,
fc, red, lw, none, center,
mkWidth, Diagram, mconcat,
strokeLoop, glueLine, lineFromSegments, straight,
arrowV, arrowAt', arrowHead, spike,
hcat, vcat, hcat', pad, catMethod, CatMethod(Distrib), sep,
def, set)
import Diagrams.TwoD.Layout.Grid (
gridCat')
import Linear (
V2(V2), (^+^), zero, negated, (*^))
vector2Diagram :: V2 Double -> Diagram Rasterific
vector2Diagram (V2 a1 a2) = arrowV (V2 a1 a2)
bivector2Diagram :: V2 Double -> V2 Double -> Diagram Rasterific
bivector2Diagram a b = mconcat [arrow1, arrow2, arrow3, arrow4, area] where
area = lw none (fc red (
strokeLoop (glueLine (lineFromSegments segments))))
segments = map straight [a, b, negated a, negated b]
invisibleArrow p v = lw none (arrowAt' arrowOpts (vectorP2 p) v)
arrow1 = invisibleArrow zero (0.5 *^ a)
arrow2 = invisibleArrow a (0.5 *^ b)
arrow3 = invisibleArrow (a ^+^ b) (0.5 *^ negated a)
arrow4 = invisibleArrow b (0.5 *^ negated b)
arrowOpts = set arrowHead spike def
drawOuterVectorVector2 :: V2 Double -> V2 Double -> Diagram Rasterific
drawOuterVectorVector2 a b = center (mconcat [
vector2Diagram a, vector2Diagram b, bivector2Diagram a b])
vectorP2 :: V2 r -> P2 r
vectorP2 (V2 a1 a2) = p2 (a1,a2)
figure1 :: Diagram Rasterific
figure1 = gridCat' 3 (concat (zipWith rowDiagram vectors1 vectors2)) where
rowDiagram vector1 vector2 = map center [
vector2Diagram vector1,
vector2Diagram vector2,
bivector2Diagram vector1 vector2]
vectors1 = [V2 9 1, V2 5 5, V2 9 1]
vectors2 = [V2 2 8, V2 (-5) 5, V2 (-9) 0]
figure2 :: Diagram Rasterific
figure2 = pad 1.1 (gridCat' 4 (map center (zipWith bivector2Diagram vectors1 vectors2))) where
vectors1 = [V2 5 0, V2 2.5 0, V2 10 0, V2 5 0]
vectors2 = [V2 0 5, V2 0 10, V2 0 2.5, V2 3 5]
figure3 :: Diagram Rasterific
figure3 = gridCat' 3 (concat (zipWith rowDiagram vectors1 vectors2)) where
rowDiagram vector1 vector2 = map (center . pad 1.1) [
vector2Diagram vector1,
vector2Diagram vector2,
bivector2Diagram vector1 vector2]
vectors1 = [V2 5 (-1), V2 3 5]
vectors2 = [V2 3 5, V2 5 (-1)]
main :: IO ()
main = do
renderRasterific "out/figure1.png" (mkWidth 500) figure1
renderRasterific "out/figure2.png" (mkWidth 500) figure2
renderRasterific "out/figure3.png" (mkWidth 500) figure3
| phischu/geometric-algebra | examples/BivectorDiagrams.hs | bsd-3-clause | 2,437 | 0 | 15 | 465 | 991 | 524 | 467 | 57 | 1 |
{-#
LANGUAGE
RecordWildCards
#-}
module ParserCreator where
import Data.Char
import Data.List
import GrammarParser
createHaskellParser::Grammar->String
createHaskellParser g =
"module Parser where\n\nimport Text.Parsec\nimport Data.Ix\n\n"
++ "wrap x y = \"<\" ++ x ++ \">\" ++ y ++ \"</\" ++ x ++ \">\"\n"
++ intercalate "\n\n" (map createRuleParser $ rules g)
++ "\n"
createRuleParser r =
"parse" ++ toUpper (head (ruleName r)):tail (ruleName r) ++ "::Parsec String () String\n"
++ "parse" ++ toUpper (head (ruleName r)):tail (ruleName r) ++ " =\n "
++ createOrParser (options r)
createOrParser (Or [tl]) = createTermListParser tl
createOrParser options =
"choice [" ++ intercalate ", " (map createTermListParser $ orTerm options) ++ "]"
createTermListParser (TermList [mt]) = createModifiedParser mt
createTermListParser termList =
"fmap concat (sequence [" ++ intercalate ", " (map createModifiedParser $ listTerms termList) ++ "])"
createModifiedParser::ModifiedTerm->String
createModifiedParser (ModifiedTerm negated t (Many greedy)) = "fmap concat (many (" ++ createTermParser t ++ "))"
createModifiedParser (ModifiedTerm negated t Option) = "option \"\" (" ++ createTermParser t ++ ")"
createModifiedParser (ModifiedTerm negated t Plus) = "fmap concat (many1 (" ++ createTermParser t ++ "))"
createModifiedParser (ModifiedTerm negated t None) = createTermParser t
createTermParser AnyChar = "anyChar"
createTermParser RuleTerm{..} = "fmap (wrap \"" ++ termName ++ "\") parse" ++ toUpper (head termName):tail termName
createTermParser ParenTerm{..} = "choice [" ++ intercalate ", " (map createTermListParser $ orTerm parenTermOptions) ++ "]"
createTermParser CharTerm{..} = "fmap show (char " ++ show theChar ++ ")"
createTermParser CharRangeTerm{range=CharRange from to} =
"fmap (:[]) (satisfy (inRange (" ++ show from ++ ", " ++ show to ++ ")))"
createTermParser CharSetTerm{charset=CharSet cs} = "fmap show (oneOf " ++ show cs ++ ")"
| jamshidh/antlrToH | src/ParserCreator.hs | bsd-3-clause | 1,984 | 0 | 17 | 305 | 583 | 284 | 299 | 34 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, ConstraintKinds, PatternGuards #-}
{-# OPTIONS_GHC -O0 #-}
module Idris.Parser(module Idris.Parser,
module Idris.ParseExpr,
module Idris.ParseData,
module Idris.ParseHelpers,
module Idris.ParseOps) where
import Prelude hiding (pi)
import Text.Trifecta.Delta
import Text.Trifecta hiding (span, stringLiteral, charLiteral, natural, symbol, char, string, whiteSpace)
import Text.Parser.LookAhead
import Text.Parser.Expression
import qualified Text.Parser.Token as Tok
import qualified Text.Parser.Char as Chr
import qualified Text.Parser.Token.Highlight as Hi
import Text.PrettyPrint.ANSI.Leijen (Doc, plain)
import qualified Text.PrettyPrint.ANSI.Leijen as ANSI
import Idris.AbsSyntax hiding (namespace, params)
import Idris.DSL
import Idris.Imports
import Idris.Delaborate
import Idris.Error
import Idris.ElabDecls
import Idris.ElabTerm
import Idris.Coverage
import Idris.IBC
import Idris.Unlit
import Idris.Providers
import Idris.Output
import Idris.ParseHelpers
import Idris.ParseOps
import Idris.ParseExpr
import Idris.ParseData
import Idris.Docstrings
import Paths_idris
import Util.DynamicLinker
import qualified Util.Pretty as P
import Idris.Core.TT
import Idris.Core.Evaluate
import Control.Applicative hiding (Const)
import Control.Monad
import Control.Monad.Error (throwError, catchError)
import Control.Monad.State.Strict
import Data.Function
import Data.Maybe
import qualified Data.List.Split as Spl
import Data.List
import Data.Monoid
import Data.Char
import Data.Ord
import qualified Data.Map as M
import qualified Data.HashSet as HS
import qualified Data.Text as T
import qualified Data.ByteString.UTF8 as UTF8
import qualified Data.Set as S
import Debug.Trace
import System.FilePath
import System.IO
{-
@
grammar shortcut notation:
~CHARSEQ = complement of char sequence (i.e. any character except CHARSEQ)
RULE? = optional rule (i.e. RULE or nothing)
RULE* = repeated rule (i.e. RULE zero or more times)
RULE+ = repeated rule with at least one match (i.e. RULE one or more times)
RULE! = invalid rule (i.e. rule that is not valid in context, report meaningful error in case)
RULE{n} = rule repeated n times
@
-}
{- * Main grammar -}
{- | Parses module definition
@
ModuleHeader ::= 'module' Identifier_t ';'?;
@
-}
moduleHeader :: IdrisParser [String]
moduleHeader = try (do noDocCommentHere "Modules cannot have documentation comments"
reserved "module"
i <- identifier
option ';' (lchar ';')
return (moduleName i))
<|> try (do lchar '%'; reserved "unqualified"
return [])
<|> return (moduleName "Main")
where moduleName x = case span (/='.') x of
(x, "") -> [x]
(x, '.':y) -> x : moduleName y
{- | Parses an import statement
@
Import ::= 'import' Identifier_t ';'?;
@
-}
import_ :: IdrisParser (String, Maybe String, FC)
import_ = do fc <- getFC
reserved "import"
id <- identifier
newName <- optional (reserved "as" *> identifier)
option ';' (lchar ';')
return (toPath id, toPath <$> newName, fc)
<?> "import statement"
where toPath = foldl1' (</>) . Spl.splitOn "."
{- | Parses program source
@
Prog ::= Decl* EOF;
@
-}
prog :: SyntaxInfo -> IdrisParser [PDecl]
prog syn = do whiteSpace
decls <- many (decl syn)
notOpenBraces
eof
let c = (concat decls)
return c
{-| Parses a top-level declaration
@
Decl ::=
Decl'
| Using
| Params
| Mutual
| Namespace
| Class
| Instance
| DSL
| Directive
| Provider
| Transform
| Import!
;
@
-}
decl :: SyntaxInfo -> IdrisParser [PDecl]
decl syn = do notEndBlock
declBody
where declBody :: IdrisParser [PDecl]
declBody = declBody'
<|> using_ syn
<|> params syn
<|> mutual syn
<|> namespace syn
<|> class_ syn
<|> instance_ syn
<|> do d <- dsl syn; return [d]
<|> directive syn
<|> provider syn
<|> transform syn
<|> do import_; fail "imports must be at top of file"
<?> "declaration"
declBody' :: IdrisParser [PDecl]
declBody' = do d <- decl' syn
i <- get
let d' = fmap (debindApp syn . (desugar syn i)) d
return [d']
{- | Parses a top-level declaration with possible syntax sugar
@
Decl' ::=
Fixity
| FunDecl'
| Data
| Record
| SyntaxDecl
;
@
-}
decl' :: SyntaxInfo -> IdrisParser PDecl
decl' syn = fixity
<|> syntaxDecl syn
<|> fnDecl' syn
<|> data_ syn
<|> record syn
<?> "declaration"
{- | Parses a syntax extension declaration (and adds the rule to parser state)
@
SyntaxDecl ::= SyntaxRule;
@
-}
syntaxDecl :: SyntaxInfo -> IdrisParser PDecl
syntaxDecl syn = do s <- syntaxRule syn
i <- get
let rs = syntax_rules i
let ns = syntax_keywords i
let ibc = ibc_write i
let ks = map show (names s)
put (i { syntax_rules = s : rs,
syntax_keywords = ks ++ ns,
ibc_write = IBCSyntax s : map IBCKeyword ks ++ ibc })
fc <- getFC
return (PSyntax fc s)
where names (Rule syms _ _) = mapMaybe ename syms
ename (Keyword n) = Just n
ename _ = Nothing
{- | Parses a syntax extension declaration
@
SyntaxRuleOpts ::= 'term' | 'pattern';
@
@
SyntaxRule ::=
SyntaxRuleOpts? 'syntax' SyntaxSym+ '=' TypeExpr Terminator;
@
@
SyntaxSym ::= '[' Name_t ']'
| '{' Name_t '}'
| Name_t
| StringLiteral_t
;
@
-}
syntaxRule :: SyntaxInfo -> IdrisParser Syntax
syntaxRule syn
= do sty <- try (do
pushIndent
sty <- option AnySyntax (do reserved "term"; return TermSyntax
<|> do reserved "pattern"; return PatternSyntax)
reserved "syntax"
return sty)
syms <- some syntaxSym
when (all isExpr syms) $ unexpected "missing keywords in syntax rule"
let ns = mapMaybe getName syms
when (length ns /= length (nub ns))
$ unexpected "repeated variable in syntax rule"
lchar '='
tm <- typeExpr (allowImp syn)
terminator
return (Rule (mkSimple syms) tm sty)
where
isExpr (Expr _) = True
isExpr _ = False
getName (Expr n) = Just n
getName _ = Nothing
-- Can't parse two full expressions (i.e. expressions with application) in a row
-- so change them both to a simple expression
mkSimple (Expr e : es) = SimpleExpr e : mkSimple' es
mkSimple xs = mkSimple' xs
mkSimple' (Expr e : Expr e1 : es) = SimpleExpr e : SimpleExpr e1 :
mkSimple es
mkSimple' (e : es) = e : mkSimple' es
mkSimple' [] = []
{- | Parses a syntax symbol (either binding variable, keyword or expression)
@
SyntaxSym ::= '[' Name_t ']'
| '{' Name_t '}'
| Name_t
| StringLiteral_t
;
@
-}
syntaxSym :: IdrisParser SSymbol
syntaxSym = try (do lchar '['; n <- name; lchar ']'
return (Expr n))
<|> try (do lchar '{'; n <- name; lchar '}'
return (Binding n))
<|> do n <- iName []
return (Keyword n)
<|> do sym <- stringLiteral
return (Symbol sym)
<?> "syntax symbol"
{- | Parses a function declaration with possible syntax sugar
@
FunDecl ::= FunDecl';
@
-}
fnDecl :: SyntaxInfo -> IdrisParser [PDecl]
fnDecl syn = try (do notEndBlock
d <- fnDecl' syn
i <- get
let d' = fmap (desugar syn i) d
return [d']) <?> "function declaration"
{-| Parses a function declaration
@
FunDecl' ::=
DocComment_t? FnOpts* Accessibility? FnOpts* FnName TypeSig Terminator
| Postulate
| Pattern
| CAF
;
@
-}
fnDecl' :: SyntaxInfo -> IdrisParser PDecl
fnDecl' syn = checkFixity $
do (doc, fc, opts', n, acc) <- try (do
pushIndent
ist <- get
doc <- option noDocs docComment
ist <- get
let initOpts = if default_total ist
then [TotalFn]
else []
opts <- fnOpts initOpts
acc <- optional accessibility
opts' <- fnOpts opts
n_in <- fnName
let n = expandNS syn n_in
fc <- getFC
lchar ':'
return (doc, fc, opts', n, acc))
ty <- typeExpr (allowImp syn)
terminator
addAcc n acc
return (PTy (fst doc) (snd doc) syn fc opts' n ty)
<|> postulate syn
<|> caf syn
<|> pattern syn
<?> "function declaration"
where checkFixity :: IdrisParser PDecl -> IdrisParser PDecl
checkFixity p = do decl <- p
case getName decl of
Nothing -> return decl
Just n -> do fOk <- fixityOK n
unless fOk . fail $
"Missing fixity declaration for " ++ show n
return decl
getName (PTy _ _ _ _ _ n _) = Just n
getName _ = Nothing
fixityOK (NS n _) = fixityOK n
fixityOK (UN n) | all (flip elem opChars) (str n) =
do fixities <- fmap idris_infixes get
return . elem (str n) . map (\ (Fix _ op) -> op) $ fixities
| otherwise = return True
fixityOK _ = return True
{-| Parses function options given initial options
@
FnOpts ::= 'total'
| 'partial'
| 'implicit'
| '%' 'assert_total'
| '%' 'error_handler'
| '%' 'reflection'
| '%' 'specialise' '[' NameTimesList? ']'
;
@
@
NameTimes ::= FnName Natural?;
@
@
NameTimesList ::=
NameTimes
| NameTimes ',' NameTimesList
;
@
-}
-- FIXME: Check compatability for function options (i.e. partal/total)
fnOpts :: [FnOpt] -> IdrisParser [FnOpt]
fnOpts opts
= do reserved "total"; fnOpts (TotalFn : opts)
<|> do reserved "partial"; fnOpts (PartialFn : (opts \\ [TotalFn]))
<|> do reserved "covering"; fnOpts (CoveringFn : (opts \\ [TotalFn]))
<|> do try (lchar '%' *> reserved "export"); c <- stringLiteral;
fnOpts (CExport c : opts)
<|> do try (lchar '%' *> reserved "assert_total");
fnOpts (AssertTotal : opts)
<|> do try (lchar '%' *> reserved "error_handler");
fnOpts (ErrorHandler : opts)
<|> do try (lchar '%' *> reserved "error_reverse");
fnOpts (ErrorReverse : opts)
<|> do try (lchar '%' *> reserved "reflection");
fnOpts (Reflection : opts)
<|> do lchar '%'; reserved "specialise";
lchar '['; ns <- sepBy nameTimes (lchar ','); lchar ']'
fnOpts (Specialise ns : opts)
<|> do reserved "implicit"; fnOpts (Implicit : opts)
<|> return opts
<?> "function modifier"
where nameTimes :: IdrisParser (Name, Maybe Int)
nameTimes = do n <- fnName
t <- option Nothing (do reds <- natural
return (Just (fromInteger reds)))
return (n, t)
{- | Parses a postulate
@
Postulate ::=
DocComment_t? 'postulate' FnOpts* Accesibility? FnOpts* FnName TypeSig Terminator
;
@
-}
postulate :: SyntaxInfo -> IdrisParser PDecl
postulate syn = do doc <- try $ do doc <- option noDocs docComment
pushIndent
reserved "postulate"
return doc
ist <- get
let initOpts = if default_total ist
then [TotalFn]
else []
opts <- fnOpts initOpts
acc <- optional accessibility
opts' <- fnOpts opts
n_in <- fnName
let n = expandNS syn n_in
lchar ':'
ty <- typeExpr (allowImp syn)
fc <- getFC
terminator
addAcc n acc
return (PPostulate (fst doc) syn fc opts' n ty)
<?> "postulate"
{- | Parses a using declaration
@
Using ::=
'using' '(' UsingDeclList ')' OpenBlock Decl* CloseBlock
;
@
-}
using_ :: SyntaxInfo -> IdrisParser [PDecl]
using_ syn =
do reserved "using"; lchar '('; ns <- usingDeclList syn; lchar ')'
openBlock
let uvars = using syn
ds <- many (decl (syn { using = uvars ++ ns }))
closeBlock
return (concat ds)
<?> "using declaration"
{- | Parses a parameters declaration
@
Params ::=
'parameters' '(' TypeDeclList ')' OpenBlock Decl* CloseBlock
;
@
-}
params :: SyntaxInfo -> IdrisParser [PDecl]
params syn =
do reserved "parameters"; lchar '('; ns <- typeDeclList syn; lchar ')'
openBlock
let pvars = syn_params syn
ds <- many (decl syn { syn_params = pvars ++ ns })
closeBlock
fc <- getFC
return [PParams fc ns (concat ds)]
<?> "parameters declaration"
{- | Parses a mutual declaration (for mutually recursive functions)
@
Mutual ::=
'mutual' OpenBlock Decl* CloseBlock
;
@
-}
mutual :: SyntaxInfo -> IdrisParser [PDecl]
mutual syn =
do reserved "mutual"
openBlock
let pvars = syn_params syn
ds <- many (decl syn)
closeBlock
fc <- getFC
return [PMutual fc (concat ds)]
<?> "mutual block"
{-| Parses a namespace declaration
@
Namespace ::=
'namespace' identifier OpenBlock Decl+ CloseBlock
;
@
-}
namespace :: SyntaxInfo -> IdrisParser [PDecl]
namespace syn =
do reserved "namespace"; n <- identifier;
openBlock
ds <- some (decl syn { syn_namespace = n : syn_namespace syn })
closeBlock
return [PNamespace n (concat ds)]
<?> "namespace declaration"
{- | Parses a methods block (for instances)
@
InstanceBlock ::= 'where' OpenBlock FnDecl* CloseBlock
@
-}
instanceBlock :: SyntaxInfo -> IdrisParser [PDecl]
instanceBlock syn = do reserved "where"
openBlock
ds <- many (fnDecl syn)
closeBlock
return (concat ds)
<?> "instance block"
{- | Parses a methods and instances block (for type classes)
@
MethodOrInstance ::=
FnDecl
| Instance
;
@
@
ClassBlock ::=
'where' OpenBlock MethodOrInstance* CloseBlock
;
@
-}
classBlock :: SyntaxInfo -> IdrisParser [PDecl]
classBlock syn = do reserved "where"
openBlock
ds <- many ((notEndBlock >> instance_ syn) <|> fnDecl syn)
closeBlock
return (concat ds)
<?> "class block"
{-| Parses a type class declaration
@
ClassArgument ::=
Name
| '(' Name ':' Expr ')'
;
@
@
Class ::=
DocComment_t? Accessibility? 'class' ConstraintList? Name ClassArgument* ClassBlock?
;
@
-}
class_ :: SyntaxInfo -> IdrisParser [PDecl]
class_ syn = do (doc, acc) <- try (do
doc <- option noDocs docComment
acc <- optional accessibility
return (doc, acc))
reserved "class"; fc <- getFC; cons <- constraintList syn; n_in <- fnName
let n = expandNS syn n_in
cs <- many carg
ds <- option [] (classBlock syn)
accData acc n (concatMap declared ds)
return [PClass (fst doc) syn fc cons n cs (snd doc) ds]
<?> "type-class declaration"
where
carg :: IdrisParser (Name, PTerm)
carg = do lchar '('; i <- name; lchar ':'; ty <- expr syn; lchar ')'
return (i, ty)
<|> do i <- name;
return (i, PType)
{- | Parses a type class instance declaration
@
Instance ::=
'instance' InstanceName? ConstraintList? Name SimpleExpr* InstanceBlock?
;
@
@
InstanceName ::= '[' Name ']';
@
-}
instance_ :: SyntaxInfo -> IdrisParser [PDecl]
instance_ syn = do reserved "instance"; fc <- getFC
en <- optional instanceName
cs <- constraintList syn
cn <- fnName
args <- many (simpleExpr syn)
let sc = PApp fc (PRef fc cn) (map pexp args)
let t = bindList (PPi constraint) (map (\x -> (sMN 0 "constraint", x)) cs) sc
ds <- option [] (instanceBlock syn)
return [PInstance syn fc cs cn args t en ds]
<?> "instance declaration"
where instanceName :: IdrisParser Name
instanceName = do lchar '['; n_in <- fnName; lchar ']'
let n = expandNS syn n_in
return n
<?> "instance name"
{- | Parses a using declaration list
@
UsingDeclList ::=
UsingDeclList'
| NameList TypeSig
;
@
@
UsingDeclList' ::=
UsingDecl
| UsingDecl ',' UsingDeclList'
;
@
@
NameList ::=
Name
| Name ',' NameList
;
@
-}
usingDeclList :: SyntaxInfo -> IdrisParser [Using]
usingDeclList syn
= try (sepBy1 (usingDecl syn) (lchar ','))
<|> do ns <- sepBy1 name (lchar ',')
lchar ':'
t <- typeExpr (disallowImp syn)
return (map (\x -> UImplicit x t) ns)
<?> "using declaration list"
{- |Parses a using declaration
@
UsingDecl ::=
FnName TypeSig
| FnName FnName+
;
@
-}
usingDecl :: SyntaxInfo -> IdrisParser Using
usingDecl syn = try (do x <- fnName
lchar ':'
t <- typeExpr (disallowImp syn)
return (UImplicit x t))
<|> do c <- fnName
xs <- some fnName
return (UConstraint c xs)
<?> "using declaration"
{- | Parse a clause with patterns
@
Pattern ::= Clause;
@
-}
pattern :: SyntaxInfo -> IdrisParser PDecl
pattern syn = do fc <- getFC
clause <- clause syn
return (PClauses fc [] (sMN 2 "_") [clause]) -- collect together later
<?> "pattern"
{- | Parse a constant applicative form declaration
@
CAF ::= 'let' FnName '=' Expr Terminator;
@
-}
caf :: SyntaxInfo -> IdrisParser PDecl
caf syn = do reserved "let"
n_in <- fnName; let n = expandNS syn n_in
lchar '='
t <- expr syn
terminator
fc <- getFC
return (PCAF fc n t)
<?> "constant applicative form declaration"
{- | Parse an argument expression
@
ArgExpr ::= HSimpleExpr | {- In Pattern External (User-defined) Expression -};
@
-}
argExpr :: SyntaxInfo -> IdrisParser PTerm
argExpr syn = let syn' = syn { inPattern = True } in
try (hsimpleExpr syn') <|> simpleExternalExpr syn'
<?> "argument expression"
{- | Parse a right hand side of a function
@
RHS ::= '=' Expr
| '?=' RHSName? Expr
| 'impossible'
;
@
@
RHSName ::= '{' FnName '}';
@
-}
rhs :: SyntaxInfo -> Name -> IdrisParser PTerm
rhs syn n = do lchar '='; expr syn
<|> do symbol "?=";
name <- option n' (do symbol "{"; n <- fnName; symbol "}";
return n)
r <- expr syn
return (addLet name r)
<|> do reserved "impossible"; return PImpossible
<?> "function right hand side"
where mkN :: Name -> Name
mkN (UN x) = sUN (str x++"_lemma_1")
mkN (NS x n) = NS (mkN x) n
n' :: Name
n' = mkN n
addLet :: Name -> PTerm -> PTerm
addLet nm (PLet n ty val r) = PLet n ty val (addLet nm r)
addLet nm (PCase fc t cs) = PCase fc t (map addLetC cs)
where addLetC (l, r) = (l, addLet nm r)
addLet nm r = (PLet (sUN "value") Placeholder r (PMetavar nm))
{- |Parses a function clause
@
RHSOrWithBlock ::= RHS WhereOrTerminator
| 'with' SimpleExpr OpenBlock FnDecl+ CloseBlock
;
@
@
Clause ::= WExpr+ RHSOrWithBlock
| SimpleExpr '<==' FnName RHS WhereOrTerminator
| ArgExpr Operator ArgExpr WExpr* RHSOrWithBlock {- Except "=" and "?=" operators to avoid ambiguity -}
| FnName ConstraintArg* ImplicitOrArgExpr* WExpr* RHSOrWithBlock
;
@
@
ImplicitOrArgExpr ::= ImplicitArg | ArgExpr;
@
@
WhereOrTerminator ::= WhereBlock | Terminator;
@
-}
clause :: SyntaxInfo -> IdrisParser PClause
clause syn
= do wargs <- try (do pushIndent; some (wExpr syn))
fc <- getFC
ist <- get
n <- case lastParse ist of
Just t -> return t
Nothing -> fail "Invalid clause"
(do r <- rhs syn n
let ctxt = tt_ctxt ist
let wsyn = syn { syn_namespace = [] }
(wheres, nmap) <- choice [do x <- whereBlock n wsyn
popIndent
return x,
do terminator
return ([], [])]
return $ PClauseR fc wargs r wheres) <|> (do
popIndent
reserved "with"
wval <- simpleExpr syn
openBlock
ds <- some $ fnDecl syn
let withs = concat ds
closeBlock
return $ PWithR fc wargs wval withs)
<|> do ty <- try (do pushIndent
ty <- simpleExpr syn
symbol "<=="
return ty)
fc <- getFC
n_in <- fnName; let n = expandNS syn n_in
r <- rhs syn n
ist <- get
let ctxt = tt_ctxt ist
let wsyn = syn { syn_namespace = [] }
(wheres, nmap) <- choice [do x <- whereBlock n wsyn
popIndent
return x,
do terminator
return ([], [])]
let capp = PLet (sMN 0 "match")
ty
(PMatchApp fc n)
(PRef fc (sMN 0 "match"))
ist <- get
put (ist { lastParse = Just n })
return $ PClause fc n capp [] r wheres
<|> do (l, op) <- try (do
pushIndent
l <- argExpr syn
op <- operator
when (op == "=" || op == "?=" ) $
fail "infix clause definition with \"=\" and \"?=\" not supported "
return (l, op))
let n = expandNS syn (sUN op)
r <- argExpr syn
fc <- getFC
wargs <- many (wExpr syn)
(do rs <- rhs syn n
let wsyn = syn { syn_namespace = [] }
(wheres, nmap) <- choice [do x <- whereBlock n wsyn
popIndent
return x,
do terminator
return ([], [])]
ist <- get
let capp = PApp fc (PRef fc n) [pexp l, pexp r]
put (ist { lastParse = Just n })
return $ PClause fc n capp wargs rs wheres) <|> (do
popIndent
reserved "with"
wval <- bracketed syn
openBlock
ds <- some $ fnDecl syn
closeBlock
ist <- get
let capp = PApp fc (PRef fc n) [pexp l, pexp r]
let withs = map (fillLHSD n capp wargs) $ concat ds
put (ist { lastParse = Just n })
return $ PWith fc n capp wargs wval withs)
<|> do pushIndent
n_in <- fnName; let n = expandNS syn n_in
cargs <- many (constraintArg syn)
fc <- getFC
args <- many (try (implicitArg (syn { inPattern = True } ))
<|> (fmap pexp (argExpr syn)))
wargs <- many (wExpr syn)
let capp = PApp fc (PRef fc n)
(cargs ++ args)
(do r <- rhs syn n
ist <- get
let ctxt = tt_ctxt ist
let wsyn = syn { syn_namespace = [] }
(wheres, nmap) <- choice [do x <- whereBlock n wsyn
popIndent
return x,
do terminator
return ([], [])]
ist <- get
put (ist { lastParse = Just n })
return $ PClause fc n capp wargs r wheres) <|> (do
reserved "with"
ist <- get
put (ist { lastParse = Just n })
wval <- bracketed syn
openBlock
ds <- some $ fnDecl syn
let withs = map (fillLHSD n capp wargs) $ concat ds
closeBlock
popIndent
return $ PWith fc n capp wargs wval withs)
<?> "function clause"
where
fillLHS :: Name -> PTerm -> [PTerm] -> PClause -> PClause
fillLHS n capp owargs (PClauseR fc wargs v ws)
= PClause fc n capp (owargs ++ wargs) v ws
fillLHS n capp owargs (PWithR fc wargs v ws)
= PWith fc n capp (owargs ++ wargs) v
(map (fillLHSD n capp (owargs ++ wargs)) ws)
fillLHS _ _ _ c = c
fillLHSD :: Name -> PTerm -> [PTerm] -> PDecl -> PDecl
fillLHSD n c a (PClauses fc o fn cs) = PClauses fc o fn (map (fillLHS n c a) cs)
fillLHSD n c a x = x
{-| Parses with pattern
@
WExpr ::= '|' Expr';
@
-}
wExpr :: SyntaxInfo -> IdrisParser PTerm
wExpr syn = do lchar '|'
expr' syn
<?> "with pattern"
{- | Parses a where block
@
WhereBlock ::= 'where' OpenBlock Decl+ CloseBlock;
@
-}
whereBlock :: Name -> SyntaxInfo -> IdrisParser ([PDecl], [(Name, Name)])
whereBlock n syn
= do reserved "where"
ds <- indentedBlock1 (decl syn)
let dns = concatMap (concatMap declared) ds
return (concat ds, map (\x -> (x, decoration syn x)) dns)
<?> "where block"
{- |Parses a code generation target language name
@
Codegen ::= 'C'
| 'Java'
| 'JavaScript'
| 'Node'
| 'LLVM'
| 'Bytecode'
;
@
-}
codegen_ :: IdrisParser Codegen
codegen_ = do reserved "C"; return ViaC
<|> do reserved "Java"; return ViaJava
<|> do reserved "JavaScript"; return ViaJavaScript
<|> do reserved "Node"; return ViaNode
<|> do reserved "LLVM"; return ViaLLVM
<|> do reserved "Bytecode"; return Bytecode
<?> "code generation language"
{- |Parses a compiler directive
@
StringList ::=
String
| String ',' StringList
;
@
@
Directive ::= '%' Directive';
@
@
Directive' ::= 'lib' CodeGen String_t
| 'link' CodeGen String_t
| 'flag' CodeGen String_t
| 'include' CodeGen String_t
| 'hide' Name
| 'freeze' Name
| 'access' Accessibility
| 'default' Totality
| 'logging' Natural
| 'dynamic' StringList
| 'name' Name NameList
| 'error_handlers' Name NameList
| 'language' 'TypeProviders'
| 'language' 'ErrorReflection'
;
@
-}
directive :: SyntaxInfo -> IdrisParser [PDecl]
directive syn = do try (lchar '%' *> reserved "lib"); cgn <- codegen_; lib <- stringLiteral;
return [PDirective (do addLib cgn lib
addIBC (IBCLib cgn lib))]
<|> do try (lchar '%' *> reserved "link"); cgn <- codegen_; obj <- stringLiteral;
return [PDirective (do dirs <- allImportDirs
o <- liftIO $ findInPath dirs obj
addIBC (IBCObj cgn obj) -- just name, search on loading ibc
addObjectFile cgn o)]
<|> do try (lchar '%' *> reserved "flag"); cgn <- codegen_;
flag <- stringLiteral
return [PDirective (do addIBC (IBCCGFlag cgn flag)
addFlag cgn flag)]
<|> do try (lchar '%' *> reserved "include"); cgn <- codegen_; hdr <- stringLiteral;
return [PDirective (do addHdr cgn hdr
addIBC (IBCHeader cgn hdr))]
<|> do try (lchar '%' *> reserved "hide"); n <- fnName
return [PDirective (do setAccessibility n Hidden
addIBC (IBCAccess n Hidden))]
<|> do try (lchar '%' *> reserved "freeze"); n <- iName []
return [PDirective (do setAccessibility n Frozen
addIBC (IBCAccess n Frozen))]
<|> do try (lchar '%' *> reserved "access"); acc <- accessibility
return [PDirective (do i <- get
put(i { default_access = acc }))]
<|> do try (lchar '%' *> reserved "default"); tot <- totality
i <- get
put (i { default_total = tot } )
return [PDirective (do i <- get
put(i { default_total = tot }))]
<|> do try (lchar '%' *> reserved "logging"); i <- natural;
return [PDirective (setLogLevel (fromInteger i))]
<|> do try (lchar '%' *> reserved "dynamic"); libs <- sepBy1 stringLiteral (lchar ',');
return [PDirective (do added <- addDyLib libs
case added of
Left lib -> addIBC (IBCDyLib (lib_name lib))
Right msg ->
fail $ msg)]
<|> do try (lchar '%' *> reserved "name")
ty <- fnName
ns <- sepBy1 name (lchar ',')
return [PDirective (do ty' <- disambiguate ty
mapM_ (addNameHint ty') ns
mapM_ (\n -> addIBC (IBCNameHint (ty', n))) ns)]
<|> do try (lchar '%' *> reserved "error_handlers")
fn <- fnName
arg <- fnName
ns <- sepBy1 name (lchar ',')
return [PDirective (do fn' <- disambiguate fn
ns' <- mapM disambiguate ns
addFunctionErrorHandlers fn' arg ns'
mapM_ (addIBC . IBCFunctionErrorHandler fn' arg) ns')]
<|> do try (lchar '%' *> reserved "language"); ext <- pLangExt;
return [PDirective (addLangExt ext)]
<?> "directive"
where disambiguate :: Name -> Idris Name
disambiguate n = do i <- getIState
case lookupCtxtName n (idris_implicits i) of
[(n', _)] -> return n'
[] -> throwError (NoSuchVariable n)
more -> throwError (CantResolveAlts (map (show . fst) more))
pLangExt :: IdrisParser LanguageExt
pLangExt = (reserved "TypeProviders" >> return TypeProviders)
<|> (reserved "ErrorReflection" >> return ErrorReflection)
{- | Parses a totality
@
Totality ::= 'partial' | 'total'
@
-}
totality :: IdrisParser Bool
totality
= do reserved "total"; return True
<|> do reserved "partial"; return False
{- | Parses a type provider
@
Provider ::= '%' 'provide' Provider_What? '(' FnName TypeSig ')' 'with' Expr;
ProviderWhat ::= 'proof' | 'term' | 'type' | 'postulate'
@
-}
provider :: SyntaxInfo -> IdrisParser [PDecl]
provider syn = do try (lchar '%' *> reserved "provide");
what <- provideWhat
lchar '('; n <- fnName; lchar ':'; t <- typeExpr syn; lchar ')'
fc <- getFC
reserved "with"
e <- expr syn
return [PProvider syn fc what n t e]
<?> "type provider"
where provideWhat :: IdrisParser ProvideWhat
provideWhat = option ProvAny
( ((reserved "proof" <|> reserved "term" <|> reserved "type") *>
pure ProvTerm)
<|> (reserved "postulate" *> pure ProvPostulate)
<?> "provider variety")
{- | Parses a transform
@
Transform ::= '%' 'transform' Expr '==>' Expr
@
-}
transform :: SyntaxInfo -> IdrisParser [PDecl]
transform syn = do try (lchar '%' *> reserved "transform")
-- leave it unchecked, until we work out what this should
-- actually mean...
-- safety <- option True (do reserved "unsafe"
-- return False)
l <- expr syn
fc <- getFC
symbol "==>"
r <- expr syn
return [PTransform fc False l r]
<?> "transform"
{- * Loading and parsing -}
{- | Parses an expression from input -}
parseExpr :: IState -> String -> Result PTerm
parseExpr st = runparser (fullExpr defaultSyntax) st "(input)"
{- | Parses a constant form input -}
parseConst :: IState -> String -> Result Const
parseConst st = runparser constant st "(input)"
{- | Parses a tactic from input -}
parseTactic :: IState -> String -> Result PTactic
parseTactic st = runparser (fullTactic defaultSyntax) st "(input)"
-- | Parse module header and imports
parseImports :: FilePath -> String -> Idris ([String], [(String, Maybe String, FC)], Maybe Delta)
parseImports fname input
= do i <- getIState
case parseString (runInnerParser (evalStateT imports i)) (Directed (UTF8.fromString fname) 0 0 0 0) input of
Failure err -> fail (show err)
Success (x, i) -> do putIState i
return x
where imports :: IdrisParser (([String], [(String, Maybe String, FC)], Maybe Delta), IState)
imports = do whiteSpace
mname <- moduleHeader
ps <- many import_
mrk <- mark
isEof <- lookAheadMatches eof
let mrk' = if isEof
then Nothing
else Just mrk
i <- get
return ((mname, ps, mrk'), i)
-- | There should be a better way of doing this...
findFC :: Doc -> (FC, String)
findFC x = let s = show (plain x) in
case span (/= ':') s of
(failname, ':':rest) -> case span isDigit rest of
(line, ':':rest') -> case span isDigit rest' of
(col, ':':msg) -> let pos = (read line, read col) in
(FC failname pos pos, msg)
-- | Check if the coloring matches the options and corrects if necessary
fixColour :: Bool -> ANSI.Doc -> ANSI.Doc
fixColour False doc = ANSI.plain doc
fixColour True doc = doc
-- | A program is a list of declarations, possibly with associated
-- documentation strings.
parseProg :: SyntaxInfo -> FilePath -> String -> Maybe Delta ->
Idris [PDecl]
parseProg syn fname input mrk
= do i <- getIState
case runparser mainProg i fname input of
Failure doc -> do -- FIXME: Get error location from trifecta
-- this can't be the solution!
let (fc, msg) = findFC doc
i <- getIState
case idris_outputmode i of
RawOutput -> ihputStrLn (idris_outh i) (show $ fixColour (idris_colourRepl i) doc)
IdeSlave n -> ihWarn (idris_outh i) fc (P.text msg)
putIState (i { errSpan = Just fc })
return []
Success (x, i) -> do putIState i
reportParserWarnings
return $ collect x
where mainProg :: IdrisParser ([PDecl], IState)
mainProg = case mrk of
Nothing -> do i <- get; return ([], i)
Just mrk -> do
release mrk
ds <- prog syn
i' <- get
return (ds, i')
{- | Load idris module and show error if something wrong happens -}
loadModule :: Handle -> FilePath -> Idris String
loadModule outh f
= idrisCatch (loadModule' outh f)
(\e -> do setErrSpan (getErrSpan e)
ist <- getIState
ihWarn outh (getErrSpan e) $ pprintErr ist e
return "")
{- | Load idris module -}
loadModule' :: Handle -> FilePath -> Idris String
loadModule' outh f
= do i <- getIState
let file = takeWhile (/= ' ') f
ibcsd <- valIBCSubDir i
ids <- allImportDirs
fp <- findImport ids ibcsd file
if file `elem` imported i
then iLOG $ "Already read " ++ file
else do putIState (i { imported = file : imported i })
case fp of
IDR fn -> loadSource outh False fn
LIDR fn -> loadSource outh True fn
IBC fn src ->
idrisCatch (loadIBC fn)
(\c -> do iLOG $ fn ++ " failed " ++ pshow i c
case src of
IDR sfn -> loadSource outh False sfn
LIDR sfn -> loadSource outh True sfn)
let (dir, fh) = splitFileName file
return (dropExtension fh)
{- | Load idris code from file -}
loadFromIFile :: Handle -> IFileType -> Idris ()
loadFromIFile h i@(IBC fn src)
= do iLOG $ "Skipping " ++ getSrcFile i
idrisCatch (loadIBC fn)
(\err -> ierror $ LoadingFailed fn err)
where
getSrcFile (IDR fn) = fn
getSrcFile (LIDR fn) = fn
getSrcFile (IBC f src) = getSrcFile src
loadFromIFile h (IDR fn) = loadSource' h False fn
loadFromIFile h (LIDR fn) = loadSource' h True fn
{-| Load idris source code and show error if something wrong happens -}
loadSource' :: Handle -> Bool -> FilePath -> Idris ()
loadSource' h lidr r
= idrisCatch (loadSource h lidr r)
(\e -> do setErrSpan (getErrSpan e)
ist <- getIState
case e of
At f e' -> ihWarn h f (pprintErr ist e')
_ -> ihWarn h (getErrSpan e) (pprintErr ist e))
{- | Load Idris source code-}
loadSource :: Handle -> Bool -> FilePath -> Idris ()
loadSource h lidr f
= do iLOG ("Reading " ++ f)
i <- getIState
let def_total = default_total i
file_in <- runIO $ readFile f
file <- if lidr then tclift $ unlit f file_in else return file_in
(mname, imports, pos) <- parseImports f file
ids <- allImportDirs
ibcsd <- valIBCSubDir i
mapM_ (\f -> do fp <- findImport ids ibcsd f
case fp of
LIDR fn -> ifail $ "No ibc for " ++ f
IDR fn -> ifail $ "No ibc for " ++ f
IBC fn src -> loadIBC fn)
[fn | (fn, _, _) <- imports]
reportParserWarnings
-- process and check module aliases
let modAliases = M.fromList
[(prep alias, prep realName) | (realName, Just alias, fc) <- imports]
prep = map T.pack . reverse . Spl.splitOn "/"
aliasNames = [(alias, fc) | (_, Just alias, fc) <- imports]
histogram = groupBy ((==) `on` fst) . sortBy (comparing fst) $ aliasNames
case map head . filter ((/= 1) . length) $ histogram of
[] -> logLvl 3 $ "Module aliases: " ++ show (M.toList modAliases)
(n,fc):_ -> throwError . At fc . Msg $ "import alias not unique: " ++ show n
i <- getIState
putIState (i { default_access = Hidden, module_aliases = modAliases })
clearIBC -- start a new .ibc file
mapM_ (addIBC . IBCImport) [realName | (realName, alias, fc) <- imports]
let syntax = defaultSyntax{ syn_namespace = reverse mname }
ds' <- parseProg syntax f file pos
-- Parsing done, now process declarations
let ds = namespaces mname ds'
logLvl 3 (show $ showDecls verbosePPOption ds)
i <- getIState
logLvl 10 (show (toAlist (idris_implicits i)))
logLvl 3 (show (idris_infixes i))
-- Now add all the declarations to the context
v <- verbose
when v $ ihputStrLn h $ "Type checking " ++ f
-- we totality check after every Mutual block, so if
-- anything is a single definition, wrap it in a
-- mutual block on its own
elabDecls toplevel (map toMutual ds)
i <- getIState
-- simplify every definition do give the totality checker
-- a better chance
mapM_ (\n -> do logLvl 5 $ "Simplifying " ++ show n
updateContext (simplifyCasedef n))
(map snd (idris_totcheck i))
-- build size change graph from simplified definitions
iLOG "Totality checking"
i <- getIState
mapM_ buildSCG (idris_totcheck i)
mapM_ checkDeclTotality (idris_totcheck i)
-- Redo totality check for deferred names
let deftots = idris_defertotcheck i
iLOG $ "Totality checking " ++ show deftots
mapM_ (\x -> do tot <- getTotality x
case tot of
Total _ -> setTotality x Unchecked
_ -> return ()) (map snd deftots)
mapM_ buildSCG deftots
mapM_ checkDeclTotality deftots
iLOG ("Finished " ++ f)
ibcsd <- valIBCSubDir i
iLOG "Universe checking"
iucheck
let ibc = ibcPathNoFallback ibcsd f
i <- getIState
addHides (hide_list i)
-- Finally, write an ibc if checking was successful
ok <- noErrors
when ok $
idrisCatch (do writeIBC f ibc; clearIBC)
(\c -> return ()) -- failure is harmless
i <- getIState
putIState (i { default_total = def_total,
hide_list = [] })
return ()
where
namespaces :: [String] -> [PDecl] -> [PDecl]
namespaces [] ds = ds
namespaces (x:xs) ds = [PNamespace x (namespaces xs ds)]
toMutual :: PDecl -> PDecl
toMutual m@(PMutual _ d) = m
toMutual (PNamespace x ds) = PNamespace x (map toMutual ds)
toMutual x = let r = PMutual (fileFC "single mutual") [x] in
case x of
PClauses _ _ _ _ -> r
PClass _ _ _ _ _ _ _ _ -> r
PInstance _ _ _ _ _ _ _ _ -> r
_ -> x
{- | Adds names to hide list -}
addHides :: [(Name, Maybe Accessibility)] -> Idris ()
addHides xs = do i <- getIState
let defh = default_access i
let (hs, as) = partition isNothing xs
unless (null as) $
mapM_ doHide
(map (\ (n, _) -> (n, defh)) hs ++
map (\ (n, Just a) -> (n, a)) as)
where isNothing (_, Nothing) = True
isNothing _ = False
doHide (n, a) = do setAccessibility n a
addIBC (IBCAccess n a)
| DanielWaterworth/Idris-dev | src/Idris/Parser.hs | bsd-3-clause | 46,800 | 438 | 23 | 19,574 | 9,522 | 5,153 | 4,369 | 830 | 12 |
module IdMT where
import MT
import Control_Monad_Fix
import Monad(liftM)
newtype With_ m a = I { removeId :: m a }
instance MT With_ where
lift = I
instance Monad m => Monad (With_ m) where
return = lift . return
I m >>= f = I (m >>= removeId . f)
instance Monad m => Functor (With_ m) where
fmap = liftM
instance HasBaseMonad m n => HasBaseMonad (With_ m) n where
inBase = I . inBase
instance HasEnv m ix e => HasEnv (With_ m) ix e where
getEnv = I . getEnv
instance HasState m ix e => HasState (With_ m) ix e where
updSt ix = I . updSt ix
instance HasOutput m ix o => HasOutput (With_ m) ix o where
outputTree ix = I . outputTree ix
instance HasExcept m x => HasExcept (With_ m) x where
raise = I . raise
handle h = I . handle (removeId . h) . removeId
instance HasCont m => HasCont (With_ m) where
callcc f = I (callcc f')
where f' k = removeId (f (I . k))
instance HasRefs m r => HasRefs (With_ m) r where
newRef = I . newRef
readRef = I . readRef
writeRef r = I . writeRef r
instance MonadFix m => MonadFix (With_ m) where
mfix f = I (mfix (removeId . f))
| forste/haReFork | tools/base/lib/Monads/IdMT.hs | bsd-3-clause | 1,274 | 0 | 12 | 438 | 520 | 262 | 258 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
{- |
Module : $Header$
Description : Abstract syntax for SELinux policies
Copyright : (c) Galois, Inc.
License : see the file LICENSE
Maintainer : Joe Hurd
Stability : provisional
Portability : portable
Abstract syntax for SELinux policies
-}
module SCD.SELinux.Syntax(
Policy(..)
, CommonPerm(..)
, AvPerm(..)
, TeRbac(..)
, Stmt(..)
, AvRuleBlock(..)
, AvRule(..)
, RequireStmt(..)
, Require(..)
, CondExpr(..)
, Op(..)
, Constraint(..)
, ConstraintExpr(..)
, ConstraintExprPrim(..)
, ContextIndex(..)
, COp(..)
, CEqOp(..)
, RoleMlsOp(..)
, SidContext(..)
, PortContext(..)
, NetInterfaceContext(..)
, NodeContext(..)
, Protocol(..)
, FileSystemUse(..)
, GenFileSystemContext(..)
, FilePath(..)
, FileType(..)
, IPV4Address(..)
, IPV6Address(..)
, IPAddressMask(..)
, SecurityContext(..)
, Transition(..)
, SourceTarget(..)
, AllowDeny(..)
, Self(..)
, NeverAllow(..)
, SignedId(..)
, signedId2Id
, Sign(..)
, Permissions(..)
, StarTilde(..)
, User(..)
, Identifier(..)
, IsIdentifier(..)
, mkId
, withIdString
, ClassId(..)
, CommonId(..)
, PermissionId(..)
, TypeId(..)
, AttributeId(..)
, TypeOrAttributeId(..)
, UserId(..)
, RoleId(..)
, NetInterfaceId(..)
, FileSystemId(..)
, Sid(..)
, BoolId(..)
) where
import Data.Tree(Tree)
import Data.Word(Word8, Word16)
import Data.Array.IArray(listArray, elems)
import Data.Array.Unboxed(UArray)
import Data.NonEmptyList(NonEmptyList)
import Data.Generics(Typeable, Data(..))
import Prelude hiding (FilePath)
import Text.Happy.ParserMonad(Pos, noPos)
data Identifier = I !Pos (UArray Int Char)
deriving Typeable
instance Data Identifier where
gfoldl = undefined
gunfold = undefined
toConstr = undefined
dataTypeOf = undefined
-- | Ignore the position information when comparing Identifiers.
instance Eq Identifier where
I _ s1 == I _ s2 = s1 == s2
instance Ord Identifier where
I _ s1 `compare` I _ s2 = s1 `compare` s2
instance Show Identifier where
show = show . idString
instance Read Identifier where
readsPrec i s = [(mkId a,b) | (a,b) <- readsPrec i s]
class (Show a, Ord a) => IsIdentifier a where
idString :: a -> String
mkId' :: Pos -> String -> a
fromId :: Identifier -> a
toId :: a -> Identifier
pos :: a -> Pos
mkId :: IsIdentifier i => String -> i
mkId = mkId' noPos
withIdString :: IsIdentifier i => (String -> String) -> i -> i
withIdString f i = mkId' (pos i) (f (idString i))
instance IsIdentifier Identifier where
idString (I _ s) = elems s
mkId' p s = I p $ listArray (1,length s) s
fromId i = i
toId i = i
pos (I p _) = p
newtype ClassId = ClassId Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
newtype CommonId = CommonId Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
newtype PermissionId = PermissionId Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
newtype TypeId = TypeId Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
newtype AttributeId = AttributeId Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
newtype TypeOrAttributeId = TypeOrAttributeId Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
newtype Sid = Sid Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
newtype BoolId = BoolId Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
newtype UserId = UserId Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
newtype RoleId = RoleId Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
newtype NetInterfaceId = NetInterfaceId Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
newtype FileSystemId = FileSystemId Identifier
deriving (Typeable, Data, Eq, Read, Show, Ord, IsIdentifier)
data Policy = Policy{ classes :: NonEmptyList ClassId
, initialSids :: NonEmptyList Sid
, commonPerms :: [CommonPerm]
, avPerms :: NonEmptyList AvPerm
, teRbacs :: [TeRbac]
, users :: NonEmptyList User
, constraints :: [Constraint]
, sidContexts :: NonEmptyList (SidContext SecurityContext)
, fileSystemUses :: [FileSystemUse SecurityContext]
, genFileSystemContexts :: [GenFileSystemContext SecurityContext]
, portContexts :: [PortContext SecurityContext]
, netInterfaceContexts :: [NetInterfaceContext SecurityContext]
, nodeContexts :: [NodeContext SecurityContext]
}
deriving (Typeable, Data, Eq, Read, Show)
data CommonPerm = CommonPerm CommonId (NonEmptyList PermissionId)
deriving (Typeable, Data, Eq, Read, Show)
data AvPerm = AvPermClass ClassId (Either (NonEmptyList PermissionId) (CommonId, [PermissionId]))
deriving (Typeable, Data, Eq, Read, Show)
data TeRbac =
Attribute AttributeId
| Type TypeId [TypeId] [AttributeId]
| TypeAlias TypeId (NonEmptyList TypeId)
| TypeAttribute TypeId (NonEmptyList AttributeId)
| BoolDef BoolId Bool
-- -- | RangeTrans SourceTarget MlsRange
| TeNeverAllow (SourceTarget (NeverAllow TypeOrAttributeId)
(NeverAllow Self))
Permissions
| Role RoleId [SignedId TypeOrAttributeId]
| Dominance (NonEmptyList (Tree RoleId))
| RoleTransition (NonEmptyList RoleId) (NonEmptyList (SignedId TypeOrAttributeId)) RoleId
| RoleAllow (NonEmptyList RoleId) (NonEmptyList RoleId)
| CondStmt CondExpr [RequireStmt] [RequireStmt]
| Stmt Stmt
| Optional AvRuleBlock (Maybe AvRuleBlock)
deriving (Typeable, Data, Eq, Read, Show)
data Stmt =
Transition Transition (SourceTarget (NonEmptyList (SignedId TypeOrAttributeId))
(NonEmptyList (SignedId TypeOrAttributeId)))
TypeId
| TeAvTab AllowDeny (SourceTarget (NonEmptyList (SignedId TypeOrAttributeId))
(NonEmptyList (SignedId Self)))
Permissions
deriving (Typeable, Data, Eq, Read, Show)
data AvRuleBlock =
AvRuleBlock [AvRule] [User]
deriving (Typeable, Data, Eq, Read, Show)
data AvRule =
TeRbac TeRbac
| AvRuleRequire (NonEmptyList Require)
deriving (Typeable, Data, Eq, Read, Show)
data RequireStmt =
RequireStmt Stmt
| Require (NonEmptyList Require)
deriving (Typeable, Data, Eq, Read, Show)
data Require =
RequireClass ClassId (NonEmptyList PermissionId)
| RequireRole (NonEmptyList RoleId)
| RequireType (NonEmptyList TypeId)
| RequireAttribute (NonEmptyList AttributeId)
| RequireUser (NonEmptyList UserId)
| RequireBool (NonEmptyList BoolId)
deriving (Typeable, Data, Eq, Read, Show)
data CondExpr =
Not CondExpr
| Op CondExpr Op CondExpr
| Var BoolId
deriving (Typeable, Data, Eq, Ord, Read, Show)
data Op = And | Or | Xor | Equals | Notequal
deriving (Typeable, Data, Eq, Ord, Read, Show, Enum)
data Constraint =
Constrain (NonEmptyList ClassId) (NonEmptyList PermissionId) ConstraintExpr
| ValidateTrans (NonEmptyList ClassId) ConstraintExpr
deriving (Typeable, Data, Eq, Read, Show)
data ConstraintExpr =
ConstraintExprPrim ConstraintExprPrim
| CNot ConstraintExpr
| COp ConstraintExpr COp ConstraintExpr
deriving (Typeable, Data, Eq, Read, Show)
data ConstraintExprPrim =
CUsers CEqOp
| CRoles RoleMlsOp
| CTypes CEqOp
| CUserSet ContextIndex CEqOp (NonEmptyList UserId)
| CRoleSet ContextIndex CEqOp (NonEmptyList RoleId)
| CTypeSet ContextIndex CEqOp (NonEmptyList TypeOrAttributeId)
deriving (Typeable, Data, Eq, Read, Show)
data ContextIndex = C1 | C2 | C3
deriving (Typeable, Data, Eq, Read, Show, Enum)
data COp = CAnd | COr
deriving (Typeable, Data, Eq, Read, Show, Enum)
data CEqOp = CEquals | CNotequal
deriving (Typeable, Data, Eq, Read, Show, Enum)
data RoleMlsOp =
CEqOp CEqOp
| Dom
| DomBy
| InComp
deriving (Typeable, Data, Eq, Read, Show)
data SidContext s = SidContext Sid s
deriving (Typeable, Data, Eq, Read, Show)
data PortContext s = PortContext Protocol Word16 Word16 s
deriving (Typeable, Data, Eq, Read, Show)
data NetInterfaceContext s = NetInterfaceContext NetInterfaceId s s
deriving (Typeable, Data, Eq, Read, Show)
-- TODO Capture type relationship between address and mask
data NodeContext s = NodeContext IPAddressMask s
deriving (Typeable, Data, Eq, Read, Show)
data SecurityContext = SecurityContext UserId RoleId TypeId
deriving (Typeable, Data, Eq, Read, Show)
data Protocol = Tcp | Udp
deriving (Typeable, Data, Eq, Read, Show, Enum)
data FileSystemUse s =
FSUseXattr FileSystemId s
| FSUseTask FileSystemId s
| FSUseTrans FileSystemId s
deriving (Typeable, Data, Eq, Read, Show)
data GenFileSystemContext s = GenFSCon FileSystemId FilePath (Maybe FileType) s
deriving (Typeable, Data, Eq, Read, Show)
data FilePath = FilePath String
deriving (Typeable, Data, Eq, Read, Show)
data FileType = BlockFile | CharacterFile | DirectoryFile | FifoFile | LinkFile | SocketFile | PlainFile
deriving (Typeable, Data, Eq, Read, Show, Enum)
data IPV4Address = IPV4Address Word8 Word8 Word8 Word8
deriving (Typeable, Data, Eq, Read, Show)
data IPV6Address = IPV6Address Word16 Word16 Word16 Word16 Word16 Word16 Word16 Word16
deriving (Typeable, Data, Eq, Read, Show)
data IPAddressMask =
IPV4AddrMask IPV4Address IPV4Address
| IPV6AddrMask IPV6Address IPV6Address
deriving (Typeable, Data, Eq, Read, Show)
data Transition =
TypeTransition
| TypeMember
| TypeChange
deriving (Typeable, Data, Eq, Read, Show, Enum)
data SourceTarget st tt = SourceTarget{ sourceTypes :: st
, targetTypes :: tt
, targetClasses :: (NonEmptyList ClassId)
}
deriving (Typeable, Data, Eq, Read, Show)
data AllowDeny =
Allow
| AuditAllow
| AuditDeny
| DontAudit
deriving (Typeable, Data, Eq, Read, Show, Enum)
{-
data MlsRange = MlsRange Level Level
deriving (Typeable, Data, Eq, Read, Show)
newtype Level = Level Integer
deriving (Typeable, Data, Eq, Read, Show)
-}
data Self =
NotSelf TypeOrAttributeId
| Self
deriving (Typeable, Data, Eq, Read, Show)
data NeverAllow t =
NeverAllow (NonEmptyList (SignedId t))
| NAStarTilde (StarTilde (SignedId t))
deriving (Typeable, Data, Eq, Read, Show)
instance Functor NeverAllow where
fmap f (NeverAllow l) = NeverAllow (fmap (fmap f) l)
fmap f (NAStarTilde i) = NAStarTilde (fmap (fmap f) i)
data SignedId t =
SignedId Sign t
deriving (Typeable, Data, Eq, Ord, Read, Show)
signedId2Id :: SignedId i -> i
signedId2Id (SignedId _ i) = i
instance Functor SignedId where
fmap f (SignedId s t) = SignedId s (f t)
data Sign = Negative | Positive
deriving (Typeable, Data, Eq, Ord, Read, Show, Enum)
data Permissions =
Permissions (NonEmptyList PermissionId)
| PStarTilde (StarTilde PermissionId)
deriving (Typeable, Data, Eq, Read, Show)
data StarTilde i =
Star
| Tilde (NonEmptyList i)
deriving (Typeable, Data, Eq, Read, Show)
instance Functor StarTilde where
fmap _ Star = Star
fmap f (Tilde l) = Tilde (fmap f l)
data User = User UserId (NonEmptyList RoleId)
deriving (Typeable, Data, Eq, Read, Show)
| GaloisInc/sk-dev-platform | libs/SCD/src/SCD/SELinux/Syntax.hs | bsd-3-clause | 11,557 | 0 | 12 | 2,508 | 3,826 | 2,132 | 1,694 | 302 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
-----------------------------------------------------------------------------
-- |
-- Module : Geometry.CubicSpline
-- Copyright : (c) 2011-2017 diagrams team (see LICENSE)
-- License : BSD-style (see LICENSE)
-- Maintainer : [email protected]
--
-- A /cubic spline/ is a smooth, connected sequence of cubic curves.
-- This module provides two methods for constructing splines.
--
-- The 'cubicSpline' method can be used to create closed or open cubic
-- splines from a list of points. The resulting splines /pass through/
-- all the control points, but depend on the control points in a
-- "global" way (that is, changing one control point may alter the
-- entire curve). For access to the internals of the spline
-- generation algorithm, see "Diagrams.CubicSpline.Internal".
--
-- 'bspline' creates a cubic B-spline, which starts and ends at the
-- first and last control points, but does not necessarily pass
-- through any of the other control points. It depends on the control
-- points in a "local" way, that is, changing one control point will
-- only affect a local portion of the curve near that control point.
--
-----------------------------------------------------------------------------
module Geometry.CubicSpline
(
-- * Cubic splines
cubicSpline
, cubicSplineLine
, cubicSplineLoop
, cubicSplineLineVec
, cubicSplineLoopVec
-- * B-splines
, bspline
) where
import Geometry.CubicSpline.Boehm
import Geometry.CubicSpline.Internal
import Geometry.Located
import Geometry.Segment
import Geometry.Space
import Geometry.Trail
import Linear
import Linear.Affine
import qualified Data.Vector as B
import qualified Data.Vector.Generic as V
import qualified Data.Vector.Unboxed as U
-- | Construct a spline path-like thing of cubic segments from a list of
-- vertices, with the first vertex as the starting point. The first
-- argument specifies whether the path should be closed.
--
-- <<diagrams/src_Geometry_CubicSpline_cubicSplineEx.svg#diagram=cubicSplineEx&width=600>>
--
-- > import Geometry.CubicSpline
-- > pts = map p2 [(0,0), (2,3), (5,-2), (-4,1), (0,3)]
-- > spot = circle 0.2 # fc blue # lw none
-- > mkPath closed = position (zip pts (repeat spot))
-- > <> cubicSpline closed pts
-- > cubicSplineEx = (mkPath False ||| strutX 2 ||| mkPath True)
-- > # centerXY # pad 1.1
--
-- For more information, see <http://mathworld.wolfram.com/CubicSpline.html>.
cubicSpline
:: (InSpace v n t, FromTrail t, Additive v, Fractional n)
=> Bool -> [Point v n] -> t
cubicSpline _ [] = fromLocTrail $ mempty `at` origin
cubicSpline closed pps@(p:ps)
| closed = fromLocLoop $ cubicSplineLoop offsets `at` p
| otherwise = fromLocLine $ cubicSplineLine offsets `at` p
where offsets = zipWith (flip (.-.)) pps ps
-- $cubic-spline
-- A cubic spline is a smooth curve made up of cubic bezier segments
-- whose offsets match the input offsets.
--
-- - For lines the curvatures at the start of the first segment and
-- end of the last segment are both zero (a "natural" cubic spline).
-- - For loops the tangent at the end of last segment matches the
-- tangent at the begining of the first segment.
--
-- These requirements uniquely define the cubic spline. In the case that
-- only one offset is given, a linear segment is returned.
-- Lines ---------------------------------------------------------------
-- | See 'cubicSpline'.
cubicSplineLineVec
:: (V.Vector vec (v n), V.Vector vec n, Additive v, Fractional n)
=> vec (v n)
-> Line v n
cubicSplineLineVec vs
| n <= 1 = lineFromSegments $ map Linear (V.toList vs)
| otherwise = cubicSplineLineFromTangents vs off dv
where
n = V.length vs
off = V.foldl' (^+^) zero vs
dv = cubicSplineLineTangents vs
{-# INLINE cubicSplineLineVec #-}
cubicSplineLineV2D
:: [V2 Double]
-> Line V2 Double
cubicSplineLineV2D = cubicSplineLineVec . U.fromList
-- | See 'cubicSpline'.
cubicSplineLine
:: (Additive v, Fractional n)
=> [v n] -> Line v n
cubicSplineLine = cubicSplineLineVec . B.fromList
{-# INLINE [0] cubicSplineLine #-}
-- Loops ---------------------------------------------------------------
cubicSplineLoopVec
:: (V.Vector vec (v n), V.Vector vec n, Additive v, Fractional n)
=> vec (v n) -> Loop v n
cubicSplineLoopVec vs
| n <= 1 = loopFromSegments (map Linear (V.toList vs)) linearClosing
| otherwise = cubicSplineLoopFromTangents vs off dv
where
n = V.length vs
off = V.foldl' (^+^) zero vs
dv = cubicSplineLoopTangents vs (negated off)
{-# INLINE cubicSplineLoopVec #-}
-- | See 'cubicSpline'.
cubicSplineLoopV2D
:: [V2 Double] -> Loop V2 Double
cubicSplineLoopV2D = cubicSplineLoopVec . U.fromList
-- | See 'cubicSpline'.
cubicSplineLoop
:: (Additive v, Fractional n)
=> [v n] -> Loop v n
cubicSplineLoop = cubicSplineLoopVec . B.fromList
{-# INLINE [0] cubicSplineLoop #-}
{-# RULES
"cubicSplineLine/V2 Double" cubicSplineLine = cubicSplineLineV2D;
"cubicSplineLoop/V2 Double" cubicSplineLoop = cubicSplineLoopV2D
#-}
| cchalmers/geometry | src/Geometry/CubicSpline.hs | bsd-3-clause | 5,323 | 0 | 11 | 1,071 | 783 | 444 | 339 | 71 | 1 |
{-# LANGUAGE OverloadedStrings, TemplateHaskell #-}
module Text.Blaze.Amf.Elements where
import Text.Blaze.Amf.Template
import Text.Blaze.Internal
import Language.Haskell.TH
import Text.Blaze.Amf.Attributes
import Data.Monoid
$(elements ["x","y","z"])
$(elements ["nx","ny","nz"])
$(elements ["r","g","b","a"])
$(elements ["v1","v2","v3"])
$(elements ["rx","ry","rz"])
$(elements ["dx1","dy1","dz1"])
$(elements ["dx2","dy2","dz2"])
$(elements ["deltax","deltay","deltaz"])
$(elements ["texmap","utex","utex2","utex3"])
$(elements ["object","color","mesh","vertices","vertex"])
$(elements ["coordinates","normal","edge","volume","triangle","metadata"])
$(elements ["texture","material","composite","constellation","instance"])
$("amf" `elementAs` "raw_amf")
amf :: Unit -> Markup -> Markup
amf u cont = unsafeByteString "<?xml version=\"1.0\"?>" `mappend`
(raw_amf ! unit u $ cont) | matthewSorensen/blaze-amf | src/Text/Blaze/Amf/Elements.hs | bsd-3-clause | 899 | 0 | 9 | 83 | 354 | 197 | 157 | 23 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE PolyKinds #-}
module Math.Monad.Atkey
( At(..)
, Atkey
, Coat(..)
, Coatkey
) where
import Data.Type.Equality
import Math.Category
import Math.Functor
import Prelude (($))
data At a i j where
At :: a -> At a i i
instance Functor (At a i) where
type Dom (At a i) = (:~:)
type Cod (At a i) = (->)
fmap Refl a = a
instance Functor (At a) where
type Dom (At a) = (:~:)
type Cod (At a) = Nat (:~:) (->)
fmap Refl = Nat id
instance Functor At where
type Dom At = (->)
type Cod At = Nat (:~:) (Nat (:~:) (->))
fmap f = Nat $ Nat $ \(At a) -> At (f a)
type Atkey m i j a = m (At a j) i
newtype Coat a i j = Coat { runCoat :: (i ~ j) => a }
instance Functor (Coat a i) where
type Dom (Coat a i) = (:~:)
type Cod (Coat a i) = (->)
fmap Refl a = a
instance Functor (Coat a) where
type Dom (Coat a) = (:~:)
type Cod (Coat a) = Nat (:~:) (->)
fmap Refl = Nat id
instance Functor Coat where
type Dom Coat = (->)
type Cod Coat = Nat (:~:) (Nat (:~:) (->))
fmap f = Nat $ Nat $ \ g -> Coat (f (runCoat g))
type Coatkey m i j a = m (Coat a j) i
| ekmett/categories | src/Math/Monad/Atkey.hs | bsd-3-clause | 1,183 | 0 | 12 | 313 | 589 | 335 | 254 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
module FPNLA.Operations.BLAS.Strategies.GEMM.Accelerate.DefPar () where
import FPNLA.Matrix.Instances.AccMatrix (AccMatrix (..),
AccVector (..))
import FPNLA.Operations.BLAS (GEMM (gemm))
import FPNLA.Operations.BLAS.Strategies.DataTypes (DefPar_Acc)
import FPNLA.Operations.Parameters (Elt,
TransType (..),
blasResultM)
import Data.Array.Accelerate ((:.) (..), Acc,
All (All), Array,
DIM2, Exp, IsNum,
Z (Z),
constant,
shape)
import qualified Data.Array.Accelerate as A (Elt, fold,
lift, map,
replicate,
transpose,
unlift,
zipWith)
instance (Elt e, A.Elt e, IsNum e) => GEMM DefPar_Acc AccMatrix AccVector e where
gemm _ pmA pmB alpha beta (AccMatrix mC) =
blasResultM (AccMatrix mC')
where
alpha_expr = constant alpha
beta_expr = constant beta
mC' = A.map (beta_expr*) . A.zipWith (+) mC $ matMul mA mB
mA = A.map (alpha_expr*) $ unAccTrans pmA
mB = unAccTrans pmB
unAccTrans pm = case pm of (NoTrans (AccMatrix m)) -> m
(Trans (AccMatrix m)) -> A.transpose m
(ConjTrans (AccMatrix m)) -> A.transpose m --no hay complejos en A.Elt
matMul :: (A.Elt e, IsNum e) => Acc (Array DIM2 e) -> Acc (Array DIM2 e) -> Acc (Array DIM2 e)
matMul arr brr
= A.fold (+) 0 $ A.zipWith (*) arrRepl brrRepl
where
Z :. rowsA :. _ = A.unlift (shape arr) :: Z :. Exp Int :. Exp Int
Z :. _ :. colsB = A.unlift (shape brr) :: Z :. Exp Int :. Exp Int
arrRepl = A.replicate (A.lift $ Z :. All :. colsB :. All) arr
brrRepl = A.replicate (A.lift $ Z :. rowsA :. All :. All) (A.transpose brr)
{-
matMul2 :: (A.Elt e, IsNum e) => Acc (Array DIM2 e) -> Acc (Array DIM2 e) -> Acc (Array DIM2 e)
matMul2 arr brr =
let
(Z:.rowsA:.colsA) = A.unlift (shape arr) :: Z :. Exp Int :. Exp Int
(Z:.rowsB:.colsB) = A.unlift (shape brr) :: Z :. Exp Int :. Exp Int
-- Transpongo mB:
mB'_acc = backpermute (A.lift $ Z:.colsB:.rowsB) (\e -> A.uncurry index2 $ A.lift (A.snd $ unindex2 e, A.fst $ unindex2 e)) brr
repB = A.replicate (A.lift $ Z:.rowsA:.All:.All) mB'_acc
repA = A.replicate (A.lift $ Z:.All:.colsB:.All) arr
in A.fold1 (+) $ A.zipWith (*) repA repB
-} | mauroblanco/fpnla-examples | src/FPNLA/Operations/BLAS/Strategies/GEMM/Accelerate/DefPar.hs | bsd-3-clause | 3,424 | 0 | 14 | 1,607 | 665 | 369 | 296 | 42 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Volume.HR.Rules
( rules ) where
import Data.String
import Data.Text (Text)
import Prelude
import Duckling.Dimensions.Types
import Duckling.Types
import Duckling.Regex.Types
import Duckling.Volume.Helpers
import Duckling.Numeral.Helpers (isPositive)
import qualified Duckling.Volume.Types as TVolume
import qualified Duckling.Numeral.Types as TNumeral
volumes :: [(Text, String, TVolume.Unit)]
volumes = [ ("<latent vol> ml" , "m(l|ililita?ra?)" , TVolume.Millilitre)
, ("<vol> hectoliters" , "(hektolita?ra?)" , TVolume.Hectolitre)
, ("<vol> liters" , "l(it(a)?r(a|e)?)?" , TVolume.Litre)
, ("<latent vol> gallon", "gal(ona?)?" , TVolume.Gallon)
]
rulesVolumes :: [Rule]
rulesVolumes = map go volumes
where
go :: (Text, String, TVolume.Unit) -> Rule
go (name, regexPattern, u) = Rule
{ name = name
, pattern =
[ regex regexPattern
]
, prod = \_ -> Just . Token Volume $ unitOnly u
}
fractions :: [(Text, String, Double)]
fractions = [ ("half", "pola", 1/2)
]
rulesFractionalVolume :: [Rule]
rulesFractionalVolume = map go fractions
where
go :: (Text, String, Double) -> Rule
go (name, regexPattern, f) = Rule
{ name = name
, pattern =
[ regex regexPattern
, Predicate isUnitOnly
]
, prod = \case
(_:
Token Volume TVolume.VolumeData{TVolume.unit = Just u}:
_) ->
Just . Token Volume $ volume u f
_ -> Nothing
}
rules :: [Rule]
rules =
[
]
++ rulesVolumes
++ rulesFractionalVolume
| facebookincubator/duckling | Duckling/Volume/HR/Rules.hs | bsd-3-clause | 1,928 | 0 | 19 | 490 | 483 | 291 | 192 | 49 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UnboxedTuples #-}
{-# OPTIONS_GHC -O2 #-}
module Haskell.Language.LexerSimple.Types
( countInputSpace
, AlexInput(..)
, aiLineL
, byteStringPos
, Context(..)
, LiterateLocation(..)
, isLiterateEnabled
, isLiterateBirdOrOutside
, isLiterateLatexOrOutside
, AlexState(..)
, mkAlexState
, alexEnterBirdLiterateEnv
, alexEnterLiterateLatexEnv
, alexExitLiterateEnv
, pushContext
, modifyCommentDepth
, modifyQuasiquoterDepth
, modifyPreprocessorDepth
, retrieveToken
, addIndentationSize
, calculateQuasiQuoteEnds
, AlexM
, runAlexM
, alexSetInput
, alexSetNextCode
, alexInputPrevChar
, dropUntilNL
, dropUntil
, dropUntil2
, alexGetByte
, unsafeTextHeadAscii
, unsafeTextHead
, utf8BS
, asCodeL
, asCommentDepthL
, asQuasiquoterDepthL
, asIndentationSizeL
, asPreprocessorDepthL
, asLiterateLocL
, asHaveQQEndL
) where
import Control.Monad.State.Strict
import Control.Monad.Writer.Strict
import Data.Char
import Data.Int
import Data.Maybe
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.Void (Void, vacuous)
import Data.Word (Word8)
import Haskell.Language.Lexer.FastTags
import Haskell.Language.Lexer.Types (LiterateStyle(..), Context(..), AlexCode(..))
import Haskell.Language.LexerSimple.LensBlaze
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as C8
import qualified Data.ByteString.Internal as BSI
import Foreign.ForeignPtr
import GHC.Base
import GHC.IO (IO(..))
import GHC.Ptr
import GHC.Word
{-# INLINE advanceLine #-}
advanceLine :: Char# -> Line -> Line
advanceLine '\n'# = increaseLine
advanceLine _ = id
countInputSpace :: AlexInput -> Int -> Int
countInputSpace AlexInput{aiInput} len =
utf8FoldlBounded len inc 0 aiInput
where
inc acc ' '# = acc + 1
inc acc '\t'# = acc + 8
inc acc c# = case fixChar c# of 1## -> acc + 1; _ -> acc
data AlexInput = AlexInput
{ aiInput :: {-# UNPACK #-} !(Ptr Word8)
, aiLine :: {-# UNPACK #-} !Line
} deriving (Show, Eq, Ord)
{-# INLINE aiLineL #-}
aiLineL :: Lens' AlexInput Line
aiLineL = lens aiLine (\b s -> s { aiLine = b })
{-# INLINE byteStringPos #-}
byteStringPos :: C8.ByteString -> Int
byteStringPos (BSI.PS _payload offset _len) = offset
{-# INLINE withAlexInput #-}
withAlexInput :: C8.ByteString -> (AlexInput -> a) -> a
withAlexInput s f =
case s' of
BSI.PS ptr offset _len ->
inlinePerformIO $ withForeignPtr ptr $ \ptr' -> do
let !input = set aiLineL initLine AlexInput
{ aiInput = ptr' `plusPtr` offset
, aiLine = Line 0
}
!res = f input
touchForeignPtr ptr
pure res
where
-- Line numbering starts from 0 because we're adding additional newline
-- at the beginning to simplify processing. Thus, line numbers in the
-- result are 1-based.
initLine = Line 0
-- Add '\0' at the end so that we'll find the end of stream (just
-- as in the old C days...)
s' = C8.cons '\n' $ C8.snoc (C8.snoc (stripBOM s) '\n') '\0'
stripBOM :: C8.ByteString -> C8.ByteString
stripBOM xs = fromMaybe xs $ C8.stripPrefix "\xEF\xBB\xBF" xs
data LiterateLocation a = LiterateInside a | LiterateOutside | Vanilla
deriving (Eq, Ord, Show, Functor)
{-# INLINE litLocToInt #-}
litLocToInt :: LiterateLocation LiterateStyle -> Int
litLocToInt = \case
Vanilla -> 0
LiterateOutside -> 1
LiterateInside Bird -> 2
LiterateInside Latex -> 3
{-# INLINE intToLitLoc #-}
intToLitLoc :: Int -> LiterateLocation LiterateStyle
intToLitLoc = \case
0 -> Vanilla
1 -> LiterateOutside
2 -> LiterateInside Bird
3 -> LiterateInside Latex
x -> error $ "Invalid literate location representation: " ++ show x
{-# INLINE isLiterateEnabled #-}
isLiterateEnabled :: LiterateLocation a -> Bool
isLiterateEnabled = \case
LiterateInside _ -> True
LiterateOutside -> True
Vanilla -> False
{-# INLINE isLiterateBirdOrOutside #-}
isLiterateBirdOrOutside :: LiterateLocation LiterateStyle -> Bool
isLiterateBirdOrOutside = \case
LiterateInside Bird -> True
LiterateInside Latex -> False
LiterateOutside -> True
Vanilla -> False
{-# INLINE isLiterateLatexOrOutside #-}
isLiterateLatexOrOutside :: LiterateLocation LiterateStyle -> Bool
isLiterateLatexOrOutside = \case
LiterateInside Bird -> False
LiterateInside Latex -> True
LiterateOutside -> True
Vanilla -> False
data AlexState = AlexState
{ asInput :: {-# UNPACK #-} !AlexInput
, asIntStore :: {-# UNPACK #-} !Word64
, asContextStack :: [Context]
} deriving (Show, Eq, Ord)
{-# INLINE asIntStoreL #-}
asIntStoreL :: Lens' AlexState Word64
asIntStoreL = lens asIntStore (\b s -> s { asIntStore = b })
{-# INLINE maybeBoolToInt #-}
maybeBoolToInt :: Maybe Bool -> Int
maybeBoolToInt = \case
Nothing -> 0
Just False -> 1
Just True -> 2
{-# INLINE intToMaybeBool #-}
intToMaybeBool :: Int -> Maybe Bool
intToMaybeBool = \case
0 -> Nothing
1 -> Just False
2 -> Just True
x -> error $ "Invalid integer representation of 'Maybe Bool': " ++ show x
{-# INLINE asCodeL #-}
{-# INLINE asCommentDepthL #-}
{-# INLINE asQuasiquoterDepthL #-}
{-# INLINE asIndentationSizeL #-}
{-# INLINE asPreprocessorDepthL #-}
{-# INLINE asLiterateLocL #-}
{-# INLINE asHaveQQEndL #-}
-- | Current Alex state the lexer is in. E.g. comments, string, TH quasiquoter
-- or vanilla toplevel mode.
asCodeL :: Lens' AlexState AlexCode
asCommentDepthL, asQuasiquoterDepthL, asIndentationSizeL :: Lens' AlexState Int16
-- | How many directives deep are we.
asPreprocessorDepthL :: Lens' AlexState Int16
-- | Whether we're in bird-style or latex-style literate environment
asLiterateLocL :: Lens' AlexState (LiterateLocation LiterateStyle)
asHaveQQEndL :: Lens' AlexState (Maybe Bool)
asCodeL = asIntStoreL . int16L' 0 0x000f
asCommentDepthL = asIntStoreL . int16L' 4 0x03ff
asQuasiquoterDepthL = asIntStoreL . int16L' 14 0x03ff
asIndentationSizeL = asIntStoreL . int16L 24
asPreprocessorDepthL = asIntStoreL . int16L 40
asLiterateLocL = \f -> asIntStoreL (int16L' 56 0x0003 (fmap litLocToInt . f . intToLitLoc))
asHaveQQEndL = \f -> asIntStoreL (int16L' 58 0x0003 (fmap maybeBoolToInt . f . intToMaybeBool))
mkAlexState :: LiterateLocation Void -> AlexCode -> AlexInput -> AlexState
mkAlexState litLoc startCode input =
set asCodeL startCode $
set asLiterateLocL (vacuous litLoc) AlexState
{ asInput = input
, asIntStore = 0
, asContextStack = []
}
{-# INLINE alexEnterBirdLiterateEnv #-}
alexEnterBirdLiterateEnv :: MonadState AlexState m => m ()
alexEnterBirdLiterateEnv =
modify $ set asLiterateLocL (LiterateInside Bird)
{-# INLINE alexEnterLiterateLatexEnv #-}
alexEnterLiterateLatexEnv :: MonadState AlexState m => m ()
alexEnterLiterateLatexEnv =
modify $ set asLiterateLocL (LiterateInside Latex)
{-# INLINE alexExitLiterateEnv #-}
alexExitLiterateEnv :: MonadState AlexState m => m ()
alexExitLiterateEnv =
modify $ set asLiterateLocL LiterateOutside
{-# INLINE pushContext #-}
pushContext :: MonadState AlexState m => Context -> m ()
pushContext ctx = modify (\s -> s { asContextStack = ctx : asContextStack s })
{-# INLINE modifyCommentDepth #-}
modifyCommentDepth :: MonadState AlexState m => (Int16 -> Int16) -> m Int16
modifyCommentDepth f = do
depth <- gets (view asCommentDepthL)
let depth' = f depth
modify $ \s -> set asCommentDepthL depth' s
pure depth'
{-# INLINE modifyQuasiquoterDepth #-}
modifyQuasiquoterDepth :: MonadState AlexState m => (Int16 -> Int16) -> m Int16
modifyQuasiquoterDepth f = do
depth <- gets (view asQuasiquoterDepthL)
let depth' = f depth
modify $ \s -> set asQuasiquoterDepthL depth' s
pure depth'
{-# INLINE modifyPreprocessorDepth #-}
modifyPreprocessorDepth :: MonadState AlexState m => (Int16 -> Int16) -> m Int16
modifyPreprocessorDepth f = do
depth <- gets (view asPreprocessorDepthL)
let depth' = f depth
modify $ \s -> set asPreprocessorDepthL depth' s
pure depth'
{-# INLINE retrieveToken #-}
retrieveToken :: AlexInput -> Int -> T.Text
retrieveToken AlexInput{aiInput} len =
TE.decodeUtf8 $ utf8BS len aiInput
{-# INLINE addIndentationSize #-}
addIndentationSize :: MonadState AlexState m => Int16 -> m ()
addIndentationSize x =
modify (over asIndentationSizeL (+ x))
data QQEndsState = QQEndsState
{ qqessPresent :: !Bool
, qqessPrevChar :: !Char#
}
calculateQuasiQuoteEnds :: Ptr Word8 -> Bool
calculateQuasiQuoteEnds =
qqessPresent . utf8Foldl' combine (QQEndsState False '\n'#)
where
combine :: QQEndsState -> Char# -> QQEndsState
combine QQEndsState{qqessPresent, qqessPrevChar} c# = QQEndsState
{ qqessPresent =
qqessPresent ||
case (# qqessPrevChar, c# #) of
(# '|'#, ']'# #) -> True
(# _, '⟧'# #) -> True
_ -> False
, qqessPrevChar = c#
}
type AlexM = WriterT [Pos ServerToken] (State AlexState)
{-# INLINE runAlexM #-}
runAlexM
:: LiterateLocation Void
-> AlexCode
-> C8.ByteString
-> AlexM ()
-> [Pos ServerToken]
runAlexM litLoc startCode input action =
withAlexInput input $ \input' ->
evalState (execWriterT action) $ mkAlexState litLoc startCode input'
{-# INLINE alexSetInput #-}
alexSetInput :: MonadState AlexState m => AlexInput -> m ()
alexSetInput input = modify $ \s -> s { asInput = input }
{-# INLINE alexSetNextCode #-}
alexSetNextCode :: MonadState AlexState m => AlexCode -> m ()
alexSetNextCode code = modify $ set asCodeL code
-- Alex interface
{-# INLINE alexInputPrevChar #-}
alexInputPrevChar :: AlexInput -> Char
alexInputPrevChar = const '\0'
{-# INLINE dropUntilNL #-}
dropUntilNL :: AlexInput -> AlexInput
dropUntilNL input@AlexInput{aiInput} =
input { aiInput = dropUntilNL# aiInput }
{-# INLINE dropUntil #-}
dropUntil :: Word8 -> AlexInput -> AlexInput
dropUntil w input@AlexInput{aiInput} =
input { aiInput = dropUntil# w aiInput }
{-# INLINE dropUntil2 #-}
dropUntil2 :: Word8 -> Word8 -> AlexInput -> AlexInput
dropUntil2 w1 w2 input@AlexInput{aiInput} =
input { aiInput = dropUntil2# w1 w2 aiInput }
{-# INLINE alexGetByte #-}
alexGetByte :: AlexInput -> Maybe (Word8, AlexInput)
alexGetByte input@AlexInput{aiInput} =
case nextChar aiInput of
(C# '\0'#, _) -> Nothing
(C# c#, cs) -> Just (b, input')
where
!b = W8# (fixChar c#)
input' = over aiLineL (advanceLine c#) $ input { aiInput = cs }
-- Translate unicode character into special symbol we teached Alex to recognize.
{-# INLINE fixChar #-}
fixChar :: Char# -> Word#
fixChar = \case
-- These should not be translated since Alex knows about them
'→'# -> reservedSym
'∷'# -> reservedSym
'⇒'# -> reservedSym
'∀'# -> reservedSym
'⦇'# -> reservedSym
'⦈'# -> reservedSym
'⟦'# -> reservedSym
'⟧'# -> reservedSym
'\x01'# -> other
'\x02'# -> other
'\x03'# -> other
'\x04'# -> other
'\x05'# -> other
'\x06'# -> other
'\x07'# -> other
c# -> case ord# c# of
c2# | isTrue# (c2# <=# 0x7f#) ->
int2Word# c2# -- Plain ascii needs no fixing.
| otherwise ->
case generalCategory (C# c#) of
UppercaseLetter -> upper
LowercaseLetter -> lower
TitlecaseLetter -> upper
ModifierLetter -> suffix
NonSpacingMark -> suffix
OtherLetter -> lower
DecimalNumber -> digit
OtherNumber -> digit
Space -> space
ConnectorPunctuation -> symbol
DashPunctuation -> symbol
OtherPunctuation -> symbol
MathSymbol -> symbol
CurrencySymbol -> symbol
ModifierSymbol -> symbol
OtherSymbol -> symbol
_ -> other
where
other, space, upper, lower, symbol, digit, suffix, reservedSym :: Word#
other = 0x00## -- Don't care about these
space = 0x01##
upper = 0x02##
lower = 0x03##
symbol = 0x04##
digit = 0x05##
suffix = 0x06##
reservedSym = 0x07##
{-# INLINE unsafeTextHeadAscii #-}
unsafeTextHeadAscii :: Ptr Word8 -> Word8
unsafeTextHeadAscii (Ptr ptr#) = W8# (indexWord8OffAddr# ptr# 0#)
{-# INLINE unsafeTextHead #-}
unsafeTextHead :: Ptr Word8 -> Char
unsafeTextHead = fst . nextChar
{-# INLINE nextChar #-}
nextChar :: Ptr Word8 -> (Char, Ptr Word8)
nextChar (Ptr ptr#) =
case utf8DecodeChar# ptr# of
(# c#, nBytes# #) -> (C# c#, Ptr (ptr# `plusAddr#` nBytes#))
{-# INLINE dropUntilNL# #-}
dropUntilNL# :: Ptr Word8 -> Ptr Word8
dropUntilNL# (Ptr start#) = Ptr (go start#)
where
go :: Addr# -> Addr#
go ptr# = case indexWord8OffAddr# ptr# 0# of
0## -> ptr#
10## -> ptr# -- '\n'
_ -> go (ptr# `plusAddr#` 1#)
{-# INLINE dropUntil# #-}
dropUntil# :: Word8 -> Ptr Word8 -> Ptr Word8
dropUntil# (W8# w#) (Ptr start#) = Ptr (go start#)
where
go :: Addr# -> Addr#
go ptr# = case indexWord8OffAddr# ptr# 0# of
0## -> ptr#
10## -> ptr# -- '\n'
c# | isTrue# (c# `eqWord#` w#) -> ptr#
| otherwise -> go (ptr# `plusAddr#` 1#)
{-# INLINE dropUntil2# #-}
dropUntil2# :: Word8 -> Word8 -> Ptr Word8 -> Ptr Word8
dropUntil2# (W8# w1#) (W8# w2#) (Ptr start#) = Ptr (go start#)
where
go :: Addr# -> Addr#
go ptr# = case indexWord8OffAddr# ptr# 0# of
0## -> ptr#
10## -> ptr# -- '\n'
c# | isTrue# ((c# `eqWord#` w1#) `orI#` (c# `eqWord#` w2#)) -> ptr#
| otherwise -> go (ptr# `plusAddr#` 1#)
{-# INLINE utf8Foldl' #-}
utf8Foldl' :: forall a. (a -> Char# -> a) -> a -> Ptr Word8 -> a
utf8Foldl' f x0 (Ptr ptr#) =
go x0 ptr#
where
go :: a -> Addr# -> a
go !acc addr# =
case utf8DecodeChar# addr# of
(# _, 0# #) -> acc
(# c#, nBytes# #) -> go (acc `f` c#) (addr# `plusAddr#` nBytes#)
{-# INLINE utf8FoldlBounded #-}
utf8FoldlBounded :: forall a. Int -> (a -> Char# -> a) -> a -> Ptr Word8 -> a
utf8FoldlBounded (I# len#) f x0 (Ptr ptr#) =
go len# x0 ptr#
where
go :: Int#-> a -> Addr# -> a
go 0# !acc _ = acc
go n# !acc addr# =
case utf8DecodeChar# addr# of
(# _, 0# #) -> acc
(# c#, nBytes# #) -> go (n# -# 1#) (acc `f` c#) (addr# `plusAddr#` nBytes#)
{-# INLINE utf8BS #-}
utf8BS :: Int -> Ptr Word8 -> BS.ByteString
utf8BS (I# n#) (Ptr start#) =
BSI.PS (inlinePerformIO (newForeignPtr_ (Ptr start#))) 0 (I# (go n# start# 0#))
where
go :: Int# -> Addr# -> Int# -> Int#
go 0# _ m# = m#
go k# ptr# m# =
case utf8SizeChar# ptr# of
0# -> m#
nBytes# -> go (k# -# 1#) (ptr# `plusAddr#` nBytes#) (m# +# nBytes#)
{-# INLINE inlinePerformIO #-}
inlinePerformIO :: IO a -> a
inlinePerformIO (IO m) = case m realWorld# of (# _, r #) -> r
{-# INLINE utf8DecodeChar# #-}
utf8DecodeChar# :: Addr# -> (# Char#, Int# #)
utf8DecodeChar# a# =
case indexWord8OffAddr# a# 0# of
0## -> (# '\0'#, 0# #)
!x# ->
let !ch0 = word2Int# x# in
case () of
() | isTrue# (ch0 <=# 0x7F#) -> (# chr# ch0, 1# #)
| isTrue# ((ch0 >=# 0xC0#) `andI#` (ch0 <=# 0xDF#)) ->
let !ch1 = word2Int# (indexWord8OffAddr# a# 1#) in
if isTrue# ((ch1 <# 0x80#) `orI#` (ch1 >=# 0xC0#)) then err 1# else
(# chr# (((ch0 -# 0xC0#) `uncheckedIShiftL#` 6#) +#
(ch1 -# 0x80#)),
2# #)
| isTrue# ((ch0 >=# 0xE0#) `andI#` (ch0 <=# 0xEF#)) ->
let !ch1 = word2Int# (indexWord8OffAddr# a# 1#) in
if isTrue# ((ch1 <# 0x80#) `orI#` (ch1 >=# 0xC0#)) then err 1# else
let !ch2 = word2Int# (indexWord8OffAddr# a# 2#) in
if isTrue# ((ch2 <# 0x80#) `orI#` (ch2 >=# 0xC0#)) then err 2# else
(# chr# (((ch0 -# 0xE0#) `uncheckedIShiftL#` 12#) +#
((ch1 -# 0x80#) `uncheckedIShiftL#` 6#) +#
(ch2 -# 0x80#)),
3# #)
| isTrue# ((ch0 >=# 0xF0#) `andI#` (ch0 <=# 0xF8#)) ->
let !ch1 = word2Int# (indexWord8OffAddr# a# 1#) in
if isTrue# ((ch1 <# 0x80#) `orI#` (ch1 >=# 0xC0#)) then err 1# else
let !ch2 = word2Int# (indexWord8OffAddr# a# 2#) in
if isTrue# ((ch2 <# 0x80#) `orI#` (ch2 >=# 0xC0#)) then err 2# else
let !ch3 = word2Int# (indexWord8OffAddr# a# 3#) in
if isTrue# ((ch3 <# 0x80#) `orI#` (ch3 >=# 0xC0#)) then err 3# else
(# chr# (((ch0 -# 0xF0#) `uncheckedIShiftL#` 18#) +#
((ch1 -# 0x80#) `uncheckedIShiftL#` 12#) +#
((ch2 -# 0x80#) `uncheckedIShiftL#` 6#) +#
(ch3 -# 0x80#)),
4# #)
| otherwise -> err 1#
where
-- all invalid sequences end up here:
err :: Int# -> (# Char#, Int# #)
err nBytes# = (# '\8'#, nBytes# #)
-- TODO: check whether following note from ghc applies to server's lexer:
-- '\xFFFD' would be the usual replacement character, but
-- that's a valid symbol in Haskell, so will result in a
-- confusing parse error later on. Instead we use '\0' which
-- will signal a lexer error immediately.
{-# INLINE utf8SizeChar# #-}
utf8SizeChar# :: Addr# -> Int#
utf8SizeChar# a# =
case indexWord8OffAddr# a# 0# of
0## -> 0#
!x# ->
let !ch0 = word2Int# x# in
case () of
_ | isTrue# (ch0 <=# 0x7F#) -> 1#
| isTrue# ((ch0 >=# 0xC0#) `andI#` (ch0 <=# 0xDF#)) ->
let !ch1 = word2Int# (indexWord8OffAddr# a# 1#) in
if isTrue# ((ch1 <# 0x80#) `orI#` (ch1 >=# 0xC0#)) then 1# else
2#
| isTrue# ((ch0 >=# 0xE0#) `andI#` (ch0 <=# 0xEF#)) ->
let !ch1 = word2Int# (indexWord8OffAddr# a# 1#) in
if isTrue# ((ch1 <# 0x80#) `orI#` (ch1 >=# 0xC0#)) then 1# else
let !ch2 = word2Int# (indexWord8OffAddr# a# 2#) in
if isTrue# ((ch2 <# 0x80#) `orI#` (ch2 >=# 0xC0#)) then 2# else
3#
| isTrue# ((ch0 >=# 0xF0#) `andI#` (ch0 <=# 0xF8#)) ->
let !ch1 = word2Int# (indexWord8OffAddr# a# 1#) in
if isTrue# ((ch1 <# 0x80#) `orI#` (ch1 >=# 0xC0#)) then 1# else
let !ch2 = word2Int# (indexWord8OffAddr# a# 2#) in
if isTrue# ((ch2 <# 0x80#) `orI#` (ch2 >=# 0xC0#)) then 2# else
let !ch3 = word2Int# (indexWord8OffAddr# a# 3#) in
if isTrue# ((ch3 <# 0x80#) `orI#` (ch3 >=# 0xC0#)) then 3# else
4#
| otherwise -> 1#
| sergv/tags-server | src/Haskell/Language/LexerSimple/Types.hs | bsd-3-clause | 19,337 | 0 | 32 | 5,072 | 5,489 | 2,907 | 2,582 | 481 | 32 |
module Generate.JavaScript.BuiltIn where
import qualified Language.ECMAScript3.Syntax as JS
import Generate.JavaScript.Helpers
import qualified Reporting.Region as R
utils :: String -> [JS.Expression ()] -> JS.Expression ()
utils func args =
obj ["_utils", func] `call` args
-- LITERALS
character :: Char -> JS.Expression ()
character char =
utils "chr" [ JS.StringLit () [char] ]
string :: String -> JS.Expression ()
string str =
JS.StringLit () str
-- LISTS
list :: [JS.Expression ()] -> JS.Expression ()
list elements =
utils "list" [ JS.ArrayLit () elements ]
range :: JS.Expression () -> JS.Expression () -> JS.Expression ()
range low high =
utils "range" [ low, high ]
-- RECORDS
recordUpdate :: JS.Expression () -> [(String, JS.Expression ())] -> JS.Expression ()
recordUpdate record fields =
utils "update" [ record, JS.ArrayLit () (map toKeyValue fields) ]
toKeyValue :: (String, JS.Expression ()) -> JS.Expression ()
toKeyValue (key, value) =
JS.ArrayLit () [ JS.StringLit () key, value ]
-- COMPARISIONS
eq :: JS.Expression () -> JS.Expression () -> JS.Expression ()
eq left right =
utils "eq" [ left, right ]
compare :: JS.Expression () -> JS.Expression () -> JS.Expression ()
compare left right =
utils "compare" [ left, right ]
-- CRASH
crash :: R.Region -> Maybe String -> JS.Expression ()
crash region maybeCaseCrashValue =
case maybeCaseCrashValue of
Nothing ->
utils "crash" [ regionToJs region ]
Just crashValue ->
utils "caseCrash" [ regionToJs region, ref crashValue ]
regionToJs :: R.Region -> JS.Expression ()
regionToJs (R.Region start end) =
JS.ObjectLit ()
[ ( prop "start", positionToJs start )
, ( prop "end", positionToJs end )
]
positionToJs :: R.Position -> JS.Expression ()
positionToJs (R.Position line column) =
JS.ObjectLit ()
[ ( prop "line", JS.IntLit () line )
, ( prop "column", JS.IntLit () column )
] | Axure/elm-compiler | src/Generate/JavaScript/BuiltIn.hs | bsd-3-clause | 1,961 | 0 | 10 | 405 | 775 | 397 | 378 | 48 | 2 |
{-# LANGUAGE QuasiQuotes #-}
module Output.Player where
import Control.Lens
import NewTTRS.Law
import Text.Hamlet
import qualified Data.Map as Map
import Event
import Output.Formatting
import Output.Common
import DataStore
import Player
import Snap.Snaplet.SqliteSimple
playerPage :: (Functor m, HasSqlite m) => PlayerId -> m Html
playerPage playerId = do
Just player <- getPlayerById playerId
events <- getLawsForPlayer playerId
let rows = reverse $ Map.toList events
let laws = map (snd.snd) rows
return $ [shamlet|
$doctype 5
<html>
<head>
^{metaTags}
<title>Player
<link rel=stylesheet type=text/css href=/static/common.css>
<link rel=stylesheet type=text/css href=/static/ratings.css>
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.5/jquery.min.js">
<script language=javascript src=/static/jquery.flot.js>
^{graphInclude laws}
<body>
^{navigationLinks}
<h1>Report for #{view playerName player}
<table .data>
<tr>
<th>Event
<th colspan=3>Rating
<tr>
<th>Date
<th>μ
<th>σ
<th>Graph
$forall (i,(eventId,(event,law))) <- itoList rows
<tr :odd i:.alt>
<td>
<a href=#{mkEventUrl eventId}>
#{formatShortDay $ view eventDay event}
<td .num>#{showRound $ lawMean law}
<td .num>#{showRound $ lawStddev law}
<td>
<div #graph#{i} .bargraph>
|]
| glguy/tt-ratings | Output/Player.hs | bsd-3-clause | 1,485 | 0 | 12 | 375 | 161 | 87 | 74 | 19 | 1 |
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE RecordWildCards #-}
import Control.Monad
import Control.Monad.IO.Class
import Data.List
import qualified Data.Map as Map
import Text.Groom
import Text.PrettyPrint.ANSI.Leijen
import Language.Haskell.Exts.Pretty (prettyPrint)
import System.Directory
import System.Environment
import System.Console.GetOpt
import System.Exit
----------------------------------------------------------------
import Data.Protobuf.FileIO
import Data.Protobuf.AST hiding (Option)
import Data.Protobuf.Types
import Data.Protobuf.DataTree
import Data.Protobuf.Transform
import Data.Protobuf.CodeGen
main :: IO ()
main = do
pars <- parseArgs
let cxt = PbContext { includePaths = "." : includes pars
}
res <- runPbMonad cxt $ do
-- Read protobuf files
ast <- readProtobuf (fileList pars)
when (dumpAST pars) $
liftIO $ dumpBundle ast
-- Check AST
applyBundleM_ checkLabels ast
-- Name mangling etc...
let ast1 = applyBundle mangleNames
$ applyBundle sortLabels ast
ast2 <- applyBundleM removePackage ast1
ast3 <- applyBundleM buildNamespace ast2
pb1 <- resolveImports ast3
pb2 <- mapM resolveTypeNames pb1
-- Convert to haskell
DataTree hask <- toHaskellTree pb2
-- Pretty print haskell code
liftIO $ do
setCurrentDirectory (outputDir pars)
mapM_ dump (Map.toList hask)
-- Check for errors
case res of
Right _ -> return ()
Left err -> putStrLn err >> exitFailure
dump :: ([Identifier TagType], HsModule) -> IO ()
dump m@(map identifier -> qs, _) = do
let dir = intercalate "/" (init qs)
file = last qs ++ ".hs"
createDirectoryIfMissing True dir
writeFile ("./" ++ dir ++ "/" ++ file) ((prettyPrint $ convert m) ++ "\n")
dumpPB :: Show a => ProtobufFile a -> IO ()
dumpPB pb = do
putStrLn (groom pb)
dumpBundle :: Show a => Bundle a -> IO ()
dumpBundle (Bundle{..}) = do
putStrLn "\n\n\n\n"
let ln = putDoc $ red $ text "================================================================\n"
ln
mapM_ print $ Map.toList importMap
ln
forM_ (Map.toList packageMap) $ \(path, pb) -> do
putDoc $
blue (text "==== <<< ") <>
green (text path) <>
blue (text " >>> ================\n")
dumpPB pb
----------------------------------------------------------------
-- Command line parameters
----------------------------------------------------------------
data Parameters = Parameters
{ fileList :: [FilePath]
, includes :: [FilePath]
, outputDir :: FilePath
, dumpAST :: Bool
}
defaultParameters :: [FilePath] -> Parameters
defaultParameters fs = Parameters
{ fileList = fs
, includes = []
, outputDir = "."
, dumpAST = False
}
parseArgs :: IO Parameters
parseArgs = do
args <- getArgs
case getOpt Permute opts args of
(xs,files,[]) -> return $ foldl (flip ($)) (defaultParameters files) xs
(_,_,errs) -> do mapM_ putStrLn errs
exitFailure
where
opts =
[ Option [] ["out_hs"]
(ReqArg (\arg p -> p { outputDir = arg }) "DIR")
"Directory to put generate files to"
, Option ['I'] []
(ReqArg (\arg p -> p { includes = includes p ++ [arg] }) "DIR")
"Include directory"
-- Dump flags
, Option [] ["dump-ast"]
(NoArg $ \p -> p { dumpAST = True })
"Dump AST"
]
| Shimuuar/protobuf | protobuf-grammar/driver.hs | bsd-3-clause | 3,458 | 0 | 17 | 850 | 1,053 | 544 | 509 | 91 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module OpenRTB.Types.Enum.ExpandableDirectionSpec where
import Control.Applicative
import Data.Aeson
import Data.Aeson.TH
import Test.Hspec
import Test.QuickCheck
import Test.Instances
import OpenRTB.Types.Enum.ExpandableDirection as ED
data Mock = Mock { ed :: ExpandableDirection } deriving (Eq, Show)
$(deriveJSON defaultOptions ''Mock)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "ExpandableDirection" $ do
context "JSON" $ do
it "should convert back and forth" $ property $ do
\m -> (decode . encode) m == Just (m :: Mock)
instance Arbitrary Mock where
arbitrary = Mock <$> arbitrary
| ankhers/openRTB-hs | spec/OpenRTB/Types/Enum/ExpandableDirectionSpec.hs | bsd-3-clause | 694 | 0 | 18 | 113 | 197 | 108 | 89 | 21 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.