code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
import Text.MoodleMD.Types
import Text.MoodleMD.Reader
import Text.MoodleMD.WriterXML
import System.Environment
import Control.Applicative
import System.Console.ArgParser
data Config = Config {input :: FilePath} deriving Show
cliParser :: ParserSpec Config
cliParser = Config
`parsedBy` optPos "" "input-file" `Descr` "input file name; leave blank for reading from stdin"
main :: IO ()
main = withParseResult cliParser run
run :: Config -> IO ()
run (Config inFile) = do
input <- if inFile == "" then getContents else readFile inFile
let result = either (error.show) id . parseMoodleMD $ input
rendered <- renderQs result
putStrLn rendered
| uulm-ai/moodle-md | src/Main.hs | gpl-3.0 | 666 | 0 | 14 | 115 | 196 | 104 | 92 | 18 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Storage.DefaultObjectAccessControls.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a new default object ACL entry on the specified bucket.
--
-- /See:/ <https://developers.google.com/storage/docs/json_api/ Cloud Storage JSON API Reference> for @storage.defaultObjectAccessControls.insert@.
module Network.Google.Resource.Storage.DefaultObjectAccessControls.Insert
(
-- * REST Resource
DefaultObjectAccessControlsInsertResource
-- * Creating a Request
, defaultObjectAccessControlsInsert
, DefaultObjectAccessControlsInsert
-- * Request Lenses
, doaciBucket
, doaciPayload
, doaciUserProject
, doaciProvisionalUserProject
) where
import Network.Google.Prelude
import Network.Google.Storage.Types
-- | A resource alias for @storage.defaultObjectAccessControls.insert@ method which the
-- 'DefaultObjectAccessControlsInsert' request conforms to.
type DefaultObjectAccessControlsInsertResource =
"storage" :>
"v1" :>
"b" :>
Capture "bucket" Text :>
"defaultObjectAcl" :>
QueryParam "userProject" Text :>
QueryParam "provisionalUserProject" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ObjectAccessControl :>
Post '[JSON] ObjectAccessControl
-- | Creates a new default object ACL entry on the specified bucket.
--
-- /See:/ 'defaultObjectAccessControlsInsert' smart constructor.
data DefaultObjectAccessControlsInsert =
DefaultObjectAccessControlsInsert'
{ _doaciBucket :: !Text
, _doaciPayload :: !ObjectAccessControl
, _doaciUserProject :: !(Maybe Text)
, _doaciProvisionalUserProject :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DefaultObjectAccessControlsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'doaciBucket'
--
-- * 'doaciPayload'
--
-- * 'doaciUserProject'
--
-- * 'doaciProvisionalUserProject'
defaultObjectAccessControlsInsert
:: Text -- ^ 'doaciBucket'
-> ObjectAccessControl -- ^ 'doaciPayload'
-> DefaultObjectAccessControlsInsert
defaultObjectAccessControlsInsert pDoaciBucket_ pDoaciPayload_ =
DefaultObjectAccessControlsInsert'
{ _doaciBucket = pDoaciBucket_
, _doaciPayload = pDoaciPayload_
, _doaciUserProject = Nothing
, _doaciProvisionalUserProject = Nothing
}
-- | Name of a bucket.
doaciBucket :: Lens' DefaultObjectAccessControlsInsert Text
doaciBucket
= lens _doaciBucket (\ s a -> s{_doaciBucket = a})
-- | Multipart request metadata.
doaciPayload :: Lens' DefaultObjectAccessControlsInsert ObjectAccessControl
doaciPayload
= lens _doaciPayload (\ s a -> s{_doaciPayload = a})
-- | The project to be billed for this request. Required for Requester Pays
-- buckets.
doaciUserProject :: Lens' DefaultObjectAccessControlsInsert (Maybe Text)
doaciUserProject
= lens _doaciUserProject
(\ s a -> s{_doaciUserProject = a})
-- | The project to be billed for this request if the target bucket is
-- requester-pays bucket.
doaciProvisionalUserProject :: Lens' DefaultObjectAccessControlsInsert (Maybe Text)
doaciProvisionalUserProject
= lens _doaciProvisionalUserProject
(\ s a -> s{_doaciProvisionalUserProject = a})
instance GoogleRequest
DefaultObjectAccessControlsInsert
where
type Rs DefaultObjectAccessControlsInsert =
ObjectAccessControl
type Scopes DefaultObjectAccessControlsInsert =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/devstorage.full_control"]
requestClient DefaultObjectAccessControlsInsert'{..}
= go _doaciBucket _doaciUserProject
_doaciProvisionalUserProject
(Just AltJSON)
_doaciPayload
storageService
where go
= buildClient
(Proxy ::
Proxy DefaultObjectAccessControlsInsertResource)
mempty
| brendanhay/gogol | gogol-storage/gen/Network/Google/Resource/Storage/DefaultObjectAccessControls/Insert.hs | mpl-2.0 | 4,860 | 0 | 16 | 1,052 | 551 | 326 | 225 | 91 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudResourceManager.Projects.TestIAMPermissions
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns permissions that a caller has on the specified Project.
--
-- /See:/ <https://cloud.google.com/resource-manager Google Cloud Resource Manager API Reference> for @cloudresourcemanager.projects.testIamPermissions@.
module Network.Google.Resource.CloudResourceManager.Projects.TestIAMPermissions
(
-- * REST Resource
ProjectsTestIAMPermissionsResource
-- * Creating a Request
, projectsTestIAMPermissions
, ProjectsTestIAMPermissions
-- * Request Lenses
, ptipXgafv
, ptipUploadProtocol
, ptipPp
, ptipAccessToken
, ptipUploadType
, ptipPayload
, ptipBearerToken
, ptipResource
, ptipCallback
) where
import Network.Google.Prelude
import Network.Google.ResourceManager.Types
-- | A resource alias for @cloudresourcemanager.projects.testIamPermissions@ method which the
-- 'ProjectsTestIAMPermissions' request conforms to.
type ProjectsTestIAMPermissionsResource =
"v1" :>
"projects" :>
CaptureMode "resource" "testIamPermissions" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TestIAMPermissionsRequest :>
Post '[JSON] TestIAMPermissionsResponse
-- | Returns permissions that a caller has on the specified Project.
--
-- /See:/ 'projectsTestIAMPermissions' smart constructor.
data ProjectsTestIAMPermissions = ProjectsTestIAMPermissions'
{ _ptipXgafv :: !(Maybe Xgafv)
, _ptipUploadProtocol :: !(Maybe Text)
, _ptipPp :: !Bool
, _ptipAccessToken :: !(Maybe Text)
, _ptipUploadType :: !(Maybe Text)
, _ptipPayload :: !TestIAMPermissionsRequest
, _ptipBearerToken :: !(Maybe Text)
, _ptipResource :: !Text
, _ptipCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProjectsTestIAMPermissions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ptipXgafv'
--
-- * 'ptipUploadProtocol'
--
-- * 'ptipPp'
--
-- * 'ptipAccessToken'
--
-- * 'ptipUploadType'
--
-- * 'ptipPayload'
--
-- * 'ptipBearerToken'
--
-- * 'ptipResource'
--
-- * 'ptipCallback'
projectsTestIAMPermissions
:: TestIAMPermissionsRequest -- ^ 'ptipPayload'
-> Text -- ^ 'ptipResource'
-> ProjectsTestIAMPermissions
projectsTestIAMPermissions pPtipPayload_ pPtipResource_ =
ProjectsTestIAMPermissions'
{ _ptipXgafv = Nothing
, _ptipUploadProtocol = Nothing
, _ptipPp = True
, _ptipAccessToken = Nothing
, _ptipUploadType = Nothing
, _ptipPayload = pPtipPayload_
, _ptipBearerToken = Nothing
, _ptipResource = pPtipResource_
, _ptipCallback = Nothing
}
-- | V1 error format.
ptipXgafv :: Lens' ProjectsTestIAMPermissions (Maybe Xgafv)
ptipXgafv
= lens _ptipXgafv (\ s a -> s{_ptipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ptipUploadProtocol :: Lens' ProjectsTestIAMPermissions (Maybe Text)
ptipUploadProtocol
= lens _ptipUploadProtocol
(\ s a -> s{_ptipUploadProtocol = a})
-- | Pretty-print response.
ptipPp :: Lens' ProjectsTestIAMPermissions Bool
ptipPp = lens _ptipPp (\ s a -> s{_ptipPp = a})
-- | OAuth access token.
ptipAccessToken :: Lens' ProjectsTestIAMPermissions (Maybe Text)
ptipAccessToken
= lens _ptipAccessToken
(\ s a -> s{_ptipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ptipUploadType :: Lens' ProjectsTestIAMPermissions (Maybe Text)
ptipUploadType
= lens _ptipUploadType
(\ s a -> s{_ptipUploadType = a})
-- | Multipart request metadata.
ptipPayload :: Lens' ProjectsTestIAMPermissions TestIAMPermissionsRequest
ptipPayload
= lens _ptipPayload (\ s a -> s{_ptipPayload = a})
-- | OAuth bearer token.
ptipBearerToken :: Lens' ProjectsTestIAMPermissions (Maybe Text)
ptipBearerToken
= lens _ptipBearerToken
(\ s a -> s{_ptipBearerToken = a})
-- | REQUIRED: The resource for which the policy detail is being requested.
-- \`resource\` is usually specified as a path. For example, a Project
-- resource is specified as \`projects\/{project}\`.
ptipResource :: Lens' ProjectsTestIAMPermissions Text
ptipResource
= lens _ptipResource (\ s a -> s{_ptipResource = a})
-- | JSONP
ptipCallback :: Lens' ProjectsTestIAMPermissions (Maybe Text)
ptipCallback
= lens _ptipCallback (\ s a -> s{_ptipCallback = a})
instance GoogleRequest ProjectsTestIAMPermissions
where
type Rs ProjectsTestIAMPermissions =
TestIAMPermissionsResponse
type Scopes ProjectsTestIAMPermissions =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient ProjectsTestIAMPermissions'{..}
= go _ptipResource _ptipXgafv _ptipUploadProtocol
(Just _ptipPp)
_ptipAccessToken
_ptipUploadType
_ptipBearerToken
_ptipCallback
(Just AltJSON)
_ptipPayload
resourceManagerService
where go
= buildClient
(Proxy :: Proxy ProjectsTestIAMPermissionsResource)
mempty
| rueshyna/gogol | gogol-resourcemanager/gen/Network/Google/Resource/CloudResourceManager/Projects/TestIAMPermissions.hs | mpl-2.0 | 6,471 | 0 | 19 | 1,512 | 939 | 545 | 394 | 136 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
-- Module : Gen.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
module Gen.Types
( module Gen.Types
, module Gen.Types.Help
, module Gen.Types.Id
, module Gen.Types.Map
, module Gen.Types.NS
, module Gen.Types.Schema
, module Gen.Types.Data
) where
import Control.Applicative
import Control.Lens hiding ((.=))
import Control.Monad.Except
import Control.Monad.State.Strict
import Data.Aeson hiding (Array, Bool, String)
import qualified Data.Attoparsec.Text as A
import Data.Bifunctor
import Data.CaseInsensitive (CI)
import qualified Data.CaseInsensitive as CI
import Data.Function (on)
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import Data.List (sort)
import Data.Maybe
import Data.Ord
import Data.Semigroup ((<>))
import Data.String
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.Lazy as LText
import qualified Data.Text.Lazy.Builder as Build
import Data.Text.Manipulate
import qualified Filesystem.Path.CurrentOS as Path
import Formatting
import Gen.Orphans ()
import Gen.Text
import Gen.Types.Data
import Gen.Types.Help
import Gen.Types.Id
import Gen.Types.Map
import Gen.Types.NS
import Gen.Types.Schema
import GHC.TypeLits (Symbol)
import Prelude hiding (Enum)
import Text.EDE (Template)
default (Integer)
type Set = Set.HashSet
type Error = LText.Text
type Path = Path.FilePath
newtype Version (v :: Symbol) = Version Text
deriving (Eq, Show)
instance ToJSON (Version v) where
toJSON (Version v) = toJSON v
fver :: Format a (Version v -> a)
fver = later (\(Version v) -> Build.fromText v)
type LibraryVer = Version "library"
type ClientVer = Version "client"
type CoreVer = Version "core"
data Versions = Versions
{ _libraryVersion :: LibraryVer
, _clientVersion :: ClientVer
, _coreVersion :: CoreVer
} deriving (Show)
makeClassy ''Versions
data Release
= Sandbox
| Alpha (Maybe Int) (Maybe Char)
| Beta (Maybe Int) (Maybe Char)
deriving (Eq, Ord, Show)
data ModelVersion = ModelVersion Double (Maybe Release)
deriving (Eq, Show)
instance Ord ModelVersion where
compare (ModelVersion an ar) (ModelVersion bn br) =
compare an bn <>
case (ar, br) of
(Nothing, _) -> GT
(_, Nothing) -> LT
(Just x, Just y) -> compare x y
parseVersion :: Text -> Either String ModelVersion
parseVersion x = first (mappend (Text.unpack x) . mappend " -> ") $
A.parseOnly (preface *> (empty' <|> version) <* A.endOfInput) x
where
empty' = ModelVersion 0 <$> (alpha <|> beta <|> exp')
version = ModelVersion
<$> number
<*> (alpha <|> beta <|> sandbox <|> pure Nothing)
preface = A.takeWhile (/= '_') *> void (A.char '_') <|> pure ()
number = A.takeWhile (/= 'v') *> A.char 'v' *> A.double
alpha = A.string "alpha"
*> (Alpha <$> optional A.decimal <*> optional A.letter)
<&> Just
beta = A.string "beta"
*> (Beta <$> optional A.decimal <*> optional A.letter)
<&> Just
sandbox = Just Sandbox <$ A.string "sandbox"
exp' = Just Sandbox <$ A.string "exp" <* A.decimal
data Model = Model
{ modelName :: Text
, modelPrefix :: Text
, modelVersion :: ModelVersion
, modelPath :: Path
}
instance Eq Model where
(==) = on (==) modelPrefix
instance Ord Model where
compare a b =
on compare modelPrefix a b
<> on compare (Down . modelVersion) a b
modelFromPath :: Path -> Model
modelFromPath x = Model n p v x
where
n = Text.init
. Text.intercalate "/"
. drop 1
. dropWhile (/= "model")
$ Text.split (== '/') p
p = toTextIgnore (Path.parent (Path.parent x))
v = either error id $ parseVersion (toTextIgnore (Path.dirname x))
data Templates = Templates
{ cabalTemplate :: Template
, tocTemplate :: Template
, readmeTemplate :: Template
, typesTemplate :: Template
, prodTemplate :: Template
, sumTemplate :: Template
, actionTemplate :: Template
}
data Imports = Imports
{ tocImports :: [NS]
, typeImports :: [NS]
, prodImports :: [NS]
, sumImports :: [NS]
, actionImports :: [NS]
}
serviceImports :: HasService a b => a -> Imports
serviceImports s = Imports
{ tocImports = [preludeNS]
, typeImports = sort [preludeNS, prodNS s, sumNS s]
, prodImports = sort [preludeNS, sumNS s]
, sumImports = [preludeNS]
, actionImports = sort [preludeNS, typesNS s]
}
tocNS, typesNS, prodNS, sumNS :: HasService a b => a -> NS
tocNS = mappend "Network.Google" . mkNS . view sCanonicalName
typesNS = (<> "Types") . tocNS
prodNS = (<> "Product") . typesNS
sumNS = (<> "Sum") . typesNS
preludeNS :: NS
preludeNS = "Network.Google.Prelude"
resourceNS, methodNS :: NS
resourceNS = "Network.Google.Resource"
methodNS = "Network.Google.Method"
exposedModules :: Library -> [NS]
exposedModules l = sort $
tocNS l
: typesNS l
: map _actNamespace (_apiResources (_lAPI l))
++ map _actNamespace (_apiMethods (_lAPI l))
otherModules :: Library -> [NS]
otherModules s = sort [prodNS s, sumNS s]
toTextIgnore :: Path -> Text
toTextIgnore = either id id . Path.toText
data Library = Library
{ _lVersions :: Versions
, _lService :: Service Global
, _lAPI :: API
, _lSchemas :: [Data]
}
makeLenses ''Library
instance HasVersions Library where
versions = lVersions
instance HasDescription Library Global where
description = lService . description
instance HasService Library Global where
service = lService
instance ToJSON Library where
toJSON l = object
-- Library
[ "libraryName" .= (l ^. sLibrary)
, "libraryTitle" .= renameTitle (l ^. dTitle)
, "libraryDescription" .= Desc 4 (l ^. dDescription)
, "libraryVersion" .= (l ^. libraryVersion)
, "coreVersion" .= (l ^. coreVersion)
, "clientVersion" .= (l ^. clientVersion)
, "exposedModules" .= exposedModules l
, "otherModules" .= otherModules l
-- Service
, "id" .= (l ^. dId)
, "name" .= (l ^. dName)
, "version" .= (l ^. dVersion)
, "title" .= (l ^. dTitle)
, "description" .= (l ^. dDescription)
, "documentationLink" .= (l ^. dDocumentationLink)
, "labels" .= (l ^. dLabels)
, "features" .= (l ^. dFeatures)
, "scopes" .= (l ^. dAuth)
-- API
, "api" .= (l ^. lAPI)
-- Schemas
, "schemas" .= (l ^. lSchemas)
]
data TType
= TType Global
| TLit Lit
| TMaybe TType
| TList TType
| TMap TType TType
deriving (Eq, Show)
data Derive
= DEq
| DOrd
| DRead
| DShow
| DEnum
| DNum
| DIntegral
| DReal
| DMonoid
| DIsString
| DData
| DTypeable
| DGeneric
deriving (Eq, Show)
data Solved = Solved
{ _additional :: Bool
, _unique :: Global
, _prefix :: Prefix
, _schema :: Schema Global
, _type :: TType
, _deriving :: [Derive]
} deriving (Show)
instance HasInfo Solved where
info = f . info
where
f = lens _schema (\s a -> s { _schema = a })
monoid :: Solved -> Bool
monoid = elem DMonoid . _deriving
setAdditional :: Solved -> Solved
setAdditional s = setRequired $ s
{ _additional = True
, _type = TMap (TLit Text) (_type s)
}
type Seen = Map (CI Text) {- Prefix -} (Set (CI Text)) {- Inhabitants -}
data Memo = Memo
{ _context :: Service (Fix Schema)
, _typed :: Map Global TType
, _derived :: Map Global [Derive]
, _reserve :: Set Global
, _schemas :: Map Global (Schema Global)
, _prefixed :: Map Global Prefix
, _branches :: Seen
, _fields :: Seen
}
initial :: Service (Fix Schema) -> Memo
initial s = Memo s mempty mempty res core mempty mempty mempty
where
-- Top-level schema definitions with ids.
res = Set.fromList
. mapMaybe (view iId)
$ Map.elems (s ^. dSchemas)
-- Types available in Network.Google.Prelude.
core = Map.fromList
[ ("Body", SLit requiredInfo RqBody)
, ("Stream", SLit requiredInfo RsBody)
, ("JSONValue", SLit requiredInfo JSONValue)
]
makeLenses ''Memo
instance HasService Memo (Fix Schema) where
service = context
type AST = ExceptT Error (State Memo)
reserveType :: Global -> AST Global
reserveType g = do
p <- uses reserve (Set.member g)
pure $!
if p
then reference g "'"
else g
reserveBranches :: AST ()
reserveBranches = do
ss <- use schemas
let bs = Set.fromList $ map (CI.mk . global) (Map.keys ss)
branches %= Map.insertWith (<>) mempty bs
reserveFields :: AST ()
reserveFields = do
ss <- use schemas
forM_ (Map.keys ss) $ \(global -> k) -> do
let p:ps = splitWords k
s = mconcat ps
fields %= Map.insertWith (<>) (CI.mk p) (Set.singleton (CI.mk s))
| rueshyna/gogol | gen/src/Gen/Types.hs | mpl-2.0 | 10,817 | 0 | 17 | 3,437 | 2,986 | 1,669 | 1,317 | 287 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.DeviceFarm.ListArtifacts
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets information about artifacts.
--
-- /See:/ <http://docs.aws.amazon.com/devicefarm/latest/APIReference/API_ListArtifacts.html AWS API Reference> for ListArtifacts.
module Network.AWS.DeviceFarm.ListArtifacts
(
-- * Creating a Request
listArtifacts
, ListArtifacts
-- * Request Lenses
, laNextToken
, laArn
, laType
-- * Destructuring the Response
, listArtifactsResponse
, ListArtifactsResponse
-- * Response Lenses
, larsArtifacts
, larsNextToken
, larsResponseStatus
) where
import Network.AWS.DeviceFarm.Types
import Network.AWS.DeviceFarm.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Represents a request to the list artifacts operation.
--
-- /See:/ 'listArtifacts' smart constructor.
data ListArtifacts = ListArtifacts'
{ _laNextToken :: !(Maybe Text)
, _laArn :: !Text
, _laType :: !ArtifactCategory
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListArtifacts' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'laNextToken'
--
-- * 'laArn'
--
-- * 'laType'
listArtifacts
:: Text -- ^ 'laArn'
-> ArtifactCategory -- ^ 'laType'
-> ListArtifacts
listArtifacts pArn_ pType_ =
ListArtifacts'
{ _laNextToken = Nothing
, _laArn = pArn_
, _laType = pType_
}
-- | An identifier that was returned from the previous call to this
-- operation, which can be used to return the next set of items in the
-- list.
laNextToken :: Lens' ListArtifacts (Maybe Text)
laNextToken = lens _laNextToken (\ s a -> s{_laNextToken = a});
-- | The Run, Job, Suite, or Test ARN.
laArn :: Lens' ListArtifacts Text
laArn = lens _laArn (\ s a -> s{_laArn = a});
-- | The artifacts\' type.
--
-- Allowed values include:
--
-- - FILE: The artifacts are files.
-- - LOG: The artifacts are logs.
-- - SCREENSHOT: The artifacts are screenshots.
laType :: Lens' ListArtifacts ArtifactCategory
laType = lens _laType (\ s a -> s{_laType = a});
instance AWSRequest ListArtifacts where
type Rs ListArtifacts = ListArtifactsResponse
request = postJSON deviceFarm
response
= receiveJSON
(\ s h x ->
ListArtifactsResponse' <$>
(x .?> "artifacts" .!@ mempty) <*>
(x .?> "nextToken")
<*> (pure (fromEnum s)))
instance ToHeaders ListArtifacts where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("DeviceFarm_20150623.ListArtifacts" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON ListArtifacts where
toJSON ListArtifacts'{..}
= object
(catMaybes
[("nextToken" .=) <$> _laNextToken,
Just ("arn" .= _laArn), Just ("type" .= _laType)])
instance ToPath ListArtifacts where
toPath = const "/"
instance ToQuery ListArtifacts where
toQuery = const mempty
-- | Represents the result of a list artifacts operation.
--
-- /See:/ 'listArtifactsResponse' smart constructor.
data ListArtifactsResponse = ListArtifactsResponse'
{ _larsArtifacts :: !(Maybe [Artifact])
, _larsNextToken :: !(Maybe Text)
, _larsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListArtifactsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'larsArtifacts'
--
-- * 'larsNextToken'
--
-- * 'larsResponseStatus'
listArtifactsResponse
:: Int -- ^ 'larsResponseStatus'
-> ListArtifactsResponse
listArtifactsResponse pResponseStatus_ =
ListArtifactsResponse'
{ _larsArtifacts = Nothing
, _larsNextToken = Nothing
, _larsResponseStatus = pResponseStatus_
}
-- | Information about the artifacts.
larsArtifacts :: Lens' ListArtifactsResponse [Artifact]
larsArtifacts = lens _larsArtifacts (\ s a -> s{_larsArtifacts = a}) . _Default . _Coerce;
-- | If the number of items that are returned is significantly large, this is
-- an identifier that is also returned, which can be used in a subsequent
-- call to this operation to return the next set of items in the list.
larsNextToken :: Lens' ListArtifactsResponse (Maybe Text)
larsNextToken = lens _larsNextToken (\ s a -> s{_larsNextToken = a});
-- | The response status code.
larsResponseStatus :: Lens' ListArtifactsResponse Int
larsResponseStatus = lens _larsResponseStatus (\ s a -> s{_larsResponseStatus = a});
| olorin/amazonka | amazonka-devicefarm/gen/Network/AWS/DeviceFarm/ListArtifacts.hs | mpl-2.0 | 5,486 | 0 | 14 | 1,281 | 847 | 506 | 341 | 104 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ApplicativeDo #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ViewPatterns #-}
module TypeTranspiler where
import Data.Char
import Data.List
import Control.Monad
import Control.Applicative
alpha = ['a' .. 'z'] ++ ['A' .. 'Z']
digit = ['0' .. '9']
---------------------------------------------------------
----------------- my parser combinator ------------------
---------------------------------------------------------
newtype Parser a = Parser { parse :: String -> [(a, String)] }
transpile = parseCode typeP
parseCode :: Parser a -> String -> Either String a
parseCode m (parse m -> [(res, [])]) = Right res
parseCode _ _ = Left "Hugh?"
instance Functor Parser where
fmap f (Parser ps) = Parser $ \p -> [ (f a, b) | (a, b) <- ps p ]
--
instance Applicative Parser where
pure = return
(Parser p1) <*> (Parser p2) = Parser $ \p ->
[ (f a, s2) | (f, s1) <- p1 p, (a, s2) <- p2 s1 ]
--
instance Monad Parser where
return a = Parser $ \s -> [(a, s)]
p >>= f = Parser $ concatMap (\(a, s1) -> f a <!-- s1) . parse p
--
instance MonadPlus Parser where
mzero = Parser $ const []
mplus p q = Parser $ \s -> parse p s ++ parse q s
--
instance Alternative Parser where
empty = mzero
p <|> q = Parser $ \s -> case parse p s of
[] -> parse q s
rs -> rs
--
item = Parser $ \case
[ ] -> [ ]
(h : t) -> [(h, t)]
--
satisfy p = item >>= \c -> if p c then return c else empty
option0 d p = p <|> return d
bracketsP m = do
reservedLP "("
n <- m
reservedLP ")"
return n
--
oneOf = satisfy . flip elem
charP = satisfy . (==)
reservedP = tokenP . stringP
reservedLP = tokenLP . stringP
reservedWordsLP = foldr1 (<|>) . (reservedLP <$>)
spaces0P = do
a <- many $ oneOf " \t\r"
return []
--
newLines0P = do
a <- many $ oneOf " \t\r\n"
return []
--
stringP [ ] = return []
stringP (c : cs) = do
charP c
stringP cs
return $ c : cs
--
tokenP p = do
s <- spaces0P
a <- p
return $ s ++ a
--
tokenLP = (newLines0P <++>)
seperateP ns ss = do
n <- ns
return [n] <~> do
s <- ss
r <- seperateP ns ss
return $ n : s : r
--
fromRight :: b -> Either a b -> b
fromRight r (Left _) = r
fromRight _ (Right r) = r
mapped "Unit" = "Void"
mapped "Int" = "Integer"
mapped others = others
(\|/) = flip seperateP
(<~>) = flip (<|>)
(<!--) = parse
(<|||) a = putStrLn . fromRight "Parse Error" . parseCode a
infixl 2 \|/
a <++> b = do
x <- a
y <- b
return $ x ++ y
--
typeP = do
a <- typeReferenceP
newLines0P
return a
--
typeReferenceP = tokenLP $ do
a <- functionTypeP
<|> userTypeP
<|> bracketsP typeReferenceP
r <- option0 [] $ reservedLP "?"
return a
--
userTypeP = do
ls <- reservedLP "." \|/ simpleUserTypeP
return $ join ls
--
simpleUserTypeP = tokenLP $ do
n <- simpleNameP
p <- reservedP [] <~> do
reservedLP "<"
ls <- reservedLP "," \|/ why <|> typeP <~> do
o <- option0 [] $ reservedWordsLP [ "in", "out" ]
t <- typeP
return $ case o of
"in" -> "? super " ++ t
"out" -> "? extends " ++ t
[ ] -> t
reservedLP ">"
return $ '<' : join ls ++ ">"
return $ mapped n ++ p
where
why = do
reservedLP "*"
return "?"
--
functionTypeP = do
b <- bracketsP $ option0 [] $
reservedLP "," \|/ tokenLP typeP
reservedLP "->"
c <- typeP
return $ "Function" ++ show
(let x = length b
in if x == 0
then 0
else 1 + x `div` 2) ++ "<"
++ join (mapped <$> b)
++ (if b == [] then [] else ",") ++ c ++ ">"
--
javaIdentifierP = do
c <- oneOf $ "_$" ++ alpha
r <- many $ oneOf $ "_$" ++ alpha ++ digit
return $ c : r
--
simpleNameP = javaIdentifierP <|> do
charP '`'
i <- javaIdentifierP
charP '`'
return i
--
| ice1000/OI-codes | codewars/authoring/haskell/TypeTranspiler.hs | agpl-3.0 | 3,836 | 0 | 20 | 1,071 | 1,627 | 822 | 805 | 136 | 3 |
import System.Plugins
import API
main = do
m_v <- dynload "../Plugin.o" ["../api"]
[]
"resource_dyn"
case m_v of
LoadFailure es -> mapM_ putStrLn es >> error "didn't compile"
LoadSuccess _ (Interface eq) -> do
print $ 1 `eq` 2
print $ 'a' `eq` 'b'
| Changaco/haskell-plugins | testsuite/dynload/poly/prog/Main.hs | lgpl-2.1 | 440 | 0 | 13 | 235 | 106 | 52 | 54 | 11 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Haskoin.Crypto.ExtendedKeys.Units (tests) where
import Test.HUnit (Assertion, assertBool, assertEqual)
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Data.Aeson (decode, encode)
import Data.Maybe (isJust, isNothing, fromJust)
import Data.String (fromString)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy.Char8 as B8
import Network.Haskoin.Crypto
import Network.Haskoin.Util
tests :: [Test]
tests =
[ testGroup "BIP32 derivation vector 1"
[ testCase "Chain m" $ runXKeyVec (xKeyVec !! 0)
, testCase "Chain m/0'" $ runXKeyVec (xKeyVec !! 1)
, testCase "Chain m/0'/1" $ runXKeyVec (xKeyVec !! 2)
, testCase "Chain m/0'/1/2'" $ runXKeyVec (xKeyVec !! 3)
, testCase "Chain m/0'/1/2'/2" $ runXKeyVec (xKeyVec !! 4)
, testCase "Chain m/0'/1/2'/2/1000000000" $
runXKeyVec (xKeyVec !! 5)
]
, testGroup "BIP32 subkey derivation vector 2"
[ testCase "Chain m" $ runXKeyVec (xKeyVec2 !! 0)
, testCase "Chain m/0" $ runXKeyVec (xKeyVec2 !! 1)
, testCase "Chain m/0/2147483647'" $
runXKeyVec (xKeyVec2 !! 2)
, testCase "Chain m/0/2147483647'/1" $
runXKeyVec (xKeyVec2 !! 3)
, testCase "Chain m/0/2147483647'/1/2147483646'" $
runXKeyVec (xKeyVec2 !! 4)
, testCase "Chain m/0/2147483647'/1/2147483646'/2" $
runXKeyVec (xKeyVec2 !! 5)
]
, testGroup "BIP32 subkey derivation using string path"
[ testGroup "Either Derivations" testApplyPath
, testGroup "Public Derivations" testDerivePubPath
, testGroup "Private Derivations" testDerivePrvPath
, testGroup "Path Parsing" testParsePath
, testGroup "FromJSON" testFromJsonPath
, testGroup "ToJSON" testToJsonPath
]
]
testFromJsonPath :: [Test]
testFromJsonPath = do
path <- jsonPathVectors
return $ testCase ("Path " ++ path) $
assertEqual path (Just [fromString path :: DerivPath])
(decode $ B8.pack $ "[\"" ++ path ++ "\"]")
testToJsonPath :: [Test]
testToJsonPath = do
path <- jsonPathVectors
return $ testCase ("Path " ++ path) $
assertEqual path (B8.pack $ "[\"" ++ path ++ "\"]")
(encode [fromString path :: ParsedPath])
jsonPathVectors :: [String]
jsonPathVectors =
[ "m"
, "m/0"
, "m/0'"
, "M/0'"
, "m/2147483647"
, "M/2147483647"
, "m/1/2/3/4/5/6/7/8"
, "M/1/2/3/4/5/6/7/8"
, "m/1'/2'/3/4"
, "M/1'/2'/3/4"
]
testParsePath :: [Test]
testParsePath = do
(path, t) <- parsePathVectors
return $ testCase ("Path " ++ path) $
assertBool path (t $ parsePath path)
parsePathVectors :: [(String, Maybe ParsedPath -> Bool)]
parsePathVectors =
[ ("m", isJust)
, ("m/0'", isJust)
, ("M/0'", isJust)
, ("m/2147483648", isNothing)
, ("m/2147483647", isJust)
, ("M/2147483648", isNothing)
, ("M/2147483647", isJust)
, ("M/-1", isNothing)
, ("M/-2147483648", isNothing)
, ("m/1/2/3/4/5/6/7/8", isJust)
, ("M/1/2/3/4/5/6/7/8", isJust)
, ("m/1'/2'/3/4", isJust)
, ("M/1'/2'/3/4", isJust)
, ("m/1/2'/3/4'", isJust)
, ("M/1/2'/3/4'", isJust)
, ("meh", isNothing)
, ("infinity", isNothing)
, ("NaN", isNothing)
]
testApplyPath :: [Test]
testApplyPath = do
(key, path, final) <- derivePathVectors
return $ testCase ("Path " ++ path) $
assertEqual path final $
applyPath (fromJust $ parsePath path) key
testDerivePubPath :: [Test]
testDerivePubPath = do
(key, path, final) <- derivePubPathVectors
return $ testCase ("Path " ++ path) $
assertEqual path final $
derivePubPath (fromString path :: SoftPath) key
testDerivePrvPath :: [Test]
testDerivePrvPath = do
(key, path, final) <- derivePrvPathVectors
return $ testCase ("Path " ++ path) $
assertEqual path final $
derivePath (fromString path :: DerivPath) key
derivePubPathVectors :: [(XPubKey, String, XPubKey)]
derivePubPathVectors =
[ ( xpub, "M", xpub )
, ( xpub, "M/8", pubSubKey xpub 8 )
, ( xpub, "M/8/30/1", foldl pubSubKey xpub [8,30,1] )
]
where
xprv = fromJust $ xPrvImport
"xprv9s21ZrQH143K46iDVRSyFfGfMgQjzC4BV3ZUfNbG7PHQrJjE53ofAn5gYkp6KQ\
\WzGmb8oageSRxBY8s4rjr9VXPVp2HQDbwPt4H31Gg4LpB"
xpub = deriveXPubKey xprv
derivePrvPathVectors :: [(XPrvKey, String, XPrvKey)]
derivePrvPathVectors =
[ ( xprv, "m", xprv )
, ( xprv, "M", xprv )
, ( xprv, "m/8'", hardSubKey xprv 8 )
, ( xprv, "M/8'", hardSubKey xprv 8 )
, ( xprv, "m/8'/30/1"
, foldl prvSubKey (hardSubKey xprv 8) [30,1]
)
, ( xprv, "M/8'/30/1"
, foldl prvSubKey (hardSubKey xprv 8) [30,1]
)
, ( xprv, "m/3/20"
, foldl prvSubKey xprv [3,20]
)
, ( xprv, "M/3/20"
, foldl prvSubKey xprv [3,20]
)
]
where
xprv = fromJust $ xPrvImport
"xprv9s21ZrQH143K46iDVRSyFfGfMgQjzC4BV3ZUfNbG7PHQrJjE53ofAn5gYkp6KQ\
\WzGmb8oageSRxBY8s4rjr9VXPVp2HQDbwPt4H31Gg4LpB"
derivePathVectors :: [(XKey, String, Either String XKey)]
derivePathVectors =
[ ( XPrv xprv, "m", Right $ XPrv xprv )
, ( XPrv xprv, "M", Right $ XPub xpub )
, ( XPrv xprv, "m/8'", Right $ XPrv $ hardSubKey xprv 8 )
, ( XPrv xprv, "M/8'", Right $ XPub $ deriveXPubKey $ hardSubKey xprv 8 )
, ( XPrv xprv, "m/8'/30/1"
, Right $ XPrv $ foldl prvSubKey (hardSubKey xprv 8) [30,1]
)
, ( XPrv xprv, "M/8'/30/1"
, Right $ XPub $
deriveXPubKey $ foldl prvSubKey (hardSubKey xprv 8) [30,1]
)
, ( XPrv xprv, "m/3/20"
, Right $ XPrv $ foldl prvSubKey xprv [3,20]
)
, ( XPrv xprv, "M/3/20"
, Right $ XPub $ deriveXPubKey $ foldl prvSubKey xprv [3,20]
)
]
where
xprv = fromJust $ xPrvImport
"xprv9s21ZrQH143K46iDVRSyFfGfMgQjzC4BV3ZUfNbG7PHQrJjE53ofAn5gYkp6KQ\
\WzGmb8oageSRxBY8s4rjr9VXPVp2HQDbwPt4H31Gg4LpB"
xpub = deriveXPubKey xprv
runXKeyVec :: ([ByteString], XPrvKey) -> Assertion
runXKeyVec (v, m) = do
assertBool "xPrvID" $ (encodeHex $ encode' $ xPrvID m) == v !! 0
assertBool "xPrvFP" $ (encodeHex $ encode' $ xPrvFP m) == v !! 1
assertBool "xPrvAddr" $
(addrToBase58 $ xPubAddr $ deriveXPubKey m) == v !! 2
assertBool "prvKey" $
(encodeHex $ encodePrvKey $ xPrvKey m) == v !! 3
assertBool "xPrvWIF" $ xPrvWif m == v !! 4
assertBool "pubKey" $
(encodeHex $ encode' $ xPubKey $ deriveXPubKey m) == v !! 5
assertBool "chain code" $
(encodeHex $ encode' $ xPrvChain m) == v !! 6
assertBool "Hex PubKey" $
(encodeHex $ encode' $ deriveXPubKey m) == v !! 7
assertBool "Hex PrvKey" $ (encodeHex $ encode' m) == v !! 8
assertBool "Base58 PubKey" $ (xPubExport $ deriveXPubKey m) == v !! 9
assertBool "Base58 PrvKey" $ xPrvExport m == v !! 10
-- BIP 0032 Test Vectors
-- https://en.bitcoin.it/wiki/BIP_0032_TestVectors
xKeyVec :: [([ByteString], XPrvKey)]
xKeyVec = zip xKeyResVec $ foldl f [m] der
where f acc d = acc ++ [d $ last acc]
m = makeXPrvKey $ fromJust $ decodeHex m0
der = [ flip hardSubKey 0
, flip prvSubKey 1
, flip hardSubKey 2
, flip prvSubKey 2
, flip prvSubKey 1000000000
]
xKeyVec2 :: [([ByteString], XPrvKey)]
xKeyVec2 = zip xKeyResVec2 $ foldl f [m] der
where f acc d = acc ++ [d $ last acc]
m = makeXPrvKey $ fromJust $ decodeHex m1
der = [ flip prvSubKey 0
, flip hardSubKey 2147483647
, flip prvSubKey 1
, flip hardSubKey 2147483646
, flip prvSubKey 2
]
m0 :: ByteString
m0 = "000102030405060708090a0b0c0d0e0f"
xKeyResVec :: [[ByteString]]
xKeyResVec =
[
-- m
[ "3442193e1bb70916e914552172cd4e2dbc9df811"
, "3442193e"
, "15mKKb2eos1hWa6tisdPwwDC1a5J1y9nma"
, "e8f32e723decf4051aefac8e2c93c9c5b214313817cdb01a1494b917c8436b35"
, "L52XzL2cMkHxqxBXRyEpnPQZGUs3uKiL3R11XbAdHigRzDozKZeW"
, "0339a36013301597daef41fbe593a02cc513d0b55527ec2df1050e2e8ff49c85c2"
, "873dff81c02f525623fd1fe5167eac3a55a049de3d314bb42ee227ffed37d508"
, "0488b21e000000000000000000873dff81c02f525623fd1fe5167eac3a55a049de3d314bb42ee227ffed37d5080339a36013301597daef41fbe593a02cc513d0b55527ec2df1050e2e8ff49c85c2"
, "0488ade4000000000000000000873dff81c02f525623fd1fe5167eac3a55a049de3d314bb42ee227ffed37d50800e8f32e723decf4051aefac8e2c93c9c5b214313817cdb01a1494b917c8436b35"
, "xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJoCu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8"
, "xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi"
]
-- m/0'
, [ "5c1bd648ed23aa5fd50ba52b2457c11e9e80a6a7"
, "5c1bd648"
, "19Q2WoS5hSS6T8GjhK8KZLMgmWaq4neXrh"
, "edb2e14f9ee77d26dd93b4ecede8d16ed408ce149b6cd80b0715a2d911a0afea"
, "L5BmPijJjrKbiUfG4zbiFKNqkvuJ8usooJmzuD7Z8dkRoTThYnAT"
, "035a784662a4a20a65bf6aab9ae98a6c068a81c52e4b032c0fb5400c706cfccc56"
, "47fdacbd0f1097043b78c63c20c34ef4ed9a111d980047ad16282c7ae6236141"
, "0488b21e013442193e8000000047fdacbd0f1097043b78c63c20c34ef4ed9a111d980047ad16282c7ae6236141035a784662a4a20a65bf6aab9ae98a6c068a81c52e4b032c0fb5400c706cfccc56"
, "0488ade4013442193e8000000047fdacbd0f1097043b78c63c20c34ef4ed9a111d980047ad16282c7ae623614100edb2e14f9ee77d26dd93b4ecede8d16ed408ce149b6cd80b0715a2d911a0afea"
, "xpub68Gmy5EdvgibQVfPdqkBBCHxA5htiqg55crXYuXoQRKfDBFA1WEjWgP6LHhwBZeNK1VTsfTFUHCdrfp1bgwQ9xv5ski8PX9rL2dZXvgGDnw"
, "xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7"
]
-- m/0'/1
, [ "bef5a2f9a56a94aab12459f72ad9cf8cf19c7bbe"
, "bef5a2f9"
, "1JQheacLPdM5ySCkrZkV66G2ApAXe1mqLj"
, "3c6cb8d0f6a264c91ea8b5030fadaa8e538b020f0a387421a12de9319dc93368"
, "KyFAjQ5rgrKvhXvNMtFB5PCSKUYD1yyPEe3xr3T34TZSUHycXtMM"
, "03501e454bf00751f24b1b489aa925215d66af2234e3891c3b21a52bedb3cd711c"
, "2a7857631386ba23dacac34180dd1983734e444fdbf774041578e9b6adb37c19"
, "0488b21e025c1bd648000000012a7857631386ba23dacac34180dd1983734e444fdbf774041578e9b6adb37c1903501e454bf00751f24b1b489aa925215d66af2234e3891c3b21a52bedb3cd711c"
, "0488ade4025c1bd648000000012a7857631386ba23dacac34180dd1983734e444fdbf774041578e9b6adb37c19003c6cb8d0f6a264c91ea8b5030fadaa8e538b020f0a387421a12de9319dc93368"
, "xpub6ASuArnXKPbfEwhqN6e3mwBcDTgzisQN1wXN9BJcM47sSikHjJf3UFHKkNAWbWMiGj7Wf5uMash7SyYq527Hqck2AxYysAA7xmALppuCkwQ"
, "xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs"
]
-- m/0'/1/2'
, [ "ee7ab90cde56a8c0e2bb086ac49748b8db9dce72"
, "ee7ab90c"
, "1NjxqbA9aZWnh17q1UW3rB4EPu79wDXj7x"
, "cbce0d719ecf7431d88e6a89fa1483e02e35092af60c042b1df2ff59fa424dca"
, "L43t3od1Gh7Lj55Bzjj1xDAgJDcL7YFo2nEcNaMGiyRZS1CidBVU"
, "0357bfe1e341d01c69fe5654309956cbea516822fba8a601743a012a7896ee8dc2"
, "04466b9cc8e161e966409ca52986c584f07e9dc81f735db683c3ff6ec7b1503f"
, "0488b21e03bef5a2f98000000204466b9cc8e161e966409ca52986c584f07e9dc81f735db683c3ff6ec7b1503f0357bfe1e341d01c69fe5654309956cbea516822fba8a601743a012a7896ee8dc2"
, "0488ade403bef5a2f98000000204466b9cc8e161e966409ca52986c584f07e9dc81f735db683c3ff6ec7b1503f00cbce0d719ecf7431d88e6a89fa1483e02e35092af60c042b1df2ff59fa424dca"
, "xpub6D4BDPcP2GT577Vvch3R8wDkScZWzQzMMUm3PWbmWvVJrZwQY4VUNgqFJPMM3No2dFDFGTsxxpG5uJh7n7epu4trkrX7x7DogT5Uv6fcLW5"
, "xprv9z4pot5VBttmtdRTWfWQmoH1taj2axGVzFqSb8C9xaxKymcFzXBDptWmT7FwuEzG3ryjH4ktypQSAewRiNMjANTtpgP4mLTj34bhnZX7UiM"
]
-- m/0'/1/2'/2
, [ "d880d7d893848509a62d8fb74e32148dac68412f"
, "d880d7d8"
, "1LjmJcdPnDHhNTUgrWyhLGnRDKxQjoxAgt"
, "0f479245fb19a38a1954c5c7c0ebab2f9bdfd96a17563ef28a6a4b1a2a764ef4"
, "KwjQsVuMjbCP2Zmr3VaFaStav7NvevwjvvkqrWd5Qmh1XVnCteBR"
, "02e8445082a72f29b75ca48748a914df60622a609cacfce8ed0e35804560741d29"
, "cfb71883f01676f587d023cc53a35bc7f88f724b1f8c2892ac1275ac822a3edd"
, "0488b21e04ee7ab90c00000002cfb71883f01676f587d023cc53a35bc7f88f724b1f8c2892ac1275ac822a3edd02e8445082a72f29b75ca48748a914df60622a609cacfce8ed0e35804560741d29"
, "0488ade404ee7ab90c00000002cfb71883f01676f587d023cc53a35bc7f88f724b1f8c2892ac1275ac822a3edd000f479245fb19a38a1954c5c7c0ebab2f9bdfd96a17563ef28a6a4b1a2a764ef4"
, "xpub6FHa3pjLCk84BayeJxFW2SP4XRrFd1JYnxeLeU8EqN3vDfZmbqBqaGJAyiLjTAwm6ZLRQUMv1ZACTj37sR62cfN7fe5JnJ7dh8zL4fiyLHV"
, "xprvA2JDeKCSNNZky6uBCviVfJSKyQ1mDYahRjijr5idH2WwLsEd4Hsb2Tyh8RfQMuPh7f7RtyzTtdrbdqqsunu5Mm3wDvUAKRHSC34sJ7in334"
]
-- m/0'/1/2'/2/1000000000
, [ "d69aa102255fed74378278c7812701ea641fdf32"
, "d69aa102"
, "1LZiqrop2HGR4qrH1ULZPyBpU6AUP49Uam"
, "471b76e389e528d6de6d816857e012c5455051cad6660850e58372a6c3e6e7c8"
, "Kybw8izYevo5xMh1TK7aUr7jHFCxXS1zv8p3oqFz3o2zFbhRXHYs"
, "022a471424da5e657499d1ff51cb43c47481a03b1e77f951fe64cec9f5a48f7011"
, "c783e67b921d2beb8f6b389cc646d7263b4145701dadd2161548a8b078e65e9e"
, "0488b21e05d880d7d83b9aca00c783e67b921d2beb8f6b389cc646d7263b4145701dadd2161548a8b078e65e9e022a471424da5e657499d1ff51cb43c47481a03b1e77f951fe64cec9f5a48f7011"
, "0488ade405d880d7d83b9aca00c783e67b921d2beb8f6b389cc646d7263b4145701dadd2161548a8b078e65e9e00471b76e389e528d6de6d816857e012c5455051cad6660850e58372a6c3e6e7c8"
, "xpub6H1LXWLaKsWFhvm6RVpEL9P4KfRZSW7abD2ttkWP3SSQvnyA8FSVqNTEcYFgJS2UaFcxupHiYkro49S8yGasTvXEYBVPamhGW6cFJodrTHy"
, "xprvA41z7zogVVwxVSgdKUHDy1SKmdb533PjDz7J6N6mV6uS3ze1ai8FHa8kmHScGpWmj4WggLyQjgPie1rFSruoUihUZREPSL39UNdE3BBDu76"
]
]
m1 :: ByteString
m1 = "fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542"
xKeyResVec2 :: [[ByteString]]
xKeyResVec2 =
[
-- m
[ "bd16bee53961a47d6ad888e29545434a89bdfe95"
, "bd16bee5"
, "1JEoxevbLLG8cVqeoGKQiAwoWbNYSUyYjg"
, "4b03d6fc340455b363f51020ad3ecca4f0850280cf436c70c727923f6db46c3e"
, "KyjXhyHF9wTphBkfpxjL8hkDXDUSbE3tKANT94kXSyh6vn6nKaoy"
, "03cbcaa9c98c877a26977d00825c956a238e8dddfbd322cce4f74b0b5bd6ace4a7"
, "60499f801b896d83179a4374aeb7822aaeaceaa0db1f85ee3e904c4defbd9689"
, "0488b21e00000000000000000060499f801b896d83179a4374aeb7822aaeaceaa0db1f85ee3e904c4defbd968903cbcaa9c98c877a26977d00825c956a238e8dddfbd322cce4f74b0b5bd6ace4a7"
, "0488ade400000000000000000060499f801b896d83179a4374aeb7822aaeaceaa0db1f85ee3e904c4defbd9689004b03d6fc340455b363f51020ad3ecca4f0850280cf436c70c727923f6db46c3e"
, "xpub661MyMwAqRbcFW31YEwpkMuc5THy2PSt5bDMsktWQcFF8syAmRUapSCGu8ED9W6oDMSgv6Zz8idoc4a6mr8BDzTJY47LJhkJ8UB7WEGuduB"
, "xprv9s21ZrQH143K31xYSDQpPDxsXRTUcvj2iNHm5NUtrGiGG5e2DtALGdso3pGz6ssrdK4PFmM8NSpSBHNqPqm55Qn3LqFtT2emdEXVYsCzC2U"
]
-- m/0
, [ "5a61ff8eb7aaca3010db97ebda76121610b78096"
, "5a61ff8e"
, "19EuDJdgfRkwCmRzbzVBHZWQG9QNWhftbZ"
, "abe74a98f6c7eabee0428f53798f0ab8aa1bd37873999041703c742f15ac7e1e"
, "L2ysLrR6KMSAtx7uPqmYpoTeiRzydXBattRXjXz5GDFPrdfPzKbj"
, "02fc9e5af0ac8d9b3cecfe2a888e2117ba3d089d8585886c9c826b6b22a98d12ea"
, "f0909affaa7ee7abe5dd4e100598d4dc53cd709d5a5c2cac40e7412f232f7c9c"
, "0488b21e01bd16bee500000000f0909affaa7ee7abe5dd4e100598d4dc53cd709d5a5c2cac40e7412f232f7c9c02fc9e5af0ac8d9b3cecfe2a888e2117ba3d089d8585886c9c826b6b22a98d12ea"
, "0488ade401bd16bee500000000f0909affaa7ee7abe5dd4e100598d4dc53cd709d5a5c2cac40e7412f232f7c9c00abe74a98f6c7eabee0428f53798f0ab8aa1bd37873999041703c742f15ac7e1e"
, "xpub69H7F5d8KSRgmmdJg2KhpAK8SR3DjMwAdkxj3ZuxV27CprR9LgpeyGmXUbC6wb7ERfvrnKZjXoUmmDznezpbZb7ap6r1D3tgFxHmwMkQTPH"
, "xprv9vHkqa6EV4sPZHYqZznhT2NPtPCjKuDKGY38FBWLvgaDx45zo9WQRUT3dKYnjwih2yJD9mkrocEZXo1ex8G81dwSM1fwqWpWkeS3v86pgKt"
]
-- m/0/2147483647'
, [ "d8ab493736da02f11ed682f88339e720fb0379d1"
, "d8ab4937"
, "1Lke9bXGhn5VPrBuXgN12uGUphrttUErmk"
, "877c779ad9687164e9c2f4f0f4ff0340814392330693ce95a58fe18fd52e6e93"
, "L1m5VpbXmMp57P3knskwhoMTLdhAAaXiHvnGLMribbfwzVRpz2Sr"
, "03c01e7425647bdefa82b12d9bad5e3e6865bee0502694b94ca58b666abc0a5c3b"
, "be17a268474a6bb9c61e1d720cf6215e2a88c5406c4aee7b38547f585c9a37d9"
, "0488b21e025a61ff8effffffffbe17a268474a6bb9c61e1d720cf6215e2a88c5406c4aee7b38547f585c9a37d903c01e7425647bdefa82b12d9bad5e3e6865bee0502694b94ca58b666abc0a5c3b"
, "0488ade4025a61ff8effffffffbe17a268474a6bb9c61e1d720cf6215e2a88c5406c4aee7b38547f585c9a37d900877c779ad9687164e9c2f4f0f4ff0340814392330693ce95a58fe18fd52e6e93"
, "xpub6ASAVgeehLbnwdqV6UKMHVzgqAG8Gr6riv3Fxxpj8ksbH9ebxaEyBLZ85ySDhKiLDBrQSARLq1uNRts8RuJiHjaDMBU4Zn9h8LZNnBC5y4a"
, "xprv9wSp6B7kry3Vj9m1zSnLvN3xH8RdsPP1Mh7fAaR7aRLcQMKTR2vidYEeEg2mUCTAwCd6vnxVrcjfy2kRgVsFawNzmjuHc2YmYRmagcEPdU9"
]
-- m/0/2147483647'/1
, [ "78412e3a2296a40de124307b6485bd19833e2e34"
, "78412e3a"
, "1BxrAr2pHpeBheusmd6fHDP2tSLAUa3qsW"
, "704addf544a06e5ee4bea37098463c23613da32020d604506da8c0518e1da4b7"
, "KzyzXnznxSv249b4KuNkBwowaN3akiNeEHy5FWoPCJpStZbEKXN2"
, "03a7d1d856deb74c508e05031f9895dab54626251b3806e16b4bd12e781a7df5b9"
, "f366f48f1ea9f2d1d3fe958c95ca84ea18e4c4ddb9366c336c927eb246fb38cb"
, "0488b21e03d8ab493700000001f366f48f1ea9f2d1d3fe958c95ca84ea18e4c4ddb9366c336c927eb246fb38cb03a7d1d856deb74c508e05031f9895dab54626251b3806e16b4bd12e781a7df5b9"
, "0488ade403d8ab493700000001f366f48f1ea9f2d1d3fe958c95ca84ea18e4c4ddb9366c336c927eb246fb38cb00704addf544a06e5ee4bea37098463c23613da32020d604506da8c0518e1da4b7"
, "xpub6DF8uhdarytz3FWdA8TvFSvvAh8dP3283MY7p2V4SeE2wyWmG5mg5EwVvmdMVCQcoNJxGoWaU9DCWh89LojfZ537wTfunKau47EL2dhHKon"
, "xprv9zFnWC6h2cLgpmSA46vutJzBcfJ8yaJGg8cX1e5StJh45BBciYTRXSd25UEPVuesF9yog62tGAQtHjXajPPdbRCHuWS6T8XA2ECKADdw4Ef"
]
-- m/0/2147483647'/1/2147483646'
, [ "31a507b815593dfc51ffc7245ae7e5aee304246e"
, "31a507b8"
, "15XVotxCAV7sRx1PSCkQNsGw3W9jT9A94R"
, "f1c7c871a54a804afe328b4c83a1c33b8e5ff48f5087273f04efa83b247d6a2d"
, "L5KhaMvPYRW1ZoFmRjUtxxPypQ94m6BcDrPhqArhggdaTbbAFJEF"
, "02d2b36900396c9282fa14628566582f206a5dd0bcc8d5e892611806cafb0301f0"
, "637807030d55d01f9a0cb3a7839515d796bd07706386a6eddf06cc29a65a0e29"
, "0488b21e0478412e3afffffffe637807030d55d01f9a0cb3a7839515d796bd07706386a6eddf06cc29a65a0e2902d2b36900396c9282fa14628566582f206a5dd0bcc8d5e892611806cafb0301f0"
, "0488ade40478412e3afffffffe637807030d55d01f9a0cb3a7839515d796bd07706386a6eddf06cc29a65a0e2900f1c7c871a54a804afe328b4c83a1c33b8e5ff48f5087273f04efa83b247d6a2d"
, "xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL"
, "xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc"
]
-- m/0/2147483647'/1/2147483646'/2
, [ "26132fdbe7bf89cbc64cf8dafa3f9f88b8666220"
, "26132fdb"
, "14UKfRV9ZPUp6ZC9PLhqbRtxdihW9em3xt"
, "bb7d39bdb83ecf58f2fd82b6d918341cbef428661ef01ab97c28a4842125ac23"
, "L3WAYNAZPxx1fr7KCz7GN9nD5qMBnNiqEJNJMU1z9MMaannAt4aK"
, "024d902e1a2fc7a8755ab5b694c575fce742c48d9ff192e63df5193e4c7afe1f9c"
, "9452b549be8cea3ecb7a84bec10dcfd94afe4d129ebfd3b3cb58eedf394ed271"
, "0488b21e0531a507b8000000029452b549be8cea3ecb7a84bec10dcfd94afe4d129ebfd3b3cb58eedf394ed271024d902e1a2fc7a8755ab5b694c575fce742c48d9ff192e63df5193e4c7afe1f9c"
, "0488ade40531a507b8000000029452b549be8cea3ecb7a84bec10dcfd94afe4d129ebfd3b3cb58eedf394ed27100bb7d39bdb83ecf58f2fd82b6d918341cbef428661ef01ab97c28a4842125ac23"
, "xpub6FnCn6nSzZAw5Tw7cgR9bi15UV96gLZhjDstkXXxvCLsUXBGXPdSnLFbdpq8p9HmGsApME5hQTZ3emM2rnY5agb9rXpVGyy3bdW6EEgAtqt"
, "xprvA2nrNbFZABcdryreWet9Ea4LvTJcGsqrMzxHx98MMrotbir7yrKCEXw7nadnHM8Dq38EGfSh6dqA9QWTyefMLEcBYJUuekgW4BYPJcr9E7j"
]
]
| tphyahoo/haskoin | haskoin-core/tests/Network/Haskoin/Crypto/ExtendedKeys/Units.hs | unlicense | 20,367 | 0 | 14 | 3,386 | 2,869 | 1,624 | 1,245 | 327 | 1 |
module Braxton.A282166 (a282166) where
import Helpers.BraxtonHelper (allSequences, SymmetricRelation(..), ReflexiveRelation(..))
a282166 :: Int -> Integer
a282166 n = minimum $ map sum theSequences where
theSequences = map (map fromIntegral) $ allSequences NonReflexive Symmetric n n
| peterokagey/haskellOEIS | src/Braxton/A282166.hs | apache-2.0 | 287 | 0 | 10 | 37 | 90 | 50 | 40 | 5 | 1 |
{-
Copyright 2019 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
program = drawingOf(path[start, end])
start :: Point
start = (0, 0)
end :: Point
end = [2, -4]
| pranjaltale16/codeworld | codeworld-compiler/test/testcase/test_pointType/source.hs | apache-2.0 | 707 | 0 | 8 | 135 | 56 | 33 | 23 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-
Copyright 2020 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module CodeWorld.Compile.Requirements.Language (parseRequirement) where
import CodeWorld.Compile.Framework
import CodeWorld.Compile.Requirements.LegacyLanguage
import CodeWorld.Compile.Requirements.Types
import Control.Applicative
import Data.Aeson
import Data.Aeson.Types (explicitParseFieldMaybe)
import qualified Data.Aeson.Types as Aeson
import Data.Either
import Data.Foldable
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Yaml as Yaml
import Language.Haskell.Exts.SrcLoc
instance FromJSON Requirement where
parseJSON = withObject "Requirement" $ \v ->
Requirement <$> v .: "Description"
<*> v .: "Rules"
instance FromJSON Rule where
parseJSON = withObject "Rule" $ \o -> do
choices <-
sequence
[ explicitParseFieldMaybe definedByFunction o "definedByFunction",
explicitParseFieldMaybe matchesExpected o "matchesExpected",
explicitParseFieldMaybe hasSimpleParams o "hasSimpleParams",
explicitParseFieldMaybe usesAllParams o "usesAllParams",
explicitParseFieldMaybe notDefined o "notDefined",
explicitParseFieldMaybe notUsed o "notUsed",
explicitParseFieldMaybe containsMatch o "containsMatch",
explicitParseFieldMaybe matchesRegex o "matchesRegex",
explicitParseFieldMaybe ifThen o "ifThen",
explicitParseFieldMaybe allOf o "all",
explicitParseFieldMaybe anyOf o "any",
explicitParseFieldMaybe notThis o "not",
explicitParseFieldMaybe maxLineLength o "maxLineLength",
explicitParseFieldMaybe noWarningsExcept o "noWarningsExcept",
explicitParseFieldMaybe typeSignatures o "typeSignatures",
explicitParseFieldMaybe blacklist o "blacklist",
explicitParseFieldMaybe whitelist o "whitelist"
]
case catMaybes choices of
[r] -> decorateWith o r
[] -> fail "No recognized rule type was defined."
_ -> fail "More than one type was found for a single rule."
decorateWith :: Aeson.Object -> Rule -> Aeson.Parser Rule
decorateWith obj = wrapCustomMessage
where
wrapCustomMessage rule = do
msg <- obj .:? "explanation"
case msg of
Just str -> return (OnFailure str rule)
_ -> return rule
definedByFunction :: Aeson.Value -> Aeson.Parser Rule
definedByFunction = withObject "definedByFunction" $ \o ->
DefinedByFunction <$> o .: "variable"
<*> o .: "function"
matchesExpected :: Aeson.Value -> Aeson.Parser Rule
matchesExpected = withObject "matchesExpected" $ \o ->
MatchesExpected <$> o .: "variable"
<*> o .: "hash"
hasSimpleParams :: Aeson.Value -> Aeson.Parser Rule
hasSimpleParams = withText "hasSimpleParams" $ \t ->
return $ HasSimpleParams $ T.unpack t
usesAllParams :: Aeson.Value -> Aeson.Parser Rule
usesAllParams = withText "usesAllParams" $ \t ->
return $ UsesAllParams $ T.unpack t
notDefined :: Aeson.Value -> Aeson.Parser Rule
notDefined = withText "notDefined" $ \t ->
return $ NotDefined $ T.unpack t
notUsed :: Aeson.Value -> Aeson.Parser Rule
notUsed = withText "notUsed" $ \t ->
return $ NotUsed $ T.unpack t
containsMatch :: Aeson.Value -> Aeson.Parser Rule
containsMatch = withObject "containsMatch" $ \o ->
ContainsMatch <$> o .: "template"
<*> o .:? "topLevel" .!= True
<*> o .:? "cardinality" .!= atLeastOne
matchesRegex :: Aeson.Value -> Aeson.Parser Rule
matchesRegex = withObject "matchesRegex" $ \o ->
MatchesRegex <$> o .: "pattern"
<*> o .:? "cardinality" .!= atLeastOne
ifThen :: Aeson.Value -> Aeson.Parser Rule
ifThen = withObject "ifThen" $ \o ->
OnFailure <$> o .: "if"
<*> o .: "then"
allOf :: Aeson.Value -> Aeson.Parser Rule
allOf v = AllOf <$> withArray "all" (mapM parseJSON . toList) v
anyOf :: Aeson.Value -> Aeson.Parser Rule
anyOf v = AnyOf <$> withArray "any" (mapM parseJSON . toList) v
notThis :: Aeson.Value -> Aeson.Parser Rule
notThis v = NotThis <$> parseJSON v
maxLineLength :: Aeson.Value -> Aeson.Parser Rule
maxLineLength v = MaxLineLength <$> parseJSON v
noWarningsExcept :: Aeson.Value -> Aeson.Parser Rule
noWarningsExcept v = NoWarningsExcept <$> withArray "exceptions" (mapM parseJSON . toList) v
typeSignatures :: Aeson.Value -> Aeson.Parser Rule
typeSignatures v = TypeSignatures <$> parseJSON v
blacklist :: Aeson.Value -> Aeson.Parser Rule
blacklist v = Blacklist <$> withArray "blacklist" (mapM parseJSON . toList) v
whitelist :: Aeson.Value -> Aeson.Parser Rule
whitelist v = Whitelist <$> withArray "whitelist" (mapM parseJSON . toList) v
instance FromJSON Cardinality where
parseJSON val = parseAsNum val <|> parseAsObj val
where
parseAsNum val = do
n <- parseJSON val
return (Cardinality (Just n) (Just n))
parseAsObj = withObject "cardinality" $ \o -> do
exactly <- o .:? "exactly"
mini <- o .:? "atLeast"
maxi <- o .:? "atMost"
case (exactly, mini, maxi) of
(Just n, Nothing, Nothing) ->
return (Cardinality (Just n) (Just n))
(Nothing, Nothing, Nothing) ->
fail "Missing cardinality"
(Nothing, m, n) ->
return (Cardinality m n)
parseRequirement :: Int -> Int -> Text -> Either String Requirement
parseRequirement ln col txt
| isLegacyFormat txt = parseLegacyRequirement ln col txt
| otherwise =
either (Left . prettyPrintYamlParseException ln col) Right $
Yaml.decodeEither' (T.encodeUtf8 txt)
prettyPrintYamlParseException ln col e =
formatLocation srcSpan ++ ": " ++ Yaml.prettyPrintParseException e
where
srcSpan = SrcSpanInfo loc []
loc = SrcSpan "program.hs" ln col ln col
| google/codeworld | codeworld-compiler/src/CodeWorld/Compile/Requirements/Language.hs | apache-2.0 | 6,396 | 0 | 19 | 1,295 | 1,639 | 832 | 807 | 129 | 2 |
{-# LANGUAGE StrictData #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Provides functions that facilitate defining textcase transformations.
-- To see how these can be used used, see the definitions of @addTextCase@
-- in "Citeproc.Pandoc" and "Citproc.CslJson".
module Citeproc.CaseTransform
( CaseTransformState(..)
, CaseTransformer(..)
, withUppercaseAll
, withLowercaseAll
, withCapitalizeWords
, withCapitalizeFirst
, withSentenceCase
, withTitleCase
)
where
import Data.Ord ()
import Data.Char (isUpper, isLower)
import Data.Text (Text)
import qualified Data.Text as T
import Citeproc.Types (Lang(..))
import qualified Citeproc.Unicode as Unicode
-- | Wraps a function used to define textcase transformations.
newtype CaseTransformer =
CaseTransformer
{ unCaseTransformer :: Maybe Lang -> CaseTransformState -> Text -> Text }
-- | Tracks context in textcase transformations.
data CaseTransformState =
Start
| StartSentence
| AfterWordEnd
| AfterWordChar
| AfterSentenceEndingPunctuation
| BeforeLastWord
deriving (Show, Eq)
-- | Uppercase everything.
withUppercaseAll :: CaseTransformer
withUppercaseAll =
CaseTransformer (\mblang _ -> Unicode.toUpper mblang)
-- | Lowercase everything.
withLowercaseAll :: CaseTransformer
withLowercaseAll =
CaseTransformer (\mblang _ -> Unicode.toLower mblang)
-- | Capitalize all words.
withCapitalizeWords :: CaseTransformer
withCapitalizeWords = CaseTransformer go
where
go mblang st chunk
| isMixedCase chunk = chunk
| st == Start || st == StartSentence || st == AfterWordEnd ||
st == BeforeLastWord
= if T.all isLower chunk
then capitalizeText mblang chunk
else chunk
| otherwise = chunk
-- | Capitalize first letter.
withCapitalizeFirst :: CaseTransformer
withCapitalizeFirst = CaseTransformer go
where
go mblang st chunk
| isMixedCase chunk = chunk
| st == Start
= if T.all isLower chunk
then capitalizeText mblang chunk
else chunk
| otherwise = chunk
-- | Capitalize first letter of each sentence.
withSentenceCase :: CaseTransformer
withSentenceCase = CaseTransformer go
where
go mblang st chunk
| isCapitalized chunk
, not (st == Start || st == StartSentence)
= Unicode.toLower mblang chunk
| isCapitalized chunk || T.all isLower chunk
, st == Start || st == StartSentence
= capitalizeText mblang $ Unicode.toLower mblang chunk
| otherwise = chunk
-- | Use title case.
withTitleCase :: CaseTransformer
withTitleCase = CaseTransformer go
where
go mblang st chunk
| isMixedCase chunk = chunk
| T.all isUpper chunk = chunk -- spec doesn't say this but tests do
-- textcase_TitleCapitalization.txt
| st == StartSentence || st == Start =
capitalizeText mblang $ Unicode.toLower mblang chunk
| st == AfterWordEnd
, not (isStopWord chunk)
, T.compareLength chunk 1 == GT =
capitalizeText mblang $ Unicode.toLower mblang chunk
| st == BeforeLastWord
, T.compareLength chunk 1 == GT =
capitalizeText mblang $ Unicode.toLower mblang chunk
| otherwise = chunk
isCapitalized :: Text -> Bool
isCapitalized t =
case T.uncons t of
Just (c, t') -> isUpper c && T.all isLower t'
_ -> False
isMixedCase :: Text -> Bool
isMixedCase t = T.any isUpper t && T.any isLower t
capitalizeText :: Maybe Lang -> Text -> Text
capitalizeText mblang x =
case T.uncons x of
Just (c,x') -> Unicode.toUpper mblang (T.singleton c) <> x'
Nothing -> x
isStopWord :: Text -> Bool
isStopWord "a" = True
isStopWord "an" = True
isStopWord "and" = True
isStopWord "as" = True
isStopWord "at" = True
isStopWord "but" = True
isStopWord "by" = True
isStopWord "down" = True
isStopWord "for" = True
isStopWord "from" = True
isStopWord "in" = True
isStopWord "into" = True
isStopWord "nor" = True
isStopWord "of" = True
isStopWord "on" = True
isStopWord "onto" = True
isStopWord "or" = True
isStopWord "over" = True
isStopWord "so" = True
isStopWord "the" = True
isStopWord "till" = True
isStopWord "to" = True
isStopWord "up" = True
isStopWord "via" = True
isStopWord "with" = True
isStopWord "yet" = True
-- not in original list but seems required by test flipflop_Apostrophes
-- and textcase_SkipNameParticlesInTitleCase
isStopWord "about" = True
isStopWord "van" = True
isStopWord "von" = True
isStopWord "de" = True
isStopWord "d" = True
isStopWord "l" = True
isStopWord _ = False
| jgm/citeproc | src/Citeproc/CaseTransform.hs | bsd-2-clause | 4,574 | 0 | 16 | 984 | 1,180 | 605 | 575 | 125 | 2 |
module Handler.ListLinks where
import Import
import Control.Monad (forM)
import qualified Data.Text as T
findLinks :: UserId -> YesodDB sub App [(Link, [Text])]
findLinks user = do
links <- selectList [LinkOwner ==. user] []
forM links $ \(Entity linkid link) -> do
tags <- map (linkTagsTag . entityVal) <$> selectList [LinkTagsLink ==. linkid] []
return (link, tags)
getListLinksR :: Handler RepHtml
getListLinksR = do
(Entity user _) <- requireAuth
links <- runDB $ findLinks user
defaultLayout $ do
addScriptRemote "//cdnjs.cloudflare.com/ajax/libs/jquery/1.8.2/jquery.min.js"
addScriptRemote "//cdnjs.cloudflare.com/ajax/libs/underscore.js/1.3.3/underscore-min.js"
addScriptRemote "https://raw.github.com/raimohanska/bacon.js/master/dist/Bacon.min.js"
$(widgetFile "listLinks")
| MasseR/introitu | Handler/ListLinks.hs | bsd-2-clause | 820 | 0 | 15 | 123 | 231 | 115 | 116 | 19 | 1 |
{-# LANGUAGE QuasiQuotes #-}
-----------------------------------------------------------------------------
--
-- Module : Model.PaperReader.AnnualRev
-- Copyright :
-- License : BSD3
--
-- Maintainer : Hiro Kai
-- Stability : Experimental
-- Portability :
--
--
--
-----------------------------------------------------------------------------
--
-- For Annual Reviews journals
--
module Parser.Publisher.AnnualRev (
annualRevReader
) where
import Parser.Import
import Text.XML.Cursor as C
import Parser.Utils
import qualified Data.Text as T
import Control.Applicative
_annualRevReader :: PaperReader
annualRevReader :: PaperReader
_supportedUrl :: PaperReader -> T.Text -> Maybe SupportLevel
_title, _journal, _volume, _pageFrom, _pageTo, _articleType, _abstract
:: ReaderElement' (Maybe Text)
_mainHtml :: ReaderElement' (Maybe PaperMainText)
_doi :: ReaderElement' Text
_year :: ReaderElement' (Maybe Int)
_authors :: ReaderElement' [Text]
_publisher :: ReaderElement' (Maybe Text)
_refs :: ReaderElement' [Reference]
_figs :: ReaderElement' [Figure]
_annualRevReader = defaultReader {
supportedUrl = _supportedUrl,
doi = anyLevel _doi,
journal = anyLevel _journal,
publisher = anyLevel _publisher,
title = anyLevel _title,
volume = anyLevel _volume,
pageFrom = anyLevel _pageFrom,
pageTo = anyLevel _pageTo,
year = anyLevel _year,
authors = anyLevel _authors,
articleType = anyLevel _articleType,
refs = onlyFullL _refs,
figs = onlyFullL _figs
}
annualRevReader
= _annualRevReader {
readerName = (\_ -> "Annual Reviews")
,abstract = absLevel _abstract
,mainHtml = onlyFull _mainHtml
}
_articleType _ _ = Nothing -- headm . getMeta "dc.Type"
_supportedUrl _ url =
if "http://www.annualreviews.org/doi/full/" `T.isPrefixOf` url then
Just SFullText
else
Nothing
_doi _ = fromMaybe "" . headm . getMeta "dc.Identifier"
_journal _ = inner . queryT [jq| h1.journalTitle |]
_publisher _ = headm .getMeta "dc.Publisher"
_title _ = inner . queryT [jq| h1.arttitle |]
-- ToDo: This is ad hoc. Rewrite these with Regexp.
_volume _ c = let ns = queryT [jq| span.breadcrumbs > a |] c in
if length ns >= 3 then
fmap (fst . T.breakOn "-") $ (inner . (:[]) . (!! 2)) ns >>= dropmt 7
else
Nothing
_pageFrom _ c =
fmap (fst . T.breakOn "-" . snd . T.breakOn ": ") (inner $ queryT [jq| div.issueInfo |] c)
>>= dropmt 2
_pageTo _ c = fmap (snd . T.breakOnEnd "-" . fst . T.breakOn " (") (inner $ queryT [jq| div.issueInfo |] c)
_year _ c = fmap (read . T.unpack) ((headm . getMeta "dc.Date") c >>= takemt 4)
_authors _ = getMeta "dc.Creator"
_abstract _ = inner . queryT [jq| div.abstractSection |]
_mainHtml _ = fmap FlatHtml . inner . map fromNode . removeQueries ["div.figureThumbnail", "div.chooseSections","div.headerSelect"] . map node . queryT [jq| div.NLM_sec_level_1 |]
_refs _ cur =
let
cs = queryT [jq| #referencesTab > ul > li |] cur
mkRef c =
let
mid = eid (node c)
mname = do
n <- inner $ queryT [jq| span.position |] c
maybeText $ fst $ T.breakOn "." n
mcittxt = do
h <- inner $ queryT [jq| div.citation |] c
t <- headMay $ T.splitOn "<script" h
return $ T.replace "</i>" "" $ T.replace "<i>" "" t
murl = do
txt <- mcittxt
return $ T.concat ["http://alocator.web.fc2.com/?q=",txt,"&redirect=yes"]
in
Reference
<$> mid
<*> mname
<*> Just Nothing -- Stub
<*> Just mcittxt
<*> Just murl
in
catMaybes $ map mkRef cs
_figs _ cur =
let
cs = queryT [jq| #figuresTab |] cur
in
[] -- Stub.
| hirokai/PaperServer | Parser/Publisher/AnnualRev.hs | bsd-2-clause | 3,774 | 1 | 18 | 915 | 1,096 | 593 | 503 | 87 | 2 |
{-# LANGUAGE BangPatterns #-}
module Database.VCache.VGetInit
( vgetInit
) where
import Data.Bits
import Foreign.Ptr
import Database.VCache.Types
import Database.VCache.VGetAux
-- | For VGet from the database, we start with just a pointer and a
-- size. To process the VGet data, we also need to read addresses
-- from a dedicated region. This is encoded from the end, as follows:
--
-- (normal data) addressN offset offset offset offset ... bytes
--
-- Here 'bytes' is basically a varNat encoded backwards for the
-- number of bytes (not counting 'bytes') back to the start of the
-- first address. This address is then encoded as a varNat, and any
-- offset is encoded as a varInt with the idea of reducing overhead
-- for encoding addresses near to each other in memory.
--
-- Addresses are encoded such that the first address to parse is last
-- in the sequence (thereby avoiding a list reverse operation).
--
-- To read addresses, we simply read the number of bytes from the end,
-- step back that far, then read the initial address and offsets until
-- we get back to the end. This must be performed before we apply the
-- normal read operation for the VGet state. It must be applied exactly
-- once for a given input.
--
vgetInit :: VGet ()
vgetInit =
readAddrBytes >>= \ nAddrBytes ->
if (0 == nAddrBytes) then return () else
VGet $ \ s ->
let bUnderflow = nAddrBytes > (vget_limit s `minusPtr` vget_target s) in
if bUnderflow then return eBadAddressRegion else
let pAddrs = vget_limit s `plusPtr` negate nAddrBytes in
let sAddrs = s { vget_target = pAddrs } in
_vget readAddrs sAddrs >>= \ mbAddrs ->
case mbAddrs of
VGetR addrs _ ->
let s' = s { vget_children = addrs, vget_limit = pAddrs } in
return (VGetR () s')
VGetE eMsg -> return (VGetE eMsg)
{-# INLINABLE vgetInit #-}
eBadAddressRegion :: VGetR a
eBadAddressRegion = VGetE "VGet: failed to read address region"
readAddrBytes :: VGet Int
readAddrBytes = readAddrBytes' 0
{-# INLINE readAddrBytes #-}
readAddrBytes' :: Int -> VGet Int
readAddrBytes' !nAccum =
getWord8FromEnd >>= \ w8 ->
let nAccum' = (nAccum `shiftL` 7) .|. (fromIntegral (0x7f .&. w8)) in
if (w8 < 0x80) then return $! nAccum' else
readAddrBytes' nAccum'
-- read a variable list of at least one address
readAddrs :: VGet [Address]
readAddrs =
getVarNat >>= \ nFirst ->
let addr0 = fromIntegral nFirst in
addr0 `seq` readAddrs' [addr0] nFirst
-- read address offsets until end of input
readAddrs' :: [Address] -> Integer -> VGet [Address]
readAddrs' addrs !nLast =
isEmpty >>= \ bEmpty ->
if bEmpty then return addrs else
getVarInt >>= \ nOff ->
let nCurr = nLast + nOff in
let addr = fromIntegral nCurr in
addr `seq` readAddrs' (addr:addrs) nCurr
| bitemyapp/haskell-vcache | hsrc_lib/Database/VCache/VGetInit.hs | bsd-2-clause | 2,936 | 0 | 27 | 721 | 564 | 308 | 256 | 46 | 4 |
module Toc1 where
import MyEither
import MyIdentity
import MyMaybe
import Test.HUnit
tests :: Test
tests = TestList
[ myIdentityTests
, myMaybeTests
, myEitherTests
]
myIdentityTests :: Test
myIdentityTests = TestCase $ show (MyIdentity "foo") @?= "MyIdentity \"foo\""
myMaybeTests :: Test
myMaybeTests = TestList
[ TestCase $ show (MyNothing :: MyMaybe String) @?= "MyNothing"
, TestCase $ show (MyJust "foo") @?= "MyJust \"foo\""
]
myEitherTests :: Test
myEitherTests = TestList
[ TestCase $ show (MyLeft "foo" :: MyEither String String) @?= "MyLeft \"foo\""
, TestCase $ show (MyRight "foo" :: MyEither String String) @?= "MyRight \"foo\""
]
| hjwylde/haskell-type-classes-workshop | test/Toc1.hs | bsd-3-clause | 691 | 0 | 11 | 142 | 192 | 103 | 89 | 20 | 1 |
module LSC2012.TestHuffman1 where
import Data.Data
import Test.LazySmallCheck2012
import Benchmarks.Huffman
import System.Environment
instance Serial a => Serial (BTree a) where
series = cons1 Leaf \/ cons2 Fork
bench d = depthCheck (read d) prop_decEnc
| UoYCS-plasma/LazySmallCheck2012 | suite/performance/LSC2012/TestHuffman1.hs | bsd-3-clause | 260 | 0 | 7 | 39 | 81 | 42 | 39 | 8 | 1 |
module Main where
import Prelude hiding (readFile)
import System.IO(hSetBuffering,BufferMode(..),stdout)
import System.IO.UTF8(readFile)
import System.Directory(getDirectoryContents)
import System.FilePath((</>))
import System.Environment(getArgs)
import Data.List(isSuffixOf)
import Control.Monad(foldM,when)
import Crawl
import Model
import Words
import Display
pca :: String -> IO [(String, Double, Double)]
pca file = analyze file >>= pcaAnalysis
analyze :: String -> IO Model
analyze file = do
content <- readFile file >>= return.tokenizeString
dict <- tokenizeFiles [file]
trainModel 0 dict [content]
trainFiles :: [String] -> IO Model
trainFiles txts = do
dict <- tokenizeFiles txts
putStrLn $ "Encoded " ++ (show $ dictionaryLength dict) ++ " words, dim="++ (show $ encodingLength dict)
contents <- mapM (\ f -> readFile f >>= return. tokenizeString) txts
let tokens = length contents
putStrLn $ "Training " ++ (show tokens) ++ " files"
trainModel tokens dict contents
analyzeDirectory :: String -> IO Model
analyzeDirectory dir = do
txts <- getDirectoryContents dir >>= return.filter (isSuffixOf ".txt")
trainFiles txts
main :: IO ()
main = do
args <- getArgs
let dir = case args of
(x:xs) -> x
[] -> "."
putStrLn $ "analyzing directory "++ dir
hSetBuffering stdout NoBuffering
-- pdfs <- downloadPDFs
-- txts <- (mapM convertToText pdfs >>= return.filter (/= []))
-- m <- trainFiles txts
m <- analyzeDirectory dir
p <- pcaAnalysis m
when (length p /= (numberOfWords m))
(fail $ "PCA should have same number of words than model: "++ (show $ length p) ++ "vs. " ++ (show $ numberOfWords m))
let modelFile = (dir </> "model.vec")
let pcaFile = (dir </> "model.pca")
putStrLn $ "Writing model to file "++ modelFile
writeFile modelFile (show m)
putStrLn $ "Writing PCA to file " ++ pcaFile
writeFile pcaFile (show p)
| RayRacine/hs-word2vec | analyze.hs | bsd-3-clause | 1,932 | 0 | 14 | 385 | 652 | 329 | 323 | 50 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Data.Wikipedia.Request
( Request ()
, (<>)
, requestURL
-- | Request properties
, action
, prop
, titles
, plnamespace
, pllimit
, plcontinue
-- | Requests
, queryRequest
, linksRequest
, wikiLinks
) where
import Data.List (intersperse)
data Request = Request [(String, String)] deriving Show
-- | Construct single-argument request
simpleRequest :: (String, String) -> Request
simpleRequest = Request . (:[])
-- | Join two requests into one
(<>) :: Request -> Request -> Request
(Request xs) <> (Request ys) = Request (xs ++ ys)
-- | Request properties
action, prop, titles, plnamespace, pllimit, plcontinue, format
:: String -> Request
action x = simpleRequest ("action", x)
prop x = simpleRequest ("prop", x)
titles x = simpleRequest ("titles", x)
plnamespace x = simpleRequest ("plnamespace", x)
pllimit x = simpleRequest ("pllimit", x)
plcontinue x = simpleRequest ("plcontinue", x)
format x = simpleRequest ("format", x)
-- | Premade requests
queryRequest, linksRequest, wikiLinks :: Request
queryRequest = action "query" <> format "json"
linksRequest = queryRequest <> prop "links"
wikiLinks = linksRequest <> plnamespace "0"
-- | Base-url for wikipedia api
baseUrl :: String
baseUrl = "http://en.wikipedia.org/w/api.php"
-- | URL used to make request
requestURL :: Request -> String
requestURL (Request xs) = let query = concat $ intersperse "&" qargs
qargs = map (\(a, b) -> a ++ "=" ++ b) xs
in baseUrl ++ "?" ++ query
| awagner83/powermap | src/Data/Wikipedia/Request.hs | bsd-3-clause | 1,637 | 0 | 13 | 397 | 449 | 260 | 189 | 42 | 1 |
module TinyScheduler.SubJobs
( convertJobIntoSubJobs
, execSubJobs
) where
import Control.Concurrent
import Control.Concurrent.Async
import Data.Time
import TinyScheduler.Jobs
import Prelude hiding (id)
data SubJob a = SubJob
{ jobId :: Int
, subJobNo :: Int
, delayx :: Int
, job_ :: IO a
}
-- | Converts each hit of a job into a Subjob
convertJobIntoSubJobs :: UTCTime -> Job a -> [SubJob a]
convertJobIntoSubJobs currentTime x =
let timeDelays = (delay x currentTime)
zippedDelays = zip [1 ..] timeDelays
in map (\(i, z) -> SubJob (id x) i z (job x)) zippedDelays
execSubJob :: SubJob a -> IO a
execSubJob x = threadDelay (delayx x) >> (job_ x)
execSubJobs :: [SubJob a] -> IO [a]
execSubJobs = mapConcurrently execSubJob
| functor-soup/tiny-scheduler | TinyScheduler/SubJobs.hs | bsd-3-clause | 756 | 0 | 12 | 150 | 258 | 141 | 117 | 22 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.Init
( findCabalFiles
, initProject
, InitOpts (..)
, SnapPref (..)
, Method (..)
, makeConcreteResolver
, tryDeprecatedPath
, getImplicitGlobalProjectDir
) where
import Control.Exception (assert)
import Control.Exception.Enclosed (catchAny, handleIO)
import Control.Monad (liftM, when)
import Control.Monad.Catch (MonadMask, MonadThrow, throwM)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Lazy as L
import qualified Data.HashMap.Strict as HM
import qualified Data.IntMap as IntMap
import qualified Data.Foldable as F
import Data.List (isSuffixOf,sort)
import Data.List.Extra (nubOrd)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (mapMaybe)
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Yaml as Yaml
import qualified Distribution.PackageDescription as C
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.Find
import Path.IO
import Stack.BuildPlan
import Stack.Constants
import Stack.Package
import Stack.Solver
import Stack.Types
import System.Directory (getDirectoryContents)
findCabalFiles :: MonadIO m => Bool -> Path Abs Dir -> m [Path Abs File]
findCabalFiles recurse dir =
liftIO $ findFiles dir isCabal (\subdir -> recurse && not (isIgnored subdir))
where
isCabal path = ".cabal" `isSuffixOf` toFilePath path
isIgnored path = toFilePath (dirname path) `Set.member` ignoredDirs
-- | Special directories that we don't want to traverse for .cabal files
ignoredDirs :: Set FilePath
ignoredDirs = Set.fromList
[ ".git"
, "dist"
, ".stack-work"
]
-- | Generate stack.yaml
initProject :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, HasGHCVariant env, MonadLogger m, MonadBaseControl IO m)
=> Path Abs Dir
-> InitOpts
-> m ()
initProject currDir initOpts = do
let dest = currDir </> stackDotYaml
dest' = toFilePath dest
exists <- fileExists dest
when (not (forceOverwrite initOpts) && exists) $
error ("Refusing to overwrite existing stack.yaml, " <>
"please delete before running stack init " <>
"or if you are sure use \"--force\"")
cabalfps <- findCabalFiles (includeSubDirs initOpts) currDir
$logInfo $ "Writing default config file to: " <> T.pack dest'
$logInfo $ "Basing on cabal files:"
mapM_ (\path -> $logInfo $ "- " <> T.pack (toFilePath path)) cabalfps
$logInfo ""
when (null cabalfps) $ error "In order to init, you should have an existing .cabal file. Please try \"stack new\" instead"
(warnings,gpds) <- fmap unzip (mapM readPackageUnresolved cabalfps)
sequence_ (zipWith (mapM_ . printCabalFileWarning) cabalfps warnings)
(r, flags, extraDeps) <- getDefaultResolver cabalfps gpds initOpts
let p = Project
{ projectPackages = pkgs
, projectExtraDeps = extraDeps
, projectFlags = flags
, projectResolver = r
, projectExtraPackageDBs = []
}
pkgs = map toPkg cabalfps
toPkg fp = PackageEntry
{ peValidWanted = Nothing
, peExtraDepMaybe = Nothing
, peLocation = PLFilePath $
case stripDir currDir $ parent fp of
Nothing
| currDir == parent fp -> "."
| otherwise -> assert False $ toFilePath $ parent fp
Just rel -> toFilePath rel
, peSubdirs = []
}
$logInfo $ "Selected resolver: " <> resolverName r
liftIO $ L.writeFile dest' $ B.toLazyByteString $ renderStackYaml p
$logInfo $ "Wrote project config to: " <> T.pack dest'
-- | Render a stack.yaml file with comments, see:
-- https://github.com/commercialhaskell/stack/issues/226
renderStackYaml :: Project -> B.Builder
renderStackYaml p =
case Yaml.toJSON p of
Yaml.Object o -> renderObject o
_ -> assert False $ B.byteString $ Yaml.encode p
where
renderObject o =
B.byteString "# For more information, see: https://github.com/commercialhaskell/stack/blob/release/doc/yaml_configuration.md\n\n" <>
F.foldMap (goComment o) comments <>
goOthers (o `HM.difference` HM.fromList comments) <>
B.byteString
"# Control whether we use the GHC we find on the path\n\
\# system-ghc: true\n\n\
\# Require a specific version of stack, using version ranges\n\
\# require-stack-version: -any # Default\n\
\# require-stack-version: >= 0.1.4.0\n\n\
\# Override the architecture used by stack, especially useful on Windows\n\
\# arch: i386\n\
\# arch: x86_64\n\n\
\# Extra directories used by stack for building\n\
\# extra-include-dirs: [/path/to/dir]\n\
\# extra-lib-dirs: [/path/to/dir]\n"
comments =
[ ("resolver", "Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2)")
, ("packages", "Local packages, usually specified by relative directory name")
, ("extra-deps", "Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3)")
, ("flags", "Override default flag values for local packages and extra-deps")
, ("extra-package-dbs", "Extra package databases containing global packages")
]
goComment o (name, comment) =
case HM.lookup name o of
Nothing -> assert False mempty
Just v ->
B.byteString "# " <>
B.byteString comment <>
B.byteString "\n" <>
B.byteString (Yaml.encode $ Yaml.object [(name, v)]) <>
B.byteString "\n"
goOthers o
| HM.null o = mempty
| otherwise = assert False $ B.byteString $ Yaml.encode o
getSnapshots' :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m, MonadBaseControl IO m)
=> m (Maybe Snapshots)
getSnapshots' =
liftM Just getSnapshots `catchAny` \e -> do
$logError $
"Unable to download snapshot list, and therefore could " <>
"not generate a stack.yaml file automatically"
$logError $
"This sometimes happens due to missing Certificate Authorities " <>
"on your system. For more information, see:"
$logError ""
$logError " https://github.com/commercialhaskell/stack/issues/234"
$logError ""
$logError "You can try again, or create your stack.yaml file by hand. See:"
$logError ""
$logError " https://github.com/commercialhaskell/stack/blob/release/doc/yaml_configuration.md"
$logError ""
$logError $ "Exception was: " <> T.pack (show e)
return Nothing
-- | Get the default resolver value
getDefaultResolver :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, HasGHCVariant env, MonadLogger m, MonadBaseControl IO m)
=> [Path Abs File] -- ^ cabal files
-> [C.GenericPackageDescription] -- ^ cabal descriptions
-> InitOpts
-> m (Resolver, Map PackageName (Map FlagName Bool), Map PackageName Version)
getDefaultResolver cabalfps gpds initOpts =
case ioMethod initOpts of
MethodSnapshot snapPref -> do
msnapshots <- getSnapshots'
names <-
case msnapshots of
Nothing -> return []
Just snapshots -> getRecommendedSnapshots snapshots snapPref
mpair <- findBuildPlan gpds names
case mpair of
Just (snap, flags) ->
return (ResolverSnapshot snap, flags, Map.empty)
Nothing -> throwM $ NoMatchingSnapshot names
MethodResolver aresolver -> do
resolver <- makeConcreteResolver aresolver
mpair <-
case resolver of
ResolverSnapshot name -> findBuildPlan gpds [name]
ResolverCompiler _ -> return Nothing
ResolverCustom _ _ -> return Nothing
case mpair of
Just (snap, flags) ->
return (ResolverSnapshot snap, flags, Map.empty)
Nothing -> return (resolver, Map.empty, Map.empty)
MethodSolver -> do
(compilerVersion, extraDeps) <- cabalSolver Ghc (map parent cabalfps) Map.empty []
return
( ResolverCompiler compilerVersion
, Map.filter (not . Map.null) $ fmap snd extraDeps
, fmap fst extraDeps
)
getRecommendedSnapshots :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, HasGHCVariant env, MonadLogger m, MonadBaseControl IO m)
=> Snapshots
-> SnapPref
-> m [SnapName]
getRecommendedSnapshots snapshots pref = do
-- Get the most recent LTS and Nightly in the snapshots directory and
-- prefer them over anything else, since odds are high that something
-- already exists for them.
existing <-
liftM (reverse . sort . mapMaybe (parseSnapName . T.pack)) $
snapshotsDir >>=
liftIO . handleIO (const $ return [])
. getDirectoryContents . toFilePath
let isLTS LTS{} = True
isLTS Nightly{} = False
isNightly Nightly{} = True
isNightly LTS{} = False
names = nubOrd $ concat
[ take 2 $ filter isLTS existing
, take 2 $ filter isNightly existing
, map (uncurry LTS)
(take 2 $ reverse $ IntMap.toList $ snapshotsLts snapshots)
, [Nightly $ snapshotsNightly snapshots]
]
namesLTS = filter isLTS names
namesNightly = filter isNightly names
case pref of
PrefNone -> return names
PrefLTS -> return $ namesLTS ++ namesNightly
PrefNightly -> return $ namesNightly ++ namesLTS
data InitOpts = InitOpts
{ ioMethod :: !Method
-- ^ Preferred snapshots
, forceOverwrite :: Bool
-- ^ Overwrite existing files
, includeSubDirs :: Bool
-- ^ If True, include all .cabal files found in any sub directories
}
data SnapPref = PrefNone | PrefLTS | PrefNightly
-- | Method of initializing
data Method = MethodSnapshot SnapPref | MethodResolver AbstractResolver | MethodSolver
-- | Turn an 'AbstractResolver' into a 'Resolver'.
makeConcreteResolver :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m, HasHttpManager env, MonadLogger m)
=> AbstractResolver
-> m Resolver
makeConcreteResolver (ARResolver r) = return r
makeConcreteResolver ar = do
snapshots <- getSnapshots
r <-
case ar of
ARResolver r -> assert False $ return r
ARGlobal -> do
config <- asks getConfig
implicitGlobalDir <- getImplicitGlobalProjectDir config
let fp = implicitGlobalDir </> stackDotYaml
(ProjectAndConfigMonoid project _, _warnings) <-
liftIO (Yaml.decodeFileEither $ toFilePath fp)
>>= either throwM return
return $ projectResolver project
ARLatestNightly -> return $ ResolverSnapshot $ Nightly $ snapshotsNightly snapshots
ARLatestLTSMajor x ->
case IntMap.lookup x $ snapshotsLts snapshots of
Nothing -> error $ "No LTS release found with major version " ++ show x
Just y -> return $ ResolverSnapshot $ LTS x y
ARLatestLTS
| IntMap.null $ snapshotsLts snapshots -> error $ "No LTS releases found"
| otherwise ->
let (x, y) = IntMap.findMax $ snapshotsLts snapshots
in return $ ResolverSnapshot $ LTS x y
$logInfo $ "Selected resolver: " <> resolverName r
return r
-- | Get the location of the implicit global project directory.
-- If the directory already exists at the deprecated location, its location is returned.
-- Otherwise, the new location is returned.
getImplicitGlobalProjectDir
:: (MonadIO m, MonadLogger m)
=> Config -> m (Path Abs Dir)
getImplicitGlobalProjectDir config =
--TEST no warning printed
liftM fst $ tryDeprecatedPath
Nothing
dirExists
(implicitGlobalProjectDir stackRoot)
(implicitGlobalProjectDirDeprecated stackRoot)
where
stackRoot = configStackRoot config
-- | If deprecated path exists, use it and print a warning.
-- Otherwise, return the new path.
tryDeprecatedPath
:: (MonadIO m, MonadLogger m)
=> Maybe T.Text -- ^ Description of file for warning (if Nothing, no deprecation warning is displayed)
-> (Path Abs a -> m Bool) -- ^ Test for existence
-> Path Abs a -- ^ New path
-> Path Abs a -- ^ Deprecated path
-> m (Path Abs a, Bool) -- ^ (Path to use, whether it already exists)
tryDeprecatedPath mWarningDesc exists new old = do
newExists <- exists new
if newExists
then return (new, True)
else do
oldExists <- exists old
if oldExists
then do
case mWarningDesc of
Nothing -> return ()
Just desc ->
$logWarn $ T.concat
[ "Warning: Location of ", desc, " at '"
, T.pack (toFilePath old)
, "' is deprecated; rename it to '"
, T.pack (toFilePath new)
, "' instead" ]
return (old, True)
else return (new, False)
| rrnewton/stack | src/Stack/Init.hs | bsd-3-clause | 14,937 | 0 | 21 | 4,934 | 3,196 | 1,626 | 1,570 | 280 | 8 |
{-# LANGUAGE OverloadedStrings #-}
module Indexer where
import Control.Applicative
import Data.Configurator as Cfg
import Data.Configurator.Types
import Data.Text
main :: IO ()
main = do
cfg <- build =<< load configFiles
putStrLn $ show cfg
| kylcarte/harihara | src/Indexer.hs | bsd-3-clause | 249 | 0 | 9 | 42 | 67 | 37 | 30 | 10 | 1 |
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
module Logic.Herbrand where
import Prelude hiding (id,(.))
import Control.Category
import Control.Applicative
import Control.Monad.Free
import Control.Monad.Logic
import Control.Monad.Supply
import Control.Monad.State
import Data.Unifiable
import Data.Set (Set)
import qualified Data.Set as S
newtype Subst f a b = Subst
{ subst :: a -> Free f (Maybe b)
}
instance Functor f => Category (Subst f) where
id = Subst $ \_ -> return Nothing
g . f = Subst $ subst f >=> maybe (return Nothing) (subst g)
newtype Unify f x a = Unify
{ unUnify :: SupplyT x (StateT (Subst f x x) Logic) a
} deriving
( Functor , Applicative , Monad
, Alternative , MonadPlus
, MonadLogic
, MonadState (Subst f x x)
, MonadSupply x
)
type U f x = Unify f x (Free f x)
type U_ f = U f Int
freeVars :: (Foldable f, Ord x) => Free f x -> Set x
freeVars = foldMap S.singleton
runUnify :: (Functor f, FreshSupply x) => U f x -> Logic (Free f x)
runUnify m = uncurry (flip walk) <$> runStateT (fst <$> runSupplyT freshSupply (unUnify m)) id
walkVar :: Functor f => Subst f x x -> x -> Free f x
walkVar s x = subst s x >>= \case
Just y -> walkVar s y
_ -> return x
walk :: Functor f => Subst f x x -> Free f x -> Free f x
walk s u = u >>= walkVar s
fresh :: Functor f => U f x
fresh = Pure <$> supply
freshN :: Functor f => Int -> ([Free f x] -> Unify f x a) -> Unify f x a
freshN n f = replicateM n fresh >>= f
occursCheck :: (Functor f, Foldable f, Eq x) => x -> Free f x -> Unify f x ()
occursCheck x = mapM_ $ guard . not . (==) x
extendSubst :: (Functor f, Eq x) => x -> Free f x -> U f x
extendSubst x u = state $ \s ->
( u
, Subst $ \y -> if x == y
then Just <$> u
else subst s y
)
unify :: (Unifiable f, Eq x) => Free f x -> Free f x -> U f x
unify u v = do
case (u,v) of
(Pure x,Pure y)
| x == y -> return u
(Pure x,_ ) -> occursCheck x v >> extendSubst x v
(_ ,Pure y) -> occursCheck y u >> extendSubst y u
(Free x,Free y) -> Free <$> unifyWith unify x y
(===) :: (Unifiable f, Eq x) => Free f x -> Free f x -> U f x
a === b = do
s <- get
let u = walk s a
v = walk s b
once $ unify u v
data Cons a
= Nil
| Int Int
| Cons a a
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
instance Num (Cons a) where
fromInteger = Int . fromInteger
_ + _ = undefined
_ * _ = undefined
_ - _ = undefined
abs _ = undefined
signum _ = undefined
nil :: Free Cons a
nil = Free Nil
int :: Int -> Free Cons a
int = Free . Int
cons :: Free Cons a -> Free Cons a -> Free Cons a
cons a b = Free $ Cons a b
instance Num (f (Free f a)) => Num (Free f a) where
fromInteger = Free . fromInteger
_ + _ = undefined
_ * _ = undefined
_ - _ = undefined
abs _ = undefined
signum _ = undefined
instance Unifiable Cons where
unifyWith f = \case
Cons a b -> \case
Cons c d -> Cons <$> f a c <*> f b d
_ -> empty
Nil -> \case
Nil -> pure Nil
_ -> empty
Int x -> \case
Int y -> Int x <$ guard (x == y)
_ -> empty
test0 :: U_ Cons
test0 = freshN 4 $ \[q,x,y,z] -> do
x === cons 1 y
y === cons 2 z
z === cons 3 nil
q === x
| kylcarte/logics | src/Logic/Herbrand.hs | bsd-3-clause | 3,552 | 0 | 15 | 978 | 1,633 | 827 | 806 | 114 | 4 |
{-# LANGUAGE ScopedTypeVariables #-}
-----------------------------------------------------------------------------
--
-- Tasks running external programs for SysTools
--
-- (c) The GHC Team 2017
--
-----------------------------------------------------------------------------
module SysTools.Tasks where
import Exception
import ErrUtils
import HscTypes
import DynFlags
import Outputable
import GHC.Platform
import Util
import Data.List
import System.IO
import System.Process
import GhcPrelude
import LlvmCodeGen.Base (LlvmVersion, llvmVersionStr, supportedLlvmVersion, parseLlvmVersion)
import SysTools.Process
import SysTools.Info
{-
************************************************************************
* *
\subsection{Running an external program}
* *
************************************************************************
-}
runUnlit :: DynFlags -> [Option] -> IO ()
runUnlit dflags args = traceToolCommand dflags "unlit" $ do
let prog = pgm_L dflags
opts = getOpts dflags opt_L
runSomething dflags "Literate pre-processor" prog
(map Option opts ++ args)
runCpp :: DynFlags -> [Option] -> IO ()
runCpp dflags args = traceToolCommand dflags "cpp" $ do
let (p,args0) = pgm_P dflags
args1 = map Option (getOpts dflags opt_P)
args2 = [Option "-Werror" | gopt Opt_WarnIsError dflags]
++ [Option "-Wundef" | wopt Opt_WarnCPPUndef dflags]
mb_env <- getGccEnv args2
runSomethingFiltered dflags id "C pre-processor" p
(args0 ++ args1 ++ args2 ++ args) Nothing mb_env
runPp :: DynFlags -> [Option] -> IO ()
runPp dflags args = traceToolCommand dflags "pp" $ do
let prog = pgm_F dflags
opts = map Option (getOpts dflags opt_F)
runSomething dflags "Haskell pre-processor" prog (args ++ opts)
-- | Run compiler of C-like languages and raw objects (such as gcc or clang).
runCc :: Maybe ForeignSrcLang -> DynFlags -> [Option] -> IO ()
runCc mLanguage dflags args = traceToolCommand dflags "cc" $ do
let p = pgm_c dflags
args1 = map Option userOpts
args2 = languageOptions ++ args ++ args1
-- We take care to pass -optc flags in args1 last to ensure that the
-- user can override flags passed by GHC. See #14452.
mb_env <- getGccEnv args2
runSomethingResponseFile dflags cc_filter "C Compiler" p args2 mb_env
where
-- discard some harmless warnings from gcc that we can't turn off
cc_filter = unlines . doFilter . lines
{-
gcc gives warnings in chunks like so:
In file included from /foo/bar/baz.h:11,
from /foo/bar/baz2.h:22,
from wibble.c:33:
/foo/flibble:14: global register variable ...
/foo/flibble:15: warning: call-clobbered r...
We break it up into its chunks, remove any call-clobbered register
warnings from each chunk, and then delete any chunks that we have
emptied of warnings.
-}
doFilter = unChunkWarnings . filterWarnings . chunkWarnings []
-- We can't assume that the output will start with an "In file inc..."
-- line, so we start off expecting a list of warnings rather than a
-- location stack.
chunkWarnings :: [String] -- The location stack to use for the next
-- list of warnings
-> [String] -- The remaining lines to look at
-> [([String], [String])]
chunkWarnings loc_stack [] = [(loc_stack, [])]
chunkWarnings loc_stack xs
= case break loc_stack_start xs of
(warnings, lss:xs') ->
case span loc_start_continuation xs' of
(lsc, xs'') ->
(loc_stack, warnings) : chunkWarnings (lss : lsc) xs''
_ -> [(loc_stack, xs)]
filterWarnings :: [([String], [String])] -> [([String], [String])]
filterWarnings [] = []
-- If the warnings are already empty then we are probably doing
-- something wrong, so don't delete anything
filterWarnings ((xs, []) : zs) = (xs, []) : filterWarnings zs
filterWarnings ((xs, ys) : zs) = case filter wantedWarning ys of
[] -> filterWarnings zs
ys' -> (xs, ys') : filterWarnings zs
unChunkWarnings :: [([String], [String])] -> [String]
unChunkWarnings [] = []
unChunkWarnings ((xs, ys) : zs) = xs ++ ys ++ unChunkWarnings zs
loc_stack_start s = "In file included from " `isPrefixOf` s
loc_start_continuation s = " from " `isPrefixOf` s
wantedWarning w
| "warning: call-clobbered register used" `isContainedIn` w = False
| otherwise = True
-- force the C compiler to interpret this file as C when
-- compiling .hc files, by adding the -x c option.
-- Also useful for plain .c files, just in case GHC saw a
-- -x c option.
(languageOptions, userOpts) = case mLanguage of
Nothing -> ([], userOpts_c)
Just language -> ([Option "-x", Option languageName], opts)
where
(languageName, opts) = case language of
LangC -> ("c", userOpts_c)
LangCxx -> ("c++", userOpts_cxx)
LangObjc -> ("objective-c", userOpts_c)
LangObjcxx -> ("objective-c++", userOpts_cxx)
LangAsm -> ("assembler", [])
RawObject -> ("c", []) -- claim C for lack of a better idea
userOpts_c = getOpts dflags opt_c
userOpts_cxx = getOpts dflags opt_cxx
isContainedIn :: String -> String -> Bool
xs `isContainedIn` ys = any (xs `isPrefixOf`) (tails ys)
-- | Run the linker with some arguments and return the output
askLd :: DynFlags -> [Option] -> IO String
askLd dflags args = traceToolCommand dflags "linker" $ do
let (p,args0) = pgm_l dflags
args1 = map Option (getOpts dflags opt_l)
args2 = args0 ++ args1 ++ args
mb_env <- getGccEnv args2
runSomethingWith dflags "gcc" p args2 $ \real_args ->
readCreateProcessWithExitCode' (proc p real_args){ env = mb_env }
runAs :: DynFlags -> [Option] -> IO ()
runAs dflags args = traceToolCommand dflags "as" $ do
let (p,args0) = pgm_a dflags
args1 = map Option (getOpts dflags opt_a)
args2 = args0 ++ args1 ++ args
mb_env <- getGccEnv args2
runSomethingFiltered dflags id "Assembler" p args2 Nothing mb_env
-- | Run the LLVM Optimiser
runLlvmOpt :: DynFlags -> [Option] -> IO ()
runLlvmOpt dflags args = traceToolCommand dflags "opt" $ do
let (p,args0) = pgm_lo dflags
args1 = map Option (getOpts dflags opt_lo)
-- We take care to pass -optlo flags (e.g. args0) last to ensure that the
-- user can override flags passed by GHC. See #14821.
runSomething dflags "LLVM Optimiser" p (args1 ++ args ++ args0)
-- | Run the LLVM Compiler
runLlvmLlc :: DynFlags -> [Option] -> IO ()
runLlvmLlc dflags args = traceToolCommand dflags "llc" $ do
let (p,args0) = pgm_lc dflags
args1 = map Option (getOpts dflags opt_lc)
runSomething dflags "LLVM Compiler" p (args0 ++ args1 ++ args)
-- | Run the clang compiler (used as an assembler for the LLVM
-- backend on OS X as LLVM doesn't support the OS X system
-- assembler)
runClang :: DynFlags -> [Option] -> IO ()
runClang dflags args = traceToolCommand dflags "clang" $ do
let (clang,_) = pgm_lcc dflags
-- be careful what options we call clang with
-- see #5903 and #7617 for bugs caused by this.
(_,args0) = pgm_a dflags
args1 = map Option (getOpts dflags opt_a)
args2 = args0 ++ args1 ++ args
mb_env <- getGccEnv args2
Exception.catch (do
runSomethingFiltered dflags id "Clang (Assembler)" clang args2 Nothing mb_env
)
(\(err :: SomeException) -> do
errorMsg dflags $
text ("Error running clang! you need clang installed to use the" ++
" LLVM backend") $+$
text "(or GHC tried to execute clang incorrectly)"
throwIO err
)
-- | Figure out which version of LLVM we are running this session
figureLlvmVersion :: DynFlags -> IO (Maybe LlvmVersion)
figureLlvmVersion dflags = traceToolCommand dflags "llc" $ do
let (pgm,opts) = pgm_lc dflags
args = filter notNull (map showOpt opts)
-- we grab the args even though they should be useless just in
-- case the user is using a customised 'llc' that requires some
-- of the options they've specified. llc doesn't care what other
-- options are specified when '-version' is used.
args' = args ++ ["-version"]
catchIO (do
(pin, pout, perr, _) <- runInteractiveProcess pgm args'
Nothing Nothing
{- > llc -version
LLVM (http://llvm.org/):
LLVM version 3.5.2
...
-}
hSetBinaryMode pout False
_ <- hGetLine pout
vline <- hGetLine pout
let mb_ver = parseLlvmVersion vline
hClose pin
hClose pout
hClose perr
return mb_ver
)
(\err -> do
debugTraceMsg dflags 2
(text "Error (figuring out LLVM version):" <+>
text (show err))
errorMsg dflags $ vcat
[ text "Warning:", nest 9 $
text "Couldn't figure out LLVM version!" $$
text ("Make sure you have installed LLVM " ++
llvmVersionStr supportedLlvmVersion) ]
return Nothing)
runLink :: DynFlags -> [Option] -> IO ()
runLink dflags args = traceToolCommand dflags "linker" $ do
-- See Note [Run-time linker info]
--
-- `-optl` args come at the end, so that later `-l` options
-- given there manually can fill in symbols needed by
-- Haskell libraries coming in via `args`.
linkargs <- neededLinkArgs `fmap` getLinkerInfo dflags
let (p,args0) = pgm_l dflags
optl_args = map Option (getOpts dflags opt_l)
args2 = args0 ++ linkargs ++ args ++ optl_args
mb_env <- getGccEnv args2
runSomethingResponseFile dflags ld_filter "Linker" p args2 mb_env
where
ld_filter = case (platformOS (targetPlatform dflags)) of
OSSolaris2 -> sunos_ld_filter
_ -> id
{-
SunOS/Solaris ld emits harmless warning messages about unresolved
symbols in case of compiling into shared library when we do not
link against all the required libs. That is the case of GHC which
does not link against RTS library explicitly in order to be able to
choose the library later based on binary application linking
parameters. The warnings look like:
Undefined first referenced
symbol in file
stg_ap_n_fast ./T2386_Lib.o
stg_upd_frame_info ./T2386_Lib.o
templatezmhaskell_LanguageziHaskellziTHziLib_litE_closure ./T2386_Lib.o
templatezmhaskell_LanguageziHaskellziTHziLib_appE_closure ./T2386_Lib.o
templatezmhaskell_LanguageziHaskellziTHziLib_conE_closure ./T2386_Lib.o
templatezmhaskell_LanguageziHaskellziTHziSyntax_mkNameGzud_closure ./T2386_Lib.o
newCAF ./T2386_Lib.o
stg_bh_upd_frame_info ./T2386_Lib.o
stg_ap_ppp_fast ./T2386_Lib.o
templatezmhaskell_LanguageziHaskellziTHziLib_stringL_closure ./T2386_Lib.o
stg_ap_p_fast ./T2386_Lib.o
stg_ap_pp_fast ./T2386_Lib.o
ld: warning: symbol referencing errors
this is actually coming from T2386 testcase. The emitting of those
warnings is also a reason why so many TH testcases fail on Solaris.
Following filter code is SunOS/Solaris linker specific and should
filter out only linker warnings. Please note that the logic is a
little bit more complex due to the simple reason that we need to preserve
any other linker emitted messages. If there are any. Simply speaking
if we see "Undefined" and later "ld: warning:..." then we omit all
text between (including) the marks. Otherwise we copy the whole output.
-}
sunos_ld_filter :: String -> String
sunos_ld_filter = unlines . sunos_ld_filter' . lines
sunos_ld_filter' x = if (undefined_found x && ld_warning_found x)
then (ld_prefix x) ++ (ld_postfix x)
else x
breakStartsWith x y = break (isPrefixOf x) y
ld_prefix = fst . breakStartsWith "Undefined"
undefined_found = not . null . snd . breakStartsWith "Undefined"
ld_warn_break = breakStartsWith "ld: warning: symbol referencing errors"
ld_postfix = tail . snd . ld_warn_break
ld_warning_found = not . null . snd . ld_warn_break
runLibtool :: DynFlags -> [Option] -> IO ()
runLibtool dflags args = traceToolCommand dflags "libtool" $ do
linkargs <- neededLinkArgs `fmap` getLinkerInfo dflags
let args1 = map Option (getOpts dflags opt_l)
args2 = [Option "-static"] ++ args1 ++ args ++ linkargs
libtool = pgm_libtool dflags
mb_env <- getGccEnv args2
runSomethingFiltered dflags id "Linker" libtool args2 Nothing mb_env
runAr :: DynFlags -> Maybe FilePath -> [Option] -> IO ()
runAr dflags cwd args = traceToolCommand dflags "ar" $ do
let ar = pgm_ar dflags
runSomethingFiltered dflags id "Ar" ar args cwd Nothing
askAr :: DynFlags -> Maybe FilePath -> [Option] -> IO String
askAr dflags mb_cwd args = traceToolCommand dflags "ar" $ do
let ar = pgm_ar dflags
runSomethingWith dflags "Ar" ar args $ \real_args ->
readCreateProcessWithExitCode' (proc ar real_args){ cwd = mb_cwd }
runRanlib :: DynFlags -> [Option] -> IO ()
runRanlib dflags args = traceToolCommand dflags "ranlib" $ do
let ranlib = pgm_ranlib dflags
runSomethingFiltered dflags id "Ranlib" ranlib args Nothing Nothing
runMkDLL :: DynFlags -> [Option] -> IO ()
runMkDLL dflags args = traceToolCommand dflags "mkdll" $ do
let (p,args0) = pgm_dll dflags
args1 = args0 ++ args
mb_env <- getGccEnv (args0++args)
runSomethingFiltered dflags id "Make DLL" p args1 Nothing mb_env
runWindres :: DynFlags -> [Option] -> IO ()
runWindres dflags args = traceToolCommand dflags "windres" $ do
let cc = pgm_c dflags
cc_args = map Option (sOpt_c (settings dflags))
windres = pgm_windres dflags
opts = map Option (getOpts dflags opt_windres)
quote x = "\"" ++ x ++ "\""
args' = -- If windres.exe and gcc.exe are in a directory containing
-- spaces then windres fails to run gcc. We therefore need
-- to tell it what command to use...
Option ("--preprocessor=" ++
unwords (map quote (cc :
map showOpt opts ++
["-E", "-xc", "-DRC_INVOKED"])))
-- ...but if we do that then if windres calls popen then
-- it can't understand the quoting, so we have to use
-- --use-temp-file so that it interprets it correctly.
-- See #1828.
: Option "--use-temp-file"
: args
mb_env <- getGccEnv cc_args
runSomethingFiltered dflags id "Windres" windres args' Nothing mb_env
touch :: DynFlags -> String -> String -> IO ()
touch dflags purpose arg = traceToolCommand dflags "touch" $
runSomething dflags purpose (pgm_T dflags) [FileOption "" arg]
-- * Tracing utility
-- | Record in the eventlog when the given tool command starts
-- and finishes, prepending the given 'String' with
-- \"systool:\", to easily be able to collect and process
-- all the systool events.
--
-- For those events to show up in the eventlog, you need
-- to run GHC with @-v2@ or @-ddump-timings@.
traceToolCommand :: DynFlags -> String -> IO a -> IO a
traceToolCommand dflags tool = withTiming
dflags (text $ "systool:" ++ tool) (const ())
| sdiehl/ghc | compiler/main/SysTools/Tasks.hs | bsd-3-clause | 15,939 | 0 | 22 | 4,339 | 3,335 | 1,704 | 1,631 | 221 | 13 |
module Data.Text.Extra
( upperCaseFirst
, lowerCaseFirst
, dropPrefix
, dropSuffix
, (<+>)
, module T
)
where
import Data.String ( IsString )
import Data.Text as T
import Data.Semigroup
import Data.Char
import Prelude
dropPrefix :: Text -> Text -> Maybe Text
dropPrefix prefix s = if prefix `isPrefixOf` s
then Just (T.drop (T.length prefix) s)
else Nothing
dropSuffix :: Text -> Text -> Maybe Text
dropSuffix suffix s = if suffix `isSuffixOf` s
then Just (T.dropEnd (T.length suffix) s)
else Nothing
(<+>) :: (IsString a, Semigroup a) => a -> a -> a
a <+> b = a <> " " <> b
lowerCaseFirst :: Text -> Text
lowerCaseFirst = onFirst Data.Char.toLower
upperCaseFirst :: Text -> Text
upperCaseFirst = onFirst Data.Char.toUpper . T.dropWhile (not . isAlpha)
onFirst :: (Char -> Char) -> Text -> Text
onFirst f = \case
Cons c cs -> Cons (f c) cs
t -> t
pattern Cons :: Char -> Text -> Text
pattern Cons c cs <- (uncons -> Just (c, cs))
where Cons c cs = cons c cs
| expipiplus1/vulkan | generate-new/src/Data/Text/Extra.hs | bsd-3-clause | 1,115 | 0 | 11 | 332 | 405 | 220 | 185 | -1 | -1 |
import Test.Hspec
import Test.QuickCheck
import Control.Exception (evaluate)
-- import Numeric.LinearAlgebra
main :: IO ()
main = hspec .
describe "Prelude.head" $ do
it "returns the first element of a list" $
head [23 ..] `shouldBe` (23 :: Int)
it "returns the first element of an *arbitrary* list" .
property $ \x xs -> head (x:xs) == (x :: Int)
it "throws an exception if used with an empty list" $
evaluate (head []) `shouldThrow` anyException
| pupuu/deep-neuralnet | test/Spec.hs | bsd-3-clause | 484 | 0 | 13 | 111 | 145 | 76 | 69 | 12 | 1 |
module ListUtil where
{-@ type IncrList a = [a] <{\xi xj -> xi <= xj}> @-}
| raymoo/lh-vfa-stuff | ListUtil.hs | bsd-3-clause | 76 | 0 | 2 | 17 | 5 | 4 | 1 | 1 | 0 |
{-# LANGUAGE TemplateHaskell, QuasiQuotes, TypeFamilies, OverloadedStrings #-}
import Text.Digestive.Forms.Yesod
import Control.Applicative
import Yesod hiding (Html, renderHtml, runFormPost)
import Text.Digestive
import Text.Digestive.Blaze.Html5
import Text.Blaze.Html5 (Html)
import qualified Data.ByteString.Lazy as LB
import Data.Maybe
import Network.Wai.Handler.SimpleServer
import qualified Text.Hamlet as H
data Test = Test
type Handler = GHandler Test Test
mkYesod "Test" [parseRoutes|
/ UploadR GET POST
|]
instance Yesod Test where
approot _ = ""
data Upload = Upload LB.ByteString String
deriving (Show)
resultView :: View e v -> Result e ok -> v
resultView v = unView v . resultErrors
resultErrors (Error e) = e
resultErrors _ = []
uploadForm :: YesodForm Handler Html BlazeFormHtml Upload
uploadForm = Upload
<$> fmap (fromMaybe "Unknown" . fmap snd) inputFile
<*> (<++ errors) (inputTextRead "Required" Nothing)
uploadFormWidget :: BlazeFormHtml -> GWidget Test Test ()
uploadFormWidget form =
addHamlet [$hamlet|
%form!method=post!enctype=multipart/form-data
$formHtml$
%input!type=submit
|]
where formHtml = H.toHtml form
getUploadR :: Handler RepHtml
getUploadR = do
form <- viewForm uploadForm "upload"
defaultLayout $ uploadFormWidget form
postUploadR :: Handler RepHtml
postUploadR = do
(view, res) <- runFormPost uploadForm "upload"
defaultLayout $ do
case res of
Error _ -> return ()
Ok up -> addHtml . H.toHtml $ show up
uploadFormWidget $ resultView view res
main = basicHandler 3000 Test
| softmechanics/digestive-functors-yesod | examples/simple.hs | bsd-3-clause | 1,603 | 3 | 15 | 293 | 499 | 261 | 238 | 43 | 2 |
{-# OPTIONS -Wall #-}
{-# OPTIONS -fno-warn-unused-do-bind #-}
{-# LANGUAGE OverloadedStrings #-}
module Language.Pck.Tool.Assembler (
-- * Assembler drivers
parseInst
, parseInstFile
-- * Assembler parse examples
-- $parsenote
) where
-- Attoparsec
import Data.Attoparsec.ByteString as P
import Data.Attoparsec.ByteString.Char8 (char8, endOfLine)
import qualified Data.Attoparsec.ByteString.Char8 as P8
import Control.Applicative
-- ByteString
import qualified Data.ByteString.Char8 as B
-- List
import Data.List (elemIndex, sortBy, elemIndices)
import Data.Char (toLower)
-- instruction
import Language.Pck.Cpu.Instruction
import Control.DeepSeq (force)
import Data.Either (partitionEithers)
------------------------------------------------------------
-- driver
------------------------------------------------------------
-- | parse instructions from a ByteString data
--
-- Example:
--
-- > > parseInst (B.pack "mov r0,1\n halt\n")
-- > [MOVI R0 1,HALT]
--
parseInst :: B.ByteString -> Either [String] [Inst]
parseInst inp = case (parseOnly file inp') of
Right x -> Right x
Left _ -> parseInstAnalyze $ removeComments inp'
where inp' = B.map toLower inp
-- | parse instructions from a file
--
-- Example:
--
-- > > parseInstFile "examples/test0.asm"
-- > [MOVI R0 1,HALT]
--
parseInstFile :: FilePath -> IO [Inst]
parseInstFile f = do a <- B.readFile f
case force (parseInst a) of
Right x -> return x
Left e -> mapM_ putStrLn e >> error "parse error"
------------------------------------------------------------
-- top
------------------------------------------------------------
file :: Parser [Inst]
file = do a <- many (skipElements >> instLine)
skipElements >> endOfInput
return a
type ParseInst = Parser Inst
instLine :: ParseInst
instLine = do skipSpaces
a <- inst
skipSpaces
endOfLine <|> skipLineComment <|> skipRangeComment <|> endOfInput
return a
------------------------------------------------------------
-- instructions
------------------------------------------------------------
inst :: ParseInst
inst = miscInsts
<|> movInsts
<|> arithInsts
<|> logicInsts
<|> jumpInsts
<|> memInsts
miscInsts :: ParseInst
miscInsts = inst0 NOP "nop"
<|> inst0 HALT "halt"
movInsts :: ParseInst
movInsts = inst2 MOVI "mov" greg imm
<|> inst2 MOV "mov" greg greg
<|> inst2 movpc "mov" greg pc
arithInsts :: ParseInst
arithInsts = inst3 ADD "add" greg greg greg
<|> inst3 SUB "sub" greg greg greg
<|> inst2 CMP "cmp" greg greg
<|> inst2 ABS "abs" greg greg
<|> inst3 ASH "ash" greg greg greg
<|> inst3 MUL "mul" greg greg greg
<|> inst3 DIV "div" greg greg greg
logicInsts :: ParseInst
logicInsts = inst3 AND "and" greg greg greg
<|> inst3 OR "or" greg greg greg
<|> inst2 NOT "not" greg greg
<|> inst3 XOR "xor" greg greg greg
<|> inst3 LSH "lsh" greg greg greg
jumpInsts :: ParseInst
jumpInsts = inst2 BRI "b" fcond imm
<|> inst1 JRI "jmp" imm
<|> inst1 J "jmp" greg
<|> inst1 CALL "call" greg
<|> inst0 RET "ret"
memInsts :: ParseInst
memInsts = inst2 LD "ld" greg mem
<|> inst2 ST "st" mem greg
-- asymmetric operand utility
movpc :: GReg -> b -> Inst
movpc a _ = MOVPC a
------------------------------------------------------------
-- instruction formats
------------------------------------------------------------
type F0 = Inst
type F1 a = a -> Inst
type F2 a b = a -> b -> Inst
type F3 a b c = a -> b -> c -> Inst
inst0 :: F0 -> B.ByteString -> ParseInst
inst0 f op = f <$ string op
inst1 :: F1 a -> B.ByteString -> Parser a -> ParseInst
inst1 f op p1 = f <$> (string op >> delimSpace >> p1)
inst2 :: F2 a b -> B.ByteString -> Parser a -> Parser b -> ParseInst
inst2 f op p1 p2 = f <$> (string op >> delimSpace >> p1)
<*> (delimComma >> p2)
inst3 :: F3 a b c -> B.ByteString -> Parser a -> Parser b -> Parser c
-> ParseInst
inst3 f op p1 p2 p3 = f <$> (string op >> delimSpace >> p1)
<*> (delimComma >> p2)
<*> (delimComma >> p3)
------------------------------------------------------------
-- operand patterns
------------------------------------------------------------
-- general purpose register
strGRegPref :: B.ByteString
strGRegPref = ""
greg :: Parser GReg
greg = do string strGRegPref
let reverseSortedGregNames = sortBy (flip compare) gregNames
a <- choice $ map string reverseSortedGregNames
return $ strToGReg a
-- pc
pc :: Parser ()
pc = do string "pc"
return ()
-- flag condition
fcond :: Parser FCond
fcond = do a <- (string "eq" <|> string "ne"
<|> string "lt" <|> string "le"
<|> string "gt" <|> string "ge")
return $ strToFCond (B.unpack a)
-- immediate
strImmPref :: B.ByteString
strImmPref = ""
imm :: Parser Int
imm = do string strImmPref
immMinus <|> immHex <|> immNoSign
immNoSign :: Parser Int
immNoSign = do d <- P.takeWhile1 (inClass "0123456789")
return $ read (B.unpack d)
immMinus :: Parser Int
immMinus = do char8 '-'
d <- P.takeWhile1 (inClass "0123456789")
return $ read ('-' : B.unpack d)
immHex :: Parser Int
immHex = do string "0x"
d <- P.takeWhile1 (inClass "0123456789abcdef")
return $ read ("0x" ++ B.unpack d)
-- memory operand
strMemBeg, strMemEnd :: B.ByteString
strMemBeg = "m("
strMemEnd = ")"
mem :: Parser GReg
mem = do string strMemBeg >> skipSpaces
a <- greg
skipSpaces >> string strMemEnd
return a
-- converter utility
gregNames :: [B.ByteString]
gregNames = map (B.pack . (map toLower) . show)
[(minBound :: GReg) .. (maxBound :: GReg)]
strToGReg :: B.ByteString -> GReg
strToGReg x = case (elemIndex x gregNames) of
Just n -> toEnum n
Nothing -> error $ "strToGReg" ++ (show x)
strToFCond :: String -> FCond
strToFCond "eq" = FCEQ
strToFCond "ne" = FCNE
strToFCond "lt" = FCLT
strToFCond "le" = FCLE
strToFCond "gt" = FCGT
strToFCond "ge" = FCGE
strToFCond x = error $ "strToFCond" ++ (show x)
------------------------------------------------------------
-- utility
------------------------------------------------------------
skipSpaces :: Parser ()
skipSpaces = skipWhile P8.isHorizontalSpace
delimSpace :: Parser ()
delimSpace = satisfy P8.isHorizontalSpace *> skipWhile P8.isHorizontalSpace
delimComma :: Parser ()
delimComma = do skipSpaces
char8 ','
skipSpaces
------------------------------------------------------------
-- comment and empty line
------------------------------------------------------------
-- comment
strCmntLine, strCmntRangeBeg, strCmntRangeEnd :: B.ByteString
strCmntLine = "#"
strCmntRangeBeg = "/*"
strCmntRangeEnd = "*/"
lineComment :: Parser String
lineComment = do string strCmntLine
manyTill P8.anyChar endOfLine
rangeComment :: Parser String
rangeComment = do string strCmntRangeBeg
manyTill P8.anyChar (string strCmntRangeEnd)
-- skip empty elements
skipElements :: Parser ()
skipElements = do many (skipLineComment <|> skipRangeComment <|> skipEmptyLine)
return ()
-- empty line
skipEmptyLine :: Parser ()
skipEmptyLine = do skipSpaces >> endOfLine
return ()
-- skip line comment and range comment
skipLineComment :: Parser ()
skipLineComment = do skipSpaces >> lineComment
return ()
skipRangeComment :: Parser ()
skipRangeComment = do skipSpaces >> rangeComment >> skipSpaces
return ()
------------------------------------------------------------
-- analyzing utility to generate error line number
-- (because, attoparsec is fast but less info.)
------------------------------------------------------------
-- line-by-line parser
parseInstAnalyze :: B.ByteString -> Either [String] [Inst]
parseInstAnalyze inp = if null l then Right r else Left l
where (l,r) = partitionEithers . map parseEachLine
. extractNonEmptyLine $ inp
parseEachLine :: (Int, B.ByteString) -> Either String Inst
parseEachLine (n, inp) = case (parseOnly instLine inp) of
Right x -> Right x
Left _ -> Left $ "parseInst: parse error at line "
++ show n ++ " : " ++ show inp
extractNonEmptyLine :: B.ByteString -> [(Int, B.ByteString)]
extractNonEmptyLine = filter (\(_,x) -> isNonEmptyLine x) . zip [1..] . B.lines
isNonEmptyLine :: B.ByteString -> Bool
isNonEmptyLine = not . B.all (`B.elem` " \t\t")
removeComments :: B.ByteString -> B.ByteString
removeComments inp = case (parseOnly commentParse inp) of
Right x -> x
_ -> error "removeComments: parse error"
commentParse :: Parser B.ByteString
commentParse = do a <- many (lineCommentEol <|> rangeCommentEol <|> normalLine)
return $ B.concat a
normalLine :: Parser B.ByteString
normalLine = do a <- P8.anyChar
return $ B.pack [a]
-- preserve end-of-line in comments
lineCommentEol :: Parser B.ByteString
lineCommentEol = do lineComment
return "\n"
rangeCommentEol :: Parser B.ByteString
rangeCommentEol = do a <- rangeComment
return $ B.pack (extractEol a)
extractEol :: String -> String
extractEol cs = replicate len '\n'
where len = length $ elemIndices '\n' cs
-- $parsenote
--
-- Parse Example:
--
-- from text to the 'Language.Pck.Cpu.Instruction.Inst' data type
--
-- > text -> Inst data type
-- > ----------------------------------------
-- > nop -> NOP
-- > halt -> HALT
-- > mov r1, 100 -> MOVI R1 100
-- > mov r1, r2 -> MOV R1 R2
-- > mov r1, pc -> MOVPC R1
-- > add r1, r2, r3 -> ADD R1 R2 R3
-- > sub r1, r2, r3 -> SUB R1 R2 R3
-- > cmp r1, r2 -> CMP R1 R2
-- > abs r1, r2 -> ABS R1 R2
-- > ash r1, r2, r3 -> ASH R1 R2 R3
-- > mul r1, r2, r3 -> MUL R1 R2 R3
-- > div r1, r2, r3 -> DIV R1 R2 R3
-- > and r1, r2, r3 -> AND R1 R2 R3
-- > or r1, r2, r3 -> OR R1 R2 R3
-- > not r1, r2 -> NOT R1 R2
-- > xor r1, r2, r3 -> XOR R1 R2 R3
-- > lsh r1, r2, r3 -> LSH R1 R2 R3
-- > b eq, -3 -> BRI FCEQ (-3)
-- > jmp 3 -> JRI 3
-- > jmp r1 -> J R1
-- > call r1 -> CALL R1
-- > ret -> RET
-- > ld r1, m(r2) -> LD R1 R2
-- > st m(r1), r2 -> ST R1 R2
--
-- Comment descriptions:
--
-- > # a comment line
-- > /* a comment block */
--
| takenobu-hs/processor-creative-kit | Language/Pck/Tool/Assembler.hs | bsd-3-clause | 11,182 | 0 | 14 | 3,155 | 2,665 | 1,383 | 1,282 | 208 | 2 |
module Main where
import Types
import Parser
testTask = Task "test1" (fromGregorian 2016 01 01) (Just $ fromGregorian 2016 02 01) High "note" setEmpty
main = do
putStrLn "Executable currenty does nothing"
| kvelicka/htodo | src/Main.hs | bsd-3-clause | 210 | 0 | 8 | 37 | 63 | 33 | 30 | 6 | 1 |
module Paths ( hseDirStructure
, cabalConfigLocation
, dotDirName
, constructDotDirName
, insidePathVar
) where
import Data.List (intercalate)
import System.FilePath ((</>))
import System.Directory (getCurrentDirectory)
import Util.IO (getEnvVar)
import Types
import HsenvMonad
-- returns record containing paths to all important directories
-- inside virtual environment dir structure
hseDirStructure :: Hsenv DirStructure
hseDirStructure = do
cwd <- liftIO getCurrentDirectory
dirName <- dotDirName
let hsEnvLocation = cwd
hsEnvDirLocation = hsEnvLocation </> dirName
cabalDirLocation = hsEnvDirLocation </> "cabal"
ghcDirLocation = hsEnvDirLocation </> "ghc"
return DirStructure { hsEnv = hsEnvLocation
, hsEnvDir = hsEnvDirLocation
, ghcPackagePath = hsEnvDirLocation </> "ghc_pkg_db"
, cabalDir = cabalDirLocation
, cabalBinDir = cabalDirLocation </> "bin"
, hsEnvBinDir = hsEnvDirLocation </> "bin"
, ghcDir = ghcDirLocation
, ghcBinDir = ghcDirLocation </> "bin"
}
constructDotDirName :: Options -> String
constructDotDirName opts = maybe ".hsenv" (".hsenv_" ++) (hsEnvName opts)
-- directory name of hsEnvDir
dotDirName :: Hsenv String
dotDirName = do
opts <- ask
return $ constructDotDirName opts
-- returns location of cabal's config file inside virtual environment dir structure
cabalConfigLocation :: Hsenv FilePath
cabalConfigLocation = do
dirStructure <- hseDirStructure
return $ cabalDir dirStructure </> "config"
-- returns value of $PATH env variable to be used inside virtual environment
insidePathVar :: Hsenv String
insidePathVar = do
oldPathVar <- liftIO $ getEnvVar "PATH"
let oldPathVarSuffix = case oldPathVar of
Nothing -> ""
Just x -> ':' : x
dirStructure <- hseDirStructure
ghc <- asks ghcSource
let extraPathElems = case ghc of
System -> [cabalBinDir dirStructure]
_ -> [cabalBinDir dirStructure, ghcBinDir dirStructure]
return $ intercalate ":" extraPathElems ++ oldPathVarSuffix
| tmhedberg/hsenv | src/Paths.hs | bsd-3-clause | 2,367 | 0 | 14 | 711 | 445 | 236 | 209 | 49 | 3 |
-- | Values with a @waynodes@ string accessor.
module Data.Geo.OSM.Lens.WaynodesL where
import Data.Geo.OSM.Waynodes
import Control.Lens.Lens
class WaynodesL a where
waynodesL ::
Lens' a Waynodes
| tonymorris/geo-osm | src/Data/Geo/OSM/Lens/WaynodesL.hs | bsd-3-clause | 205 | 0 | 7 | 33 | 42 | 26 | 16 | 6 | 0 |
{-# LANGUAGE OverloadedStrings, GeneralizedNewtypeDeriving #-}
module Clay.Time
(
-- * Time type.
Time
-- * Time constructors.
, sec
, ms
)
where
import Data.Monoid
import Data.Text (pack)
import Clay.Common
import Clay.Property
-------------------------------------------------------------------------------
newtype Time = Time Value
deriving (Val, Auto, Normal, Inherit, None, Other)
-- | Time in seconds.
sec :: Double -> Time
sec i = Time (value (pack (show i) <> "s"))
-- | Time in milliseconds.
ms :: Double -> Time
ms i = Time (value (pack (show i) <> "ms"))
instance Num Time where
fromInteger = sec . fromInteger
(+) = error "plus not implemented for Time"
(*) = error "times not implemented for Time"
abs = error "abs not implemented for Time"
signum = error "signum not implemented for Time"
instance Fractional Time where
fromRational = sec . fromRational
| bergmark/clay | src/Clay/Time.hs | bsd-3-clause | 913 | 0 | 12 | 180 | 237 | 131 | 106 | 24 | 1 |
-- | Simple value types and functions.
module Analyze.Values where
import Analyze.Common (Data)
import Control.Monad.Catch (Exception, MonadThrow (..))
import Data.Text (Text)
import Data.Typeable (Typeable)
-- | Singleton type for value types.
data ValueType =
ValueTypeText
| ValueTypeInteger
| ValueTypeDouble
| ValueTypeBool
deriving (Show, Eq, Enum, Bounded)
-- | Union type for values.
data Value =
ValueText Text
| ValueInteger Integer
| ValueDouble Double
| ValueBool Bool
deriving (Show, Eq)
-- | Returns the type of the value.
valueToType :: Value -> ValueType
valueToType (ValueText _) = ValueTypeText
valueToType (ValueInteger _) = ValueTypeInteger
valueToType (ValueDouble _) = ValueTypeDouble
valueToType (ValueBool _) = ValueTypeBool
-- | Extracts 'Text' from the 'Value'.
getText :: Value -> Maybe Text
getText (ValueText s) = Just s
getText _ = Nothing
-- | Extracts 'Integer' from the 'Value'.
getInteger :: Value -> Maybe Integer
getInteger (ValueInteger i) = Just i
getInteger _ = Nothing
-- | Extracts 'Double' from the 'Value'.
getDouble :: Value -> Maybe Double
getDouble (ValueDouble d) = Just d
getDouble _ = Nothing
-- | Extracts 'Bool' from the 'Value'.
getBool :: Value -> Maybe Bool
getBool (ValueBool b) = Just b
getBool _ = Nothing
-- | Exception for when we encounder unexpected values.
data ValueTypeError k = ValueTypeError k ValueType Value deriving (Show, Eq, Typeable)
instance (Show k, Typeable k) => Exception (ValueTypeError k)
-- | Use with 'Analyze.Decoding.requireWhere' to read 'Text' values.
textual :: (Data k, MonadThrow m) => k -> Value -> m Text
textual _ (ValueText s) = pure s
textual k v = throwM (ValueTypeError k ValueTypeText v)
-- | Use with 'Analyze.Decoding.requireWhere' to read 'Integer' values.
integral :: (Data k, MonadThrow m) => k -> Value -> m Integer
integral _ (ValueInteger s) = pure s
integral k v = throwM (ValueTypeError k ValueTypeInteger v)
-- | Use with 'Analyze.Decoding.requireWhere' to read 'Double' values.
floating :: (Data k, MonadThrow m) => k -> Value -> m Double
floating _ (ValueDouble s) = pure s
floating k v = throwM (ValueTypeError k ValueTypeDouble v)
-- | Use with 'Analyze.Decoding.requireWhere' to read 'Bool' values.
boolean :: (Data k, MonadThrow m) => k -> Value -> m Bool
boolean _ (ValueBool s) = pure s
boolean k v = throwM (ValueTypeError k ValueTypeBool v)
| ejconlon/analyze | src/Analyze/Values.hs | bsd-3-clause | 2,569 | 0 | 8 | 587 | 699 | 366 | 333 | 48 | 1 |
{-# LANGUAGE OverloadedStrings, DataKinds, ScopedTypeVariables #-}
module Main (main) where
import Prelude hiding (mod, div)
import Data.Monoid
import Data.Boolean
import Data.Boolean.Numbers
import Data.Default.Class
import System.FilePath ( (</>) )
import Language.Sunroof
import Language.Sunroof.JS.Canvas
import Language.Sunroof.JS.Browser hiding ( eval )
import Language.Sunroof.JS.JQuery
import Language.Sunroof.JS.Date
import Paths_sunroof_examples
main :: IO ()
main = do
-- Copy the index HTML and jquery.js to the current directory.
dataDir <- getDataDir
readFile (dataDir </> "js/jquery.js") >>= writeFile "jquery.js"
readFile (dataDir </> "examples/clock/index.html") >>= writeFile "clock.html"
-- Compile the JavaScript and also write it to the current directory.
sunroofCompileJSA def "main" clockJS >>= writeFile "main.js"
clockJS :: JS 'A (JSFunction () ())
clockJS = function $ \() -> do
-- Renders a single line (with number) of the clock face.
renderClockFaceLine <- function $ \(c :: JSCanvas, u :: JSNumber, n :: JSNumber) -> do
c # save
-- Draw one of the indicator lines
c # beginPath
c # moveTo (0, -u * 1.0)
ifB (n `mod` 5 ==* 0)
(c # lineTo (0, -u * 0.8)) -- Minute line
(c # lineTo (0, -u * 0.9)) -- Hour line
ifB (n `mod` 15 ==* 0)
(c # lineWidth := 8) -- Quarter line
(c # lineWidth := 3) -- Non-Quarter line
c # stroke
c # closePath
-- Draw of the hour numbers
ifB (n `mod` 5 ==* 0)
(do
c # translate (-u * 0.75, 0)
c # rotate (-2 * pi / 4)
c # fillText (cast $ n `div` 5) (0, 0)
) (return ())
c # restore
-- Renders a single clock pointer.
renderClockPointer <- function $ \(c :: JSCanvas, u :: JSNumber, angle :: JSNumber, width' :: JSNumber, len :: JSNumber) -> do
c # save
c # lineCap := "round"
c # rotate angle
c # lineWidth := width'
c # beginPath
c # moveTo (0, u * 0.1)
c # lineTo (0, -u * len)
c # stroke
c # closePath
c # restore
-- Renders the clocks pointers for hours, minutes and seconds.
renderClockPointers <- function $ \(c :: JSCanvas, u :: JSNumber) -> do
(h, m, s) <- currentTime
c # save
c # lineCap := "round"
-- Hour pointer
renderClockPointer $$
(c, u, (2 * pi / 12) * ((h `mod` 12) + (m `mod` 60) / 60), 15, 0.4)
-- Minute pointer
renderClockPointer $$
( c, u, (2 * pi / 60) * ((m `mod` 60) + (s `mod` 60) / 60), 10, 0.7)
-- Second pointer
c # strokeStyle := "red"
renderClockPointer $$ ( c, u, (2 * pi / 60) * (s `mod` 60), 4, 0.9)
-- Restore everything
c # restore
-- Renders the complete face of the clock, without pointers.
renderClockFace <- function $ \(c :: JSCanvas, u :: JSNumber) -> do
c # save
c # rotate (2 * pi / 4) -- 0 degrees is at the top
-- Draw all hour lines.
lines' <- array [1..60::Int]
lines' # forEach $ \n -> do
c # save
c # rotate ((2 * pi / 60) * n)
renderClockFaceLine $$ (c, u, n)
c # restore
c # restore -- Undo all the rotation.
-- Renders the complete clock.
renderClock <- continuation $ \() -> do
u <- clockUnit
(w,h) <- canvasSize
c <- context
-- Basic setup
c # save
c # fillStyle := "black"
c # strokeStyle := "black"
c # lineCap := "round"
c # textAlign := "center"
c # font := ((cast $ u * 0.1) <> "px serif")
c # textBaseline := "top"
c # clearRect (0,0) (w,h)
c # translate (w / 2, h / 2)
-- Draw all hour lines.
renderClockFace $$ (c, u)
-- Draw the clock pointers
renderClockPointers $$ (c, u)
c # restore
return ()
_ <- window # setInterval (goto renderClock) 1000
-- and draw one now, rather than wait till later
_ <- goto renderClock ()
return ()
canvas :: JS t JSObject
canvas = document # getElementById "canvas"
context :: JS t JSCanvas
context = canvas >>= getContext "2d"
clockUnit :: JS t JSNumber
clockUnit = do
(w, h) <- canvasSize
return $ (maxB w h) / 2
canvasSize :: JS t (JSNumber, JSNumber)
canvasSize = do
c <- jQuery "#canvas"
w <- c # invoke "innerWidth" ()
h <- c # invoke "innerHeight" ()
return (w, h)
currentTime :: JS t (JSNumber, JSNumber, JSNumber)
currentTime = do
date <- newDate ()
h <- date # getHours
m <- date # getMinutes
s <- date # getSeconds
return (h, m, s)
| ku-fpg/sunroof-examples | examples/clock/Main.hs | bsd-3-clause | 4,421 | 0 | 24 | 1,182 | 1,657 | 867 | 790 | 114 | 1 |
{-# Language OverloadedStrings, GADTs #-}
{-|
Module : Client.Commands.Arguments.Renderer
Description : Interpretation of argument specification as a renderer
Copyright : (c) Eric Mertens, 2017
License : ISC
Maintainer : [email protected]
-}
module Client.Commands.Arguments.Renderer (render) where
import Client.Commands.Arguments.Spec
import Client.Image.MircFormatting
import Client.Image.PackedImage
import Client.Image.Palette
import Control.Applicative.Free
import Control.Lens
import Control.Monad.Trans.State
import Data.Functor.Compose
import qualified Data.Text as Text
import Graphics.Vty (wcswidth)
import Graphics.Vty.Attributes
render ::
Palette {- ^ palette -} ->
r {- ^ environment -} ->
Bool {- ^ render placeholders -} ->
Args r a {- ^ specification -} ->
String {- ^ user input -} ->
Image'
render pal env placeholders spec str = extend (addExcess img)
where
(img, excess) = flip runState str . getState
$ renderArgs pal env placeholders spec
addExcess
| any (' '/=) excess = (<> string defAttr excess)
| otherwise = id
extend i
| imageWidth i < minLen = resizeImage minLen i
| otherwise = i
where minLen = wcswidth str
renderArgs :: Palette -> r -> Bool -> Args r a -> Renderer a
renderArgs pal r placeholders = runAp (renderArg pal r placeholders)
------------------------------------------------------------------------
type Renderer = Compose (State String) (Const Image')
getState :: Renderer a -> State String Image'
getState = fmap getConst . getCompose
putState :: State String Image' -> Renderer a
putState = Compose . fmap Const
------------------------------------------------------------------------
renderArg :: Palette -> r -> Bool -> Arg r a -> Renderer b
renderArg pal r placeholders spec = putState $
let placeholder name
| placeholders = return (" " <> string (view palCommandPlaceholder pal) name)
| otherwise = return mempty
draw = parseIrcText' True pal . Text.pack
in
case spec of
Optional subspec -> getState (renderArgs pal r placeholders subspec)
Extension name ext ->
do (lead,tok) <- state token
if null tok then
placeholder name
else do
rest <- case ext r tok of
Nothing -> return mempty
Just subspec -> getState (renderArgs pal r placeholders subspec)
return (draw (lead++tok) <> rest)
Argument TokenArgument name _ ->
do (lead,tok) <- state token
if null tok then
placeholder name
else
return (draw (lead++tok))
Argument RemainingArgument name _ ->
do rest <- state (\x -> (x,""))
if all (' '==) rest then
placeholder name
else
return (draw rest)
token :: String -> ((String, String), String)
token xs =
let (lead, xs1) = span (' '==) xs
(tok , xs2) = break (' '==) xs1
in ((lead, tok), xs2)
| dolio/irc-core | src/Client/Commands/Arguments/Renderer.hs | isc | 3,175 | 0 | 26 | 931 | 889 | 455 | 434 | 69 | 8 |
{-# LANGUAGE RecordWildCards #-}
-- | Types which are stored in memory.
module Pos.DB.Update.MemState.Types
( MemPool (..)
, UpdateProposals
, LocalVotes
, MemState (..)
, MemVar (..)
, newMemVar
) where
import Universum
import Data.Default (Default (def))
import Serokell.Data.Memory.Units (Byte)
import Pos.Chain.Block (HeaderHash)
import Pos.Chain.Update (LocalVotes, PollModifier, UpdateProposals)
import Pos.Core (SlotCount, SlotId (..), localSlotIndexMinBound)
import Pos.Core.Slotting (MonadSlots (getCurrentSlot))
import Pos.DB.Class (MonadDBRead)
import Pos.DB.GState.Common (getTip)
-- | MemPool is data maintained by node to be included into block and
-- relayed to other nodes.
data MemPool = MemPool
{ mpProposals :: !UpdateProposals
, mpLocalVotes :: !LocalVotes
, mpSize :: !Byte
} deriving (Show)
instance Default MemPool where
def = MemPool mempty mempty 2
-- | MemState contains all in-memory data necesary for Update System.
data MemState = MemState
{ msSlot :: !SlotId
-- ^ Slot for which data is valid.
-- In reality EpochIndex should be enough, but we sometimes
-- overgeneralize things.
, msTip :: !HeaderHash
-- ^ Tip for which data is valid.
, msPool :: !MemPool
-- ^ Pool of data to be included into block.
, msModifier :: !PollModifier
-- ^ Modifier of GState corresponding to 'msPool'.
}
-- | MemVar stores MemState inside 'TVar'.
newtype MemVar = MemVar
{ mvState :: TVar MemState -- ^ MemState itself.
}
-- | Create new 'MemVar' using slotting and read-only access to DB.
newMemVar
:: (MonadIO m, MonadDBRead m, MonadSlots ctx m) => SlotCount -> m MemVar
newMemVar epochSlots = do
let slot0 = SlotId 0 localSlotIndexMinBound
msSlot <- fromMaybe slot0 <$> getCurrentSlot epochSlots
msTip <- getTip
let ms = MemState { msPool = def, msModifier = mempty, .. }
liftIO $ MemVar <$> newTVarIO ms
| input-output-hk/pos-haskell-prototype | db/src/Pos/DB/Update/MemState/Types.hs | mit | 2,087 | 0 | 11 | 544 | 401 | 239 | 162 | 53 | 1 |
sth-echo "\b\b\b\b" | sth-unescape | sth-translit "\b" "x"
>>>
xxxx
>>>= 0
| nbloomf/st-haskell | test/translit/escape.hs | gpl-3.0 | 75 | 6 | 8 | 12 | 45 | 19 | 26 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.SNS
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Amazon Simple Notification Service
--
-- Amazon Simple Notification Service (Amazon SNS) is a web service that
-- enables you to build distributed web-enabled applications. Applications
-- can use Amazon SNS to easily push real-time notification messages to
-- interested subscribers over multiple delivery protocols. For more
-- information about this product see
-- <http://aws.amazon.com/sns/ http:\/\/aws.amazon.com\/sns>. For detailed
-- information about Amazon SNS features and their associated API calls,
-- see the
-- <http://docs.aws.amazon.com/sns/latest/dg/ Amazon SNS Developer Guide>.
--
-- We also provide SDKs that enable you to access Amazon SNS from your
-- preferred programming language. The SDKs contain functionality that
-- automatically takes care of tasks such as: cryptographically signing
-- your service requests, retrying requests, and handling error responses.
-- For a list of available SDKs, go to
-- <http://aws.amazon.com/tools/ Tools for Amazon Web Services>.
--
-- /See:/ <http://docs.aws.amazon.com/sns/latest/api/Welcome.html AWS API Reference>
module Network.AWS.SNS
(
-- * Service Configuration
sNS
-- * Errors
-- $errors
-- ** EndpointDisabledException
, _EndpointDisabledException
-- ** AuthorizationErrorException
, _AuthorizationErrorException
-- ** InvalidParameterException
, _InvalidParameterException
-- ** SubscriptionLimitExceededException
, _SubscriptionLimitExceededException
-- ** PlatformApplicationDisabledException
, _PlatformApplicationDisabledException
-- ** InternalErrorException
, _InternalErrorException
-- ** InvalidParameterValueException
, _InvalidParameterValueException
-- ** NotFoundException
, _NotFoundException
-- ** TopicLimitExceededException
, _TopicLimitExceededException
-- * Waiters
-- $waiters
-- * Operations
-- $operations
-- ** DeleteEndpoint
, module Network.AWS.SNS.DeleteEndpoint
-- ** RemovePermission
, module Network.AWS.SNS.RemovePermission
-- ** DeleteTopic
, module Network.AWS.SNS.DeleteTopic
-- ** ListTopics (Paginated)
, module Network.AWS.SNS.ListTopics
-- ** CreatePlatformEndpoint
, module Network.AWS.SNS.CreatePlatformEndpoint
-- ** SetPlatformApplicationAttributes
, module Network.AWS.SNS.SetPlatformApplicationAttributes
-- ** ListSubscriptionsByTopic (Paginated)
, module Network.AWS.SNS.ListSubscriptionsByTopic
-- ** GetTopicAttributes
, module Network.AWS.SNS.GetTopicAttributes
-- ** CreatePlatformApplication
, module Network.AWS.SNS.CreatePlatformApplication
-- ** GetPlatformApplicationAttributes
, module Network.AWS.SNS.GetPlatformApplicationAttributes
-- ** ListEndpointsByPlatformApplication (Paginated)
, module Network.AWS.SNS.ListEndpointsByPlatformApplication
-- ** SetTopicAttributes
, module Network.AWS.SNS.SetTopicAttributes
-- ** DeletePlatformApplication
, module Network.AWS.SNS.DeletePlatformApplication
-- ** ListPlatformApplications (Paginated)
, module Network.AWS.SNS.ListPlatformApplications
-- ** AddPermission
, module Network.AWS.SNS.AddPermission
-- ** GetEndpointAttributes
, module Network.AWS.SNS.GetEndpointAttributes
-- ** ListSubscriptions (Paginated)
, module Network.AWS.SNS.ListSubscriptions
-- ** GetSubscriptionAttributes
, module Network.AWS.SNS.GetSubscriptionAttributes
-- ** CreateTopic
, module Network.AWS.SNS.CreateTopic
-- ** Subscribe
, module Network.AWS.SNS.Subscribe
-- ** Unsubscribe
, module Network.AWS.SNS.Unsubscribe
-- ** SetEndpointAttributes
, module Network.AWS.SNS.SetEndpointAttributes
-- ** SetSubscriptionAttributes
, module Network.AWS.SNS.SetSubscriptionAttributes
-- ** ConfirmSubscription
, module Network.AWS.SNS.ConfirmSubscription
-- ** Publish
, module Network.AWS.SNS.Publish
-- * Types
-- ** Endpoint
, Endpoint
, endpoint
, eAttributes
, eEndpointARN
-- ** MessageAttributeValue
, MessageAttributeValue
, messageAttributeValue
, mavBinaryValue
, mavStringValue
, mavDataType
-- ** PlatformApplication
, PlatformApplication
, platformApplication
, paPlatformApplicationARN
, paAttributes
-- ** Subscription
, Subscription
, subscription
, sProtocol
, sOwner
, sTopicARN
, sEndpoint
, sSubscriptionARN
-- ** Topic
, Topic
, topic
, tTopicARN
) where
import Network.AWS.SNS.AddPermission
import Network.AWS.SNS.ConfirmSubscription
import Network.AWS.SNS.CreatePlatformApplication
import Network.AWS.SNS.CreatePlatformEndpoint
import Network.AWS.SNS.CreateTopic
import Network.AWS.SNS.DeleteEndpoint
import Network.AWS.SNS.DeletePlatformApplication
import Network.AWS.SNS.DeleteTopic
import Network.AWS.SNS.GetEndpointAttributes
import Network.AWS.SNS.GetPlatformApplicationAttributes
import Network.AWS.SNS.GetSubscriptionAttributes
import Network.AWS.SNS.GetTopicAttributes
import Network.AWS.SNS.ListEndpointsByPlatformApplication
import Network.AWS.SNS.ListPlatformApplications
import Network.AWS.SNS.ListSubscriptions
import Network.AWS.SNS.ListSubscriptionsByTopic
import Network.AWS.SNS.ListTopics
import Network.AWS.SNS.Publish
import Network.AWS.SNS.RemovePermission
import Network.AWS.SNS.SetEndpointAttributes
import Network.AWS.SNS.SetPlatformApplicationAttributes
import Network.AWS.SNS.SetSubscriptionAttributes
import Network.AWS.SNS.SetTopicAttributes
import Network.AWS.SNS.Subscribe
import Network.AWS.SNS.Types
import Network.AWS.SNS.Unsubscribe
import Network.AWS.SNS.Waiters
{- $errors
Error matchers are designed for use with the functions provided by
<http://hackage.haskell.org/package/lens/docs/Control-Exception-Lens.html Control.Exception.Lens>.
This allows catching (and rethrowing) service specific errors returned
by 'SNS'.
-}
{- $operations
Some AWS operations return results that are incomplete and require subsequent
requests in order to obtain the entire result set. The process of sending
subsequent requests to continue where a previous request left off is called
pagination. For example, the 'ListObjects' operation of Amazon S3 returns up to
1000 objects at a time, and you must send subsequent requests with the
appropriate Marker in order to retrieve the next page of results.
Operations that have an 'AWSPager' instance can transparently perform subsequent
requests, correctly setting Markers and other request facets to iterate through
the entire result set of a truncated API operation. Operations which support
this have an additional note in the documentation.
Many operations have the ability to filter results on the server side. See the
individual operation parameters for details.
-}
{- $waiters
Waiters poll by repeatedly sending a request until some remote success condition
configured by the 'Wait' specification is fulfilled. The 'Wait' specification
determines how many attempts should be made, in addition to delay and retry strategies.
-}
| fmapfmapfmap/amazonka | amazonka-sns/gen/Network/AWS/SNS.hs | mpl-2.0 | 7,859 | 0 | 5 | 1,514 | 578 | 438 | 140 | 89 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TemplateHaskell #-}
module NLP.Partage.AStar.Item
( Span (..)
, beg
, end
, gap
, Active (..)
, state
, spanA
, Passive (..)
, dagID
, spanP
, isAdjoinedTo
, regular
, auxiliary
, isRoot
-- #ifdef DebugOn
, printActive
, printPassive
-- #endif
-- * Provisional
, nonTerm
)
where
import Data.Lens.Light
import Data.Maybe (isJust, isNothing)
import Prelude hiding (span)
import Data.DAWG.Ord (ID)
import NLP.Partage.AStar.Base (Pos)
import NLP.Partage.DAG (DID)
import qualified NLP.Partage.DAG as DAG
import NLP.Partage.AStar.Base (nonTerm')
import NLP.Partage.AStar.Auto (Auto (..), NotFoot(..))
data Span = Span {
-- | The starting position.
_beg :: Pos
-- | The ending position (or rather the position of the dot).
, _end :: Pos
-- | Coordinates of the gap (if applies)
, _gap :: Maybe (Pos, Pos)
} deriving (Show, Eq, Ord)
$( makeLenses [''Span] )
-- | Active chart item : state reference + span.
data Active = Active {
_state :: ID
, _spanA :: Span
} deriving (Show, Eq, Ord)
$( makeLenses [''Active] )
-- | Passive chart item : label + span.
data Passive n t = Passive
{ _dagID :: DID
-- ^ The `DID` of the elementary tree node
, _spanP :: Span
-- ^ Span of the chart item
, _isAdjoinedTo :: Bool
-- ^ Was the node represented by the item already adjoined to?
} deriving (Show, Eq, Ord)
$( makeLenses [''Passive] )
-- | Does it represent regular rules?
regular :: Span -> Bool
regular = isNothing . getL gap
-- | Does it represent auxiliary rules?
auxiliary :: Span -> Bool
auxiliary = isJust . getL gap
-- | Does it represent a root?
isRoot :: Either n DID -> Bool
isRoot x = case x of
Left _ -> True
Right _ -> False
-- #ifdef DebugOn
-- | Print an active item.
printSpan :: Span -> IO ()
printSpan span = do
putStr . show $ getL beg span
putStr ", "
case getL gap span of
Nothing -> return ()
Just (p, q) -> do
putStr $ show p
putStr ", "
putStr $ show q
putStr ", "
putStr . show $ getL end span
-- | Print an active item.
printActive :: Active -> IO ()
printActive p = do
putStr "("
putStr . show $ getL state p
putStr ", "
printSpan $ getL spanA p
putStrLn ")"
-- | Print a passive item.
printPassive :: (Show n, Show t) => Passive n t -> Auto n t -> IO ()
printPassive p auto = do
let did = getL dagID p
putStr "("
putStr $
show (DAG.unDID did) ++ "[" ++
show (nonTerm did auto) ++ "]"
putStr ", "
putStr $ "root=" ++ show (DAG.isRoot did (gramDAG auto))
putStr ", "
printSpan $ getL spanP p
putStrLn ")"
-- #endif
-- | Take the non-terminal of the underlying DAG node.
nonTerm :: DAG.DID -> Auto n t -> n
nonTerm i =
check . nonTerm' i . gramDAG
where
check Nothing = error "nonTerm: not a non-terminal ID"
check (Just x) = x
| kawu/factorized-tag-parser | src/NLP/Partage/AStar/Item.hs | bsd-2-clause | 3,107 | 0 | 14 | 950 | 901 | 481 | 420 | 90 | 2 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP
, NoImplicitPrelude
, ExistentialQuantification
#-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
{-# OPTIONS_HADDOCK not-home #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Handle.Types
-- Copyright : (c) The University of Glasgow, 1994-2009
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable
--
-- Basic types for the implementation of IO Handles.
--
-----------------------------------------------------------------------------
module GHC.IO.Handle.Types (
Handle(..), Handle__(..), showHandle,
checkHandleInvariants,
BufferList(..),
HandleType(..),
isReadableHandleType, isWritableHandleType, isReadWriteHandleType,
BufferMode(..),
BufferCodec(..),
NewlineMode(..), Newline(..), nativeNewline,
universalNewlineMode, noNewlineTranslation, nativeNewlineMode
) where
#undef DEBUG
import GHC.Base
import GHC.MVar
import GHC.IO
import GHC.IO.Buffer
import GHC.IO.BufferedIO
import GHC.IO.Encoding.Types
import GHC.IORef
import GHC.Show
import GHC.Read
import GHC.Word
import GHC.IO.Device
import Data.Typeable
#if defined(DEBUG)
import Control.Monad
#endif
-- ---------------------------------------------------------------------------
-- Handle type
-- A Handle is represented by (a reference to) a record
-- containing the state of the I/O port/device. We record
-- the following pieces of info:
-- * type (read,write,closed etc.)
-- * the underlying file descriptor
-- * buffering mode
-- * buffer, and spare buffers
-- * user-friendly name (usually the
-- FilePath used when IO.openFile was called)
-- Note: when a Handle is garbage collected, we want to flush its buffer
-- and close the OS file handle, so as to free up a (precious) resource.
-- | Haskell defines operations to read and write characters from and to files,
-- represented by values of type @Handle@. Each value of this type is a
-- /handle/: a record used by the Haskell run-time system to /manage/ I\/O
-- with file system objects. A handle has at least the following properties:
--
-- * whether it manages input or output or both;
--
-- * whether it is /open/, /closed/ or /semi-closed/;
--
-- * whether the object is seekable;
--
-- * whether buffering is disabled, or enabled on a line or block basis;
--
-- * a buffer (whose length may be zero).
--
-- Most handles will also have a current I\/O position indicating where the next
-- input or output operation will occur. A handle is /readable/ if it
-- manages only input or both input and output; likewise, it is /writable/ if
-- it manages only output or both input and output. A handle is /open/ when
-- first allocated.
-- Once it is closed it can no longer be used for either input or output,
-- though an implementation cannot re-use its storage while references
-- remain to it. Handles are in the 'Show' and 'Eq' classes. The string
-- produced by showing a handle is system dependent; it should include
-- enough information to identify the handle for debugging. A handle is
-- equal according to '==' only to itself; no attempt
-- is made to compare the internal state of different handles for equality.
data Handle
= FileHandle -- A normal handle to a file
FilePath -- the file (used for error messages
-- only)
!(MVar Handle__)
| DuplexHandle -- A handle to a read/write stream
FilePath -- file for a FIFO, otherwise some
-- descriptive string (used for error
-- messages only)
!(MVar Handle__) -- The read side
!(MVar Handle__) -- The write side
-- NOTES:
-- * A 'FileHandle' is seekable. A 'DuplexHandle' may or may not be
-- seekable.
-- | @since 4.1.0.0
instance Eq Handle where
(FileHandle _ h1) == (FileHandle _ h2) = h1 == h2
(DuplexHandle _ h1 _) == (DuplexHandle _ h2 _) = h1 == h2
_ == _ = False
data Handle__
= forall dev enc_state dec_state . (IODevice dev, BufferedIO dev, Typeable dev) =>
Handle__ {
haDevice :: !dev,
haType :: HandleType, -- type (read/write/append etc.)
haByteBuffer :: !(IORef (Buffer Word8)), -- See [note Buffering Implementation]
haBufferMode :: BufferMode,
haLastDecode :: !(IORef (dec_state, Buffer Word8)),
haCharBuffer :: !(IORef (Buffer CharBufElem)), -- See [note Buffering Implementation]
haBuffers :: !(IORef (BufferList CharBufElem)), -- spare buffers
haEncoder :: Maybe (TextEncoder enc_state),
haDecoder :: Maybe (TextDecoder dec_state),
haCodec :: Maybe TextEncoding,
haInputNL :: Newline,
haOutputNL :: Newline,
haOtherSide :: Maybe (MVar Handle__) -- ptr to the write side of a
-- duplex handle.
}
-- we keep a few spare buffers around in a handle to avoid allocating
-- a new one for each hPutStr. These buffers are *guaranteed* to be the
-- same size as the main buffer.
data BufferList e
= BufferListNil
| BufferListCons (RawBuffer e) (BufferList e)
-- Internally, we classify handles as being one
-- of the following:
data HandleType
= ClosedHandle
| SemiClosedHandle
| ReadHandle
| WriteHandle
| AppendHandle
| ReadWriteHandle
isReadableHandleType :: HandleType -> Bool
isReadableHandleType ReadHandle = True
isReadableHandleType ReadWriteHandle = True
isReadableHandleType _ = False
isWritableHandleType :: HandleType -> Bool
isWritableHandleType AppendHandle = True
isWritableHandleType WriteHandle = True
isWritableHandleType ReadWriteHandle = True
isWritableHandleType _ = False
isReadWriteHandleType :: HandleType -> Bool
isReadWriteHandleType ReadWriteHandle{} = True
isReadWriteHandleType _ = False
-- INVARIANTS on Handles:
--
-- * A handle *always* has a buffer, even if it is only 1 character long
-- (an unbuffered handle needs a 1 character buffer in order to support
-- hLookAhead and hIsEOF).
-- * In a read Handle, the byte buffer is always empty (we decode when reading)
-- * In a wriite Handle, the Char buffer is always empty (we encode when writing)
--
checkHandleInvariants :: Handle__ -> IO ()
#if defined(DEBUG)
checkHandleInvariants h_ = do
bbuf <- readIORef (haByteBuffer h_)
checkBuffer bbuf
cbuf <- readIORef (haCharBuffer h_)
checkBuffer cbuf
when (isWriteBuffer cbuf && not (isEmptyBuffer cbuf)) $
errorWithoutStackTrace ("checkHandleInvariants: char write buffer non-empty: " ++
summaryBuffer bbuf ++ ", " ++ summaryBuffer cbuf)
when (isWriteBuffer bbuf /= isWriteBuffer cbuf) $
errorWithoutStackTrace ("checkHandleInvariants: buffer modes differ: " ++
summaryBuffer bbuf ++ ", " ++ summaryBuffer cbuf)
#else
checkHandleInvariants _ = return ()
#endif
-- ---------------------------------------------------------------------------
-- Buffering modes
-- | Three kinds of buffering are supported: line-buffering,
-- block-buffering or no-buffering. These modes have the following
-- effects. For output, items are written out, or /flushed/,
-- from the internal buffer according to the buffer mode:
--
-- * /line-buffering/: the entire output buffer is flushed
-- whenever a newline is output, the buffer overflows,
-- a 'System.IO.hFlush' is issued, or the handle is closed.
--
-- * /block-buffering/: the entire buffer is written out whenever it
-- overflows, a 'System.IO.hFlush' is issued, or the handle is closed.
--
-- * /no-buffering/: output is written immediately, and never stored
-- in the buffer.
--
-- An implementation is free to flush the buffer more frequently,
-- but not less frequently, than specified above.
-- The output buffer is emptied as soon as it has been written out.
--
-- Similarly, input occurs according to the buffer mode for the handle:
--
-- * /line-buffering/: when the buffer for the handle is not empty,
-- the next item is obtained from the buffer; otherwise, when the
-- buffer is empty, characters up to and including the next newline
-- character are read into the buffer. No characters are available
-- until the newline character is available or the buffer is full.
--
-- * /block-buffering/: when the buffer for the handle becomes empty,
-- the next block of data is read into the buffer.
--
-- * /no-buffering/: the next input item is read and returned.
-- The 'System.IO.hLookAhead' operation implies that even a no-buffered
-- handle may require a one-character buffer.
--
-- The default buffering mode when a handle is opened is
-- implementation-dependent and may depend on the file system object
-- which is attached to that handle.
-- For most implementations, physical files will normally be block-buffered
-- and terminals will normally be line-buffered.
data BufferMode
= NoBuffering -- ^ buffering is disabled if possible.
| LineBuffering
-- ^ line-buffering should be enabled if possible.
| BlockBuffering (Maybe Int)
-- ^ block-buffering should be enabled if possible.
-- The size of the buffer is @n@ items if the argument
-- is 'Just' @n@ and is otherwise implementation-dependent.
deriving ( Eq -- ^ @since 4.2.0.0
, Ord -- ^ @since 4.2.0.0
, Read -- ^ @since 4.2.0.0
, Show -- ^ @since 4.2.0.0
)
{-
[note Buffering Implementation]
Each Handle has two buffers: a byte buffer (haByteBuffer) and a Char
buffer (haCharBuffer).
[note Buffered Reading]
For read Handles, bytes are read into the byte buffer, and immediately
decoded into the Char buffer (see
GHC.IO.Handle.Internals.readTextDevice). The only way there might be
some data left in the byte buffer is if there is a partial multi-byte
character sequence that cannot be decoded into a full character.
Note that the buffering mode (haBufferMode) makes no difference when
reading data into a Handle. When reading, we can always just read all
the data there is available without blocking, decode it into the Char
buffer, and then provide it immediately to the caller.
[note Buffered Writing]
Characters are written into the Char buffer by e.g. hPutStr. At the
end of the operation, or when the char buffer is full, the buffer is
decoded to the byte buffer (see writeCharBuffer). This is so that we
can detect encoding errors at the right point.
Hence, the Char buffer is always empty between Handle operations.
[note Buffer Sizing]
The char buffer is always a default size (dEFAULT_CHAR_BUFFER_SIZE).
The byte buffer size is chosen by the underlying device (via its
IODevice.newBuffer). Hence the size of these buffers is not under
user control.
There are certain minimum sizes for these buffers imposed by the
library (but not checked):
- we must be able to buffer at least one character, so that
hLookAhead can work
- the byte buffer must be able to store at least one encoded
character in the current encoding (6 bytes?)
- when reading, the char buffer must have room for two characters, so
that we can spot the \r\n sequence.
How do we implement hSetBuffering?
For reading, we have never used the user-supplied buffer size, because
there's no point: we always pass all available data to the reader
immediately. Buffering would imply waiting until a certain amount of
data is available, which has no advantages. So hSetBuffering is
essentially a no-op for read handles, except that it turns on/off raw
mode for the underlying device if necessary.
For writing, the buffering mode is handled by the write operations
themselves (hPutChar and hPutStr). Every write ends with
writeCharBuffer, which checks whether the buffer should be flushed
according to the current buffering mode. Additionally, we look for
newlines and flush if the mode is LineBuffering.
[note Buffer Flushing]
** Flushing the Char buffer
We must be able to flush the Char buffer, in order to implement
hSetEncoding, and things like hGetBuf which want to read raw bytes.
Flushing the Char buffer on a write Handle is easy: it is always empty.
Flushing the Char buffer on a read Handle involves rewinding the byte
buffer to the point representing the next Char in the Char buffer.
This is done by
- remembering the state of the byte buffer *before* the last decode
- re-decoding the bytes that represent the chars already read from the
Char buffer. This gives us the point in the byte buffer that
represents the *next* Char to be read.
In order for this to work, after readTextHandle we must NOT MODIFY THE
CONTENTS OF THE BYTE OR CHAR BUFFERS, except to remove characters from
the Char buffer.
** Flushing the byte buffer
The byte buffer can be flushed if the Char buffer has already been
flushed (see above). For a read Handle, flushing the byte buffer
means seeking the device back by the number of bytes in the buffer,
and hence it is only possible on a seekable Handle.
-}
-- ---------------------------------------------------------------------------
-- Newline translation
-- | The representation of a newline in the external file or stream.
data Newline = LF -- ^ @\'\\n\'@
| CRLF -- ^ @\'\\r\\n\'@
deriving ( Eq -- ^ @since 4.2.0.0
, Ord -- ^ @since 4.3.0.0
, Read -- ^ @since 4.3.0.0
, Show -- ^ @since 4.3.0.0
)
-- | Specifies the translation, if any, of newline characters between
-- internal Strings and the external file or stream. Haskell Strings
-- are assumed to represent newlines with the @\'\\n\'@ character; the
-- newline mode specifies how to translate @\'\\n\'@ on output, and what to
-- translate into @\'\\n\'@ on input.
data NewlineMode
= NewlineMode { inputNL :: Newline,
-- ^ the representation of newlines on input
outputNL :: Newline
-- ^ the representation of newlines on output
}
deriving ( Eq -- ^ @since 4.2.0.0
, Ord -- ^ @since 4.3.0.0
, Read -- ^ @since 4.3.0.0
, Show -- ^ @since 4.3.0.0
)
-- | The native newline representation for the current platform: 'LF'
-- on Unix systems, 'CRLF' on Windows.
nativeNewline :: Newline
#if defined(mingw32_HOST_OS)
nativeNewline = CRLF
#else
nativeNewline = LF
#endif
-- | Map @\'\\r\\n\'@ into @\'\\n\'@ on input, and @\'\\n\'@ to the native newline
-- representation on output. This mode can be used on any platform, and
-- works with text files using any newline convention. The downside is
-- that @readFile >>= writeFile@ might yield a different file.
--
-- > universalNewlineMode = NewlineMode { inputNL = CRLF,
-- > outputNL = nativeNewline }
--
universalNewlineMode :: NewlineMode
universalNewlineMode = NewlineMode { inputNL = CRLF,
outputNL = nativeNewline }
-- | Use the native newline representation on both input and output
--
-- > nativeNewlineMode = NewlineMode { inputNL = nativeNewline
-- > outputNL = nativeNewline }
--
nativeNewlineMode :: NewlineMode
nativeNewlineMode = NewlineMode { inputNL = nativeNewline,
outputNL = nativeNewline }
-- | Do no newline translation at all.
--
-- > noNewlineTranslation = NewlineMode { inputNL = LF, outputNL = LF }
--
noNewlineTranslation :: NewlineMode
noNewlineTranslation = NewlineMode { inputNL = LF, outputNL = LF }
-- ---------------------------------------------------------------------------
-- Show instance for Handles
-- handle types are 'show'n when printing error msgs, so
-- we provide a more user-friendly Show instance for it
-- than the derived one.
-- | @since 4.1.0.0
instance Show HandleType where
showsPrec _ t =
case t of
ClosedHandle -> showString "closed"
SemiClosedHandle -> showString "semi-closed"
ReadHandle -> showString "readable"
WriteHandle -> showString "writable"
AppendHandle -> showString "writable (append)"
ReadWriteHandle -> showString "read-writable"
-- | @since 4.1.0.0
instance Show Handle where
showsPrec _ (FileHandle file _) = showHandle file
showsPrec _ (DuplexHandle file _ _) = showHandle file
showHandle :: FilePath -> String -> String
showHandle file = showString "{handle: " . showString file . showString "}"
| sdiehl/ghc | libraries/base/GHC/IO/Handle/Types.hs | bsd-3-clause | 16,949 | 0 | 13 | 3,978 | 1,360 | 823 | 537 | 141 | 1 |
module Distribution.Server.Features.Core (
CoreFeature,
coreResource,
CoreResource(..),
indexExtras, --FIXME: this is internal state and should not be exported.
initCoreFeature,
basicPackageSection,
withPackageId,
withPackageName,
withPackage,
withPackagePath,
withPackageAll,
withPackageAllPath,
withPackageVersion,
withPackageVersionPath,
withPackageTarball,
packageExists,
packageIdExists,
doDeletePackage,
doAddPackage,
doMergePackage,
-- * Hooks
packageAddHook,
packageRemoveHook,
packageChangeHook,
packageIndexChange,
newPackageHook,
noPackageHook,
tarballDownload,
) where
import Distribution.Server.Acid (query, update)
import Distribution.Server.Framework
import Distribution.Server.Framework.BackupDump
import Distribution.Server.Packages.Backup
import Distribution.Server.Packages.Types
import Distribution.Server.Packages.State
import Distribution.Server.Users.Backup
import Distribution.Server.Users.State
import qualified Distribution.Server.Framework.Cache as Cache
import qualified Distribution.Server.Packages.Index as Packages.Index
import qualified Codec.Compression.GZip as GZip
import qualified Distribution.Server.Framework.ResourceTypes as Resource
import qualified Distribution.Server.Packages.PackageIndex as PackageIndex
import Distribution.Server.Packages.PackageIndex (PackageIndex)
import qualified Distribution.Server.Framework.BlobStorage as BlobStorage
import Distribution.Server.Framework.BlobStorage (BlobStorage)
import Control.Monad (liftM3, guard, mzero, when)
import Control.Monad.Trans (MonadIO, liftIO)
import Data.Time.Clock (UTCTime)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid (mconcat)
--TODO: why are we importing xhtml here!?
import Text.XHtml.Strict (Html, toHtml, unordList, h3, (<<), anchor, href, (!))
import Data.Ord (comparing)
import Data.List (sortBy, find)
import qualified Data.ByteString.Lazy.Char8 as BS
import Data.ByteString.Lazy.Char8 (ByteString)
import Distribution.Text (display)
import Distribution.Package
import Distribution.Version (Version(..))
data CoreFeature = CoreFeature {
featureInterface :: HackageFeature,
coreResource :: CoreResource,
-- index.tar.gz
cacheIndexTarball :: Cache.Cache ByteString,
-- other files to put in the index tarball like preferred-versions
indexExtras :: Cache.Cache (Map String (ByteString, UTCTime)),
-- Updating top-level packages
-- This is run after a package is added
packageAddHook :: Hook (PkgInfo -> IO ()),
-- This is run after a package is removed (but the PkgInfo is retained just for the hook)
packageRemoveHook :: Hook (PkgInfo -> IO ()),
-- This is run after a package is changed in some way (essentially added, then removed)
packageChangeHook :: Hook (PkgInfo -> PkgInfo -> IO ()),
-- This is called whenever any of the above three hooks is called, but
-- also for other updates of the index tarball (e.g. when indexExtras is updated)
packageIndexChange :: Hook (IO ()),
-- A package is added where no package by that name existed previously.
newPackageHook :: Hook (PkgInfo -> IO ()),
-- A package is removed such that no more versions of that package exists.
noPackageHook :: Hook (PkgInfo -> IO ()),
-- For download counters
tarballDownload :: Hook (PackageId -> IO ())
}
data CoreResource = CoreResource {
coreIndexPage :: Resource,
coreIndexTarball :: Resource,
corePackagesPage :: Resource,
corePackagePage :: Resource,
corePackageRedirect :: Resource,
coreCabalFile :: Resource,
corePackageTarball :: Resource,
indexTarballUri :: String,
indexPackageUri :: String -> String,
corePackageUri :: String -> PackageId -> String,
corePackageName :: String -> PackageName -> String,
coreCabalUri :: PackageId -> String,
coreTarballUri :: PackageId -> String
}
instance IsHackageFeature CoreFeature where
getFeatureInterface = featureInterface
initCoreFeature :: ServerEnv -> IO CoreFeature
initCoreFeature config = do
-- Caches
indexTar <- Cache.newCacheable BS.empty
extraMap <- Cache.newCacheable Map.empty
let computeCache = do
users <- query GetUserDb
index <- fmap packageList $ query GetPackagesState
extras <- Cache.getCache extraMap
Cache.putCache indexTar (GZip.compress $ Packages.Index.write users extras index)
downHook <- newHook
addHook <- newHook
removeHook <- newHook
changeHook <- newHook
indexHook <- newHook
newPkgHook <- newHook
noPkgHook <- newHook
registerHook indexHook computeCache
let store = serverBlobStore config
resources = CoreResource {
-- the rudimentary HTML resources are for when we don't want an additional HTML feature
coreIndexPage = (resourceAt "/.:format") { resourceGet = [("html", indexPage $ serverStaticDir config)] }
, coreIndexTarball = (resourceAt "/packages/index.tar.gz") { resourceGet = [("tarball", Cache.respondCache indexTar Resource.IndexTarball)] }
, corePackagesPage = (resourceAt "/packages/.:format") { resourceGet = [] } -- -- have basic packages listing?
, corePackagePage = (resourceAt "/package/:package.:format") { resourceGet = [("html", basicPackagePage resources)] }
, corePackageRedirect = (resourceAt "/package/") { resourceGet = [("", \_ -> seeOther "/packages/" $ toResponse ())] }
, corePackageTarball = (resourceAt "/package/:package/:tarball.tar.gz") { resourceGet = [("tarball", runServerPartE . servePackageTarball downHook store)] }
, coreCabalFile = (resourceAt "/package/:package/:cabal.cabal") { resourceGet = [("cabal", runServerPartE . serveCabalFile)] }
, indexTarballUri = renderResource (coreIndexTarball resources) []
, indexPackageUri = \format -> renderResource (corePackagesPage resources) [format]
, corePackageUri = \format pkgid -> renderResource (corePackagePage resources) [display pkgid, format]
, corePackageName = \format pkgname -> renderResource (corePackagePage resources) [display pkgname, format]
, coreCabalUri = \pkgid -> renderResource (coreCabalFile resources) [display pkgid, display (packageName pkgid)]
, coreTarballUri = \pkgid -> renderResource (corePackageTarball resources) [display pkgid, display pkgid]
}
return CoreFeature {
featureInterface = (emptyHackageFeature "core") {
featureResources = map ($ resources)
[ coreIndexPage, coreIndexTarball, corePackagesPage, corePackagePage
, corePackageRedirect, corePackageTarball, coreCabalFile ]
, featurePostInit = runHook indexHook
, featureDumpRestore = Just (dumpBackup store, restoreBackup store, testRoundtrip store)
}
, coreResource = resources
, cacheIndexTarball = indexTar
, indexExtras = extraMap
, packageAddHook = addHook
, packageRemoveHook = removeHook
, packageChangeHook = changeHook
, packageIndexChange = indexHook
, newPackageHook = newPkgHook
, noPackageHook = noPkgHook
, tarballDownload = downHook
}
where
indexPage staticDir _ = serveFile (const $ return "text/html") (staticDir ++ "/hackage.html")
dumpBackup store = do
users <- query GetUserDb
packages <- query GetPackagesState
admins <- query GetHackageAdmins
packageEntries <- readExportBlobs store $ indexToAllVersions packages
return $ packageEntries ++ [csvToBackup ["users.csv"] $ usersToCSV users, csvToBackup ["admins.csv"] $ groupToCSV admins]
restoreBackup store = mconcat [userBackup, packagesBackup store, groupBackup ["admins.csv"] ReplaceHackageAdmins]
testRoundtrip store = testRoundtripByQuery' (liftM3 (,,) (query GetUserDb) (query GetPackagesState) (query GetHackageAdmins)) $ \(_, packages, _) ->
testBlobsExist store [ blob
| pkgInfo <- PackageIndex.allPackages (packageList packages)
, (tarball, _) <- pkgTarball pkgInfo
, blob <- [pkgTarballGz tarball, pkgTarballNoGz tarball]
]
-- Should probably look more like an Apache index page (Name / Last modified / Size / Content-type)
basicPackagePage :: CoreResource -> DynamicPath -> ServerPart Response
basicPackagePage r dpath = runServerPartE $ withPackagePath dpath $ \_ pkgs ->
return $ toResponse $ Resource.XHtml $ showAllP $ sortBy (flip $ comparing packageVersion) pkgs
where
showAllP :: [PkgInfo] -> Html
showAllP pkgs = toHtml [
h3 << "Downloads",
unordList $ map (basicPackageSection (coreCabalUri r)
(coreTarballUri r)) pkgs
]
basicPackageSection :: (PackageId -> String)
-> (PackageId -> String)
-> PkgInfo -> [Html]
basicPackageSection cabalUrl tarUrl pkgInfo =
let pkgId = packageId pkgInfo; pkgStr = display pkgId in [
toHtml pkgStr,
unordList $ [
[anchor ! [href (cabalUrl pkgId)] << "Package description",
toHtml " (included in the package)"],
case pkgTarball pkgInfo of
[] -> [toHtml "Package not available"];
_ -> [anchor ! [href (tarUrl pkgId)] << (pkgStr ++ ".tar.gz"),
toHtml " (Cabal source package)"]
]
]
------------------------------------------------------------------------------
packageExists, packageIdExists :: (Package pkg, Package pkg') => PackageIndex pkg -> pkg' -> Bool
packageExists state pkg = not . null $ PackageIndex.lookupPackageName state (packageName pkg)
packageIdExists state pkg = maybe False (const True) $ PackageIndex.lookupPackageId state (packageId pkg)
withPackageId :: DynamicPath -> (PackageId -> ServerPartE a) -> ServerPartE a
withPackageId dpath = require (return $ lookup "package" dpath >>= fromReqURI)
withPackageName :: MonadIO m => DynamicPath -> (PackageName -> ServerPartT m a) -> ServerPartT m a
withPackageName dpath = require (return $ lookup "package" dpath >>= fromReqURI)
packageError :: [MessageSpan] -> ServerPartE a
packageError = errNotFound "Package not found"
withPackage :: PackageId -> (PkgInfo -> [PkgInfo] -> ServerPartE a) -> ServerPartE a
withPackage pkgid func = query GetPackagesState >>= \state ->
case PackageIndex.lookupPackageName (packageList state) (packageName pkgid) of
[] -> packageError [MText "No such package in package index"]
pkgs | pkgVersion pkgid == Version [] [] ->
-- pkgs is sorted by version number and non-empty
func (last pkgs) pkgs
pkgs -> case find ((== packageVersion pkgid) . packageVersion) pkgs of
Nothing -> packageError [MText $ "No such package version for " ++ display (packageName pkgid)]
Just pkg -> func pkg pkgs
withPackagePath :: DynamicPath -> (PkgInfo -> [PkgInfo] -> ServerPartE a) -> ServerPartE a
withPackagePath dpath func = withPackageId dpath $ \pkgid -> withPackage pkgid func
withPackageAll :: PackageName -> ([PkgInfo] -> ServerPartE a) -> ServerPartE a
withPackageAll pkgname func = query GetPackagesState >>= \state ->
case PackageIndex.lookupPackageName (packageList state) pkgname of
[] -> packageError [MText "No such package in package index"]
pkgs -> func pkgs
withPackageAllPath :: DynamicPath -> (PackageName -> [PkgInfo] -> ServerPartE a) -> ServerPartE a
withPackageAllPath dpath func = withPackageName dpath $ \pkgname -> withPackageAll pkgname (func pkgname)
withPackageVersion :: PackageId -> (PkgInfo -> ServerPartE a) -> ServerPartE a
withPackageVersion pkgid func = do
guard (packageVersion pkgid /= Version [] [])
query GetPackagesState >>= \state -> case PackageIndex.lookupPackageId (packageList state) pkgid of
Nothing -> packageError [MText $ "No such package version for " ++ display (packageName pkgid)]
Just pkg -> func pkg
withPackageVersionPath :: DynamicPath -> (PkgInfo -> ServerPartE a) -> ServerPartE a
withPackageVersionPath dpath func = withPackageId dpath $ \pkgid -> withPackageVersion pkgid func
withPackageTarball :: DynamicPath -> (PackageId -> ServerPartE a) -> ServerPartE a
withPackageTarball dpath func = withPackageId dpath $ \(PackageIdentifier name version) ->
require (return $ lookup "tarball" dpath >>= fromReqURI) $ \pkgid@(PackageIdentifier name' version') -> do
-- rules:
-- * the package name and tarball name must be the same
-- * the tarball must specify a version
-- * the package must either have no version or the same version as the tarball
guard $ name == name' && version' /= Version [] [] && (version == version' || version == Version [] [])
func pkgid
------------------------------------------------------------------------
-- result: tarball or not-found error
servePackageTarball :: Hook (PackageId -> IO ()) -> BlobStorage -> DynamicPath -> ServerPartE Response
servePackageTarball hook store dpath = withPackageTarball dpath $ \pkgid ->
withPackageVersion pkgid $ \pkg ->
case pkgTarball pkg of
[] -> errNotFound "Tarball not found" [MText "No tarball exists for this package version."]
((tb, _):_) -> do
let blobId = pkgTarballGz tb
file <- liftIO $ BlobStorage.fetch store blobId
liftIO $ runHook' hook pkgid
return $ toResponse $ Resource.PackageTarball file blobId (pkgUploadTime pkg)
-- result: cabal file or not-found error
serveCabalFile :: DynamicPath -> ServerPartE Response
serveCabalFile dpath = withPackagePath dpath $ \pkg _ -> do
-- check that the cabal name matches the package
case lookup "cabal" dpath == Just (display $ packageName pkg) of
True -> return $ toResponse (Resource.CabalFile (cabalFileByteString (pkgData pkg)))
False -> mzero
-- A wrapper around DeletePackageVersion that runs the proper hooks.
-- (no authentication though)
doDeletePackage :: CoreFeature -> PackageId -> ServerPartE ()
doDeletePackage core pkgid = withPackageVersion pkgid $ \pkg -> do
update $ DeletePackageVersion pkgid
nowPkgs <- fmap (flip PackageIndex.lookupPackageName (packageName pkgid) . packageList) $ query GetPackagesState
runHook' (packageRemoveHook core) pkg
runHook (packageIndexChange core)
when (null nowPkgs) $ runHook' (noPackageHook core) pkg
return ()
-- This is a wrapper around InsertPkgIfAbsent that runs the necessary hooks in core.
doAddPackage :: CoreFeature -> PkgInfo -> IO Bool
doAddPackage core pkgInfo = do
state <- fmap packageList $ query GetPackagesState
success <- update $ InsertPkgIfAbsent pkgInfo
when success $ do
let existedBefore = packageExists state pkgInfo
when (not existedBefore) $ do
runHook' (newPackageHook core) pkgInfo
runHook' (packageAddHook core) pkgInfo
runHook (packageIndexChange core)
return success
-- A wrapper around MergePkg.
doMergePackage :: CoreFeature -> PkgInfo -> IO ()
doMergePackage core pkgInfo = do
state <- fmap packageList $ query GetPackagesState
let mprev = PackageIndex.lookupPackageId state (packageId pkgInfo)
nameExists = packageExists state pkgInfo
update $ MergePkg pkgInfo
when (not nameExists) $ do
runHook' (newPackageHook core) pkgInfo
case mprev of
-- TODO: modify MergePkg to get the newly merged package info, not the pre-merge argument
Just prev -> runHook'' (packageChangeHook core) prev pkgInfo
Nothing -> runHook' (packageAddHook core) pkgInfo
runHook (packageIndexChange core)
| isomorphism/hackage2 | Distribution/Server/Features/Core.hs | bsd-3-clause | 15,859 | 0 | 19 | 3,361 | 3,950 | 2,084 | 1,866 | 255 | 4 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
-- FunDeps example
class Foo a b c | a b -> c where
bar :: a -> b -> c
| mpickering/ghc-exactprint | tests/examples/ghc710/FunDeps.hs | bsd-3-clause | 155 | 0 | 6 | 34 | 37 | 21 | 16 | 4 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Route53.GetCheckerIpRanges
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | To retrieve a list of the IP ranges used by Amazon Route 53 health checkers
-- to check the health of your resources, send a 'GET' request to the '2013-04-01/checkeripranges' resource. You can use these IP addresses to configure router and firewall
-- rules to allow health checkers to check the health of your resources.
--
-- <http://docs.aws.amazon.com/Route53/latest/APIReference/API_GetCheckerIpRanges.html>
module Network.AWS.Route53.GetCheckerIpRanges
(
-- * Request
GetCheckerIpRanges
-- ** Request constructor
, getCheckerIpRanges
-- * Response
, GetCheckerIpRangesResponse
-- ** Response constructor
, getCheckerIpRangesResponse
-- ** Response lenses
, gcirrCheckerIpRanges
) where
import Network.AWS.Prelude
import Network.AWS.Request.RestXML
import Network.AWS.Route53.Types
import qualified GHC.Exts
data GetCheckerIpRanges = GetCheckerIpRanges
deriving (Eq, Ord, Read, Show, Generic)
-- | 'GetCheckerIpRanges' constructor.
getCheckerIpRanges :: GetCheckerIpRanges
getCheckerIpRanges = GetCheckerIpRanges
newtype GetCheckerIpRangesResponse = GetCheckerIpRangesResponse
{ _gcirrCheckerIpRanges :: List "CheckerIpRanges" Text
} deriving (Eq, Ord, Read, Show, Monoid, Semigroup)
instance GHC.Exts.IsList GetCheckerIpRangesResponse where
type Item GetCheckerIpRangesResponse = Text
fromList = GetCheckerIpRangesResponse . GHC.Exts.fromList
toList = GHC.Exts.toList . _gcirrCheckerIpRanges
-- | 'GetCheckerIpRangesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gcirrCheckerIpRanges' @::@ ['Text']
--
getCheckerIpRangesResponse :: GetCheckerIpRangesResponse
getCheckerIpRangesResponse = GetCheckerIpRangesResponse
{ _gcirrCheckerIpRanges = mempty
}
-- | A complex type that contains sorted list of IP ranges in CIDR format for
-- Amazon Route 53 health checkers.
gcirrCheckerIpRanges :: Lens' GetCheckerIpRangesResponse [Text]
gcirrCheckerIpRanges =
lens _gcirrCheckerIpRanges (\s a -> s { _gcirrCheckerIpRanges = a })
. _List
instance ToPath GetCheckerIpRanges where
toPath = const "/2013-04-01/checkeripranges"
instance ToQuery GetCheckerIpRanges where
toQuery = const mempty
instance ToHeaders GetCheckerIpRanges
instance ToXMLRoot GetCheckerIpRanges where
toXMLRoot = const (namespaced ns "GetCheckerIpRanges" [])
instance ToXML GetCheckerIpRanges
instance AWSRequest GetCheckerIpRanges where
type Sv GetCheckerIpRanges = Route53
type Rs GetCheckerIpRanges = GetCheckerIpRangesResponse
request = get
response = xmlResponse
instance FromXML GetCheckerIpRangesResponse where
parseXML x = GetCheckerIpRangesResponse
<$> x .@? "CheckerIpRanges" .!@ mempty
| romanb/amazonka | amazonka-route53/gen/Network/AWS/Route53/GetCheckerIpRanges.hs | mpl-2.0 | 3,808 | 0 | 10 | 738 | 421 | 253 | 168 | 55 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-SP">
<title>Call Home Add-On</title>
<maps>
<homeID>callhome</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/callhome/src/main/javahelp/org/zaproxy/addon/callhome/resources/help_sr_SP/helpset_sr_SP.hs | apache-2.0 | 966 | 77 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE CPP, RankNTypes, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.InterfaceFile
-- Copyright : (c) David Waern 2006-2009,
-- Mateusz Kowalczyk 2013
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Reading and writing the .haddock interface file
-----------------------------------------------------------------------------
module Haddock.InterfaceFile (
InterfaceFile(..), ifModule, ifPackageKey,
readInterfaceFile, nameCacheFromGhc, freshNameCache, NameCacheAccessor,
writeInterfaceFile, binaryInterfaceVersion, binaryInterfaceVersionCompatibility
) where
import Haddock.Types
import Haddock.Utils hiding (out)
import Control.Monad
import Data.Array
import Data.IORef
import Data.List
import qualified Data.Map as Map
import Data.Map (Map)
import Data.Word
import BinIface (getSymtabName, getDictFastString)
import Binary
import FastMutInt
import FastString
import GHC hiding (NoLink)
import GhcMonad (withSession)
import HscTypes
import IfaceEnv
import Name
import UniqFM
import UniqSupply
import Unique
data InterfaceFile = InterfaceFile {
ifLinkEnv :: LinkEnv,
ifInstalledIfaces :: [InstalledInterface]
}
ifModule :: InterfaceFile -> Module
ifModule if_ =
case ifInstalledIfaces if_ of
[] -> error "empty InterfaceFile"
iface:_ -> instMod iface
ifPackageKey :: InterfaceFile -> PackageKey
ifPackageKey = modulePackageKey . ifModule
binaryInterfaceMagic :: Word32
binaryInterfaceMagic = 0xD0Cface
-- IMPORTANT: Since datatypes in the GHC API might change between major
-- versions, and because we store GHC datatypes in our interface files, we need
-- to make sure we version our interface files accordingly.
--
-- If you change the interface file format or adapt Haddock to work with a new
-- major version of GHC (so that the format changes indirectly) *you* need to
-- follow these steps:
--
-- (1) increase `binaryInterfaceVersion`
--
-- (2) set `binaryInterfaceVersionCompatibility` to [binaryInterfaceVersion]
--
binaryInterfaceVersion :: Word16
#if (__GLASGOW_HASKELL__ >= 709) && (__GLASGOW_HASKELL__ < 711)
binaryInterfaceVersion = 27
binaryInterfaceVersionCompatibility :: [Word16]
binaryInterfaceVersionCompatibility = [binaryInterfaceVersion]
#else
#error Unsupported GHC version
#endif
initBinMemSize :: Int
initBinMemSize = 1024*1024
writeInterfaceFile :: FilePath -> InterfaceFile -> IO ()
writeInterfaceFile filename iface = do
bh0 <- openBinMem initBinMemSize
put_ bh0 binaryInterfaceMagic
put_ bh0 binaryInterfaceVersion
-- remember where the dictionary pointer will go
dict_p_p <- tellBin bh0
put_ bh0 dict_p_p
-- remember where the symbol table pointer will go
symtab_p_p <- tellBin bh0
put_ bh0 symtab_p_p
-- Make some intial state
symtab_next <- newFastMutInt
writeFastMutInt symtab_next 0
symtab_map <- newIORef emptyUFM
let bin_symtab = BinSymbolTable {
bin_symtab_next = symtab_next,
bin_symtab_map = symtab_map }
dict_next_ref <- newFastMutInt
writeFastMutInt dict_next_ref 0
dict_map_ref <- newIORef emptyUFM
let bin_dict = BinDictionary {
bin_dict_next = dict_next_ref,
bin_dict_map = dict_map_ref }
-- put the main thing
let bh = setUserData bh0 $ newWriteState (putName bin_symtab)
(putFastString bin_dict)
put_ bh iface
-- write the symtab pointer at the front of the file
symtab_p <- tellBin bh
putAt bh symtab_p_p symtab_p
seekBin bh symtab_p
-- write the symbol table itself
symtab_next' <- readFastMutInt symtab_next
symtab_map' <- readIORef symtab_map
putSymbolTable bh symtab_next' symtab_map'
-- write the dictionary pointer at the fornt of the file
dict_p <- tellBin bh
putAt bh dict_p_p dict_p
seekBin bh dict_p
-- write the dictionary itself
dict_next <- readFastMutInt dict_next_ref
dict_map <- readIORef dict_map_ref
putDictionary bh dict_next dict_map
-- and send the result to the file
writeBinMem bh filename
return ()
type NameCacheAccessor m = (m NameCache, NameCache -> m ())
nameCacheFromGhc :: NameCacheAccessor Ghc
nameCacheFromGhc = ( read_from_session , write_to_session )
where
read_from_session = do
ref <- withSession (return . hsc_NC)
liftIO $ readIORef ref
write_to_session nc' = do
ref <- withSession (return . hsc_NC)
liftIO $ writeIORef ref nc'
freshNameCache :: NameCacheAccessor IO
freshNameCache = ( create_fresh_nc , \_ -> return () )
where
create_fresh_nc = do
u <- mkSplitUniqSupply 'a' -- ??
return (initNameCache u [])
-- | Read a Haddock (@.haddock@) interface file. Return either an
-- 'InterfaceFile' or an error message.
--
-- This function can be called in two ways. Within a GHC session it will
-- update the use and update the session's name cache. Outside a GHC session
-- a new empty name cache is used. The function is therefore generic in the
-- monad being used. The exact monad is whichever monad the first
-- argument, the getter and setter of the name cache, requires.
--
readInterfaceFile :: forall m.
MonadIO m
=> NameCacheAccessor m
-> FilePath
-> m (Either String InterfaceFile)
readInterfaceFile (get_name_cache, set_name_cache) filename = do
bh0 <- liftIO $ readBinMem filename
magic <- liftIO $ get bh0
version <- liftIO $ get bh0
case () of
_ | magic /= binaryInterfaceMagic -> return . Left $
"Magic number mismatch: couldn't load interface file: " ++ filename
| version `notElem` binaryInterfaceVersionCompatibility -> return . Left $
"Interface file is of wrong version: " ++ filename
| otherwise -> with_name_cache $ \update_nc -> do
dict <- get_dictionary bh0
-- read the symbol table so we are capable of reading the actual data
bh1 <- do
let bh1 = setUserData bh0 $ newReadState (error "getSymtabName")
(getDictFastString dict)
symtab <- update_nc (get_symbol_table bh1)
return $ setUserData bh1 $ newReadState (getSymtabName (NCU (\f -> update_nc (return . f))) dict symtab)
(getDictFastString dict)
-- load the actual data
iface <- liftIO $ get bh1
return (Right iface)
where
with_name_cache :: forall a.
((forall n b. MonadIO n
=> (NameCache -> n (NameCache, b))
-> n b)
-> m a)
-> m a
with_name_cache act = do
nc_var <- get_name_cache >>= (liftIO . newIORef)
x <- act $ \f -> do
nc <- liftIO $ readIORef nc_var
(nc', x) <- f nc
liftIO $ writeIORef nc_var nc'
return x
liftIO (readIORef nc_var) >>= set_name_cache
return x
get_dictionary bin_handle = liftIO $ do
dict_p <- get bin_handle
data_p <- tellBin bin_handle
seekBin bin_handle dict_p
dict <- getDictionary bin_handle
seekBin bin_handle data_p
return dict
get_symbol_table bh1 theNC = liftIO $ do
symtab_p <- get bh1
data_p' <- tellBin bh1
seekBin bh1 symtab_p
(nc', symtab) <- getSymbolTable bh1 theNC
seekBin bh1 data_p'
return (nc', symtab)
-------------------------------------------------------------------------------
-- * Symbol table
-------------------------------------------------------------------------------
putName :: BinSymbolTable -> BinHandle -> Name -> IO ()
putName BinSymbolTable{
bin_symtab_map = symtab_map_ref,
bin_symtab_next = symtab_next } bh name
= do
symtab_map <- readIORef symtab_map_ref
case lookupUFM symtab_map name of
Just (off,_) -> put_ bh (fromIntegral off :: Word32)
Nothing -> do
off <- readFastMutInt symtab_next
writeFastMutInt symtab_next (off+1)
writeIORef symtab_map_ref
$! addToUFM symtab_map name (off,name)
put_ bh (fromIntegral off :: Word32)
data BinSymbolTable = BinSymbolTable {
bin_symtab_next :: !FastMutInt, -- The next index to use
bin_symtab_map :: !(IORef (UniqFM (Int,Name)))
-- indexed by Name
}
putFastString :: BinDictionary -> BinHandle -> FastString -> IO ()
putFastString BinDictionary { bin_dict_next = j_r,
bin_dict_map = out_r} bh f
= do
out <- readIORef out_r
let unique = getUnique f
case lookupUFM out unique of
Just (j, _) -> put_ bh (fromIntegral j :: Word32)
Nothing -> do
j <- readFastMutInt j_r
put_ bh (fromIntegral j :: Word32)
writeFastMutInt j_r (j + 1)
writeIORef out_r $! addToUFM out unique (j, f)
data BinDictionary = BinDictionary {
bin_dict_next :: !FastMutInt, -- The next index to use
bin_dict_map :: !(IORef (UniqFM (Int,FastString)))
-- indexed by FastString
}
putSymbolTable :: BinHandle -> Int -> UniqFM (Int,Name) -> IO ()
putSymbolTable bh next_off symtab = do
put_ bh next_off
let names = elems (array (0,next_off-1) (eltsUFM symtab))
mapM_ (\n -> serialiseName bh n symtab) names
getSymbolTable :: BinHandle -> NameCache -> IO (NameCache, Array Int Name)
getSymbolTable bh namecache = do
sz <- get bh
od_names <- replicateM sz (get bh)
let arr = listArray (0,sz-1) names
(namecache', names) = mapAccumR (fromOnDiskName arr) namecache od_names
return (namecache', arr)
type OnDiskName = (PackageKey, ModuleName, OccName)
fromOnDiskName
:: Array Int Name
-> NameCache
-> OnDiskName
-> (NameCache, Name)
fromOnDiskName _ nc (pid, mod_name, occ) =
let
modu = mkModule pid mod_name
cache = nsNames nc
in
case lookupOrigNameCache cache modu occ of
Just name -> (nc, name)
Nothing ->
let
us = nsUniqs nc
u = uniqFromSupply us
name = mkExternalName u modu occ noSrcSpan
new_cache = extendNameCache cache modu occ name
in
case splitUniqSupply us of { (us',_) ->
( nc{ nsUniqs = us', nsNames = new_cache }, name )
}
serialiseName :: BinHandle -> Name -> UniqFM (Int,Name) -> IO ()
serialiseName bh name _ = do
let modu = nameModule name
put_ bh (modulePackageKey modu, moduleName modu, nameOccName name)
-------------------------------------------------------------------------------
-- * GhcBinary instances
-------------------------------------------------------------------------------
instance (Ord k, Binary k, Binary v) => Binary (Map k v) where
put_ bh m = put_ bh (Map.toList m)
get bh = fmap (Map.fromList) (get bh)
instance Binary InterfaceFile where
put_ bh (InterfaceFile env ifaces) = do
put_ bh env
put_ bh ifaces
get bh = do
env <- get bh
ifaces <- get bh
return (InterfaceFile env ifaces)
instance Binary InstalledInterface where
put_ bh (InstalledInterface modu info docMap argMap
exps visExps opts subMap fixMap) = do
put_ bh modu
put_ bh info
put_ bh docMap
put_ bh argMap
put_ bh exps
put_ bh visExps
put_ bh opts
put_ bh subMap
put_ bh fixMap
get bh = do
modu <- get bh
info <- get bh
docMap <- get bh
argMap <- get bh
exps <- get bh
visExps <- get bh
opts <- get bh
subMap <- get bh
fixMap <- get bh
return (InstalledInterface modu info docMap argMap
exps visExps opts subMap fixMap)
instance Binary DocOption where
put_ bh OptHide = do
putByte bh 0
put_ bh OptPrune = do
putByte bh 1
put_ bh OptIgnoreExports = do
putByte bh 2
put_ bh OptNotHome = do
putByte bh 3
put_ bh OptShowExtensions = do
putByte bh 4
get bh = do
h <- getByte bh
case h of
0 -> do
return OptHide
1 -> do
return OptPrune
2 -> do
return OptIgnoreExports
3 -> do
return OptNotHome
4 -> do
return OptShowExtensions
_ -> fail "invalid binary data found"
instance Binary Example where
put_ bh (Example expression result) = do
put_ bh expression
put_ bh result
get bh = do
expression <- get bh
result <- get bh
return (Example expression result)
instance Binary Hyperlink where
put_ bh (Hyperlink url label) = do
put_ bh url
put_ bh label
get bh = do
url <- get bh
label <- get bh
return (Hyperlink url label)
instance Binary Picture where
put_ bh (Picture uri title) = do
put_ bh uri
put_ bh title
get bh = do
uri <- get bh
title <- get bh
return (Picture uri title)
instance Binary a => Binary (Header a) where
put_ bh (Header l t) = do
put_ bh l
put_ bh t
get bh = do
l <- get bh
t <- get bh
return (Header l t)
instance Binary Meta where
put_ bh Meta { _version = v } = put_ bh v
get bh = (\v -> Meta { _version = v }) <$> get bh
instance (Binary mod, Binary id) => Binary (MetaDoc mod id) where
put_ bh MetaDoc { _meta = m, _doc = d } = do
put_ bh m
put_ bh d
get bh = do
m <- get bh
d <- get bh
return $ MetaDoc { _meta = m, _doc = d }
{-* Generated by DrIFT : Look, but Don't Touch. *-}
instance (Binary mod, Binary id) => Binary (DocH mod id) where
put_ bh DocEmpty = do
putByte bh 0
put_ bh (DocAppend aa ab) = do
putByte bh 1
put_ bh aa
put_ bh ab
put_ bh (DocString ac) = do
putByte bh 2
put_ bh ac
put_ bh (DocParagraph ad) = do
putByte bh 3
put_ bh ad
put_ bh (DocIdentifier ae) = do
putByte bh 4
put_ bh ae
put_ bh (DocModule af) = do
putByte bh 5
put_ bh af
put_ bh (DocEmphasis ag) = do
putByte bh 6
put_ bh ag
put_ bh (DocMonospaced ah) = do
putByte bh 7
put_ bh ah
put_ bh (DocUnorderedList ai) = do
putByte bh 8
put_ bh ai
put_ bh (DocOrderedList aj) = do
putByte bh 9
put_ bh aj
put_ bh (DocDefList ak) = do
putByte bh 10
put_ bh ak
put_ bh (DocCodeBlock al) = do
putByte bh 11
put_ bh al
put_ bh (DocHyperlink am) = do
putByte bh 12
put_ bh am
put_ bh (DocPic x) = do
putByte bh 13
put_ bh x
put_ bh (DocAName an) = do
putByte bh 14
put_ bh an
put_ bh (DocExamples ao) = do
putByte bh 15
put_ bh ao
put_ bh (DocIdentifierUnchecked x) = do
putByte bh 16
put_ bh x
put_ bh (DocWarning ag) = do
putByte bh 17
put_ bh ag
put_ bh (DocProperty x) = do
putByte bh 18
put_ bh x
put_ bh (DocBold x) = do
putByte bh 19
put_ bh x
put_ bh (DocHeader aa) = do
putByte bh 20
put_ bh aa
get bh = do
h <- getByte bh
case h of
0 -> do
return DocEmpty
1 -> do
aa <- get bh
ab <- get bh
return (DocAppend aa ab)
2 -> do
ac <- get bh
return (DocString ac)
3 -> do
ad <- get bh
return (DocParagraph ad)
4 -> do
ae <- get bh
return (DocIdentifier ae)
5 -> do
af <- get bh
return (DocModule af)
6 -> do
ag <- get bh
return (DocEmphasis ag)
7 -> do
ah <- get bh
return (DocMonospaced ah)
8 -> do
ai <- get bh
return (DocUnorderedList ai)
9 -> do
aj <- get bh
return (DocOrderedList aj)
10 -> do
ak <- get bh
return (DocDefList ak)
11 -> do
al <- get bh
return (DocCodeBlock al)
12 -> do
am <- get bh
return (DocHyperlink am)
13 -> do
x <- get bh
return (DocPic x)
14 -> do
an <- get bh
return (DocAName an)
15 -> do
ao <- get bh
return (DocExamples ao)
16 -> do
x <- get bh
return (DocIdentifierUnchecked x)
17 -> do
ag <- get bh
return (DocWarning ag)
18 -> do
x <- get bh
return (DocProperty x)
19 -> do
x <- get bh
return (DocBold x)
20 -> do
aa <- get bh
return (DocHeader aa)
_ -> error "invalid binary data found in the interface file"
instance Binary name => Binary (HaddockModInfo name) where
put_ bh hmi = do
put_ bh (hmi_description hmi)
put_ bh (hmi_copyright hmi)
put_ bh (hmi_license hmi)
put_ bh (hmi_maintainer hmi)
put_ bh (hmi_stability hmi)
put_ bh (hmi_portability hmi)
put_ bh (hmi_safety hmi)
put_ bh (fromEnum <$> hmi_language hmi)
put_ bh (map fromEnum $ hmi_extensions hmi)
get bh = do
descr <- get bh
copyr <- get bh
licen <- get bh
maint <- get bh
stabi <- get bh
porta <- get bh
safet <- get bh
langu <- fmap toEnum <$> get bh
exten <- map toEnum <$> get bh
return (HaddockModInfo descr copyr licen maint stabi porta safet langu exten)
instance Binary DocName where
put_ bh (Documented name modu) = do
putByte bh 0
put_ bh name
put_ bh modu
put_ bh (Undocumented name) = do
putByte bh 1
put_ bh name
get bh = do
h <- getByte bh
case h of
0 -> do
name <- get bh
modu <- get bh
return (Documented name modu)
1 -> do
name <- get bh
return (Undocumented name)
_ -> error "get DocName: Bad h"
| lamefun/haddock | haddock-api/src/Haddock/InterfaceFile.hs | bsd-2-clause | 19,191 | 10 | 28 | 6,630 | 5,430 | 2,569 | 2,861 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -optP -C -optP -ffreestanding #-}
#define A 'a'
main :: IO ()
main = putStrLn [A,
-- /*
'b',
-- */
'c']
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/driver/T3389.hs | bsd-3-clause | 178 | 0 | 6 | 65 | 33 | 20 | 13 | 6 | 1 |
-- !!! Exporting non-existent datatype transparently
module M(T(K1)) where
x = 'a' -- dummy definition to get round a separate bug
| siddhanathan/ghc | testsuite/tests/module/mod10.hs | bsd-3-clause | 131 | 0 | 5 | 21 | 20 | 14 | 6 | 4 | 1 |
module Language.Haskell.Session.GHC.Util where
import qualified GHC
import Outputable (Outputable)
import qualified Outputable
dshow :: Outputable a => GHC.DynFlags -> a -> String
dshow dflags = Outputable.showSDoc dflags . Outputable.ppr
| pmlodawski/ghc-session | src/Language/Haskell/Session/GHC/Util.hs | mit | 253 | 0 | 7 | 43 | 66 | 38 | 28 | 6 | 1 |
--mkPerson.hs
module MkPerson where
type Name = String
type Age = Integer
data Person = Person Name Age deriving Show
data PersonInvalid = NameEmpty
| AgeTooLow
| PersonInvalidUnknown String
deriving (Eq, Show)
mkPerson :: Name -> Age -> Either PersonInvalid Person
mkPerson name age
| name /= "" && age > 0 = Right $ Person name age
| name == "" = Left NameEmpty
| not (age > 0) = Left AgeTooLow
| otherwise = Left $ PersonInvalidUnknown $
"Name was: " ++ show name ++
" Age was: " ++ show age
gimmePerson :: IO ()
gimmePerson = do
putStr "Name: "
name <- getLine
putStr "Age: "
age <- getLine
let result = mkPerson name (read age) in
case result of
Right p -> putStrLn $ "Yay! " ++ show p
Left p -> putStrLn $ "Oh No! " ++ show p | deciduously/Haskell-First-Principles-Exercises | 4-Getting real/13-Building Projects/code/mkPerson.hs | mit | 926 | 0 | 13 | 339 | 295 | 143 | 152 | 26 | 2 |
{-|
Module : BreadU.Pages.Markup.IndexPage
Description : HTML markup for the index page.
Stability : experimental
Portability : POSIX
HTML markup for the index page.
-}
module BreadU.Pages.Markup.IndexPage where
import BreadU.Types ( FoodName, LangCode(..) )
import BreadU.Pages.Types ( IndexPage(..)
, IndexBodyContent(..)
, HeaderContent(..)
)
import BreadU.Pages.CSS.Names ( ClassName(..) )
import BreadU.Pages.JS.Own ( removeDOMItemBy, ajaxPOST )
import BreadU.Pages.Names ( ElementName(..) )
import BreadU.API ( indexPageLink
, addFoodLink
, calculateFoodLink
)
import BreadU.Pages.Content.IndexBody ( indexBodyContent )
import BreadU.Pages.Markup.Common.HeadTag ( commonHeadTag )
import BreadU.Pages.Markup.Common.Header ( commonHeader )
import BreadU.Pages.Markup.Common.Footer ( commonFooter )
import BreadU.Pages.Markup.Common.Resources ( allScripts )
import BreadU.Pages.Markup.Common.Utils
import Prelude hiding ( div, head, span )
import Data.Text ( Text, unpack, isSuffixOf )
import Data.Text.Lazy ( toStrict )
import Data.Monoid ( (<>) )
import TextShow ( showt )
import Text.Blaze.Html5
import qualified Text.Blaze.Html5.Attributes as A
import Text.Blaze.Html.Renderer.Text ( renderHtml )
{-|
Markup for index page. To serve HTML via Servant we
should provide 'ToMarkup' instance for 'IndexPage' type.
-}
instance ToMarkup IndexPage where
-- Render HTML for fake index page.
toMarkup (RedirectTo lang) = redirectImmediatelyTo $ indexPageLink lang
-- Render markup for the real, localized index page.
toMarkup IndexPage{..} = do
-- We're using blaze-html DSL to build HTML. It's building
-- in monadic context 'Html', so we can compose whole DOM
-- from the parts in the same context.
docType
html ! A.lang (toValue $ showt langCode) $ do
commonHeadTag (siteTitle headerContentForIndex)
(metaDescription headerContentForIndex)
body $ do
div ! A.class_ "container" $ do
commonHeader headerContentForIndex langCode
foodFormBlock langCode bodyContentForIndex
commonFooter footerContentForIndex langCode
allScripts
-- Render preEscaped-text. It's for raw HTML-tags in the text,
-- sometimes it's useful.
preEscapedToMarkup = toMarkup
-- | HTML-based redirection to language-specific index page.
redirectImmediatelyTo :: Text -> Html
redirectImmediatelyTo langSpecificLink = docTypeHtml $ do
head $ meta ! A.httpEquiv "refresh" ! A.content (toValue $ "0; URL='" <> langSpecificLink <> "'")
body mempty
-- | Block for the main food form.
foodFormBlock :: LangCode -> IndexBodyContent -> Html
foodFormBlock langCode content@IndexBodyContent{..} = div ! A.class_ (toValue FormBlock) $
-- Actually this form will be submitted via AJAX POST-request, see JS/Own.hs module.
form ! A.method "post"
! A.id (toValue $ show FoodFormId <> unpack (showt langCode))
! A.action (toValue $ calculateFoodLink langCode) $ do
firstFoodItem content
totalBUQuantity
row_ $ do
div ! A.class_ "col-3 col-xs-5" $ addFoodButton
div ! A.class_ "col-9 col-xs-7" $ calculateButton
where
addFoodButton = div ! A.class_ (toValue AddFood) $
button ! A.class_ (toValue $ "btn btn-secondary btn-lg btn-block waves-effect " <> showt AddFoodButton)
! A.id (toValue AddFoodButtonId)
! A.type_ "button"
! A.title (toValue addFoodTitle)
! A.onclick (toValue addNewFoodItem) $
span ! A.class_ (toValue MainButtonIcon) $ fa "fa-plus"
-- This button should be submit-button, in this case it can be clicked by Enter key.
calculateButton = div ! A.class_ (toValue Calculate) $
button ! A.class_ (toValue $ "btn btn-info btn-lg btn-block waves-effect " <> showt CalculateButton)
! A.id (toValue CalculateButtonId)
! A.type_ "submit"
! A.title (toValue calculateTitle) $ do
span ! A.class_ (toValue MainButtonIcon) $ fa "fa-calculator"
span ! A.class_ (toValue MainButtonIconSeparator) $ mempty
toHtml calculateLabel
totalBUQuantity = div ! A.class_ (toValue TotalBUQuantity) $ do
span $ toHtml totalBULabel
span ! A.id (toValue TotalBUQuantityId) $ "0"
-- | When user will click to Add button - AJAX POST-request will be sent and new food item will be added.
addNewFoodItem :: Text
addNewFoodItem = ajaxPOST (addFoodLink langCode) $
"$('#" <> showt FoodFormFirstItem <> "').append(response.itemHTML)"
{-|
First food item is always here, always at the top and cannot be removed.
So we can hardcode id/name for its inputs.
-}
firstFoodItem :: IndexBodyContent -> Html
firstFoodItem IndexBodyContent{..} =
div ! A.class_ (toValue FoodFormFirstItem)
! A.id (toValue FoodFormFirstItem) $
div ! A.class_ (toValue FoodFormItemInputs) $ do
row_ $ do
col_6 $ foodDataInput (showt FirstFoodPrefix <> showt FoodNameInputPostfix) FoodInputClass foodNameLabel
col_2 $ div ! A.class_ (toValue Or) $ toHtml orAnotherValue
col_4 $ foodDataInput (showt FirstFoodPrefix <> showt CarbsInputPostfix) CarbsInputClass carbsLabel
div ! A.class_ (toValue FoodFormRowsSeparator) $ mempty
row_ $ do
col_5 $ foodDataInput (showt FirstFoodPrefix <> showt BUInputPostfix) BUInputClass buLabel
col_2 $ div ! A.class_ (toValue Or) $ toHtml orAnotherValue
col_5 $ foodDataInput (showt FirstFoodPrefix <> showt GramsInputPostfix) GramsInputClass gramsLabel
-- | Return 'Text' with rendered HTML. It will be sent as a response to AJAX POST-request
-- after user clicked to Add new food in calculation.
newFoodItem :: LangCode -> Text -> Text
newFoodItem langCode idPrefix = toStrict . renderHtml $
newFoodItemCommon (indexBodyContent langCode) idPrefix
-- | New food item, language-agnostic variant.
newFoodItemCommon :: IndexBodyContent -> Text -> Html
newFoodItemCommon IndexBodyContent{..} idPrefix =
div ! A.class_ (toValue FoodFormItem)
! A.id (toValue thisFoodItemId) $ do
div ! A.class_ (toValue FoodFormItemInputs) $ do
row_ $ do
col_6 $ foodDataInput (idPrefix <> showt FoodNameInputPostfix) FoodInputClass foodNameLabel
col_2 $ div ! A.class_ (toValue Or) $ toHtml orAnotherValue
col_4 $ foodDataInput (idPrefix <> showt CarbsInputPostfix) CarbsInputClass carbsLabel
div ! A.class_ (toValue FoodFormRowsSeparator) $ mempty
row_ $ do
col_5 $ foodDataInput (idPrefix <> showt BUInputPostfix) BUInputClass buLabel
col_2 $ div ! A.class_ (toValue Or) $ toHtml orAnotherValue
col_5 $ foodDataInput (idPrefix <> showt GramsInputPostfix) GramsInputClass gramsLabel
div ! A.class_ (toValue RemoveIconFoodForm) $
a ! A.onclick (toValue $ removeDOMItemBy thisFoodItemId)
! A.title (toValue removeFoodItemTitle) $
preEscapedToMarkup ("×" :: String)
where
thisFoodItemId :: Text
thisFoodItemId = idPrefix <> showt FoodFormItemId
data AutoFocus = AutoFocus | NoAutoFocus
-- | One input for food-related value.
-- For simplicity 'commonName' is using both for name and id attributes.
foodDataInput :: Text -> ClassName -> Text -> Html
foodDataInput commonName additionalClass aLabel = div ! A.class_ "md-form" $ do
input ! A.type_ "text"
! A.class_ (toValue $ "form-control " <> showt additionalClass)
! foodInputDatalistIdIfRequired
! A.autocomplete "off" -- We don't need browser's default autocomplete feature.
! A.id (toValue commonName)
! A.name (toValue commonName)
label ! A.for (toValue commonName) $ toHtml aLabel
foodInputDatalistIfRequired
where
thisIsFoodInput = showt FoodNameInputPostfix `isSuffixOf` commonName
foodInputDatalistIdIfRequired =
if thisIsFoodInput then A.list (toValue datalistId) else mempty
foodInputDatalistIfRequired =
-- <datalist>-tag is supported by almost all modern browsers. Sorry, Safari...
if thisIsFoodInput then datalist ! A.id (toValue datalistId) $ mempty else mempty
datalistId = commonName <> "datalist"
-- | Options for datalist with a food suggestions.
-- Again, we render this small HTML here, not in JavaScript.
optionsForDatalist :: [FoodName] -> Text
optionsForDatalist = toStrict . renderHtml . mapM_ addOption
where
addOption :: FoodName -> Html
addOption suggestion = option ! A.value (toValue suggestion) $ mempty
| denisshevchenko/breadu.info | src/lib/BreadU/Pages/Markup/IndexPage.hs | mit | 9,731 | 0 | 19 | 2,973 | 2,030 | 1,028 | 1,002 | -1 | -1 |
module Dianoga.Minification.Css where
| iand675/dianoga | src/Dianoga/Minification/Css.hs | mit | 38 | 0 | 3 | 3 | 7 | 5 | 2 | 1 | 0 |
module Data.Bson
( module Data.Bson.Class
, module Data.Bson.Parser
, module Data.Bson.Types
, module Data.Bson.Utils
) where
-- Instance Binary Document
import Data.Bson.Binary ()
import Data.Bson.Class (ToBson(..), FromBson(..))
import Data.Bson.Parser (Parser, parse, parseMaybe, parseEither)
import Data.Bson.Types (Document, Label, Value(..), Binary(..), ObjectId(..),
Array, RegexOption(..), RegexOptions)
import Data.Bson.Utils (document, (!?), (=:))
| lambda-llama/bresson | src/Data/Bson.hs | mit | 486 | 2 | 6 | 76 | 159 | 109 | 50 | 11 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
module HttpApp.PlayNow.Api
( module HttpApp.PlayNow.Api.Types
, Protected
) where
import Servant ((:<|>), (:>), Delete, Get, JSON,
Post, ReqBody)
import HttpApp.PlayNow.Api.Types
type Protected =
New
:<|> Active
:<|> Del
type New = "new" :> ReqBody '[JSON] PNNewRq :> Post '[JSON] PNNewResp
type Active = "active" :> Get '[JSON] PNActiveResp
type Del = ReqBody '[JSON] PNDeleteRq :> Delete '[JSON] ()
| rubenmoor/skull | skull-server/src/HttpApp/PlayNow/Api.hs | mit | 567 | 0 | 9 | 174 | 159 | 96 | 63 | 15 | 0 |
module Game.Input.Events where
data InputEvent = Key KeyEvent
| Mouse MouseEvent
| Quit
| NoInput
type KeyEvent = Maybe Key
type MouseEvent = Maybe MouseButton
data Key = KeySpace
| Key1
| Key2
| Key3
deriving (Show, Eq)
data MouseButton = Left
| Right
deriving (Show, Eq)
| flomerz/SchaffschNie | src/Game/Input/Events.hs | mit | 396 | 0 | 6 | 170 | 93 | 55 | 38 | 15 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-firehoseaction.html
module Stratosphere.ResourceProperties.IoTTopicRuleFirehoseAction where
import Stratosphere.ResourceImports
-- | Full data type definition for IoTTopicRuleFirehoseAction. See
-- 'ioTTopicRuleFirehoseAction' for a more convenient constructor.
data IoTTopicRuleFirehoseAction =
IoTTopicRuleFirehoseAction
{ _ioTTopicRuleFirehoseActionDeliveryStreamName :: Val Text
, _ioTTopicRuleFirehoseActionRoleArn :: Val Text
, _ioTTopicRuleFirehoseActionSeparator :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON IoTTopicRuleFirehoseAction where
toJSON IoTTopicRuleFirehoseAction{..} =
object $
catMaybes
[ (Just . ("DeliveryStreamName",) . toJSON) _ioTTopicRuleFirehoseActionDeliveryStreamName
, (Just . ("RoleArn",) . toJSON) _ioTTopicRuleFirehoseActionRoleArn
, fmap (("Separator",) . toJSON) _ioTTopicRuleFirehoseActionSeparator
]
-- | Constructor for 'IoTTopicRuleFirehoseAction' containing required fields
-- as arguments.
ioTTopicRuleFirehoseAction
:: Val Text -- ^ 'ittrfaDeliveryStreamName'
-> Val Text -- ^ 'ittrfaRoleArn'
-> IoTTopicRuleFirehoseAction
ioTTopicRuleFirehoseAction deliveryStreamNamearg roleArnarg =
IoTTopicRuleFirehoseAction
{ _ioTTopicRuleFirehoseActionDeliveryStreamName = deliveryStreamNamearg
, _ioTTopicRuleFirehoseActionRoleArn = roleArnarg
, _ioTTopicRuleFirehoseActionSeparator = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-firehoseaction.html#cfn-iot-topicrule-firehoseaction-deliverystreamname
ittrfaDeliveryStreamName :: Lens' IoTTopicRuleFirehoseAction (Val Text)
ittrfaDeliveryStreamName = lens _ioTTopicRuleFirehoseActionDeliveryStreamName (\s a -> s { _ioTTopicRuleFirehoseActionDeliveryStreamName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-firehoseaction.html#cfn-iot-topicrule-firehoseaction-rolearn
ittrfaRoleArn :: Lens' IoTTopicRuleFirehoseAction (Val Text)
ittrfaRoleArn = lens _ioTTopicRuleFirehoseActionRoleArn (\s a -> s { _ioTTopicRuleFirehoseActionRoleArn = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-firehoseaction.html#cfn-iot-topicrule-firehoseaction-separator
ittrfaSeparator :: Lens' IoTTopicRuleFirehoseAction (Maybe (Val Text))
ittrfaSeparator = lens _ioTTopicRuleFirehoseActionSeparator (\s a -> s { _ioTTopicRuleFirehoseActionSeparator = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/IoTTopicRuleFirehoseAction.hs | mit | 2,686 | 0 | 13 | 268 | 356 | 202 | 154 | 34 | 1 |
{-# LANGUAGE OverloadedStrings, TemplateHaskell #-}
module Main
where
import Control.Concurrent
import Control.Exception (SomeException)
import Control.Distributed.Process
import Control.Distributed.Process.Closure
import Control.Distributed.Process.Node
import Control.Distributed.Process.Backend.ZMQ
import Control.Distributed.Process.Backend.ZMQ.Channel
import Control.Monad
import qualified Data.ByteString.Char8 as B8
import Data.Typeable
import Data.List.NonEmpty
import Network.Transport.ZMQ.Internal.Types as NTZ
import Network.Transport.TCP
import System.Random
import qualified System.ZMQ4 as ZMQ
import Text.Printf
server :: NTZ.TransportInternals -> Process ()
server transport = forever $ do
(chIn, chOut) <- pair (ZMQ.Pub, ZMQ.Sub) (PairOptions (Just "tcp://127.0.0.1:5423"))
Just port <- registerSend transport (chIn :: ChanAddrIn ZMQ.Pub (Int,Int))
-- create thread that will produce information
spawnLocal $ forever $ do
zipcode <- liftIO $ randomRIO ( 0, 1000000::Int)
temperature <- liftIO $ randomRIO (-80, 135::Int)
humidity <- liftIO $ randomRIO ( 10, 60::Int)
sendEx port ((B8.pack (show zipcode)),(temperature,humidity))
forever $ do
pid <- expect
send pid chOut
client :: NTZ.TransportInternals -> ProcessId -> MVar () -> Process ()
client transport pid end = do
me <- getSelfPid
send pid me
chOut <- expect :: Process (ChanAddrOut ZMQ.Sub (Int,Int))
Just ch <- registerReceive transport (SubReceive ("10001":|[])) chOut
records <- replicateM 5 $ receiveChanEx ch :: Process [(Int,Int)]
liftIO $ do
print records
putMVar end ()
main = do
zmq <- fakeTransport
Right transport <- createTransport "localhost" "8232" defaultTCPParameters
node <- newLocalNode transport initRemoteTable
end <- newEmptyMVar
runProcess node $ do
srv <- spawnLocal (server zmq)
spawnLocal (client zmq srv end)
liftIO $ takeMVar end
| qnikst/distributed-process-zmq | examples/weather.hs | mit | 1,965 | 0 | 16 | 358 | 629 | 327 | 302 | 49 | 1 |
-- simple function definition in haskell
--
-- funName params = expression
--
-- 1. function can't start with uppercase letter
-- 2. a function can call each other without in particular order
-- 3. function without parameters, use it as a name
-- 4. use let defining variables in function
-- let definition
-- definition
-- ...
-- in expression
-- 5. if then else expression
ddchen'sWife = "kino"
square x = x * x
squareArea h w = square h + square w
circleArea r = let pi = 3.14
in pi * square r
cutHalfBig x = if x > 100 then x / 2 else x
main = do
print ddchen'sWife
print $ circleArea 10
| ddki/my_study_project | language/haskell/grammer/base/simpleFunctionDef.hs | mit | 643 | 0 | 8 | 170 | 122 | 65 | 57 | 9 | 2 |
{-# Language GADTs #-}
module Unison.Runtime.Stream where
import Control.Monad
import Data.Either
import Data.Maybe
import Unison.Runtime.Free (Free)
import qualified Unison.Runtime.Free as Free
import Prelude hiding (head)
data StreamF f a r where
Effect :: f r -> StreamF f a r
Emit :: a -> StreamF f a ()
type Stream f a r = Free (StreamF f a) r
eval :: f r -> Stream f a r
eval fr = Free.eval (Effect fr)
append :: Stream f a r1 -> Stream f a r2 -> Stream f a r2
append s1 s2 = s1 >> s2
empty :: Stream f a ()
empty = pure ()
emit :: a -> Stream f a ()
emit a = Free.eval (Emit a)
emits :: [a] -> Stream f a ()
emits as = foldr append (pure ()) (map emit as)
uncons :: Stream f a r -> Free f (Either r (a, Stream f a r))
uncons s = case s of
Free.Pure r -> pure (Left r)
Free.Bind (Effect fx) k -> Free.eval fx >>= (uncons . k)
Free.Bind (Emit a) k -> pure (Right (a, k ()))
uncons' :: Stream f a r -> Stream f x (Either r (a, Stream f a r))
uncons' s = Free.translate (\f -> Effect f) (uncons s)
head :: Stream f a r -> Free f (Maybe a)
head s = either (const Nothing) (Just . fst) <$> uncons s
headOr :: a -> Stream f a r -> Free f a
headOr a s = fromMaybe a <$> head s
mapEmits :: (a -> b) -> Stream f a r -> Stream f b r
mapEmits f s = case s of
Free.Pure r -> pure r
Free.Bind (Effect fx) k -> Free.Bind (Effect fx) (mapEmits f . k)
Free.Bind (Emit a) k -> Free.Bind (Emit (f a)) (mapEmits f . k)
bindEmits :: (a -> Stream f b r) -> Stream f a r -> Stream f b r
bindEmits f s = case s of
Free.Pure r -> pure r
Free.Bind (Effect fx) k -> Free.Bind (Effect fx) (bindEmits f . k)
Free.Bind (Emit a) k -> f a `append` bindEmits f (k ())
| nightscape/platform | node/src/Unison/Runtime/Stream.hs | mit | 1,679 | 0 | 13 | 402 | 950 | 475 | 475 | 43 | 3 |
{-|
Module : Main
Description : Main module for a basic bare-minimum Jupyter kernel created using the @jupyter@ library.
Copyright : (c) Andrew Gibiansky, 2016
License : MIT
Maintainer : [email protected]
Stability : stable
Portability : POSIX
This module is the Main module for @kernel-stdin@, a bare-minimum Jupyter kernel which uses the
@stdin@ channel (with 'KernelRequest's), created using the @jupyter@ library. It is intended to
demo the bare minimum amount of code required to create a Jupyter kernel which simulates using
a standard input channel.
-}
{-# Language OverloadedStrings #-}
{-# Language PatternSynonyms #-}
module Main(main) where
-- Imports from 'base'
import Control.Monad (when)
import System.Environment (getArgs)
import System.Exit (exitFailure)
import System.IO (stderr)
-- Imports from 'text'
import Data.Text (Text)
import qualified Data.Text.IO as T
-- Imports from 'jupyter'
import Jupyter.Install (installKernel, simpleKernelspec, InstallUser(..), InstallResult(..),
Kernelspec)
import Jupyter.Kernel (readProfile, simpleKernelInfo, serve, defaultCommHandler,
defaultClientRequestHandler, KernelCallbacks(..), KernelProfile,
ClientRequestHandler)
import Jupyter.Messages (KernelOutput(..), KernelReply(..), displayPlain, ClientRequest(..),
pattern ExecuteOk, CodeBlock(..), KernelRequest(..),
ClientReply(..), InputOptions(..))
-- | In `main`, support two commands:
--
-- - `kernel-stdin install`: Register this kernel with Jupyter.
-- - `kernel-stdin kernel $FILE`: Serve a kernel given ports in connection file $FILE.
main :: IO ()
main = do
args <- getArgs
case args of
["install"] -> runInstall
["kernel", profilePath] -> runKernel profilePath
_ -> putStrLn $ "Invalid arguments: " ++ show args
-- | Register this kernel with Jupyter.
runInstall :: IO ()
runInstall =
installKernel InstallLocal stdinKernelspec >>= handleInstallResult
where
-- A basic kernelspec with limited info for testing stdin.
stdinKernelspec :: Kernelspec
stdinKernelspec =
simpleKernelspec "Stdin" "stdin" $ \exe connect -> [exe, "kernel", connect]
-- Print an error message and exit with non-zero exit code if the install failed.
handleInstallResult :: InstallResult -> IO ()
handleInstallResult installResult =
case installResult of
InstallSuccessful -> return ()
InstallFailed reason -> do
T.hPutStrLn stderr reason
exitFailure
-- | Run the kernel on ports determined by parsing the connection file provided.
runKernel :: FilePath -> IO ()
runKernel profilePath = do
Just profile <- readProfile profilePath
serve profile defaultCommHandler $ clientRequestHandler profile
-- | Client request handler which acts in all ways as the default, except for execute requests,
-- it reads data from stdin and writes it back to the client as a display data message.
clientRequestHandler :: KernelProfile -> ClientRequestHandler
clientRequestHandler profile callbacks req =
case req of
ExecuteRequest (CodeBlock code) _ -> do
sendKernelOutput callbacks $ ExecuteInputOutput 1 (CodeBlock code)
echoStdin code callbacks
return $ ExecuteReply 1 ExecuteOk
_ -> defaultClientRequestHandler profile (simpleKernelInfo "Stdin") callbacks req
-- | Read some text from the client stdin using the 'KernelCallbacks', then publish that text back
-- to the client as a 'DisplayDataOutput'.
--
-- If the execute promppt is "password", then the input is done in password mode.
echoStdin :: Text -> KernelCallbacks -> IO ()
echoStdin code callbacks =
when (code /= "skip") $ do
clientReply <- sendKernelRequest callbacks $
InputRequest
InputOptions { inputPrompt = code, inputPassword = code == "password" }
case clientReply of
InputReply stdinText ->
sendKernelOutput callbacks $ DisplayDataOutput $ displayPlain stdinText
| gibiansky/jupyter-haskell | examples/stdin/Main.hs | mit | 4,193 | 0 | 13 | 978 | 668 | 367 | 301 | 58 | 3 |
-- | Layout routine for Data.Tree
-- $Id$
module Tree
( module Data.Tree
, module Autolib.Dot.Dot
, module Tree.Class
, peng
)
where
import Data.Tree
import Autolib.Dot.Dot
import Autolib.Dot.Dotty
import Tree.Class
import Tree.Dot
| florianpilz/autotool | src/Tree.hs | gpl-2.0 | 242 | 0 | 5 | 43 | 57 | 38 | 19 | 10 | 0 |
module Data.CommandLineParser where
import Options.Applicative
data GlobalOpts = Global GlobalCmd
data GlobalCmd = Config ConfigOpts
| Repo RepoOpts
| Term TermOpts
| Course CourseOpts
| Group GroupOpts
| Project ProjectOpts
| Submit SubmitOpts
| Worktrain WorktrainOpts
data ConfigOpts = ConfigOpts ConfigCmd
data ConfigCmd = ConfigThreshold ConfigThresholdOpts
| ConfigTermDate ConfigTermDateOpts
| ConfigProjectDate ConfigProjectDateOpts
| ConfigAcceptExec ConfigAcceptExecOpts
| ConfigTimeLimit ConfigTimeLimitOpts
| ConfigSpaceLimit ConfigSpaceLimitOpts
| ConfigAdminGroups ConfigAdminGroupsOpts
| ConfigTeacherGroups ConfigTeacherGroupsOpts
| ConfigCorrector ConfigCorrectorOpts
-- Config threshold
data ConfigThresholdOpts = ConfigThresholdOpts ConfigThresholdCmd
data ConfigThresholdCmd = ConfigThresholdSet ConfigThresholdSetOpts
| ConfigThresholdList ConfigThresholdListOpts
data ConfigThresholdSetOpts = ConfigThresholdSetOpts
{ configThresholdSetCurrent :: Maybe String
, configThresholdSetChoose :: Maybe String }
data ConfigThresholdListOpts = ConfigThresholdListOpts
-- Config term date
data ConfigTermDateOpts = ConfigTermDateOpts ConfigTermDateCmd
data ConfigTermDateCmd = ConfigTermDateSet ConfigTermDateSetOpts
| ConfigTermDateList ConfigTermDateListOpts
data ConfigTermDateSetOpts = ConfigTermDateSetOpts
{ configTermDateSetTerm1 :: Maybe String
, configTermDateSetTerm2 :: Maybe String
, configTermDateSetTerm3 :: Maybe String }
data ConfigTermDateListOpts = ConfigTermDateListOpts
-- Config project date
data ConfigProjectDateOpts = ConfigProjectDateOpts ConfigProjectDateCmd
data ConfigProjectDateCmd = ConfigProjectDateSet ConfigProjectDateSetOpts
| ConfigProjectDateList ConfigProjectDateListOpts
data ConfigProjectDateSetOpts = ConfigProjectDateSetOpts
{ configProjectDateSetEnd :: Maybe String
, configProjectDateSetLate :: Maybe String }
data ConfigProjectDateListOpts = ConfigProjectDateListOpts
-- Config accept exec
data ConfigAcceptExecOpts = ConfigAcceptExecOpts ConfigAcceptExecCmd
data ConfigAcceptExecCmd = ConfigAcceptExecSet ConfigAcceptExecSetOpts
| ConfigAcceptExecList ConfigAcceptExecListOpts
data ConfigAcceptExecSetOpts = ConfigAcceptExecSetOpts
{ configAcceptExecSetFlag :: Bool }
data ConfigAcceptExecListOpts = ConfigAcceptExecListOpts
-- Config time limit
data ConfigTimeLimitOpts = ConfigTimeLimitOpts ConfigTimeLimitCmd
data ConfigTimeLimitCmd = ConfigTimeLimitSet ConfigTimeLimitSetOpts
| ConfigTimeLimitList ConfigTimeLimitListOpts
data ConfigTimeLimitSetOpts = ConfigTimeLimitSetOpts
{ configTimeLimitSetSeconds :: Double }
data ConfigTimeLimitListOpts = ConfigTimeLimitListOpts
-- Config space limit
data ConfigSpaceLimitOpts = ConfigSpaceLimitOpts ConfigSpaceLimitCmd
data ConfigSpaceLimitCmd = ConfigSpaceLimitSet ConfigSpaceLimitSetOpts
| ConfigSpaceLimitList ConfigSpaceLimitListOpts
data ConfigSpaceLimitSetOpts = ConfigSpaceLimitSetOpts
{ configSpaceLimitSetBytes :: Integer }
data ConfigSpaceLimitListOpts = ConfigSpaceLimitListOpts
-- Config admin groups
data ConfigAdminGroupsOpts = ConfigAdminGroupsOpts ConfigAdminGroupsCmd
data ConfigAdminGroupsCmd = CongigAdminGroupsSet ConfigAdminGroupsSetOpts
| CongigAdminGroupsList ConfigAdminGroupsListOpts
data ConfigAdminGroupsSetOpts = ConfigAdminGroupsSetOpts
{ configAdminGroupsSetNames :: [String] }
data ConfigAdminGroupsListOpts = ConfigAdminGroupsListOpts
-- Config teacher groups
data ConfigTeacherGroupsOpts = ConfigTeacherGroupsOpts ConfigTeacherGroupsCmd
data ConfigTeacherGroupsCmd = CongigTeacherGroupsSet ConfigTeacherGroupsSetOpts
| CongigTeacherGroupsList ConfigTeacherGroupsListOpts
data ConfigTeacherGroupsSetOpts = ConfigTeachereGroupsSetOpts
{ configTeacherGroupsSetNames :: [String] }
data ConfigTeacherGroupsListOpts = ConfigTeacherGroupsListOpts
-- Config corrector
data ConfigCorrectorOpts = ConfigCorrectorOpts ConfigCorrectorCmd
data ConfigCorrectorCmd = ConfigCorrectorIs ConfigCorrectorIsOpts
| ConfigCorrectorAdd ConfigCorrectorAddOpts
| ConfigCorrectorRemove ConfigCorrectorRemoveOpts
data ConfigCorrectorIsOpts = ConfigCorrectorIsOpts
{ configCorrectorIsName :: String }
data ConfigCorrectorAddOpts = ConfigCorrectorAddOpts
{ configCorrectorAddName :: String }
data ConfigCorrectorRemoveOpts = ConfigCorrectorRemoveOpts
{ configCorrectorRemoveName :: String }
-- Repo
data RepoOpts = RepoOpts RepoCmd
data RepoCmd = RepoAdd RepoAddOpts
| RepoRemove RepoRemoveOpts
| RepoList RepoListOpts
data RepoAddOpts = RepoAddOpts
{ repoAddRepoName :: String }
data RepoRemoveOpts = RepoRemoveOpts
{ repoRemoveRepoName :: Maybe String }
data RepoListOpts = RepoListOpts
-- Term
data TermOpts = TermOpts TermCmd
data TermCmd = TermAdd TermAddOpts
| TermRemove TermRemoveOpts
| TermList TermListOpts
| TermDate TermDateOpts
data TermAddOpts = TermAddOpts
{ termAddRepoName :: Maybe String
, termAddTermName :: String }
data TermRemoveOpts = TermRemoveOpts
{ termRemoveRepoName :: Maybe String
, termRemoveTermName :: Maybe String }
data TermListOpts = TermListOpts
{ termListRepoName :: Maybe String }
data TermDateOpts = TermDateOpts TermDateCmd
data TermDateCmd = TermDateSet TermDateSetOpts
| TermDateList TermDateListOpts
data TermDateSetOpts = TermDateSetOpts
{ termDateSetRepoName :: Maybe String
, termDateSetTermName :: Maybe String
, termDateSetStart :: Maybe String
, termDateSetEnd :: Maybe String }
data TermDateListOpts = TermDateListOpts
{ termDateListRepoName :: Maybe String
, termDateListTermName :: Maybe String }
-- Course
data CourseOpts = CourseOpts CourseCmd
data CourseCmd = CourseAdd CourseAddOpts
|Β CourseRemove CourseRemoveOpts
| CourseList CourseListOpts
|Β CourseTeacher CourseTeacherOpts
| CourseCorrector CourseCorrectorOpts
data CourseAddOpts = CourseAddOpts
{ courseAddRepoName :: Maybe String
, courseAddTermName :: Maybe String
, courseAddCourseName :: String }
data CourseRemoveOpts = CourseRemoveOpts
{ courseRemoveRepoName :: Maybe String
, courseRemoveTermName :: Maybe String
, courseRemoveCourseName :: Maybe String }
| AntoineSavage/haskell | turninH/src/Data/CommandLineParser.hs | gpl-2.0 | 6,887 | 0 | 9 | 1,498 | 990 | 573 | 417 | 130 | 0 |
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 1.0//EN"
"http://java.sun.com/products/javahelp/helpset_1_0.dtd">
<helpset version="1.0">
<title>My JavaHelp System</title>
<maps>
<mapref location="Map.jhm"/>
<homeID>overview</homeID>
</maps>
<view>
<name>TOC</name>
<label>TOC</label>
<type>javax.help.TOCView</type>
<data>TOC.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>Index.xml</data>
</view>
</helpset>
| newrol/ideasbook | MyJavaHelp/HelpSet.hs | gpl-2.0 | 691 | 53 | 43 | 182 | 270 | 138 | 132 | -1 | -1 |
module Main where
import Test.HUnit
import Data.Ix
import Data.SuffixArray
import Data.CountingSort
import qualified Data.Vector as V
testCountOccurences :: (Ix a, Ord a, Bounded a, Show a) => [a] -> [Int] -> Test
testCountOccurences testData answer = TestCase (
assertEqual ("countOccurences " ++ (show testData))
answer (V.toList $ countOccurences (V.fromList testData))
)
testPartialSums :: [Int] -> [Int] -> Test
testPartialSums testData answer = TestCase (
assertEqual ("partialSums " ++ (show testData))
answer (V.toList $ partialSums $ V.fromList testData)
)
testCountingSort :: String -> [Int] -> [Int] -> Test
testCountingSort testList testIndexes answer = TestCase (
assertEqual
("countingSort " ++ (show testList) ++ " " ++ (show testIndexes))
answer (V.toList (countingSort (V.fromList testList) (V.fromList testIndexes)))
)
testSuffixArray :: String -> [Int] -> Test
testSuffixArray testData answer = TestCase (
assertEqual ("suffixArray " ++ (show testData))
answer (V.toList myAnswer)
)
where SuffixArray _ myAnswer = suffixArray $ V.fromList testData
testComposeLists :: [Int] -> [Int] -> [Int] -> Test
testComposeLists testData testIndexes answer = TestCase (
assertEqual
("composeLists" ++ (show testData) ++ " " ++ (show testIndexes))
answer (V.toList (composeLists testData' (V.fromList testIndexes)))
)
where testData' = V.fromList testData
tests = TestList
[ TestLabel "countOccurences tests" $ TestList
[ testCountOccurences "abc" [1, 1, 1]
, testCountOccurences "abb" [1, 2]
, testCountOccurences "hello" [1, 0, 0, 1, 0, 0, 0, 2, 0, 0, 1]
, testCountOccurences (([1, 1, 1])::[Int]) [3]
, testCountOccurences (([9, 10, 12])::[Int]) [1, 1, 0, 1]
, testCountOccurences "aabc" [2, 1, 1]
]
, TestLabel "partialSums tests" $ TestList
[ testPartialSums [1, 1, 1] [1, 2, 3]
, testPartialSums [1, 0, 5, 2] [1, 1, 6, 8]
]
, TestLabel "countingSort tests" $ TestList
[ testCountingSort "bcb" [0, 1, 2] [0, 2, 1]
, testCountingSort "bcb" [2, 1, 0] [2, 0, 1]
]
, TestLabel "suffixArray tests" $ TestList
[ testSuffixArray "abcb" [0, 3, 1, 2]
, testSuffixArray "aaba" [3, 0, 1, 2]
, testSuffixArray "abc" [0, 1, 2]
, testSuffixArray "eefdcba" [6, 5, 4, 3, 0, 1, 2]
-- the difference in the following 2 tests is due to the fact
-- that the algorithm builds ordered cyclic shifts of the string.
-- to get an actual suffix array, a "sentinel" element that is
-- lexicographically smaller than all other elements in the input
-- must be appended to the input sequence. see
-- https://github.com/VictorDenisov/suffixarray/issues/1 for details.
, testSuffixArray "ababcabab" [5, 7, 0, 2, 6, 8, 1, 3, 4]
, testSuffixArray "ababcabab\0" [9, 7, 5, 0, 2, 8, 6, 1, 3, 4]
]
, TestLabel "composeLists tests" $ TestList
[ testComposeLists [0, 1, 2] [0, 1, 2] [0, 1, 2]
, testComposeLists [0, 1, 2] [1, 0, 2] [1, 0, 2]
, testComposeLists [3, 0, 2, 1] [0, 2, 1, 3] [3, 2, 0, 1]
]
]
main = runTestTT tests
| VictorDenisov/suffixarray | Tests/Tests.hs | gpl-2.0 | 3,422 | 0 | 14 | 983 | 1,152 | 654 | 498 | 56 | 1 |
module Util.UnionFind(
Element,
T,
find,
fromElement,
getW,
new,
new_,
putW,
union,
union_,
updateW
) where
import Control.Monad.Trans
import Data.IORef
import Data.Unique
import Monad(when,liftM)
data Element w a = Element a {-# UNPACK #-} !Unique {-# UNPACK #-} !(IORef (Link w a))
data Link w a = Weight {-# UNPACK #-} !Int w | Next (Element w a)
type T = Element
new :: MonadIO m => w -> a -> m (Element w a)
new w x = liftIO $ do
r <- newIORef (Weight 1 w)
n <- newUnique
return $ Element x n r
new_ :: MonadIO m => a -> m (Element () a)
new_ x = new () x
find :: MonadIO m => Element w a -> m (Element w a)
find x@(Element a _ r) = liftIO $ do
e <- readIORef r
case e of
Weight _ _ -> return x
Next next -> do
last <- Util.UnionFind.find next
when (next /= last) $ writeIORef r (Next last)
return last
getW :: MonadIO m => Element w a -> m w
getW x = liftIO $ do
Element _ _ r <- find x
Weight _ w <- readIORef r
return w
updateW :: MonadIO m => (w -> w) -> Element w a -> m ()
updateW f x = liftIO $ do
Element _ _ r <- find x
modifyIORef r (\ (Weight s w) -> Weight s (f w))
putW :: MonadIO m => Element w a -> w -> m ()
putW e w = liftIO $ do
Element _ _ r <- find e
modifyIORef r (\ (Weight s _) -> Weight s w)
union :: MonadIO m => (w -> w -> w) -> Element w a -> Element w a -> m ()
union comb e1 e2 = liftIO $ do
e1'@(Element _ _ r1) <- find e1
e2'@(Element _ _ r2) <- find e2
when (r1 /= r2) $ do
Weight w1 x1 <- readIORef r1
Weight w2 x2 <- readIORef r2
if w1 <= w2 then do
writeIORef r1 (Next e2')
writeIORef r2 $! (Weight (w1 + w2) (comb x1 x2))
else do
writeIORef r1 $! (Weight (w1 + w2) (comb x1 x2))
writeIORef r2 (Next e1')
union_ :: MonadIO m => Element () a -> Element () a -> m ()
union_ x y = union (\_ _ -> ()) x y
fromElement :: Element w a -> a
fromElement (Element a _ _) = a
instance Eq (Element w a) where
Element _ x _ == Element _ y _ = x == y
Element _ x _ /= Element _ y _ = x /= y
instance Ord (Element w a) where
Element _ x _ `compare` Element _ y _ = x `compare` y
Element _ x _ <= Element _ y _ = x <= y
Element _ x _ >= Element _ y _ = x >= y
instance Show a => Show (Element w a) where
showsPrec n (Element x _ _) = showsPrec n x
| dec9ue/jhc_copygc | src/Util/UnionFind.hs | gpl-2.0 | 2,457 | 0 | 18 | 794 | 1,271 | 614 | 657 | 74 | 2 |
{-# LANGUAGE LambdaCase #-}
module Main where
import BruteForce (bruteForce, bruteForce')
import System.Console.GetOpt
import System.Environment (getArgs)
import System.TimeIt (timeIt)
-- | all possible flags that can be passed to the executable
data Flag = ShowVersion -- ^ displays the version number
| ShowHelp -- ^ displays a help dialog
| Count String -- ^ limits the program's output to string of specific length
| EnableUpperCase -- ^ enables upper case ASCII
| EnableLowerCase -- ^ enables lower case ASCII
| EnableNumbers -- ^ enables ASCII numerals (0-9)
| EnableAdditional String -- ^ enables additional specified characters
| TimeWord String -- ^ searches for a specific word and displays the elapsed time
deriving (Eq)
-- | brute's options with description
options :: [OptDescr Flag]
options =
[ Option ['v'] ["version"] (NoArg ShowVersion) "show version number"
, Option ['h'] ["help"] (NoArg ShowHelp) "show this dialog"
, Option ['u'] ["uppercase"] (NoArg EnableUpperCase) "enable upper case"
, Option ['l'] ["lowercase"] (NoArg EnableLowerCase) "enable lower case"
, Option ['n'] ["numbers"] (NoArg EnableNumbers) "enable numbers"
, Option ['a'] ["alphabet"] (ReqArg EnableAdditional "CHARS") "enable specific characters"
, Option ['c'] ["count"] (ReqArg Count "INT") "only search for strings of specific length"
, Option ['w'] ["word"] (ReqArg TimeWord "WORD") "search for specific word; display elapsed time"
]
-- | parses command line options
bruteOpts :: [String] -- ^ an argument vector
-> IO ([Flag], [String]) -- ^ a tuple of all the parsed flags and additional args
bruteOpts argv =
case getOpt Permute options argv of
(o, n, []) -> return (o, n)
(_, _, er) -> ioError $ userError (concat er ++ helpDialog)
-- | the help/usage dialog for brute
helpDialog :: String
helpDialog = usageInfo header options
where header = "Usage: brute [OPTIONS...]"
-- | parses command line arguments and brute-forces according to them
main :: IO ()
main =
do (opts, _) <- bruteOpts =<< getArgs
case () of
() | ShowVersion `elem` opts -> putStrLn "brute 0.1.0.0"
| null opts || ShowHelp `elem` opts -> putStrLn helpDialog
| otherwise ->
let alphabet = buildAlphabet opts
isCount = \case Count{} -> True; _ -> False
isTimeWord = \case TimeWord{} -> True; _ -> False
in case filter isTimeWord opts of
(TimeWord str:_) ->
timeIt $
do mapM_ putStrLn $ takeWhile (/= str) $ bruteForce' alphabet (length str)
putStrLn $ "\nFound " ++ str ++ "!\n"
[] ->
mapM_ putStrLn $
case filter isCount opts of
(Count len:_) -> bruteForce' alphabet (read len)
[] -> bruteForce alphabet
where
buildAlphabet :: [Flag] -> String
buildAlphabet =
concatMap $
\case
EnableUpperCase -> ['A'..'Z']
EnableLowerCase -> ['a'..'z']
EnableNumbers -> ['0'..'9']
EnableAdditional cs -> cs
_ -> []
| kmein/brute | Main.hs | gpl-3.0 | 3,316 | 0 | 22 | 985 | 811 | 437 | 374 | 63 | 9 |
import Data.List
hasThreeWovels = (>=3) . length . filter (\c -> c `elem` "aeiou")
pairs s = zip s (tail s)
hasTwoInARow s = 1 <= length [(x,y) | (x, y) <- pairs s, x==y]
doesNotContainUnwantedPattern s = let unwanted_patterns = [('a', 'b'), ('c', 'd'), ('p', 'q'), ('x', 'y')]
unwanted = filter (\p -> p `elem` unwanted_patterns) (pairs s)
in null unwanted
isNice1 s = hasThreeWovels s && hasTwoInARow s && doesNotContainUnwantedPattern s
containsTwoPairs [] = False
containsTwoPairs (x:[]) = False
containsTwoPairs (x:y:xs) = ((x, y) `elem` (pairs xs)) || containsTwoPairs (y:xs)
repeatsWithOneInBetween [] = False
repeatsWithOneInBetween (x:[]) = False
repeatsWithOneInBetween (x:y:[]) = False
repeatsWithOneInBetween (x:y:xs) | x == (head xs) = True
| otherwise = repeatsWithOneInBetween (y:xs)
isNice2 s = containsTwoPairs s && repeatsWithOneInBetween s
main = do l <- fmap (lines) getContents
let niceLines1 = filter isNice1 l
print $ length niceLines1
let niceLines2 = filter isNice2 l
print $ length niceLines2
| plilja/adventofcode | 2015/day05/day05.hs | gpl-3.0 | 1,174 | 0 | 12 | 309 | 490 | 253 | 237 | 22 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RecordWildCards #-}
module Main where
import qualified Data.Vector.Unboxed as VU
import qualified Data.Vector as V
import qualified Data.HashMap.Strict as HM
import qualified Data.IntMap as IM
import qualified Data.List as L
import qualified System.Random as R
import Criterion.Main
import System.Random.TF.Instances
import System.Random.TF.Init
import System.Random.TF.Gen
import Control.DeepSeq
import Hammer.Graph
import qualified Data.Graph.Sparse as S
import Hammer.MicroGraph
import Hammer.VTK
import Hammer.VoxBox
import Hammer.VoxConn
import Linear.Vect
import Linear.Mat
genGraph :: (RandomGen g)=> g -> Int -> [((Int, Int), Double)]
genGraph g n = let
(g1, g2) = split g
ns1 = randomRs (0, div n 10) g1
ns2 = randomRs (0, div n 10) g2
in take n $ zipWith (\a b -> ((a, b), 1.0) ) ns1 ns2
testmap :: Int -> HM.HashMap Int Int
testmap n =
L.foldl' (\m k -> HM.insert k 1 m) HM.empty [0..n]
testintmap :: Int -> IM.IntMap Int
testintmap n =
L.foldl' (\m k -> IM.insert k 1 m) IM.empty [0..n]
testuvec :: Int -> VU.Vector Int
testuvec n =
L.foldl' (\m k -> k `VU.cons` m) VU.empty [0..n]
testvec :: Int -> V.Vector Int
testvec n =
L.foldl' (\m k -> k `V.cons` m) V.empty [0..n]
instance NFData (Graph Int Double) where
rnf = rnf . graph
main :: IO ()
main = do
gen <- initTFGen
let
gs = genGraph gen 4000
gg = mkUniGraph [] gs
mm = S.graphToCRS gg
n = 10
f1 x = L.foldl' multGraph x $ replicate n x
f2 x = L.foldl' S.multMM x $ replicate n x
f3 x = L.foldl' S.multMMsmrt x $ replicate n x
defaultMain $
[ bench "expGraph" $ nf f1 gg
, bench "expMM" $ nf f2 mm
, bench "expMM-smrt" $ nf f3 mm
, bench "mclSparse" $ nf (\x -> runMCL defaultMCL x) gg
]
-- ++ benchmarkMicroVoxel vboxTest
{--
--}
vboxTest :: VoxBox Int
vboxTest = VoxBox { dimension = mkStdVoxBoxRange (VoxBoxDim 21 15 5)
, origin = VoxBoxOrigin 0 0 0
, spacing = VoxelDim 1 1 1
, grainID = g }
where
g :: VU.Vector Int
g = VU.fromList
[2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,3,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,3,3,3,3,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,3,3,2,3,3,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,3,3,2,3,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,3,2,3,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,3,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,3,3,3,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,3,4,4,4,4,4,4,4,4,4,4,4
,2,2,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4
,2,2,3,2,2,2,2,2,2,2,3,3,3,3,4,4,4,4,4,4,4
,2,2,3,2,2,2,2,2,2,2,3,4,4,4,4,4,4,4,4,4,4
,2,2,3,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4
,2,2,2,2,2,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4
,2,2,2,2,2,3,2,1,1,1,1,1,4,4,3,4,4,4,4,4,4
,2,2,2,2,2,3,2,1,2,2,4,1,4,4,3,4,4,4,4,4,4
,2,2,2,2,2,3,2,1,1,2,4,1,4,4,3,4,4,4,4,4,4
,2,2,2,2,2,3,2,2,2,2,4,4,4,4,3,4,4,4,4,4,4
,2,2,2,2,2,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4
,2,2,2,2,3,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,3,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,3,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,3,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,3,2,2,2,2,2,2,2,2,2,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,1,1,1,1,1,1,1,1,1,4,4,4,4,4,4,4,4,4,4,4
,2,1,2,2,2,2,2,2,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,1,2,1,1,1,1,2,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,1,2,2,2,2,1,2,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,1,2,1,1,1,1,2,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,1,2,1,2,2,2,2,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,1,1,1,2,1,1,1,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,1,2,1,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,1,2,2,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,1,1,1,1,1,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,1,4,4,4,4,4,4,4,4,4,4,4
,2,2,2,2,2,2,2,2,2,1,4,4,4,4,4,4,4,4,4,4,4
]
| lostbean/Hammer | profile/Benchmark.hs | gpl-3.0 | 6,348 | 0 | 13 | 1,592 | 5,564 | 3,606 | 1,958 | 140 | 1 |
import SimpleEuterpea
--cs Major 4/4
--Surprised that a bit of messing around got me my second section, which I thought would be the hardest to come up with
arp' c a t = arp (pitches c) a t
block' c t = blockChord (pitches c) [0,1,2] t
sarp = [s c0]
earp = [e c0]
qarp = [q c0]
hb = [h c0]
wb = [w c0]
alter = [5,4,5,4,5,4,5,4,5,4,5,4,5,4,5,4]
test = rpt 2 (block' plr1 wb + block' plr2 wb + block' plr3 wb + block' plr4 wb + block' plr5 wb + block' plr6 wb)
key = makeChord cs1 cs3 "major" -- (C# F G#)
plr1 = key --(C# F G#)(C# F G#) cs Major
plr2 = tp plr1 --(C# E G#)(C# E G#) cs minor 1
plr3 = tl plr2 --(C# E A) (A C# E) A Major
plr4 = tr plr3 --(C# F# A)(F# A C#) fs minor
plr5 = tp plr4 --(C# F# A#)(F# A# C#)fs Major
plr6 = tl plr5 --(C# F A#)(A# C# F) as minor
mKeep1 = tl (tp plr2) --(F G# C) f minor
mKeep2 = tp (tl plr2) --(A C E) a minor 3
mKeep3 = tp (tr plr2) --(E G B) e minor
mKeep4 = tl (tr plr2) --(G# B D#) gs minor
mKeep11 = tr (tp mKeep1) --(D F A) d minor
mKeep12 = tl (tr mKeep1) --(C D# G) c minor 2
mKeep31 = tp (tr mKeep3) --(G A# D) g minor
mKeep41 = tp (tr mKeep4) --(B D F#) b minor
mKeep42 = tl (tr mKeep4) --(D# F# A#) ds minor
--Into the unknown
--Just the working force
ph2p1a = arp' plr1 alter sarp
ph2p1b = rpt 4 (block' plr1 qarp)
ph2p1 = ph2p1a * ph2p1b
ph2p2a = arp' plr2 alter sarp
ph2p2b = rpt 4 (block' plr2 qarp)
ph2p2 = ph2p2a * ph2p2b
ph2p3a = arp' plr3 alter sarp
ph2p3b = rpt 4 (block' plr3 qarp)
ph2p3 = ph2p3a * ph2p3b
ph2p4a = arp' plr5 alter sarp
ph2p4b = rpt 4 (block' plr5 qarp)
ph2p4 = ph2p4a * ph2p4b
phrase2 = rpt 2 (ph2p1 + ph2p2) + ph2p3 + ph2p2 + ph2p3 + ph2p4
--Enlightenment | WSCU/JSEuterpea | Euterpea Examples/Transformations.hs | gpl-3.0 | 1,745 | 0 | 12 | 468 | 625 | 335 | 290 | 39 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Game.Grid.GridWorld
(
GridWorld (..),
GridEvent (..),
module Game.Grid.GridWorld.Camera,
module Game.Grid.GridWorld.CameraCommand,
module Game.Grid.GridWorld.Path,
module Game.Grid.GridWorld.SegmentArray,
module Game.Grid.GridWorld.Segment,
module Game.Grid.GridWorld.Node,
module Game.Grid.GridWorld.Turn,
) where
import MyPrelude
import Game
import Game.Grid.GridWorld.Camera
import Game.Grid.GridWorld.CameraCommand
import Game.Grid.GridWorld.Path
import Game.Grid.GridWorld.SegmentArray
import Game.Grid.GridWorld.Segment
import Game.Grid.GridWorld.Node
import Game.Grid.GridWorld.Turn
data GridWorld =
GridWorld
{
gridTick :: !Tick,
gridEvents :: [GridEvent],
gridCamera :: !Camera,
gridCameraCommands :: ![CameraCommand],
gridCameraCommandTick :: !Tick,
gridCameraCommandScale :: !Float,
gridCameraCommandCount :: !UInt,
-- physical objects:
--gridPaths :: [Path]
gridPath :: !Path
}
--------------------------------------------------------------------------------
--
-- | GridEvent
data GridEvent
--EventCameraCommandsComplete
instance World GridWorld GridEvent where
worldTick =
gridTick
worldTickModify grid f =
grid { gridTick = f $ gridTick grid }
worldAllEvents =
gridEvents
worldPushEvent grid e =
grid { gridEvents = gridEvents grid ++ [e] }
--------------------------------------------------------------------------------
--
| karamellpelle/grid | source/Game/Grid/GridWorld.hs | gpl-3.0 | 2,384 | 0 | 10 | 485 | 309 | 208 | 101 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Hedgehog
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
data Foo = Foo { bar :: Int, baz :: Char } deriving (Eq, Show)
newtype Trace = Trace [Foo] deriving (Eq, Show)
randomFoo :: Gen Foo
randomFoo = do
n <- Gen.filter (\x -> (42 <= x) && (x<100)) $ Gen.integral (Range.linear 1 1000)
c <- Gen.filter (`elem` ("?!" :: String)) Gen.ascii
return $ Foo n c
randomFooND :: Gen Foo
randomFooND = do
n <- intInRange
c <- charInRange
return $ Foo n c
where
intInRange = do
n <- Gen.integral (Range.constant 0 100000)
if (567 <= n) && (n<568)
then return n
else intInRange
charInRange = do
c <- Gen.ascii
if c `elem` ("?!" :: String)
then return c
else charInRange
anyFoo :: Gen Foo
anyFoo = do
n <- Gen.integral (Range.linear 1 1000)
c <- Gen.ascii
return $ Foo n c
smallFoo :: Gen Foo
smallFoo = do
n <- Gen.integral (Range.constant 1 3)
c <- Gen.element "!abcd"
return $ Foo n c
randomTrace :: Gen Trace
randomTrace = Trace <$> Gen.list (Range.linear 1 1000) randomFoo
randomTraceND :: Gen Trace
randomTraceND = Trace <$> Gen.list (Range.linear 1 1000) randomFooND
randomTraceAnyFoo :: Gen Trace
randomTraceAnyFoo = Trace <$> Gen.list (Range.linear 1 1000) anyFoo
foosAreWrong :: Foo -> Bool
foosAreWrong (Foo n c) = 200 < n && c == 'a'
noTracesAreValid :: Property
noTracesAreValid = property $
-- Using 'randomTraceND' will probably eat up all the system's ram. Might
-- have to do with the huge number of shrinks that are generated!
forAll randomTraceND >>= \(Trace tr) -> assert (any foosAreWrong tr)
--------------------------------------------------------------------------------
-- Some record
--
-- Example taken from: https://www.youtube.com/watch?v=AIv_9T0xKEo
--------------------------------------------------------------------------------
data SomeRecord
= SomeRecord
{ someInt :: Int
, someList :: Trace
} deriving (Show, Eq)
arbitraryRecord :: Gen SomeRecord
arbitraryRecord = do
i <- Gen.integral (Range.linear 1 1000)
xs <- randomTraceAnyFoo
return $ SomeRecord i xs
-- where
-- arbitraryPair = (,) <$> arbitraryInt <*> arbitraryString
-- arbitraryInt = Gen.integral (Range.linear 1 1000)
-- arbitraryString = Gen.string (Range.linear 1 100) Gen.ascii
recordsAreEqual :: Property
recordsAreEqual = property $ do
r0 <- forAll arbitraryRecord
r1 <- forAll arbitraryRecord
r0 === r1
--------------------------------------------------------------------------------
--
--------------------------------------------------------------------------------
stringsAreWrong :: Property
stringsAreWrong = property $ do
xs <- forAll arbitraryString
xs === "hello"
where
arbitraryString = Gen.list (Range.constant 1 4) (Gen.element "abcdefghijklmn")
aSpecificChar :: Gen Char
aSpecificChar = do
c <- Gen.element "abxcd"
if c `elem` ("x" :: String)
then return c
else aSpecificChar
aFilteredchar :: Gen Char
aFilteredchar =
Gen.filter (`elem` ("x" :: String)) (Gen.element "ax")
charMustNotBeX :: Property
charMustNotBeX = property $ do
c <- forAll aFilteredchar
c /== 'x'
charsMustNotBeX :: Property
charsMustNotBeX = property $ do
c0 <- forAll aFilteredchar
c1 <- forAll aFilteredchar
assert (c0 /= 'x' || c1 /= 'x')
--------------------------------------------------------------------------------
-- Try to generate non-minimal counter examples
--------------------------------------------------------------------------------
notLargeOrBothNotEmpty :: Property
notLargeOrBothNotEmpty = property $ do
xs <- forAll randomIntLists
ys <- forAll randomIntLists
assert $ length xs < 10 && (xs /= [] && ys /=[])
where
randomIntLists = Gen.frequency
[ (1, Gen.list (Range.constant 0 1) randomInt)
, (10, Gen.list (Range.constant 1 100000) randomInt)
]
randomInt = Gen.integral (Range.constant 1 100)
-- I don't know whether this is a good example either, since hedgehog will
-- shrink towards the first element in the list.
main :: IO Bool
main = checkParallel $ Group "Test.Example"
[("Produce a minimal counter-example", notLargeOrBothNotEmpty)]
-- main :: IO Bool
-- main = checkParallel $ Group "Test.Example"
-- [ ("No traces are valid", noTracesAreValid)
-- , ("Records are equal", recordsAreEqual)
-- , ("Strings are wrong", stringsAreWrong)
-- , ("Chars must not be x", charsMustNotBeX)
-- , ("Produce a minimal counter-example", notLargeOrBothNotEmpty)
-- ]
| capitanbatata/sandbox | hedgehog-vs-qc/test/hedgehog/Main.hs | gpl-3.0 | 4,589 | 0 | 14 | 857 | 1,205 | 630 | 575 | 98 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TupleSections #-}
module Visuals where
import Prelude hiding (floor, mod, lines)
import LambdaDesigner.Op
import LambdaDesigner.ParsedOps
import LambdaDesigner.Lib
import Data.Char
import Data.IORef
import Data.List.Split
import Data.Maybe
import Debug.Trace
import Control.Lens
import Data.Matrix
import qualified Data.ByteString.Char8 as BS
import qualified Data.List as L
data VoteType = Movie | Effect deriving Eq
data VoteEffect = VoteEffect VoteType BS.ByteString BS.ByteString BS.ByteString deriving Eq
-- Utility
constnamevalfuncs :: [((Maybe (Tree BS.ByteString) -> Identity (Maybe (Tree BS.ByteString))) -> CHOP -> Identity CHOP, (Maybe (Tree Float) -> Identity (Maybe (Tree Float))) -> CHOP -> Identity CHOP)]
constnamevalfuncs =
zip
[ constantCHOPname0
, constantCHOPname1
, constantCHOPname2
, constantCHOPname3
, constantCHOPname4
, constantCHOPname5
, constantCHOPname6
, constantCHOPname7
, constantCHOPname8
, constantCHOPname9
, constantCHOPname10
, constantCHOPname11
, constantCHOPname12
, constantCHOPname13
, constantCHOPname14
, constantCHOPname15
, constantCHOPname16
, constantCHOPname17
, constantCHOPname18
, constantCHOPname19
, constantCHOPname20
, constantCHOPname21
, constantCHOPname22
, constantCHOPname23
, constantCHOPname24
, constantCHOPname25
, constantCHOPname26
, constantCHOPname27
, constantCHOPname28
, constantCHOPname29
]
[ constantCHOPvalue0
, constantCHOPvalue1
, constantCHOPvalue2
, constantCHOPvalue3
, constantCHOPvalue4
, constantCHOPvalue5
, constantCHOPvalue6
, constantCHOPvalue7
, constantCHOPvalue8
, constantCHOPvalue9
, constantCHOPvalue10
, constantCHOPvalue11
, constantCHOPvalue12
, constantCHOPvalue13
, constantCHOPvalue14
, constantCHOPvalue15
, constantCHOPvalue16
, constantCHOPvalue17
, constantCHOPvalue18
, constantCHOPvalue19
, constantCHOPvalue20
, constantCHOPvalue21
, constantCHOPvalue22
, constantCHOPvalue23
, constantCHOPvalue24
, constantCHOPvalue25
, constantCHOPvalue26
, constantCHOPvalue27
, constantCHOPvalue28
, constantCHOPvalue29
]
constnameval :: Int -> String -> Tree Float -> (CHOP -> CHOP)
constnameval i s f =
(\(n, v) -> (n ?~ str s) . (v ?~ f)) . flip (!!) i $ constnamevalfuncs
constnamesvals :: [(String, Tree Float)] -> Tree CHOP
constnamesvals namevals =
flip constantCHOP [] . foldl (.) id $
zipWith (\(s, f) (n, v) -> (n ?~ str s) . (v ?~ f)) namevals constnamevalfuncs
const1 = constantCHOP (constantCHOPvalue0 ?~ float 1) []
constx x = constantCHOP (constantCHOPvalue0 ?~ x) []
constxr x r = constnamesvals $ zip (repeat "a") (replicate r x)
selChans :: String -> Tree CHOP -> Tree CHOP
selChans n c = selectCHOP (selectCHOPchannames ?~ str n) [c]
overChans :: String -> (Tree CHOP -> Tree CHOP) -> Tree CHOP -> Tree CHOP
overChans n f c = replaceCHOP id [c, renameCHOP id . (:[selChans n c]) $ f $ selChans n c]
multchops :: [Tree CHOP] -> Tree CHOP
multchops cs = mathCHOP (mathCHOPchopop ?~ int 3) cs
-- Audio input
ain' m = mathCHOP (mathCHOPgain ?~ float m) [audiodeviceinCHOP id & resampleCHOP ((resampleCHOPtimeslice ?~ bool False) . (resampleCHOPmethod ?~ int 0) . (resampleCHOPrelative ?~ int 0) . (resampleCHOPend ?~ float 0.03)) . (:[])]
ain = ain' 1
atex = choptoTOP (choptoTOPchop ?~ ain)
aspect = audiospectrumCHOP id $ audiodeviceinCHOP id
aspecttex = choptoTOP (choptoTOPchop ?~ aspect)
analyze i = analyzeCHOP (analyzeCHOPfunction ?~ int i)
volume = analyze 6 ain
volc = chan0f volume
lowPass = audiofilterCHOP (audiofilterCHOPfilter ?~ int 0) . (:[])
lowv' = analyze 6 . lowPass . ain'
lowv = lowv' 4
lowvc' = chan0f . lowv'
lowvc = lowvc' 4
highPass = audiofilterCHOP (audiofilterCHOPfilter ?~ int 1) . (:[])
highv = analyze 6 $ highPass ain
highvc = chan0f highv
bandPass b = audiofilterCHOP ((audiofilterCHOPfilter ?~ int 2) . (audiofilterCHOPcutofflog ?~ (b !* float 4.5))) . (:[])
bandv b = analyze 6 $ bandPass b ain
bandvc = chan0f . bandv
mchan :: String -> Tree Float
mchan s = chanNamef s $ midiinmapCHOP id
mchop :: String -> Tree CHOP
mchop s = midiinmapCHOP id & selectCHOP (selectCHOPchannames ?~ str s) . (:[])
lmf h fing xyz = leapmotionCHOP id & selectCHOP (selectCHOPchannames ?~ str ("hand" ++ show h ++ "/finger" ++ show fing ++ ":t" ++ xyz)) . (:[])
lmf0 h fing = chan0f . lmf h fing
lmp h v = leapmotionCHOP id & selectCHOP (selectCHOPchannames ?~ str ("hand" ++ show h ++ "/palm:" ++ v)) . (:[])
lmp0 h = chan0f . lmp h
lms = leapmotionCHOP id & selectCHOP (selectCHOPchannames ?~ str "swipe0:tracking") . (:[])
lms0 = chan0f $ lms
cTSMod tf s = choptoSOP (
(choptoSOPattscope ?~ str "P") .
(choptoSOPchop ?~ (tf (soptoCHOP (soptoCHOPsop ?~ s))))
)
amult t = mathCHOP (
(mathCHOPalign ?~ int 7) .
(mathCHOPchopop ?~ int 3)
) [t, mathCHOP ((mathCHOPpostoff ?~ (float 1)) . (mathCHOPgain ?~ float 2)) [ain]]
acirc = cTSMod amult (circleSOP ((circleSOParc ?~ int 1) . (circleSOPtype ?~ int 2)) [])
asphere = cTSMod amult $ sphereSOP (sphereSOPtype ?~ int 3) []
mnoise t s = noiseCHOP ((noiseCHOPtype ?~ int 2) .
(noiseCHOPperiod ?~ float s) .
(noiseCHOPt . _2 ?~ t) .
(noiseCHOPtimeslice ?~ bool True)) []
mnoisec t s = chan0f $ mnoise t s
-- launchmapping = strobe (float 10 !* mchan "s1c")
-- $ scalexy (float 0.2 !* mchan "s1b")
-- $ fade (mchan "s1")
-- $ foldr (.) id (zipWith ($) (reverse effects) (reverse [1..(length effects)]))
-- $ switchT (float (-1) !+ (chan0f $ hold buttons buttons))
-- [adata (mchan "s1a" !* float 2), shapes (float 3 !+ scycle 1 3) (volc !* mchan "s2a") (mchan "s2b")]
-- effects = [ \n -> palettecycle' (passmchan n) neon seconds
-- , \n -> translatex' (passmchan n) (mchan ("s" ++ show n) !* seconds)
-- , \n -> paletterepeatT' (passmchan n) neon (float 20 !* mchan ("s" ++ show n))
-- , \n -> mirror' (passmchan n)
-- , \n -> mosaic' (passmchan n) (seconds !* float 20) (float 60)
-- , \n -> blur' (passmchan n) (float 56 !* mchan ("s" ++ show n))
-- ]
-- passmchan m = topPasses ?~ casti (mchan $ "b" ++ show m)
-- buttons = mathCHOP (mathCombChops ?~ int 1) $ mheld <$> [1..4]
-- mheld n = constC [float (fromIntegral n) !* (mchan $ "b" ++ show (n + 8))]
-- stresstest = fadeops (float 0.5) [
-- noisedisplace (float 10) $ mosaic (seconds !* float 20) (float 100) $
-- fade (float 0.96) $ blur (float 128) $ palettecycle neon seconds $
-- flocking (float 0.5, float 1) (float 10 !* volc),
-- blur (float 27) $
-- fade (float 0.99) $ flocking (float 0.4, float 1) (float 10 !* volc)]
-- -------------------
-- sphereNoise = geo' id $ outS asphere
-- -- Gens
adata m = tdata m atex
flocking (c, s) sp = tox0 "toxes/Visuals/flockingGpu.tox" [ ("Cohesion", ResolveP c)
, ("Separation", ResolveP s)
, ("Alignment", ResolveP c)
, ("Speed", ResolveP sp)
]
lines w s = frag "lines.frag" [("i_width", xV4 w), ("i_spacing", xV4 s)] []
-- metaballs mat = let wrapJust n x = Just $ chan0f x !* float n
-- mball n r tx ty = metaball' ((metaballRadius .~ (wrapJust n r, wrapJust n r, wrapJust n r)) .
-- (metaballCenter .~ (Just tx, Just ty, Nothing)))
-- noiset m = noiseC' ((chopTimeSlice ?~ bool True) .
-- (noiseCTranslate._1 ?~ seconds !* float 0.3) .
-- (noiseCTranslate._3 ?~ float (m * 3)) .
-- (noiseCAmplitude ?~ (float (m + 1)) !* volc) .
-- (noiseCChannels ?~ str "chan[1-3]"))
-- noisex = noiset 1
-- noisey = noiset 0
-- lagmodC = lag (float 0) (float 0.2)
-- in rendered . geo' (geoMat ?~ mat) .
-- outS $ mergeS [ mball 1 (lagmodC lowv) (chanf 0 noisex !+ float 0.2)
-- (chan0f noisey)
-- , mball 9 (lagmodC highv)
-- (chanf 1 noisex !+ float (-0.2))
-- (chanf 1 noisey !+ float 0.7)
-- , mball 4 (lagmodC $ bandv (float 0.5))
-- (chanf 2 noisex !+ float (-0.2))
-- (chanf 2 noisey !+ float (-0.7))
-- ]
-- movingSquiggly = geo' ((geoTranslate .~ (Just $ mnoisec (seconds !* float 20) 5, Just $ mnoisec (seconds !* float 20) 10, Just $ float 0)) .
-- (geoScale.each ?~ float 0.3) .
-- (geoMat ?~ constM' (constColor .~ (Just $ osin $ seconds, Just $ osin $ (seconds !* float 2), Just $ osin $ (seconds !* chan0f volume)))))
-- $ outS acirc
particlemover v a p s f = tox0 "toxes/Visuals/particlemover.tox" [ ("Palette", ResolveP $ palette p)
, ("Vmult", ResolveP v)
, ("Emitalpha", ResolveP a)
, ("Force", ResolveP f)
, ("Shape", ResolveP s)
]
shapes sides w s = frag "shapes.frag" [ ("i_size", xV4 s)
, ("i_width", xV4 w)
, ("i_sides", xV4 sides)
] []
sineT x s a = frag "sine.frag" [("i_time", xV4 $ x), ("i_scale", xV4 $ s), ("i_amplitude", xV4 $ a)] []
stringtheory t a = frag "string_theory.frag" [ ("i_time", xV4 t)
, ("i_angle", xV4 a)
, ("i_angle_delta", xV4 $ float 0.2)
, ("i_xoff", xV4 $ float 0)
] []
movie s f = moviefileinTOP ((moviefileinTOPplaymode ?~ int 1) .
(moviefileinTOPindex ?~ castf s) .
(moviefileinTOPresolutionw ?~ int 1920) .
(moviefileinTOPresolutionh ?~ int 1080) .
(moviefileinTOPfile ?~ (str $ "videos/" ++ f)))
-- geoT tr sc top sop = render [geo' ((geoTranslate .~ tr) . (geoScale .~ sc) . (geoMat ?~ topM top)) (outS sop)] cam
commandCode t = textTOP ((textTOPresolutionw ?~ int 1920)
. (textTOPresolutionh ?~ int 1080)
. (textTOPfontsizey ?~ float 16)
. (textTOPalignx ?~ int 2)
. (textTOPaligny ?~ int 0)
. (textTOPtext ?~ str t)
) []
-- -- Geometry generators and utilities
-- sinC i = waveC' (waveCNames ?~ str "rz") i $ osin (castf sampleIndex) !* float 360
-- scaleC i n = waveC' (waveCNames ?~ str "sx") i $ castf sampleIndex !* n
-- sidesTorus sides scale = torus' ((torusOrientation ?~ int 2) . (torusRows ?~ int 10) . (torusColumns ?~ sides) . (torusRadius .~ v2 (scale !* float 1) (scale !* float 0.5)))
-- lineLines width scale inst sop =
-- let
-- instances = casti inst !+ int 2
-- in
-- lineGeo (mathCHOP (mathCHOPgain ?~ scale) [ain]) (sinC instances) (scaleC instances $ float 0.1) (scaleC instances $ float 0.1) sop width instances wireframeM
-- lineGeo ty rz sx sy sop width instances mat =
-- let
-- ain = mathCHOP (mathCHOPgain ?~ float 10) [audioIn]
-- sgeo = instanceGeo' ((geoMat ?~ mat)) poses (outS $ sop)
-- poses = mergeC' (mergeCAlign ?~ int 7) [tx, ty', rz & renameC (str "rz"), sx & renameC (str "sx"), sy & renameC (str "sy")]
-- tx = waveC' (waveCNames ?~ str "tx") instances $ ((castf (sampleIndex !* casti width !* int 2)) !/ castf instances) !- width
-- ty' = ty & resampleC' ((resampleEnd ?~ instances) . (resampleRate ?~ instances)) False & renameC (str "ty")
-- centerCam t r = cam' ((camTranslate .~ t) . (camPivot .~ v3mult (float (-1)) t) . (camRotate .~ r))
-- volume = analyze (int 6) ain
-- volc = chan0f volume
-- in
-- render [sgeo] (centerCam (v3 (float 0) (float 0) (float 50)) emptyV3)
-- spiralGeo inst speed sop =
-- let
-- sgeo = instanceGeo' ((geoMat ?~ wireframeM) . (geoUniformScale ?~ float 0.1)) poses (outS $ sop)
-- instances = casti $ inst
-- poses = mergeC' (mergeCAlign ?~ int 7) [ty, tx, tz]
-- instanceIter n = (castf sampleIndex !+ (speed !* float n) !% castf instances)
-- tx = waveC' (waveCNames ?~ str "tx") instances $ ocos (castf sampleIndex !* float 60 !+ instanceIter 0.2) !* ((instanceIter 10 !* float 0.1) !+ float 4)
-- ty = waveC' (waveCNames ?~ str "ty") instances $ osin (castf sampleIndex !* float 60 !+ instanceIter 0.2) !* ((instanceIter 10 !* float 0.1) !+ float 4)
-- tz = waveC' (waveCNames ?~ str "tz") instances $ instanceIter 10 !* float 1 !- float 50
-- centerCam t r = cam' ((camTranslate .~ t) . (camPivot .~ v3mult (float (-1)) t) . (camRotate .~ r))
-- in
-- render [sgeo] (centerCam (v3 (float 0) (float 0) (float 5)) emptyV3)
-- -- vidIn
-- -- Effects
xV2 x = (Just x, Nothing)
xV3 x = (Just x, Nothing, Nothing)
xV4 x = (Just x, Nothing, Nothing, Nothing)
fade' f l o t = feedbackT t (\t' -> l $ compositeTOP (compositeTOPoperand ?~ int 0) [t, levelTOP (levelTOPopacity ?~ o) t']) f
fade = fade' id id
over' f l o t = feedbackT t (\t' -> l $ compositeTOP (compositeTOPoperand ?~ int 31) [t, levelTOP (levelTOPopacity ?~ o) t']) f
over = over' id id
brightness' f b = levelTOP (levelTOPbrightness1 ?~ b)
brightness = brightness' id
--blur
crosshatch' f = frag' f "crosshatch.frag" [] . (:[])
paletterepeatT' f p r top = frag' f "color_repeat.frag" [("i_repeat", xV4 r)] [top, palette p]
paletterepeatT = paletterepeatT' id
edgesc' f c t = compositeTOP (compositeTOPoperand ?~ int 0) [edgeTOP f t, levelTOP (levelTOPopacity ?~ c) t]
edgesc = edgesc' id
flowermod s = frag' id "flower_mod.frag" [("uSeconds", xV4 s)] . (:[])
hue h = hsvadjustTOP (hsvadjustTOPhueoffset ?~ h)
littleplanet' f = frag' f "little_planet.frag" [] . (:[])
littleplanet = littleplanet' id
lumidots' f = frag' f "lumidots.frag" [] . (:[])
lumidots = lumidots' id
mirror' f t = compositeTOP ((compositeTOPoperand ?~ int 0) . f) [flipTOP ((flipTOPflipx ?~ bool True) . (flipTOPflipy ?~ bool True)) t, t]
mirror = mirror' id
mosaic' f t s top = frag' f "mosaic.frag" [("uTime", xV4 t), ("uScale", xV4 s)] [top]
mosaic = mosaic' id
noisedisplace' f d top = frag' f "noise_displace.frag" [("uTime", xV4 seconds), ("uDisplacement", xV4 d)] [top]
noisedisplace = noisedisplace' id
palettecycle' f p@(Palette _ s) t = compositeTOP ((compositeTOPoperand ?~ int 27) . (compositeTOPformat ?~ int 4) . f) [cropTOP ((cropTOPcropleft ?~ s) . (cropTOPcropright ?~ s)) $ palette p, t]
palettecycle = palettecycle' id
palettemap' f p@(Palette _ o) t = frag' f "palette_map.frag" [("uOffset", xV4 o), ("uSamples", xV4 $ float 16)] [t, palette p]
palettemap = palettemap' id
repeatT' f s = transformscale' f s 2
repeatT = repeatT' id
repeatTxy' f r = repeatT' f (Just r, Just r)
repeatTxy = repeatTxy' id
rgbsplit' s top = frag' id "rgbsplit.frag" [("uFrames", xV4 s)] [top]
rotate' f r = transformTOP (transformTOProtate ?~ r)
rotate = rotate' id
sat s = hsvadjustTOP (hsvadjustTOPsaturationmult ?~ s)
scale' f s = transformscale' f s 1
scale = scale' id
scalexy' f s = scale' f (Just s, Just s)
scalexy = scalexy' id
strobe' f s top = frag' f "strobe.frag" [("uSpeed", xV4 s), ("uTime", xV4 seconds)] [top]
strobe = strobe' id
transformext' f e = transformTOP (f . (transformTOPextend ?~ (int e)))
transformscale' f s e = transformext' (f . (transformTOPs .~ ((!^ (float (-1))) <$> fst s, (!^ (float (-1))) <$> snd s) )) e
transformscale = transformscale' id
translate' f t = transformTOP (f . (transformTOPextend ?~ int 3) . (transformTOPt .~ t))
translate (a, b) = translate' id (Just a, Just b)
translatex' f x = translate' f $ (Just x, Just $ float 0)
translatex = translatex' id
translatey' f y = translate' f $ (Just $ float 0, Just y)
translatey = translatey' id
val v = hsvadjustTOP (hsvadjustTOPvaluemult ?~ v)
clone c (tx, ty) (sx, sy) top = frag' id "clone.frag" [("uClones", xV4 c), ("uTranslate", emptyV4 & _1 ?~ tx & _2 ?~ ty), ("uScale", emptyV4 & _1 ?~ sx & _2 ?~ sy)] [top]
-- -- combiners
addops = compositeTOP (compositeTOPoperand ?~ int 0)
fadeops f = switchTOP ((switchTOPblend ?~ bool True) . (switchTOPindex ?~ f))
multops = compositeTOP (compositeTOPoperand ?~ int 27)
overops = compositeTOP (compositeTOPoperand ?~ int 31)
triggercount f l = countCHOP ((countCHOPthreshold ?~ bool True) .
(countCHOPthreshup ?~ float 0.5) .
(countCHOPlimitmax ?~ float (fromIntegral $ l)) .
(countCHOPoutput ?~ int 1)
) f
triggerops f tops = switchTOP (switchTOPindex ?~ chan0f (triggercount f (length tops - 1))) tops
-- showwork g es =
-- let
-- scans = scanl (\g e -> e g) g es
-- in
-- addops $ (head $ reverse scans):(zipWith (\i -> (translate (float (-0.45) !+ (float 0.1 !* float i), float 0.4)) . scalexy (float 10)) [0..] (take (length es) $ scans))
-- -- led
-- (>>>) = flip (.)
-- constled f n = constC' (constCEndFrames ?~ int n) [f]
-- rgbled n r g b = mergeC [r & stretchC (int n), g & stretchC (int n), b & stretchC (int n)]
-- & shuffleC (int 6)
-- topToLed n =
-- crop' ((cropTop ?~ float 0.00008))
-- >>> topToC
-- >>> selectC' (selectCNames ?~ str "r g b")
-- >>> stretchC (int n)
-- >>> shuffleC (int 6)
-- bottomLedSplit n t = (topToLed n t, t)
-- ledPalette p = flip lookupC (topToC' (topToChopAName ?~ str "") $ palette (p. (switchTOPindex ?~ )))
-- palettes
data Palette = Palette [Color] (Tree Float)
data Color = Hex BS.ByteString | RGB Int Int Int
neon = Palette $ Hex <$> ["A9336B", "5F2F88", "CB673D", "87BB38"]
fire = Palette $ Hex . BS.pack . fmap toUpper <$> ["F07F13", "800909", "F27D0C", "FDCF58"]
buddhist = Palette $ Hex . BS.pack . fmap toUpper <$> ["0000FF", "FFFF00", "FF0000", "FFFFFF", "FF9800"]
tealcontrast = Palette [RGB 188 242 246, RGB 50 107 113, RGB 211 90 30, RGB 209 122 43, RGB 188 242 246]
purplish = Palette [RGB 150 110 100, RGB 223 143 67, RGB 76 73 100 , RGB 146 118 133, RGB 165 148 180]
sunset = Palette [RGB 185 117 19, RGB 228 187 108, RGB 251 162 1, RGB 255 243 201]
coolpink = Palette [RGB 215 40 26, RGB 157 60 121, RGB 179 83 154, RGB 187 59 98]
darkestred = Palette [RGB 153 7 17, RGB 97 6 11, RGB 49 7 8, RGB 13 7 7, RGB 189 5 13]
nature = Palette [RGB 63 124 7, RGB 201 121 66, RGB 213 101 23, RGB 177 201 80, RGB 180 207 127]
greenpurple = Palette [RGB 42 4 74, RGB 11 46 89, RGB 13 103 89, RGB 122 179 23, RGB 160 197 95]
tealblue = Palette [RGB 188 242 246, RGB 50 107 113, RGB 188 242 246, RGB 165 148 180]
flower = Palette $ Hex <$> ["000E00", "003D00", "E4A900", "FEDEEF", "C99CB8"]
bluepink = Palette $ Hex <$> ["F2C6F2", "F8F0F0", "A6D1FF", "3988E1", "4C8600"]
lime = Palette $ Hex <$> ["FF4274", "DCD549", "ABDFAB", "437432", "033B45"]
palettergb :: Palette -> Tree CHOP
palettergb (Palette colors sel) =
crossCHOP (crossCHOPcross ?~ (sel !% float 1) !* (float $ fromIntegral $ length colors)) $ constnamesvals . zip ["r", "g", "b"] . (fmap float) . colorToFloat <$> (last colors):colors
palettergba :: Palette -> Tree CHOP
palettergba (Palette colors sel) =
crossCHOP (crossCHOPcross ?~ (sel !% float 1) !* (float $ fromIntegral $ length colors)) $ constnamesvals . zip ["r", "g", "b", "a"] . (fmap float) . colorToFloat <$> (last colors):colors
paletteMapSel :: (Tree Float -> Tree Float) -> Palette -> Palette
paletteMapSel f (Palette p s) = Palette p (f s)
paletteClampColor :: Palette -> Palette
paletteClampColor (Palette cs s) = Palette cs (floor s !/ float (fromIntegral $ length cs))
-- ------------------------
-- lagmod l = chan0f . lag (float 0) (float l) . constC . (:[])
scr = (++) "scripts/Visuals/"
frag = frag' id
frag' f s params =
glslmultiTOP (
f . (glslmultiTOPresolutionw ?~ int 1920) .
(glslmultiTOPresolutionh ?~ int 1080) .
(glslmultiTOPpixeldat ?~ textDAT (textDATfile ?~ str (scr s)) []) .
(glslmultiTOPoutputresolution ?~ int 9) .
(glslmultiTOPformat ?~ int 4) .
(foldl (.) id $ zipWith3 (\(pn, pv) n v -> (n ?~ str pn) . (v .~ pv)) params
[glslmultiTOPuniname0, glslmultiTOPuniname1, glslmultiTOPuniname2, glslmultiTOPuniname3]
[glslmultiTOPvalue0, glslmultiTOPvalue1, glslmultiTOPvalue2, glslmultiTOPvalue3]))
-- rendered g = renderTOP ((renderTOPlights .~ [lightCOMP id []]) . (renderTOPgeometry ?~ ResolveP g) . (renderTOPcamera ?~ ResolveP (cameraCOMP id [])))
tdata v t = frag "audio_data.frag" [("i_volume", xV4 v)] [t]
palette p = rampTOP ((rampTOPresolutionw ?~ int 128) . (rampTOPresolutionh ?~ int 1) . (rampTOPdat ?~ paletteDat p)) []
paletteDat (Palette colors sel) = tableDAT (textBlob ?~ BS.intercalate "\n" (BS.intercalate "\t" <$> ["r", "g", "b", "a", "pos"]:(zipWith (colorToBS (length colors)) [0..] colors)))
colorToBS :: Int -> Int -> Color -> [BS.ByteString]
colorToBS n i c = ((BS.pack . show) <$> colorToFloat c) ++ [BS.pack . show $ fromIntegral i / fromIntegral n]
colorToFloat :: Color -> [Float]
colorToFloat (Hex str) =
let
hexes = take 3 . chunksOf 2 . drop 1
todig = flip L.elemIndex "0123456789ABCDEF"
toIntList = fmap todig
toInt = foldl (\acc i -> acc * 16 + i) 0
toHex = fmap toInt . sequence . toIntList
hextorgb = fmap ((/ 256) . fromIntegral)
in
catMaybes $ (hextorgb <$> (toHex <$> hexes (show str))) ++ [Just 1]
colorToFloat (RGB r g b) = fmap ((/ 256) . fromIntegral) [r, g, b, 256]
-- ------------------------------
-- -- Randos
-- tidalmessages = oscinD' (oscInDAddressScope ?~ str "/vis") 7010 & selectD' ((selectDRStartI ?~ int 1) . (selectDREndI ?~ int 1) . (selectDCStartI ?~ int 5) . (selectDCEndI ?~ int 5))
-- tidalvis = tox "toxes/tidal_patterns.tox" [] (Just tidalmessages)
-- tidalvistop = tidalvis & nullC & chopToT' (chopToTopFormat ?~ int 2)
-- sensel
sensel = cplusplusCHOP ( (cplusplusCHOPplugin ?~ str "C:/Users/ulyssesp/Development/SenselCHOP/Release/CPlusPlusCHOPExample.dll")) []
senselchop = sensel & selectCHOP (selectCHOPchannames ?~ str "chan") . (:[]) & shuffleCHOP ((shuffleCHOPmethod ?~ int 8) . (shuffleCHOPnval ?~ int 185))
senseltop f =
choptoTOP (choptoTOPchop ?~ f senselchop)
& flipTOP (flipTOPflipy ?~ bool True)
& resolutionTOP ((resolutionTOPresolutionw ?~ int 1920) . (resolutionTOPresolutionh ?~ int 1080) . (resolutionTOPoutputresolution ?~ int 9))
& reorderTOP ((reorderTOPformat ?~ int 26) . (reorderTOPoutputalphachan ?~ int 0)) . (:[])
senseltouches = sensel & selectCHOP (selectCHOPchannames ?~ str "chan1") . (:[]) & deleteCHOP ((deleteCHOPdelsamples ?~ bool True)) . (:[])
gesture mchan = gestureCHOP id [sensel & selectCHOP (selectCHOPchannames ?~ str "chan") . (:[]) & shuffleCHOP (shuffleCHOPmethod ?~ int 1), mchop mchan] & shuffleCHOP ((shuffleCHOPmethod ?~ int 4) . (shuffleCHOPnval ?~ int 185) . (shuffleCHOPfirstsample ?~ bool True))
-- beats
data Beat =
Beat
{ beatpulse :: Tree CHOP
, bps :: Tree CHOP
}
tapbeat :: Tree Float -> Tree Float -> Beat
tapbeat i g =
let
beat = constantCHOP (constantCHOPvalue0 ?~ i) [] & logicCHOP (logicCHOPpreop ?~ int 5) . (:[])
beathold = holdCHOP id [speedCHOP id [const1, beat] & delayCHOP ((delayCHOPdelay ?~ float 1) . (delayCHOPdelayunit ?~ int 1)), beat] & nullCHOP (nullCHOPcooktype ?~ int 2)
beattrail =
trailCHOP ((trailCHOPwlength ?~ float 8) . (trailCHOPwlengthunit ?~ int 1) . (trailCHOPcapture ?~ int 1)) [beathold]
& deleteCHOP ((deleteCHOPdelsamples ?~ bool True) . (deleteCHOPcondition ?~ int 5) . (deleteCHOPinclvalue1 ?~ bool False)) . (:[])
bps = mathCHOP ((mathCHOPpostop ?~ int 5) . (mathCHOPgain ?~ g)) . (:[]) $ analyze 0 beattrail
beataccum = speedCHOP id [bps, beat]
finalbeat =
beataccum
& limitCHOP ((limitCHOPmax ?~ float 1) . (limitCHOPtype ?~ int 2) . (limitCHOPmin ?~ float 0))
& logicCHOP ((logicCHOPboundmax ?~ float 0.08) . (logicCHOPpreop ?~ int 5) . (logicCHOPconvert ?~ int 2)) . (:[])
in
Beat finalbeat bps
tapbeatm9 = tapbeat (mchan "b9") (float 2 !^ (floor $ (mchan "s1a" !- float 0.5) !* float 4))
tapbeatm9sec = beatseconds tapbeatm9
beatramp :: Beat -> Tree CHOP
beatramp (Beat beat bps) = speedCHOP (speedCHOPresetcondition ?~ int 2) [bps, beat]
beatxcount :: Float -> Tree CHOP -> Beat -> Tree CHOP
beatxcount x reset (Beat beat _) = countCHOP ((countCHOPoutput ?~ int 1) . (countCHOPlimitmax ?~ float (x - 1))) [beat, reset]
beatxpulse :: Float -> Tree CHOP -> Beat -> Tree CHOP
beatxpulse x reset = logicCHOP (logicCHOPpreop ?~ int 6) . (:[]) . beatxcount x reset
beatxramp :: Float -> Tree CHOP -> Beat -> Tree CHOP
beatxramp x reset beat@(Beat bpulse bps) = speedCHOP id [bps & mathCHOP (mathCHOPgain ?~ float (1/x)) . (:[]), beatxpulse x reset beat]
beatseconds :: Beat -> Tree Float
beatseconds b = seconds !* (chan0f $ bps b)
beatsecondschop :: Beat -> Tree CHOP
beatsecondschop b = speedCHOP id [bps b]
-- dj midi clock
midi = midiinCHOP (
(midiinCHOPid ?~ str "2") .
(midiinCHOPsimplified ?~ bool False) .
(midiinCHOPpulsename ?~ str "beat") .
(midiinCHOPchannel ?~ str "1-8") .
(midiinCHOPcontrolind ?~ str "1-16")
)
midisyncbeat = tapbeat (chan0f $ midi & selectCHOP (selectCHOPchannames ?~ str "beat") . (:[])) (float 2 !^ (floor $ (mchan "s1a" !- float 0.5) !* float 4))
-- DMX
data DMXColor = DMXColor (Tree CHOP)
data DMX =
COLORado (Tree CHOP) DMXColor
| GenericRGB DMXColor
| Laluce (Tree CHOP) DMXColor (Tree CHOP)
| Fill Int
| FillVal Int Float
| StormFX (Tree CHOP) (Tree CHOP) (Tree CHOP) (Tree CHOP)
dmxToChop :: DMX -> Tree CHOP
dmxToChop (COLORado dim (DMXColor color)) =
mergeCHOP id
[ dim
, color
, constnamesvals $ (, float 0) <$> ["color_macro", "strobe", "progs", "auto_speed", "dimmer_speed"]
]
dmxToChop (GenericRGB (DMXColor tch)) = tch
dmxToChop (Laluce dim (DMXColor color) white) =
mergeCHOP id
[ dim
, color
, white
, constnamesvals $ (, float 0) <$> ["hopping", "strobe", "macro"]
]
dmxToChop (Fill i) = constnamesvals $ zip (repeat "a") (replicate i (float 0))
dmxToChop (FillVal i f) = constnamesvals $ zip (repeat "a") (replicate i (float f))
dmxToChop (StormFX ledp ledstrobe lasers laserstrobe) =
mergeCHOP id
[ constnamesvals [("auto", float 1)]
, ledp
, constnamesvals [("auto_speed", float 1)]
, ledstrobe
, constnamesvals [("white", float 1)]
, lasers
, laserstrobe
, constnamesvals [("mled", float 0), ("mlaser", float 0)]
]
dmxColor = DMXColor $ constnamesvals [("r", float 0), ("g", float 0), ("b", float 0)]
laluce = Laluce (const1) dmxColor (constnamesvals [("w", float 0)])
colorado = COLORado (const1) dmxColor
stormfx =
StormFX
(constnamesvals [("led", float (160.0/255.0))])
(constnamesvals [("ledstrobe", float 0.5)])
(constnamesvals [("lasers", float (160.0/255.0))])
(constnamesvals [("laserstrobe", float 0.5)])
dmxColorRGB :: Tree Float -> Tree Float -> Tree Float -> DMXColor
dmxColorRGB r g b = DMXColor $ constnamesvals [("r", r), ("g", g), ("b", b)]
dimDmx :: Tree CHOP -> DMX -> DMX
dimDmx f (COLORado dim c) = COLORado (multchops [dim, f]) c
dimDmx f (Laluce dim c w) = Laluce (multchops [dim, f]) c w
dimDmx f (GenericRGB (DMXColor tch)) = GenericRGB (DMXColor $ multchops [tch, f])
dimDmx _ d = d
dmxMapColor :: (DMXColor -> DMXColor) -> DMX -> DMX
dmxMapColor f (COLORado dim dmxc) = COLORado dim (f dmxc)
dmxMapColor f (GenericRGB dmxc) = GenericRGB (f dmxc)
dmxMapColor f (Laluce dim dmxc w) = Laluce dim (f dmxc) w
dmxMapColor _ d = d
dmxColorMapR :: (Tree CHOP -> Tree CHOP) -> DMXColor -> DMXColor
dmxColorMapR f (DMXColor c) = DMXColor $ overChans "r" f c
dmxColorMapG :: (Tree CHOP -> Tree CHOP) -> DMXColor -> DMXColor
dmxColorMapG f (DMXColor c) = DMXColor $ overChans "g" f c
dmxColorMapB :: (Tree CHOP -> Tree CHOP) -> DMXColor -> DMXColor
dmxColorMapB f (DMXColor c) = DMXColor $ overChans "b" f c
dmxColorMapAll :: (Tree CHOP -> Tree CHOP) -> DMXColor -> DMXColor
dmxColorMapAll f (DMXColor c) = DMXColor $ overChans "r g b" f c
sendDmx :: [DMX] -> Tree CHOP
sendDmx = dmxoutCHOP (
(dmxoutCHOPinterface ?~ int 3) .
(dmxoutCHOPrate ?~ int 40) .
(dmxoutCHOPnetaddress ?~ str "10.7.224.159") .
(dmxoutCHOPlocaladdress ?~ str "10.7.224.158")) . mathCHOP (mathCHOPtorange2 ?~ float 255) . (:[]) . mergeCHOP id . fmap dmxToChop
paletteDmx :: Palette -> DMX -> DMX
paletteDmx p d = dmxMapColor (dmxColorMapAll (const $ palettergb p)) d
paletteDmxList :: Palette -> [DMX] -> [DMX]
paletteDmxList p@(Palette _ f) ds = zipWith (\i -> paletteDmx (paletteMapSel ((!+) (float $ fromIntegral i / fromIntegral (length ds))) p)) [0..] ds
-- DMX configs
homeDmx = [laluce]
bizarre =
[ colorado, Fill 23
, colorado, Fill 23
, colorado, Fill 23
, colorado, Fill 23
, colorado, Fill 23
, colorado, Fill 23
, colorado, Fill 23
, colorado, Fill 23
, colorado, Fill 23
]
bizarremessedup =
[ GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
]
holo =
[ GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, FillVal 1 0
, FillVal 1 1
, GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, GenericRGB dmxColor
, FillVal 1 0
, FillVal 1 1
] | ulyssesp/oscillare | src/Visuals.hs | gpl-3.0 | 30,638 | 0 | 18 | 7,780 | 9,188 | 4,853 | 4,335 | -1 | -1 |
{-
The Delve Programming Language
Copyright 2009 John Morrice
Distributed under the terms of the GNU General Public License v3, or ( at your option ) any later version.
This file is part of Delve.
Delve is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Delve is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Delve. If not, see <http://www.gnu.org/licenses/>.
-}
{-# OPTIONS -XTypeFamilies
-XEmptyDataDecls
-XMultiParamTypeClasses
-XRankNTypes
-XTypeSynonymInstances
-XFlexibleInstances
-XFlexibleContexts
-XUndecidableInstances #-}
-- A compiler from LocalCore to VMCode
-- A lot of the operations are very very inefficient
-- It would be better to pass around the continuation, than to add commands to the end of the program
module VMCompiler where
import Data.List.Stream as L
import Data.Word
import Data.Tuple.Select
import Data.Maybe
import Control.Monad.Stream as C
import Control.Applicative as A
import Backend
import Core
import Compiler
import LocalCore
import DMachineState as D
import UniqueVars hiding ( local )
import Util
-- TODO: Raise compiler errors when fed invalid code
-- TODO: Quantify extactly 'invalid code'
-- Any instruction with an empty path is invalid.
-- TODO: Run within VMVars -- need to assign and then immediately call 'Begin' blocks
-- This is best done at this stage, rather than earlier, as this is rather a large change of semantics, changing the meaning of the core languages ( which are similar )
-- The compiler instance for compiling VM code
data VMCompiler
instance Compiler VMCompiler where
data Source VMCompiler = LocalCore Word64 LCore
newtype Target VMCompiler = VMCode Code
compile ( LocalCore uniq lcore ) =
VMCode $ sel1 $ runRWS ( vm_core lcore ) ( ) uniq
type VMVars = SimpleVars
class Optimization o Expr => VMCode a o where
to_vm :: o -> a -> VMVars Code
-- instances for optimized LExecStmts
instance VMCode LStmt o => VMCode LExecStmt o where
to_vm o lexec =
case lexec of
LStmt stmt -> to_vm o stmt
LExecExpr lexec -> do
execs <- vm_exec lexec
return $ execs L.++ oppop o
-- a new type representing a simple block of code
newtype SimpleBlock = SimpleBlock LExecCore
-- compile a simple block of code
instance VMCode LExecStmt o => VMCode SimpleBlock o where
to_vm o ( SimpleBlock core ) = do
stmts <- tail_analyze_block o core
return $ PushLocal : stmts
-- popping, taking into account the optimization
instance Optimization None Expr where
oppop _ = A.empty
instance Optimization Local Expr where
oppop _ = A.pure PopLocal
instance Optimization Frame Expr where
oppop _ = A.pure PopFrame
instance VMCode s o => Backend o s VMVars Expr where
backend = to_vm
-- instances for optimized Stmts
-- These operations are highly inefficient
instance VMCode LStmt None where
to_vm o stmt =
let pops = [ ] in
-- compile a LStmt to Code
case stmt of
LStandalone cexp ->
to_vm none cexp
LSetHere sym eexp -> do
vm_code <- to_vm none eexp
return $ vm_code L.++ ( AssignLocal 0 [ ] sym : pops )
LSetLocal i path eexp -> do
vm_code <- to_vm none eexp
return $ vm_code L.++ ( AssignLocal i ( drop_lst path ) ( L.last path ) : pops )
LSetObject path eexp -> do
vm_code <- to_vm none eexp
return $ vm_code L.++ ( AssignObject ( drop_lst path ) ( L.last path ) : pops )
LBegin lcore ->
to_vm local $ SimpleBlock lcore
LCoreSpecial _ _ ->
error "Attempted to compile special data into bytecode!\nSpecial data sections are reserved for embedding Haskell into Delve."
instance VMCode LStmt Local where
to_vm o stmt =
let pops = oppop o in
-- compile a LStmt to Code
case stmt of
LStandalone cexp ->
to_vm local cexp
LSetHere sym eexp -> do
vm_code <- to_vm none eexp
return $ vm_code L.++ ( AssignLocal 0 [ ] sym : pops )
LSetLocal i path eexp -> do
vm_code <- to_vm none eexp
return $ vm_code L.++ ( AssignLocal i ( drop_lst path ) ( L.last path ) : pops )
LSetObject path eexp -> do
vm_code <- to_vm none eexp
return $ vm_code L.++ ( AssignObject ( drop_lst path ) ( L.last path ) : pops )
LBegin lcore -> do
exps <- to_vm local $ SimpleBlock lcore
return $ exps L.++ pops
instance VMCode LStmt Frame where
to_vm o stmt =
let pops = oppop o in
-- compile a LStmt to Code
case stmt of
LStandalone cexp ->
to_vm frame cexp
LSetHere sym eexp -> do
vm_code <- to_vm none eexp
return $ vm_code L.++ ( AssignLocal 0 [ ] sym : pops )
LSetLocal i path eexp -> do
vm_code <- to_vm none eexp
return $ vm_code L.++ ( AssignLocal i ( drop_lst path ) ( L.last path ) : pops )
LSetObject path eexp -> do
vm_code <- to_vm none eexp
return $ vm_code L.++ ( AssignObject ( drop_lst path ) ( L.last path ) : pops )
LBegin lcore ->
to_vm frame $ SimpleBlock lcore
embed_core :: VMCode LExecStmt o => ( LVar -> Code ) -> o -> LCoreExpr -> VMVars Code
embed_core c o cexp =
let pops = oppop o in
case cexp of
LApp e args ->
return $ push_args args L.++ c e
LCoreMatch lvar alts ->
case lvar of
LLocalVar i p ->
fmap ( return . MatchLocal i p ) $ vm_alts o alts
LObjVar p ->
fmap ( return . MatchObj p ) $ vm_alts o alts
LSimple s ->
return $ vm_simple s : pops
instance VMCode LCoreExpr None where
-- compile a LCoreExpr to Code
to_vm o c =
embed_core no_opt_call o c
instance VMCode LCoreExpr Local where
-- compile a LCoreExpr to Code
to_vm o c =
embed_core local_tail_call o c
instance VMCode LCoreExpr Frame where
-- compile a LCoreExpr to Code
to_vm o c =
embed_core frame_tail_call o c
-- compile LAlternatives into Alternatives
vm_alts :: VMCode LExecStmt o=> o -> LAlternatives -> VMVars Alternatives
vm_alts o =
C.mapM $ vm_alt o
-- compile LCoreAlternative into Alternative
vm_alt :: VMCode LExecStmt o => o -> LCoreAlternative -> VMVars D.Alternative
vm_alt o ( sym , lcore ) = do
dcore <- to_vm o $ SimpleBlock lcore
return ( sym , dcore )
-- compile LCore to Code
vm_core :: LCore -> VMVars Code
vm_core = fmap L.concat . C.mapM ( to_vm none )
-- write the arguments to the argument stack
push_args :: [ LVar ] -> Code
push_args as = L.zipWith push_arg as [ 0 .. ]
push_arg :: LVar -> Word8 -> Expr
push_arg v n =
case v of
LLocalVar i p ->
PushLocalArg i p n
LObjVar p ->
PushObjArg p n
-- There is duplication between a couple of backends here
-- But I have yet to seperate the generation of the call code from these functions ( no_opt_call, local_tail_call , frame_tail_call )
-- Is it really needed?
no_opt_call v =
on_var v call_local call_object
local_tail_call v =
on_var v local_tail_call_local local_tail_call_obj
frame_tail_call v =
on_var v frame_tail_call_local frame_tail_call_obj
-- call a path from the local scope
call_local :: Int -> [ Symbol ] -> Code
call_local i path =
case path of
( _ :_ : _ ) ->
[ LoadLocal i ( drop_lst path ) , CallObj $ return $ L.last path , PopObject ]
( p : [ ] ) ->
[ CallLocal i path ]
-- local tail call a path from the local scope
local_tail_call_local :: Int -> [ Symbol ] -> Code
local_tail_call_local i path =
case path of
( _ : _ : _ ) ->
[ LoadLocal i ( drop_lst path ) , LocalTailCallObj True $ return $ L.last path ]
( p : [ ] ) ->
[ LocalTailCallLocal False i path ]
-- local tail call a path from the current object
local_tail_call_obj :: [ Symbol ] -> Code
local_tail_call_obj path =
case path of
( _ : _ : _ ) ->
[ LoadObj ( drop_lst path ) , LocalTailCallObj True $ return $ L.last path ]
( p : [ ] ) ->
[ LocalTailCallObj False path ]
-- frame tail call a path from the local scope
frame_tail_call_local :: Int -> [ Symbol ] -> Code
frame_tail_call_local i path =
case path of
( _ : _ : _ ) ->
[ LoadLocal i ( drop_lst path ) , FrameTailCallObj $ return $ L.last path ]
( p : [ ] ) ->
[ FrameTailCallLocal i path ]
-- frame tail call a path from the current object
frame_tail_call_obj :: [ Symbol ] -> Code
frame_tail_call_obj path =
case path of
( _ : _ : _ ) ->
[ LoadObj ( drop_lst path ) , FrameTailCallObj $ return $ L.last path ]
( p : [ ] ) ->
[ FrameTailCallObj path ]
-- call from an object
call_object :: [ Symbol ] -> Code
call_object =
return . CallObj
-- compile a LSimpleExpr to an Expr
vm_simple :: LSimpleExpr -> Expr
vm_simple ls =
case ls of
LReserved r ->
vm_reserved r
LLit l ->
vm_lit l
LVar v ->
vm_var v
-- compile a lit into an Expr
vm_lit :: Lit -> Expr
vm_lit l =
case l of
CoreSymbol s ->
NewSymbol s
CorePrim p ->
NewPrim p
-- compile reserved into an Expr
vm_reserved :: Reserved -> Expr
vm_reserved Self =
RefSelf
-- compile a var into an Expr
vm_var :: LVar -> Expr
vm_var v =
case v of
LLocalVar i path ->
RefLocal i path
LObjVar path ->
RefObj path
-- create a block for a bit of callable code ( Function or Method )
callable_block :: LExecCore -> [ Symbol ] -> VMVars Expr
callable_block lcore args =
fmap ( NewBlock . (:) PushLocal . (L.++) ( write_args args ) ) $ tail_analyze_block frame lcore
-- compile an LExecExpr to Code
vm_exec :: LExecExpr -> VMVars Code
vm_exec exec =
case exec of
LFunction args lcore ->
if not $ L.null lcore
then do
n <- new_name
cb <- callable_block lcore args
return $ cb : remember_whole_scope n
else return [ NewBlock [ ] ]
LMethod args lcore ->
if not $ L.null lcore
then do
n <- new_name
cb <- callable_block lcore args
return $ cb : remember_local_scope n
else return [ NewBlock [ ] ]
-- Write the arguments from the argument stack into the current local scope
write_args :: [ Symbol ] -> Code
write_args args = L.zipWith WriteArg [ 0 .. ] args
-- Assign a bit of code to the variable n, and bind it to the local scope and the current object
remember_whole_scope :: Symbol -> Code
remember_whole_scope n =
[ AssignLocal 0 [ ] n , RememberLocalLocal 0 [ n ] , RememberObjLocal 0 [ n ] ]
-- Assign a bit of code to the variable n, and bind it to only the local scope
remember_local_scope :: Symbol -> Code
remember_local_scope n =
[ AssignLocal 0 [ ] n , RememberLocalLocal 0 [ n ] ]
| elginer/Delve | src/VMCompiler.hs | gpl-3.0 | 11,764 | 0 | 19 | 3,587 | 2,871 | 1,428 | 1,443 | -1 | -1 |
module Shiny.Hardware.Serial where
import Shiny.Shiny
import Shiny.Hardware
import System.Hardware.Serialport
import Control.Concurrent (threadDelay)
import qualified Data.ByteString as B
import Data.IORef
mkSerialHardware :: FilePath -> Int -> IO (Hardware)
mkSerialHardware path size = do
port <- openSerial path (defaultSerialSettings {commSpeed = CS115200})
-- store a local copy of the display
ref <- newIORef (emptyDisplay size)
let toBytes (RGB r g b) = B.pack [r, g, b]
readDisplay = readIORef ref
writeDisplay disp = do
writeIORef ref disp
sent <- send port . B.concat . map toBytes $ disp
putStrLn ("Sent " ++ show sent ++ " bytes")
return ()
displaySize = return size
resetDisplay = do
setDTR port True
threadDelay (500*1000)
setDTR port False
threadDelay (500*1000)
return ()
return $ Hardware readDisplay writeDisplay displaySize resetDisplay | dhrosa/shiny | Shiny/Hardware/Serial.hs | gpl-3.0 | 1,002 | 0 | 16 | 269 | 309 | 153 | 156 | 26 | 1 |
module Robot where
import Linear
import Arm
data Robot = Robot
{ start :: V3 Float
, arms :: [Arm]
}
| dvolk/hobo | src/Robot.hs | gpl-3.0 | 119 | 0 | 9 | 39 | 37 | 23 | 14 | 6 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.DescribeTags
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes one or more of the tags for your EC2 resources.
--
-- For more information about tags, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html Tagging Your Resources> in the /AmazonElastic Compute Cloud User Guide for Linux/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeTags.html>
module Network.AWS.EC2.DescribeTags
(
-- * Request
DescribeTags
-- ** Request constructor
, describeTags
-- ** Request lenses
, dtDryRun
, dtFilters
, dtMaxResults
, dtNextToken
-- * Response
, DescribeTagsResponse
-- ** Response constructor
, describeTagsResponse
-- ** Response lenses
, dtrNextToken
, dtrTags
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data DescribeTags = DescribeTags
{ _dtDryRun :: Maybe Bool
, _dtFilters :: List "Filter" Filter
, _dtMaxResults :: Maybe Int
, _dtNextToken :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DescribeTags' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dtDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'dtFilters' @::@ ['Filter']
--
-- * 'dtMaxResults' @::@ 'Maybe' 'Int'
--
-- * 'dtNextToken' @::@ 'Maybe' 'Text'
--
describeTags :: DescribeTags
describeTags = DescribeTags
{ _dtDryRun = Nothing
, _dtFilters = mempty
, _dtMaxResults = Nothing
, _dtNextToken = Nothing
}
dtDryRun :: Lens' DescribeTags (Maybe Bool)
dtDryRun = lens _dtDryRun (\s a -> s { _dtDryRun = a })
-- | One or more filters.
--
-- 'key' - The tag key.
--
-- 'resource-id' - The resource ID.
--
-- 'resource-type' - The resource type ('customer-gateway' | 'dhcp-options' | 'image'
-- | 'instance' | 'internet-gateway' | 'network-acl' | 'network-interface' | 'reserved-instances' | 'route-table' | 'security-group' | 'snapshot' | 'spot-instances-request' | 'subnet'
-- | 'volume' | 'vpc' | 'vpn-connection' | 'vpn-gateway').
--
-- 'value' - The tag value.
--
--
dtFilters :: Lens' DescribeTags [Filter]
dtFilters = lens _dtFilters (\s a -> s { _dtFilters = a }) . _List
-- | The maximum number of items to return for this call. The call also returns a
-- token that you can specify in a subsequent call to get the next set of
-- results. If the value is greater than 1000, we return only 1000 items.
dtMaxResults :: Lens' DescribeTags (Maybe Int)
dtMaxResults = lens _dtMaxResults (\s a -> s { _dtMaxResults = a })
-- | The token for the next set of items to return. (You received this token from
-- a prior call.)
dtNextToken :: Lens' DescribeTags (Maybe Text)
dtNextToken = lens _dtNextToken (\s a -> s { _dtNextToken = a })
data DescribeTagsResponse = DescribeTagsResponse
{ _dtrNextToken :: Maybe Text
, _dtrTags :: List "item" TagDescription
} deriving (Eq, Read, Show)
-- | 'DescribeTagsResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dtrNextToken' @::@ 'Maybe' 'Text'
--
-- * 'dtrTags' @::@ ['TagDescription']
--
describeTagsResponse :: DescribeTagsResponse
describeTagsResponse = DescribeTagsResponse
{ _dtrTags = mempty
, _dtrNextToken = Nothing
}
-- | The token to use when requesting the next set of items. If there are no
-- additional items to return, the string is empty.
dtrNextToken :: Lens' DescribeTagsResponse (Maybe Text)
dtrNextToken = lens _dtrNextToken (\s a -> s { _dtrNextToken = a })
-- | A list of tags.
dtrTags :: Lens' DescribeTagsResponse [TagDescription]
dtrTags = lens _dtrTags (\s a -> s { _dtrTags = a }) . _List
instance ToPath DescribeTags where
toPath = const "/"
instance ToQuery DescribeTags where
toQuery DescribeTags{..} = mconcat
[ "DryRun" =? _dtDryRun
, "Filter" `toQueryList` _dtFilters
, "MaxResults" =? _dtMaxResults
, "NextToken" =? _dtNextToken
]
instance ToHeaders DescribeTags
instance AWSRequest DescribeTags where
type Sv DescribeTags = EC2
type Rs DescribeTags = DescribeTagsResponse
request = post "DescribeTags"
response = xmlResponse
instance FromXML DescribeTagsResponse where
parseXML x = DescribeTagsResponse
<$> x .@? "nextToken"
<*> x .@? "tagSet" .!@ mempty
instance AWSPager DescribeTags where
page rq rs
| stop (rs ^. dtrNextToken) = Nothing
| otherwise = (\x -> rq & dtNextToken ?~ x)
<$> (rs ^. dtrNextToken)
| dysinger/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DescribeTags.hs | mpl-2.0 | 5,543 | 0 | 11 | 1,233 | 794 | 475 | 319 | 81 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.DeregisterRdsDbInstance
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deregisters an Amazon RDS instance.
--
-- Required Permissions: To use this action, an IAM user must have a Manage
-- permissions level for the stack, or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_DeregisterRdsDbInstance.html>
module Network.AWS.OpsWorks.DeregisterRdsDbInstance
(
-- * Request
DeregisterRdsDbInstance
-- ** Request constructor
, deregisterRdsDbInstance
-- ** Request lenses
, drdiRdsDbInstanceArn
-- * Response
, DeregisterRdsDbInstanceResponse
-- ** Response constructor
, deregisterRdsDbInstanceResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
newtype DeregisterRdsDbInstance = DeregisterRdsDbInstance
{ _drdiRdsDbInstanceArn :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DeregisterRdsDbInstance' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'drdiRdsDbInstanceArn' @::@ 'Text'
--
deregisterRdsDbInstance :: Text -- ^ 'drdiRdsDbInstanceArn'
-> DeregisterRdsDbInstance
deregisterRdsDbInstance p1 = DeregisterRdsDbInstance
{ _drdiRdsDbInstanceArn = p1
}
-- | The Amazon RDS instance's ARN.
drdiRdsDbInstanceArn :: Lens' DeregisterRdsDbInstance Text
drdiRdsDbInstanceArn =
lens _drdiRdsDbInstanceArn (\s a -> s { _drdiRdsDbInstanceArn = a })
data DeregisterRdsDbInstanceResponse = DeregisterRdsDbInstanceResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DeregisterRdsDbInstanceResponse' constructor.
deregisterRdsDbInstanceResponse :: DeregisterRdsDbInstanceResponse
deregisterRdsDbInstanceResponse = DeregisterRdsDbInstanceResponse
instance ToPath DeregisterRdsDbInstance where
toPath = const "/"
instance ToQuery DeregisterRdsDbInstance where
toQuery = const mempty
instance ToHeaders DeregisterRdsDbInstance
instance ToJSON DeregisterRdsDbInstance where
toJSON DeregisterRdsDbInstance{..} = object
[ "RdsDbInstanceArn" .= _drdiRdsDbInstanceArn
]
instance AWSRequest DeregisterRdsDbInstance where
type Sv DeregisterRdsDbInstance = OpsWorks
type Rs DeregisterRdsDbInstance = DeregisterRdsDbInstanceResponse
request = post "DeregisterRdsDbInstance"
response = nullResponse DeregisterRdsDbInstanceResponse
| dysinger/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks/DeregisterRdsDbInstance.hs | mpl-2.0 | 3,613 | 0 | 9 | 685 | 349 | 214 | 135 | 48 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Routers.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified Router resource.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.routers.delete@.
module Network.Google.Resource.Compute.Routers.Delete
(
-- * REST Resource
RoutersDeleteResource
-- * Creating a Request
, routersDelete
, RoutersDelete
-- * Request Lenses
, rddProject
, rddRouter
, rddRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.routers.delete@ method which the
-- 'RoutersDelete' request conforms to.
type RoutersDeleteResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"routers" :>
Capture "router" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Operation
-- | Deletes the specified Router resource.
--
-- /See:/ 'routersDelete' smart constructor.
data RoutersDelete = RoutersDelete'
{ _rddProject :: !Text
, _rddRouter :: !Text
, _rddRegion :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RoutersDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rddProject'
--
-- * 'rddRouter'
--
-- * 'rddRegion'
routersDelete
:: Text -- ^ 'rddProject'
-> Text -- ^ 'rddRouter'
-> Text -- ^ 'rddRegion'
-> RoutersDelete
routersDelete pRddProject_ pRddRouter_ pRddRegion_ =
RoutersDelete'
{ _rddProject = pRddProject_
, _rddRouter = pRddRouter_
, _rddRegion = pRddRegion_
}
-- | Project ID for this request.
rddProject :: Lens' RoutersDelete Text
rddProject
= lens _rddProject (\ s a -> s{_rddProject = a})
-- | Name of the Router resource to delete.
rddRouter :: Lens' RoutersDelete Text
rddRouter
= lens _rddRouter (\ s a -> s{_rddRouter = a})
-- | Name of the region for this request.
rddRegion :: Lens' RoutersDelete Text
rddRegion
= lens _rddRegion (\ s a -> s{_rddRegion = a})
instance GoogleRequest RoutersDelete where
type Rs RoutersDelete = Operation
type Scopes RoutersDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient RoutersDelete'{..}
= go _rddProject _rddRegion _rddRouter (Just AltJSON)
computeService
where go
= buildClient (Proxy :: Proxy RoutersDeleteResource)
mempty
| rueshyna/gogol | gogol-compute/gen/Network/Google/Resource/Compute/Routers/Delete.hs | mpl-2.0 | 3,439 | 0 | 16 | 834 | 463 | 276 | 187 | 73 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Dataproc.Projects.Regions.Clusters.GetIAMPolicy
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the access control policy for a resource. Returns an empty policy
-- if the resource exists and does not have a policy set.
--
-- /See:/ <https://cloud.google.com/dataproc/ Cloud Dataproc API Reference> for @dataproc.projects.regions.clusters.getIamPolicy@.
module Network.Google.Resource.Dataproc.Projects.Regions.Clusters.GetIAMPolicy
(
-- * REST Resource
ProjectsRegionsClustersGetIAMPolicyResource
-- * Creating a Request
, projectsRegionsClustersGetIAMPolicy
, ProjectsRegionsClustersGetIAMPolicy
-- * Request Lenses
, prcgipXgafv
, prcgipUploadProtocol
, prcgipAccessToken
, prcgipUploadType
, prcgipPayload
, prcgipResource
, prcgipCallback
) where
import Network.Google.Dataproc.Types
import Network.Google.Prelude
-- | A resource alias for @dataproc.projects.regions.clusters.getIamPolicy@ method which the
-- 'ProjectsRegionsClustersGetIAMPolicy' request conforms to.
type ProjectsRegionsClustersGetIAMPolicyResource =
"v1" :>
CaptureMode "resource" "getIamPolicy" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] GetIAMPolicyRequest :>
Post '[JSON] Policy
-- | Gets the access control policy for a resource. Returns an empty policy
-- if the resource exists and does not have a policy set.
--
-- /See:/ 'projectsRegionsClustersGetIAMPolicy' smart constructor.
data ProjectsRegionsClustersGetIAMPolicy =
ProjectsRegionsClustersGetIAMPolicy'
{ _prcgipXgafv :: !(Maybe Xgafv)
, _prcgipUploadProtocol :: !(Maybe Text)
, _prcgipAccessToken :: !(Maybe Text)
, _prcgipUploadType :: !(Maybe Text)
, _prcgipPayload :: !GetIAMPolicyRequest
, _prcgipResource :: !Text
, _prcgipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsRegionsClustersGetIAMPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'prcgipXgafv'
--
-- * 'prcgipUploadProtocol'
--
-- * 'prcgipAccessToken'
--
-- * 'prcgipUploadType'
--
-- * 'prcgipPayload'
--
-- * 'prcgipResource'
--
-- * 'prcgipCallback'
projectsRegionsClustersGetIAMPolicy
:: GetIAMPolicyRequest -- ^ 'prcgipPayload'
-> Text -- ^ 'prcgipResource'
-> ProjectsRegionsClustersGetIAMPolicy
projectsRegionsClustersGetIAMPolicy pPrcgipPayload_ pPrcgipResource_ =
ProjectsRegionsClustersGetIAMPolicy'
{ _prcgipXgafv = Nothing
, _prcgipUploadProtocol = Nothing
, _prcgipAccessToken = Nothing
, _prcgipUploadType = Nothing
, _prcgipPayload = pPrcgipPayload_
, _prcgipResource = pPrcgipResource_
, _prcgipCallback = Nothing
}
-- | V1 error format.
prcgipXgafv :: Lens' ProjectsRegionsClustersGetIAMPolicy (Maybe Xgafv)
prcgipXgafv
= lens _prcgipXgafv (\ s a -> s{_prcgipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
prcgipUploadProtocol :: Lens' ProjectsRegionsClustersGetIAMPolicy (Maybe Text)
prcgipUploadProtocol
= lens _prcgipUploadProtocol
(\ s a -> s{_prcgipUploadProtocol = a})
-- | OAuth access token.
prcgipAccessToken :: Lens' ProjectsRegionsClustersGetIAMPolicy (Maybe Text)
prcgipAccessToken
= lens _prcgipAccessToken
(\ s a -> s{_prcgipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
prcgipUploadType :: Lens' ProjectsRegionsClustersGetIAMPolicy (Maybe Text)
prcgipUploadType
= lens _prcgipUploadType
(\ s a -> s{_prcgipUploadType = a})
-- | Multipart request metadata.
prcgipPayload :: Lens' ProjectsRegionsClustersGetIAMPolicy GetIAMPolicyRequest
prcgipPayload
= lens _prcgipPayload
(\ s a -> s{_prcgipPayload = a})
-- | REQUIRED: The resource for which the policy is being requested. See the
-- operation documentation for the appropriate value for this field.
prcgipResource :: Lens' ProjectsRegionsClustersGetIAMPolicy Text
prcgipResource
= lens _prcgipResource
(\ s a -> s{_prcgipResource = a})
-- | JSONP
prcgipCallback :: Lens' ProjectsRegionsClustersGetIAMPolicy (Maybe Text)
prcgipCallback
= lens _prcgipCallback
(\ s a -> s{_prcgipCallback = a})
instance GoogleRequest
ProjectsRegionsClustersGetIAMPolicy
where
type Rs ProjectsRegionsClustersGetIAMPolicy = Policy
type Scopes ProjectsRegionsClustersGetIAMPolicy =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsRegionsClustersGetIAMPolicy'{..}
= go _prcgipResource _prcgipXgafv
_prcgipUploadProtocol
_prcgipAccessToken
_prcgipUploadType
_prcgipCallback
(Just AltJSON)
_prcgipPayload
dataprocService
where go
= buildClient
(Proxy ::
Proxy ProjectsRegionsClustersGetIAMPolicyResource)
mempty
| brendanhay/gogol | gogol-dataproc/gen/Network/Google/Resource/Dataproc/Projects/Regions/Clusters/GetIAMPolicy.hs | mpl-2.0 | 6,021 | 0 | 16 | 1,291 | 781 | 457 | 324 | 121 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Arbitrary instances and data types for use in test suites.
module Network.Haskoin.Test.Stratum
( -- * Arbitrary Data
ReqRes(..)
) where
import Control.Applicative
import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Gen
import Data.Aeson.Types hiding (Error)
import Data.Text (Text)
import qualified Data.Text as T
import Network.JsonRpc
import Network.Haskoin.Test.Crypto
import Network.Haskoin.Test.Transaction
import Network.Haskoin.Transaction.Types
import Network.Haskoin.Stratum
-- | A pair of a request and its corresponding response.
-- Id and version should match.
data ReqRes q r = ReqRes !(Request q) !(Response r)
deriving (Show, Eq)
instance Arbitrary (ReqRes Value Value) where
arbitrary = do
rq <- arbitrary
rs <- arbitrary
let rs' = rs { getResId = getReqId rq, getResVer = getReqVer rq }
return $ ReqRes rq rs'
instance Arbitrary Text where
arbitrary = T.pack <$> arbitrary
instance Arbitrary Ver where
arbitrary = elements [V1, V2]
instance (Arbitrary q, ToRequest q) => Arbitrary (Request q) where
arbitrary = do
q <- arbitrary
v <- arbitrary
let m = requestMethod q
Request v m q <$> arbitrary
instance (Arbitrary n, ToNotif n) => Arbitrary (Notif n) where
arbitrary = do
n <- arbitrary
v <- arbitrary
let m = notifMethod n
return $ Notif v m n
instance Arbitrary r => Arbitrary (Response r) where
arbitrary = Response <$> arbitrary <*> arbitrary <*> arbitrary
instance Arbitrary ErrorObj where
arbitrary = ErrorObj <$> arbitrary <*> arbitrary <*> arbitrary
<*> arbitrary <*> arbitrary
instance ( Arbitrary q, Arbitrary n, Arbitrary r
, ToRequest q, ToNotif n, ToJSON r )
=> Arbitrary (Message q n r)
where
arbitrary = oneof [ MsgRequest <$> arbitrary
, MsgNotif <$> arbitrary
, MsgResponse <$> arbitrary
, MsgError <$> arbitrary ]
instance Arbitrary Id where
arbitrary = oneof [IdInt <$> arbitrary, IdTxt <$> arbitrary]
instance Arbitrary Value where
arbitrary = resize 10 $ oneof [val, lsn, objn] where
val = oneof [ toJSON <$> (arbitrary :: Gen String)
, toJSON <$> (arbitrary :: Gen Int)
, toJSON <$> (arbitrary :: Gen Double)
, toJSON <$> (arbitrary :: Gen Bool) ]
ls = toJSON <$> listOf val
obj = object . map (\(t, v) -> (T.pack t, v)) <$> listOf ps
ps = (,) <$> (arbitrary :: Gen String) <*> oneof [val, ls]
lsn = toJSON <$> listOf (oneof [ls, obj, val])
objn = object . map (\(t, v) -> (T.pack t, v)) <$> listOf psn
psn = (,) <$> (arbitrary :: Gen String) <*> oneof [val, ls, obj]
--
-- Stratum
--
instance Arbitrary StratumTxInfo where
arbitrary = StratumTxInfo <$> arbitrary <*> arbitrary
instance Arbitrary StratumCoin where
arbitrary = do
h <- arbitrary
ArbitraryOutPoint o@(OutPoint i _) <- arbitrary
let t = StratumTxInfo h i
StratumCoin o t <$> arbitrary
instance Arbitrary StratumRequest where
arbitrary = oneof
[ StratumReqVersion <$> arbitrary <*> arbitrary
, StratumReqHistory <$> aAddr
, StratumReqBalance <$> aAddr
, StratumReqUnspent <$> aAddr
, StratumReqTx <$> arbitrary
, StratumBcastTx <$> aTx
, StratumSubAddr <$> aAddr
]
where
aAddr = arbitrary >>= \(ArbitraryAddress a) -> return a
aTx = arbitrary >>= \(ArbitraryTx tx) -> return tx
instance Arbitrary StratumNotif where
arbitrary =
StratumNotifAddr <$> aAddr <*> arbitrary
where
aAddr = arbitrary >>= \(ArbitraryAddress a) -> return a
instance Arbitrary StratumResult where
arbitrary = oneof
[ StratumSrvVersion <$> arbitrary
, StratumAddrHistory <$> arbitrary
, StratumAddrBalance <$> arbitrary <*> arbitrary
, StratumAddrUnspent <$> arbitrary
, StratumAddrStatus <$> arbitrary
, StratumTx <$> (arbitrary >>= \(ArbitraryTx tx) -> return tx)
, StratumBcastId <$> arbitrary
]
instance Arbitrary (ReqRes StratumRequest StratumResult) where
arbitrary = do
(q, s) <- oneof
[ (,) <$> (StratumReqVersion <$> arbitrary <*> arbitrary)
<*> (StratumSrvVersion <$> arbitrary)
, (,) <$> (StratumReqHistory <$> aAddr)
<*> (StratumAddrHistory <$> arbitrary)
, (,) <$> (StratumReqBalance <$> aAddr)
<*> (StratumAddrBalance <$> arbitrary <*> arbitrary)
, (,) <$> (StratumReqUnspent <$> aAddr)
<*> (StratumAddrUnspent <$> arbitrary)
, (,) <$> (StratumReqTx <$> arbitrary)
<*> (StratumTx <$> aTx)
, (,) <$> (StratumBcastTx <$> aTx)
<*> (StratumBcastId <$> arbitrary)
]
i <- arbitrary
ver <- arbitrary
return $ ReqRes (Request ver (requestMethod q) q i) (Response ver s i)
where
aAddr = arbitrary >>= \(ArbitraryAddress a) -> return a
aTx = arbitrary >>= \(ArbitraryTx tx) -> return tx
| nuttycom/haskoin | Network/Haskoin/Test/Stratum.hs | unlicense | 5,391 | 0 | 15 | 1,626 | 1,562 | 838 | 724 | 122 | 0 |
-- Zipping exercises
myZip :: [a] -> [b] -> [(a, b)]
myZip [] _ = []
myZip _ [] = []
myZip (x:xs) (y:ys) = (x,y):myZip xs ys
myZipWith :: (a -> b -> c) -> [a] -> [b] -> [c]
myZipWith _ [] _ = []
myZipWith _ _ [] = []
myZipWith f (x:xs) (y:ys) = (f x y):myZipWith f xs ys
myZip' :: [a] -> [b] -> [(a, b)]
myZip' = myZipWith (,)
| dmp1ce/Haskell-Programming-Exercises | Chapter 9/Zipping exercises.hs | unlicense | 330 | 0 | 8 | 79 | 250 | 136 | 114 | 10 | 1 |
{- Copyright 2015 David Farrell <[email protected]>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
{-# LANGUAGE DeriveDataTypeable #-}
module Ext.Help
( module Data.Dynamic
, ExtHelp(..)
, defaultExtHelp
, Topic(..)
, Topics
) where
import Data.Dynamic (Typeable(..), toDyn, fromDyn, fromDynamic)
import Arata.Types (Arata)
data ExtHelp = ExtHelp
{ short :: String
, long :: String
, aboutSyntax :: String
, aboutTopics :: String
, aboutCommands :: String
, subTopics :: Arata Topics
} deriving Typeable
defaultExtHelp = ExtHelp
{ short = "Short command description"
, long = "This is the long description of a command."
, aboutSyntax = "Syntax"
, aboutTopics = "The following topics are available"
, aboutCommands = "The following sub-commands are available"
, subTopics = return []
}
data Topic = Topic
{ topicName :: String
, topicShort :: String
, topicLong :: String
}
type Topics = [Topic]
| shockkolate/arata | plugins/Ext/Help.hs | apache-2.0 | 1,543 | 0 | 9 | 374 | 204 | 131 | 73 | 29 | 1 |
{-# LANGUAGE TemplateHaskell, MultiWayIf #-}
module Brain where
import Control.Lens
import Control.Monad.State
import Control.Monad.Reader
import Control.Monad.Writer
import Data.Ord
import Data.Maybe
import Data.List
import Data.Monoid
import qualified Data.Map.Strict as Map
import Debug.Trace
import GameInitModel
import CarPositionsModel
import qualified CarPositionsModel as CPM
import qualified GameInitModel as GIM
type Tick = Int
type LaneNumber = Int
type Speed = Float
type Angle = Float
type TrackSegment = [(Int, Piece)]
type CarDatas = Map.Map String CarData
data CarData = CarData { cdSpeed :: Speed
, cdPrevSpeed :: Speed
, cdAngleSpeed :: Speed
, cdCrashed :: Bool
, cdAngle :: Float
, cdPosition :: PiecePosition
, cdCrashTick :: Tick
, cdDNF :: Bool
}
deriving (Show, Read)
data PieceData = PieceData { pdPosition :: CarPosition
, pdSpeed :: Speed
}
deriving (Show, Read)
data PieceKnowledge = PieceKnowledge
{ speed :: Speed
, maxSpeed :: Speed
, crashed :: Bool
}
deriving (Show, Read)
data Brain = Brain { _bTrack :: Track
, _bSwitchPending :: Bool
, _bSwitchingLane :: Bool
, _bMyPosition :: PiecePosition
, _bMyCarPosition :: CarPosition
, _bCurrentThrottle :: Float
, _bCurrentSpeed :: Float
, _bPieceKnowledge :: [PieceKnowledge]
, _bCars :: CarDatas
, _bCarPieceData :: Map.Map String [PieceData]
, _bRegulateIntegral :: Float
, _bRegulateError :: Float
, _bTurboAvailable :: Bool
, _bTurboFactor :: Float
, _bTurboOn :: Bool
, _bTick :: Tick
, _bDrag :: Maybe Float
, _bMass :: Maybe Float
}
makeLenses ''Brain
drag :: Brain -> Float
drag = fromMaybe 0.02 . _bDrag
mass :: Brain -> Float
mass = fromMaybe 0.02 . _bMass
otherCars :: Brain -> CarDatas
otherCars b = Map.filterWithKey (\k _ -> k /= myName b) $ _bCars b
myName :: Brain -> String
myName = carIdName . CPM.carId . _bMyCarPosition
adjustThrottleForTurbo :: Brain -> Float -> Float
adjustThrottleForTurbo b t | _bTurboOn b == False = t
| otherwise = t / (_bTurboFactor b)
segmentsBy :: Brain -> ((Int, Piece) -> Bool) -> [TrackSegment]
segmentsBy b f = filter (f . head) . groupBy (\a b -> f a == f b) $ zip [0..] (pieces $ _bTrack b)
defaultKnowledge :: Track -> (Int, Piece) -> PieceKnowledge
defaultKnowledge t (i, StraightPiece _ _) = PieceKnowledge 1 1 False
defaultKnowledge t (i, p) = PieceKnowledge (calcSegSpeed i t) 1 False
pieceCrashed :: Speed -> PieceKnowledge -> PieceKnowledge
pieceCrashed s p = p { maxSpeed = min s (maxSpeed p - 0.01), speed = clampThrottle newSpeed, crashed = True }
where
newSpeed = speed p - (speed p / 4)
knownSpeed :: Int -> Brain -> Speed
knownSpeed pieceIdx b = speed ((_bPieceKnowledge b) !! pieceIdx)
nextPiece :: (Monad a) => BotM a Piece
nextPiece = get >>= (\b -> return ((cycle $ pieces $ _bTrack b) !! ((pieceIndex $ _bMyPosition b) + 1)))
clampThrottle :: Float -> Speed
clampThrottle = clamp 0 1
clampSpeed :: Float -> Speed
clampSpeed = max 0
isOnTrack :: CarData -> Bool
isOnTrack cd = not (cdDNF cd) && not (cdCrashed cd)
initCarData :: [Car] -> CarDatas
initCarData = Map.fromList . map toData
where
toData (Car { GameInitModel.carId = c }) = (carIdName c, CarData 0 0 0 False 0 (PiecePosition 0 0 (CarLane 0 0) 0) 0 False)
initCarPieceData :: [Car] -> Map.Map String [PieceData]
initCarPieceData cs = Map.fromList $ map conv cs
where
conv (Car { GIM.carId = cid }) = (carIdName cid, [])
leaderboard :: CarDatas -> [String]
leaderboard cds = map fst $ sortBy rank $ Map.toList cds
where
rank (_, a) (_, b) = comparing pi a b `mappend` comparing pd a b
pi = pieceIndex . cdPosition
pd = inPieceDistance . cdPosition
updateCarData :: Track -> CarPosition -> CarData -> CarData
updateCarData t cp cd = cd { cdAngle = CPM.angle cp, cdPosition = pp, cdSpeed = newSpeed, cdPrevSpeed = cdSpeed cd, cdAngleSpeed = newAngleSpeed }
where
pp = piecePosition cp
newSpeed = calculateSpeed (pieces t !! (pieceIndex $ cdPosition cd)) (cdPosition cd) pp (lanes t !! (endLaneIndex $ lane pp))
newAngleSpeed = cdAngle cd - CPM.angle cp
calculateSpeed :: Piece -> PiecePosition -> PiecePosition -> Lane -> Speed
calculateSpeed oldPiece oldPos newPos newLane = if positionDiff >= 0
then positionDiff / 10
else (positionDiff + pieceLen (distanceFromCenter newLane) oldPiece) / 10
where
positionDiff = inPieceDistance newPos - inPieceDistance oldPos
pdPiecePosition :: PieceData -> PiecePosition
pdPiecePosition = piecePosition . pdPosition
pdPieceIndex :: PieceData -> Int
pdPieceIndex = pieceIndex . pdPiecePosition
newPieceData :: PiecePosition -> CarPosition -> Piece -> Lane -> PieceData
newPieceData old newCp p l = PieceData { pdPosition = newCp
, pdSpeed = calculateSpeed p old (piecePosition newCp) l
}
updateCarPieceData :: [PieceData] -> PieceData -> [PieceData]
updateCarPieceData pds pd | length pds == 0 = [pd]
| otherwise = if pdPieceIndex (last pds) /= pdPieceIndex pd
then [pd]
else pds ++ [pd]
data PlayMode = Create | Join | DefaultJoin deriving (Show, Read, Eq)
data BotConfig = BotConfig { bcName :: String, bcTrackName :: String, bcPassword :: Maybe String, bcMode :: PlayMode, bcNumCars :: Int }
type BotM a = StateT Brain (ReaderT BotConfig a)
runBotM :: BotM IO a -> BotConfig -> Brain -> IO (a, Brain)
runBotM f c b = runReaderT (runStateT f b) c
data ActionChoice = SwitchChoice Side
| BrakeChoice
| TurboChoice
| AccelerateChoice Speed
| PingChoice
| NoChoice
deriving (Show, Read, Eq, Ord)
noChoice :: (Monad m) => BotM m ActionChoice
noChoice = return NoChoice
choose :: (Monad m) => ActionChoice -> BotM m ActionChoice
choose = return
segmentLength :: Int -> TrackSegment -> Float
segmentLength offset segment = sum $ map (pieceLen offset . snd) segment
data BendDirection = BendLeft | BendRight deriving (Show, Read, Eq)
data Side = Left | Right deriving (Show, Read, Eq, Ord)
laneSwitchSide :: Lane -> Lane -> Maybe Side
laneSwitchSide from to = if | fromDist == toDist -> Nothing
| fromDist > toDist -> Just Brain.Left
| otherwise -> Just Brain.Right
where
fromDist = distanceFromCenter from
toDist = distanceFromCenter to
laneAfterSwitch :: Track -> Side -> LaneNumber -> LaneNumber
laneAfterSwitch t s current = clamp 0 maxLane (change current)
where
maxLane = (length $ lanes t) - 1
change = case s of
Brain.Left -> (-) 1
Brain.Right -> (+) 1
slowCarOnLane :: Brain -> PiecePosition -> Float -> Lane -> Bool
slowCarOnLane b pp distance l = any (\cd -> distanceToCar pp (cdPosition cd) l ps < distance) slower
where
slower = filter (\cd -> isOnTrack cd && isSlower cd) $ map snd $ Map.toList $ otherCars b
isSlower cd = let s = cdSpeed cd + 0.3 in s < _bCurrentSpeed b && s < knownSpeed (pieceIndex $ cdPosition cd) b
ps = pieces $ _bTrack b
getSegment :: Track -> Int -> Int -> TrackSegment
getSegment t start end = zip [start..end] $ take (end - start) $ drop start ps
where
ps = pieces t
switchToAvoidSlow :: Brain -> Maybe Side
switchToAvoidSlow b = if not $ slowCarOnLane b pp 50 myLane
then Nothing
else mSwitch >>= (\s -> bestSwitchDir b s `mplus` (availableLane >>= laneSwitchSide myLane))
where
pp = _bMyPosition b
myLane = (lanes $ _bTrack b) !! (endLaneIndex $ lane pp)
mSwitch = nextSwitchIndex $ drop (pieceIndex pp) (pieces $ _bTrack b)
availableLane = find (not . slowCarOnLane b pp 50) $ possibleLanes b
possibleLanes :: Brain -> [Lane]
possibleLanes b = map getLane $ filter (\n -> n >= 0 && n < laneCount) [currentLane + 1, currentLane - 1]
where
laneCount = length $ lanes $ _bTrack b
currentLane = endLaneIndex $ lane $ _bMyPosition b
getLane i = (lanes $ _bTrack b) !! i
bestSwitchDir :: Brain -> Int -> Maybe Side
bestSwitchDir b switchIdx = switchDir >>= (\s -> if shouldSwitch s then Just s else Nothing)
where
circularTrack = circularize (pieces $ _bTrack b)
(nextSwitchIdx, _) = fromJust $ find (isSwitch . snd) $ drop (switchIdx + 1) circularTrack
bends = map snd $ filter (\(i, (_, p)) -> (i < nextSwitchIdx) && (isBend p)) $ drop (switchIdx + 1) $ zip [0..] circularTrack
(lefts, rights) = partition (\(_, p) -> (bendDirection p) == BendLeft) bends
switchDir = if (length lefts) == (length rights)
then Nothing
else Just (if (length lefts) > (length rights) then Brain.Left else Brain.Right)
myLane = endLaneIndex $ lane $ _bMyPosition b
shouldSwitch Brain.Left = (myLane - 1) >= 0
shouldSwitch Brain.Right = (myLane + 1) < (length $ lanes $ _bTrack b)
needsSwitch :: Brain -> Maybe Side
needsSwitch b = if (_bSwitchPending b) || (_bSwitchingLane b) || (not $ fromMaybe False $ liftM2 (\a b -> (fst a) <= (fst b)) mSwitch mBend)
then trace "No switch" Nothing
else
let (switchIdx, _) = fromJust mSwitch
in bestSwitchDir b switchIdx
where
circularTrack = concat $ replicate 2 $ zip [0..] (pieces $ _bTrack b)
mSwitch = find (isSwitch . snd) $ drop (pieceIndex $ _bMyPosition b) $ circularTrack
mBend = mSwitch >>= (\(i, _) -> find (isBend . snd) $ drop (i + 1) $ circularTrack)
nextSwitchIndex :: [Piece] -> Maybe Int
nextSwitchIndex = findIndex isSwitch
nextBendIndex :: [Piece] -> Int -> Maybe Int
nextBendIndex ps start = fmap fst $ find (isBend . snd) $ drop start $ circularize ps
circularize :: [a] -> [(Int, a)]
circularize = concat . replicate 2 . zip [0..]
bendDirection :: Piece -> BendDirection
bendDirection (StraightPiece _ _) = BendLeft
bendDirection p = if bendAngle p < 0 then BendLeft else BendRight
isSwitch :: Piece -> Bool
isSwitch = switch
isBend :: Piece -> Bool
isBend (BendPiece {}) = True
isBend _ = False
isStraight :: Piece -> Bool
isStraight p@(StraightPiece {}) = not $ switch p
isStraight _ = False
clamp :: (Num n, Ord n) => n -> n -> n -> n
clamp a b = min b . max a
distanceTo' :: PiecePosition -> Lane -> Int -> [Piece] -> Float
distanceTo' pos lane targetIndex pieces = firstLen + (sum $ map (pieceLen offset . snd) $ takeWhile (\(i, _) -> i /= targetIndex) $ drop (1 + (pieceIndex pos)) cps)
where
cps = circularize pieces
offset = distanceFromCenter lane
firstLen = (pieceLen offset $ head pieces) - (inPieceDistance pos)
distanceToCar :: PiecePosition -> PiecePosition -> Lane -> [Piece] -> Float
distanceToCar src dst lane ps = (distanceTo' src lane (pieceIndex dst) ps) + (inPieceDistance dst)
-- | Return number of ticks at speed it would take to travel distance
ticksForDistance :: Speed -> Float -> Tick
ticksForDistance speed dist = ceiling (dist / (speed * 10))
pieceRadius :: Float -> Piece -> Float
pieceRadius def (StraightPiece {}) = def
pieceRadius _ p@(BendPiece {}) = bendRadius p
pieceAngle :: Float -> Piece -> Float
pieceAngle def (StraightPiece {}) = def
pieceAngle _ p@(BendPiece {}) = bendAngle p
calcSegSpeed :: Int -> Track -> Speed
calcSegSpeed startPoint t = k / (turnAmount / angleAmount)
where
seg = drop startPoint . circularize $ pieces t
k = 0.063
maxLen = 160 :: Float
(turnAmount, angleAmount) = foldl sumAll (0, 0) $ takeWhile (\(d,_,_,_) -> d < maxLen) $ iterate calc (0, 0, 0, map snd seg)
calc (d, ta, aa, p:ps) = let pLen = pieceLen 0 p
takeL = maxLen - d
takeL' = if takeL > pLen then pLen else takeL
anglePerc = (abs $ pieceAngle 0 p) * takeL' / pLen
in if not $ isBend p
then (d + pLen, ta, aa, ps)
else (d + pLen, ta + anglePerc / sqrt (pieceRadius 0 p), aa + anglePerc, ps)
calc (_, ta, aa, []) = (maxLen, ta, aa, [])
sumAll (t, a) (_, t', a', _) = (t + t', a + a')
pieceLen :: Int -> Piece -> Float
pieceLen _ p@(StraightPiece {}) = straightLength p
pieceLen offset p = abs ((fixedOffset + bendRadius p) * (deg2rad $ bendAngle p))
where
fixedOffset = fromIntegral $ if (bendDirection p) == BendRight then offset * (-1) else offset
deg2rad :: Float -> Float
deg2rad d = d / 180 * pi
calculateBrakeDistance :: Brain -> Speed -> Speed -> Float
calculateBrakeDistance b tar cur = (mass b / drag b) * current * (1 - (exp ((0 - drag b) * ticks / mass b)))
where
ticks = (log (target / current)) * (mass b) / (0 - drag b)
current = cur * 10
target = tar * 10
crashDuration :: Tick
crashDuration = 400
| jhartikainen/hwo2014bot | Brain.hs | apache-2.0 | 14,364 | 0 | 15 | 4,652 | 4,800 | 2,556 | 2,244 | 255 | 5 |
--
-- Copyright : (c) T.Mishima 2014
-- License : Apache-2.0
--
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
module Main where
import Bindings.OculusRift
import Bindings.OculusRift.Types
import Control.Exception ( bracket )
import Debug.Trace ( traceIO )
import Foreign.C.String ( peekCString )
import Foreign.Storable ( peek )
import Data.Maybe ( isJust,fromJust )
import Data.Bits
import Control.Monad ( forM_, forM )
import Control.Concurrent (threadDelay)
import GLFWWindow
import GLView
import Bindings.OculusRift.Types
import Graphics.Rendering.OpenGL as GL
import Graphics.GLUtil
import Foreign.Ptr (nullPtr)
import Linear.V4
-------------
import Bindings.Utils.Windows
-------------
winSize = (1920,1080)
main :: IO ()
main = bracket
(do
!b <- ovr_Initialize
!ghmd <- initGLFW winSize "oculus test" False
return (b,ghmd))
(\ (_,ghmd) -> do
ovr_Shutdown
exitGLFW ghmd
traceIO "exit")
(\ (b,ghmd) -> if b
then do
traceIO "init OK"
bracket
(do
!maxIdx <- ovrHmd_Detect
traceIO $ "detect = " ++ show maxIdx
!hmd <- ovrHmd_Create (maxIdx - 1)
return hmd)
(\ hmd' -> if isJust hmd'
then do
ovrHmd_Destroy $ fromJust hmd'
traceIO "destroy hmd"
else do
traceIO "hmd is Null"
return ())
(mainProcess ghmd)
else traceIO "init NG")
mainProcess _ Nothing = traceIO "create hmd NG"
mainProcess ghmd hmd' = do
!glhdl <- initGL
let hmd = fromJust hmd'
traceIO $ "create hmd OK : " ++ show hmd
!msg <- ovrHmd_GetLastError hmd
traceIO $ "GetLastError = " ++ msg ++ " Msg End"
traceIO " == Print HmdDesc =="
hmdDesc <- castToOvrHmdDesc hmd
printHmdDesc hmdDesc
traceIO " ==================="
!r <- ovrHmd_ConfigureTracking hmd
( ovrTrackingCap_Orientation
.|. ovrTrackingCap_MagYawCorrection
.|. ovrTrackingCap_Position)
ovrTrackingCap_None
traceIO $ "ConfigureTracking : " ++ show r
--
!hwnd <- getWindowHandle "oculus test"
traceIO $ "windowHandle : " ++ show hwnd
-- !hdc <- getWinDC hwnd
!ba <- ovrHmd_AttachToWindow hmd hwnd
Nothing Nothing
traceIO $ "AttachToWindow : " ++ show (ba,hwnd)
recommenedTex0Size <- ovrHmd_GetDefaultFovTextureSize
hmd ovrEye_Left 1.0
recommenedTex1Size <- ovrHmd_GetDefaultFovTextureSize
hmd ovrEye_Right 1.0
traceIO $ "recommentedTexSize L : "
++ show recommenedTex0Size
++ " R : "
++ show recommenedTex1Size
let !renderTargetSizeW = (si_w recommenedTex0Size)
+ (si_w recommenedTex1Size)
!renderTargetSizeH = max (si_h recommenedTex0Size)
(si_h recommenedTex1Size)
twidth = fromIntegral renderTargetSizeW
theight = fromIntegral renderTargetSizeH
!tex <- genColorTexture 0 twidth theight
!fbo <- genColorFrameBuffer tex twidth theight
--
let !eyeTexture = genEyeTextureData tex renderTargetSizeW
renderTargetSizeH
!hd = OvrRenderAPIConfigHeader
ovrRenderAPI_OpenGL
(resolution hmdDesc)
0 -- 1
!apiconf = OvrRenderAPIConfig hd (Just hwnd) Nothing -- (Just hdc)
!caps =
ovrDistortionCap_Vignette
-- .|. ovrDistortionCap_SRGB
.|. ovrDistortionCap_Overdrive
.|. ovrDistortionCap_TimeWarp
.|. ovrDistortionCap_ProfileNoTimewarpSpinWaits
.|. ovrDistortionCap_HqDistortion
-- .|. ovrDistortionCap_ComputeShader
--
-- .|. ovrDistortionCap_Chromatic
-- .|. ovrDistortionCap_NoRestore
-- .|. ovrDistortionCap_FlipInput
traceIO $ "OvrEyeTexture : " ++ show eyeTexture
traceIO $ "OvrRenderAPIConfigHeader : " ++ show hd
traceIO $ "render caps : " ++ show caps
!lfv <- ovrHmd_GetDefaultFov hmd ovrEye_Left
!rfv <- ovrHmd_GetDefaultFov hmd ovrEye_Right
!(bret, eyeRD) <- ovrHmd_ConfigureRendering hmd
(Just apiconf) caps [lfv,rfv]
traceIO $ "ConfigureRendering : " ++ show (bret,eyeRD)
--
ovrHmd_SetEnabledCaps hmd (
-- ovrHmdCap_Present
-- .|. ovrHmdCap_Available
-- .|. ovrHmdCap_Captured
-- ovrHmdCap_ExtendDesktop
-- .|. ovrHmdCap_DisplayOff
ovrHmdCap_LowPersistence
.|. ovrHmdCap_DynamicPrediction
-- .|. ovrHmdCap_NoMirrorToWindow
-- .|. ovrHmdCap_NoVSync
)
-- !tis <- ovr_GetTimeInSeconds
-- traceIO $ "GetTimeInSeconds : " ++ (show tis)
msg2 <- ovrHmd_GetLastError hmd
traceIO $ "GetLastError 2 = " ++ msg2 ++ " Msg End"
printError
ovrHmd_RecenterPose hmd
tex <- loadTextureObj "test/sample_tex.png"
mainLoop hmd ghmd glhdl (eyeTexture,tex,fbo) eyeRD tex 0
--
ovrHmd_ConfigureRendering hmd Nothing caps [lfv,rfv]
return ()
where
genColorTexture textureUnitNo width height = do
tex <- genObjectName
withTexturesAt Texture2D [(tex,textureUnitNo)] $ do
texImage2D Texture2D NoProxy 0 RGBA'
(TextureSize2D width height) 0
(PixelData RGBA UnsignedByte nullPtr)
textureFilter Texture2D $= ((Nearest, Nothing), Nearest)
texture2DWrap $= (Repeated, ClampToEdge)
--textureBorderColor Texture2D $= Color4 1.0 0.0 0.0 (0.0::GLfloat)
--textureMaxAnisotropy Texture2D $= 1.0
return tex
genColorFrameBuffer tex width height = do
traceIO $ "tex size = " ++ (show (width,height))
!fbo <- genObjectName :: IO FramebufferObject
bindFramebuffer Framebuffer $= fbo
!rbo <- genObjectName :: IO RenderbufferObject
bindRenderbuffer Renderbuffer $= rbo
renderbufferStorage Renderbuffer DepthComponent'
(RenderbufferSize width height)
framebufferRenderbuffer Framebuffer DepthAttachment Renderbuffer rbo
framebufferTexture2D Framebuffer (ColorAttachment 0) Texture2D tex 0
drawBuffers $= [FBOColorAttachment 0]
-- unbind
bindRenderbuffer Renderbuffer $= noRenderbufferObject
bindFramebuffer Framebuffer $= defaultFramebufferObject
return fbo
genEyeTextureData tex width height =
[ OvrTexture hd0 texID , OvrTexture hd1 texID ]
where
texID = (\ (TextureObject t') -> t' ) tex
vpSize = OvrSizei (div width 2) height
hd0 = OvrTextureHeader
{ apiT = ovrRenderAPI_OpenGL
, textureSize = OvrSizei width height
, renderViewport = OvrRecti (OvrVector2i 0 0) vpSize
}
hd1 = OvrTextureHeader
{ apiT = ovrRenderAPI_OpenGL
, textureSize = OvrSizei width height
, renderViewport = OvrRecti (OvrVector2i (div width 2) 0) vpSize
}
mainLoop hmd glfwHdl glhdl (eyeTexture,texobj,fbo) eyeRD tex frameNo = do
pollGLFW
--threadDelay 10000
--threadDelay 1000
--threadDelay 1000000
dt <- getDeltTime glfwHdl
exitflg' <- getExitReqGLFW glfwHdl
--ts <- ovrHmd_GetTrackingState hmd =<< ovr_GetTimeInSeconds
--traceIO $ show ts
ovrHmd_BeginFrame hmd frameNo
bindFramebuffer Framebuffer $= fbo
let (winW,winH) = winSize
withViewport (Position 0 0) (Size (fromIntegral winW) (fromIntegral winH)) $
clear [GL.ColorBuffer, GL.DepthBuffer]
(poseL:poseR:_) <- ovrHmd_GetEyePoses hmd frameNo $ map hmdToEyeViewOffset eyeRD
renderPose <- forM [(ovrEye_Left,0,poseL),(ovrEye_Right,1,poseR)]
$ \ (eyeType,i,pose) -> do
--pose <- ovrHmd_GetHmdPosePerEye hmd eyeType
(OvrMatrix4f m) <- ovrMatrix4f_Projection (fov (eyeRD !! i)) 0.1 20 True
let pm = (\ [v1,v2,v3,v4] -> V4 v1 v2 v3 v4) $
map (\ l -> (\ [a,b,c,d] -> V4 a b c d) $ map realToFrac l ) m
(OvrQuatf qx qy qz qw) = orientation pose
--traceIO $ "pose : " ++ (show eyeType) ++ " : " ++ (show pose)
--textureBinding Texture2D $= Just texobj
let fov' = fov $ head eyeRD
vPos = if eyeType == ovrEye_Left
then Position 0 0
else Position 1182 0
withViewport vPos (Size 1182 1461) $ render glhdl tex pm (qx,qy,qz,qw)
flush
return pose
bindFramebuffer Framebuffer $= defaultFramebufferObject
--traceIO $ "renderPose = " ++ (show renderPose)
--traceIO $ "eyeTexture = " ++ (show eyeTexture)
ovrHmd_EndFrame hmd renderPose eyeTexture
--swapBuff glfwHdl
--msg <- ovrHmd_GetLastError hmd
--traceIO $ "GetLastError 3 = " ++ msg ++ " Msg End"
--printError
if exitflg'
then return ()
else mainLoop hmd glfwHdl glhdl (eyeTexture,texobj,fbo) eyeRD tex (frameNo + 1)
| tmishima/bindings-Oculus | test/case2/Main.hs | apache-2.0 | 8,738 | 0 | 22 | 2,326 | 2,100 | 1,044 | 1,056 | 183 | 3 |
#!/usr/bin/env stack
-- stack --resolver lts-9.1 runghc --package minio-hs
--
-- Minio Haskell SDK, (C) 2017 Minio, Inc.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
{-# LANGUAGE OverloadedStrings #-}
import Network.Minio
import qualified Data.ByteString.Char8 as B
import Data.CaseInsensitive (original)
-- | The following example uses minio's play server at
-- https://play.minio.io:9000. The endpoint and associated
-- credentials are provided via the libary constant,
--
-- > minioPlayCI :: ConnectInfo
--
main :: IO ()
main = do
let
-- Use headers to set user-metadata - note that this header will
-- need to be set when the URL is used to make an upload.
headers = [("x-amz-meta-url-creator",
"minio-hs-presigned-put-example")]
res <- runMinio minioPlayCI $ do
-- generate a URL with 7 days expiry time
presignedPutObjectURL "my-bucket" "my-object" (7*24*3600) headers
case res of
Left e -> putStrLn $ "presignedPutObject URL failed." ++ show e
Right url -> do
-- We generate a curl command to demonstrate usage of the signed
-- URL.
let
hdrOpt (k, v) = B.concat ["-H '", original k, ": ", v, "'"]
curlCmd = B.intercalate " " $
["curl "] ++ map hdrOpt headers ++
["-T /tmp/myfile", B.concat ["'", url, "'"]]
B.putStrLn $ "The following curl command would use the presigned " ++
"URL to upload the file at \"/tmp/myfile\":"
B.putStrLn curlCmd
| donatello/minio-hs | examples/PresignedPutObject.hs | apache-2.0 | 2,035 | 0 | 19 | 473 | 272 | 154 | 118 | 22 | 2 |
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Handler.Admin where
import Import
getAdminR :: Handler RepHtml
getAdminR = do
(plugins, posts, users) <- runDB $ do
plugins <- count [PluginTitle !=. ""]
posts <- count [EntrySlug !=. ""]
users <- count [UserIdent !=. ""]
return (plugins, posts, users)
defaultLayout $ do
setTitle "Admin page"
$(widgetFile "admin/homepage")
| ModernSteward/blog | Handler/Admin.hs | bsd-2-clause | 442 | 0 | 14 | 114 | 132 | 66 | 66 | 13 | 1 |
{-# LANGUAGE TypeFamilies, FlexibleInstances, MultiParamTypeClasses #-}
module Data.Vector.Fixed.Linear where
import Data.Vector.Fixed.Internal
import Linear
type instance Dim V2 = S (S Z)
type instance Dim V3 = S (S (S Z))
type instance Dim V4 = S (S (S (S Z)))
instance Vector V2 a where
construct = Fun V2
inspect (V2 x y) (Fun f) = f x y
instance Vector V3 a where
construct = Fun V3
inspect (V3 x y z) (Fun f) = f x y z
instance Vector V4 a where
construct = Fun V4
inspect (V4 x y z w) (Fun f) = f x y z w
| acowley/fixed-linear | src/Data/Vector/Fixed/Linear.hs | bsd-3-clause | 528 | 0 | 11 | 119 | 247 | 128 | 119 | 16 | 0 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedLabels #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -Wall #-}
-- | Surface chart combinators.
--
-- A common chart is to present a set of rectangles on the XY plane with colour representing values of the underlying data. This library uses the term /surface/ chart but it is often referred to as a heatmap.
--
module Chart.Surface
( SurfaceData (..),
SurfaceOptions (..),
defaultSurfaceOptions,
SurfaceStyle (..),
defaultSurfaceStyle,
mkSurfaceData,
surfaces,
surfacef,
surfacefl,
SurfaceLegendOptions (..),
defaultSurfaceLegendOptions,
surfaceAxisOptions,
)
where
import Chart.Data
import Chart.Hud
import Chart.Primitive
import Chart.Style
import Data.Bifunctor
import Data.Bool
import Data.Colour
import Data.Foldable
import Data.FormatN
import Data.Text (Text)
import GHC.Generics
import Optics.Core
import Prelude
-- | Options for a Surface chart.
data SurfaceOptions = SurfaceOptions
{ -- | surface style
soStyle :: SurfaceStyle,
-- | The grain or granularity of the chart
soGrain :: Point Int,
-- | Chart range
soRange :: Rect Double
}
deriving (Show, Eq, Generic)
-- | official style
defaultSurfaceOptions :: SurfaceOptions
defaultSurfaceOptions =
SurfaceOptions defaultSurfaceStyle (Point 10 10) one
-- | A surface chart is a specialization of a 'RectChart'
--
-- >>> defaultSurfaceStyle
-- SurfaceStyle {surfaceColors = [Colour 0.02 0.73 0.80 1.00,Colour 0.02 0.29 0.48 1.00], surfaceRectStyle = RectStyle {borderSize = 0.0, borderColor = Colour 0.00 0.00 0.00 0.00, color = Colour 0.05 0.05 0.05 1.00}}
--
-- 
data SurfaceStyle = SurfaceStyle
{ -- | list of colours to interpolate between.
surfaceColors :: [Colour],
surfaceRectStyle :: RectStyle
}
deriving (Show, Eq, Generic)
-- | The official surface style.
defaultSurfaceStyle :: SurfaceStyle
defaultSurfaceStyle =
SurfaceStyle (palette1 <$> [0 .. 1]) (blob dark)
-- | Main surface data elements
data SurfaceData = SurfaceData
{ -- | XY Coordinates of surface.
surfaceRect :: Rect Double,
-- | Surface colour.
surfaceColor :: Colour
}
deriving (Show, Eq, Generic)
-- | surface chart without any hud trimmings
surfaces :: RectStyle -> [SurfaceData] -> [Chart]
surfaces rs ps =
( \(SurfaceData r c) ->
RectChart
(rs & #color .~ c)
[r]
)
<$> ps
-- | Create surface data from a function on a Point
mkSurfaceData ::
(Point Double -> Double) ->
Rect Double ->
Grid (Rect Double) ->
[Colour] ->
([SurfaceData], Range Double)
mkSurfaceData f r g cs = ((\(x, y) -> SurfaceData x (mixes y cs)) <$> ps', unsafeSpace1 rs)
where
ps = gridF f r g
rs = snd <$> ps
rs' = project (unsafeSpace1 rs :: Range Double) (Range 0 1) <$> rs
ps' = zip (fst <$> ps) rs'
-- | Create a surface chart from a function.
surfacef :: (Point Double -> Double) -> SurfaceOptions -> ([Chart], Range Double)
surfacef f cfg =
first (surfaces (cfg ^. #soStyle % #surfaceRectStyle)) $
mkSurfaceData
f
(cfg ^. #soRange)
(cfg ^. #soGrain)
(toList $ cfg ^. #soStyle % #surfaceColors)
-- | Create a surface chart and accompanying legend from a function.
surfacefl :: (Point Double -> Double) -> SurfaceOptions -> SurfaceLegendOptions -> ([Chart], [Hud])
surfacefl f po slo =
( cs,
[Hud 10 (legendHud (slo ^. #sloLegendOptions) (surfaceLegendChart dr slo))]
)
where
(cs, dr) = surfacef f po
-- | Legend specialization for a surface chart.
data SurfaceLegendOptions = SurfaceLegendOptions
{ sloStyle :: SurfaceStyle,
sloTitle :: Text,
-- | Width of the legend glyph
sloWidth :: Double,
-- | Resolution of the legend glyph
sloResolution :: Int,
sloAxisOptions :: AxisOptions,
sloLegendOptions :: LegendOptions
}
deriving (Eq, Show, Generic)
-- | 'AxisOptions' for a surface chart.
surfaceAxisOptions :: Colour -> AxisOptions
surfaceAxisOptions c =
AxisOptions
Nothing
Nothing
( Ticks
(TickRound (FormatN FSPrec (Just 3) True) 4 NoTickExtend)
(Just (defaultGlyphTick & #borderColor .~ c & #color .~ c & #shape .~ VLineGlyph, 0.01))
(Just (defaultTextTick & #color .~ c, 0.03))
Nothing
)
PlaceRight
-- | official surface legend options
defaultSurfaceLegendOptions :: Colour -> Text -> SurfaceLegendOptions
defaultSurfaceLegendOptions c t =
SurfaceLegendOptions defaultSurfaceStyle t 0.05 100 (surfaceAxisOptions c) surfaceLegendOptions
surfaceLegendOptions :: LegendOptions
surfaceLegendOptions =
defaultLegendOptions
& #place .~ PlaceRight
& #overallScale .~ 0.9
& #size .~ 0.5
& #vgap .~ 0.05
& #hgap .~ 0.01
& #innerPad .~ 0.05
& #outerPad .~ 0.02
& #textStyle % #hsize .~ 0.5
& #textStyle % #size .~ 0.1
& #frame .~ Nothing
-- | Creation of the classical heatmap glyph within a legend context.
surfaceLegendChart :: Range Double -> SurfaceLegendOptions -> ChartTree
surfaceLegendChart dataRange l =
legendFrame (view #sloLegendOptions l) hs
where
a = makeSurfaceTick l (named "pchart" pchart)
pchart
| l ^. #sloLegendOptions % #place == PlaceBottom
|| l ^. #sloLegendOptions % #place == PlaceTop =
vertGlyph
| otherwise = horiGlyph
t = TextChart (l ^. #sloLegendOptions % #textStyle & #anchor .~ AnchorStart) [(l ^. #sloTitle, zero)]
hs = vert (l ^. #sloLegendOptions % #vgap) [a, unnamed [t]]
vertGlyph :: [Chart]
vertGlyph =
zipWith
(\r c -> RectChart (blob c) [r])
( (\xr -> Ranges xr (Range 0 (l ^. #sloWidth)))
<$> gridSpace
dataRange
(l ^. #sloResolution)
)
( (\x -> mixes x (toList $ l ^. #sloStyle % #surfaceColors))
<$> grid MidPos (Range 0 1) (l ^. #sloResolution)
)
horiGlyph :: [Chart]
horiGlyph =
zipWith
(\r c -> RectChart (blob c) [r])
( (\yr -> Ranges (Range 0 (l ^. #sloWidth)) yr)
<$> gridSpace
dataRange
(l ^. #sloResolution)
)
( (\x -> mixes x (toList $ l ^. #sloStyle % #surfaceColors))
<$> grid MidPos (Range 0 1) (l ^. #sloResolution)
)
isHori :: SurfaceLegendOptions -> Bool
isHori l =
l ^. #sloLegendOptions % #place == PlaceBottom
|| l ^. #sloLegendOptions % #place == PlaceTop
makeSurfaceTick :: SurfaceLegendOptions -> ChartTree -> ChartTree
makeSurfaceTick l pchart = case view styleBox' pchart of
Nothing -> pchart
Just r' -> phud
where
r'' = bool (Rect 0 (l ^. #sloWidth) 0 (l ^. #sloLegendOptions % #size)) (Rect 0 (l ^. #sloLegendOptions % #size) 0 (l ^. #sloWidth)) (isHori l)
(hs, db) = toHuds (mempty & set #chartAspect ChartAspect & set #axes [(9, l ^. #sloAxisOptions & #place .~ bool PlaceRight PlaceBottom (isHori l))]) r'
phud = runHudWith r'' db hs pchart
| tonyday567/chart-svg | src/Chart/Surface.hs | bsd-3-clause | 6,973 | 0 | 26 | 1,640 | 1,872 | 1,022 | 850 | 157 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Data.FAlgebra.Tree.Indexed
( module Data.FAlgebra.Base
, module Data.FAlgebra.Annotation
, module Data.FAlgebra.Tree
, module Data.FAlgebra.Tree.Zipper
, idx
, idxSlot
) where
import Prelude hiding (zip)
import Control.Applicative (Const(..), getConst)
import Data.FAlgebra.Annotation
import Data.FAlgebra.Base
import Data.FAlgebra.Tree
import Data.FAlgebra.Tree.Zipper
import Lens.Micro
-- |Get a zipper for the ith element of a tree.
idx :: forall a t. (FCoalgebra (TreeF a) t, Annotated Size t) => Int -> t -> TreeZip a t
idx i = idx' (Size i) . root
where
idx' i z = case (coalg z :: TreeF a (TreeZip a t)) of
Empty -> z
Branch a b1 b2 -> let s = getSize (view _here b1) in
case compare i s of
LT -> idx' i b1
EQ -> z
GT -> idx' (i - s - 1) b2
-- |Get a zipper the slot such that inserting there makes the inserted element the ith element.
idxSlot :: forall a t. (FCoalgebra (TreeF a) t, Annotated Size t) => Int -> t -> TreeZip a t
idxSlot i = idxSlot' (Size i) . root
where
idxSlot' i z = case (coalg z :: TreeF a (TreeZip a t)) of
Empty -> z
Branch _ b1 b2 -> let s = getSize (view _here b1) in
case compare i s of
LT -> idxSlot' i b1
EQ -> idxSlot' i b1
GT -> idxSlot' (i - s - 1) b2
| bhamrick/fixalgs | Data/FAlgebra/Tree/Indexed.hs | bsd-3-clause | 1,473 | 0 | 18 | 441 | 504 | 269 | 235 | 34 | 4 |
{-# OPTIONS_GHC -Wno-missing-signatures #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
module GCounter where
import Test.QuickCheck ((===))
import CRDT.Cv.GCounter (GCounter (..), increment, query)
import CRDT.Laws (cvrdtLaws)
test_Cv = cvrdtLaws @(GCounter Int)
prop_increment (counter :: GCounter Int) pid =
query (increment pid counter) === succ (query counter)
| cblp/crdt | crdt-test/test/GCounter.hs | bsd-3-clause | 433 | 0 | 8 | 87 | 110 | 63 | 47 | 10 | 1 |
module Dang.ModuleSystem.Env (
NameTrie(),
NameNode(..),
Def(..),
envVal, envType, envMod,
qualify,
insertPName,
lookupVal,
lookupType,
lookupMod,
lookupPName,
openMod,
shadowing,
intersectionWith,
) where
import Dang.Syntax.AST (PName(..))
import Dang.Utils.PP
import Dang.Utils.Panic (panic)
import Control.Monad (mplus)
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
-- Naming Environment ----------------------------------------------------------
data Def = DefMod !T.Text
| DefVal !T.Text
| DefType !T.Text
deriving (Eq,Ord,Show)
instance PP Def where
ppr (DefMod n) = ppr n
ppr (DefVal n) = ppr n
ppr (DefType n) = ppr n
newtype NameTrie a = NameTrie (Map.Map Def (NameNode a))
deriving (Show)
data NameNode a = NameNode (Maybe a) (NameTrie a)
deriving (Show)
instance Monoid a => Monoid (NameTrie a) where
mempty = NameTrie Map.empty
mappend (NameTrie a) (NameTrie b) = NameTrie (Map.unionWith merge a b)
where
merge (NameNode xs x) (NameNode ys y) =
NameNode (mappend xs ys) (mappend x y)
{-# INLINE mempty #-}
{-# INLINE mappend #-}
-- | Merge the names from the left environment, into the right environment,
-- allowing shadowing of names in the right environment.
shadowing :: NameTrie a -> NameTrie a -> NameTrie a
shadowing (NameTrie l) (NameTrie r) = NameTrie (Map.unionWith merge l r)
where
merge (NameNode a l') (NameNode b r') = NameNode (a `mplus` b) (shadowing l' r')
qualify :: [T.Text] -> NameTrie a -> NameTrie a
qualify ns t = foldr step t ns
where
step n acc = NameTrie (Map.singleton (DefMod n) (NameNode Nothing acc))
envVal, envType, envMod :: Monoid a => PName -> a -> NameTrie a
envVal = singleton DefVal
envType = singleton DefType
envMod = singleton DefMod
singleton :: Monoid a => (T.Text -> Def) -> PName -> a -> NameTrie a
singleton mkDef pn n =
case pn of
PQual _ ns p -> qualify ns (mk p)
PUnqual _ p -> mk p
where
mk p = NameTrie (Map.singleton (mkDef p) (NameNode (Just n) mempty))
insertPName :: Monoid a => (T.Text -> Def) -> PName -> a -> NameTrie a -> NameTrie a
insertPName mkDef pn a =
case pn of
PUnqual _ p -> go (mkDef p) []
PQual _ ns p ->
case map DefMod ns ++ [mkDef p] of
n:ns' -> go n ns'
_ -> panic (text "Invalid qualified name")
where
go n ns (NameTrie m) = NameTrie (Map.alter upd n m)
where
upd mb =
case ns of
n':rest ->
case mb of
Just (NameNode x sub) -> Just (NameNode x (go n' rest sub))
Nothing -> Just (NameNode Nothing (go n' rest mempty))
[] ->
case mb of
Just (NameNode x sub) -> Just (NameNode (Just a `mappend` x) sub)
Nothing -> Just (NameNode (Just a) mempty)
lookupVal, lookupType, lookupMod :: PName -> NameTrie a -> Maybe a
lookupVal pn t =
case lookupPName DefVal pn t of
Just (NameNode mb _) -> mb
Nothing -> Nothing
lookupType pn t =
case lookupPName DefType pn t of
Just (NameNode mb _) -> mb
Nothing -> Nothing
lookupMod pn t =
case lookupPName DefMod pn t of
Just (NameNode mb _) -> mb
Nothing -> Nothing
lookupPName :: (T.Text -> Def) -> PName -> NameTrie a -> Maybe (NameNode a)
lookupPName mkDef pn =
case pn of
PQual _ ns p -> go (map DefMod ns ++ [mkDef p])
PUnqual _ p -> go [mkDef p]
where
go (n:ns) (NameTrie m) =
do t@(NameNode _ m') <- Map.lookup n m
if null ns
then return t
else go ns m'
go [] _ = error "Impossible"
-- | Open the module with the name N in the environment E.
openMod :: PName -> NameTrie a -> NameTrie a
openMod pn e =
case lookupPName DefMod pn e of
Just (NameNode _ ds) -> ds `shadowing` e
Nothing -> e
intersectionWith :: (Maybe a -> Maybe b -> Maybe c)
-> NameTrie a -> NameTrie b -> NameTrie c
intersectionWith f = go
where
go (NameTrie l) (NameTrie r) =
NameTrie (Map.intersectionWith merge l r)
merge (NameNode xs l') (NameNode ys r') =
NameNode (f xs ys) (go l' r')
| elliottt/dang | src/Dang/ModuleSystem/Env.hs | bsd-3-clause | 4,324 | 0 | 20 | 1,287 | 1,691 | 855 | 836 | 118 | 6 |
--------------------------------------------------------------------
-- |
-- Module : Text.Feed.Import
-- Copyright : (c) Galois, Inc. 2008,
-- (c) Sigbjorn Finne 2009-
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <[email protected]>
-- Stability : provisional
--
-- Convert from XML to Feeds.
--
--------------------------------------------------------------------
{-# LANGUAGE CPP #-}
module Text.Feed.Import
( parseFeedFromFile -- :: FilePath -> IO Feed
, parseFeedString -- :: String -> IO Feed
-- if you know your format, use these directly:
, readRSS2 -- :: XML.Element -> Maybe Feed
, readRSS1 -- :: XML.Element -> Maybe Feed
, readAtom -- :: XML.Element -> Maybe Feed
) where
import Text.Atom.Feed.Import as Atom
import Text.RSS.Import as RSS
import Text.RSS1.Import as RSS1
import Text.Feed.Types
import Text.XML.Light as XML
import Text.XML.Light.Lexer ( XmlSource )
import Control.Monad
import Prelude hiding (readFile)
#if MIN_VERSION_utf8_string(1,0,0)
-- Read the file as a packed ByteString and then apply utf8 decoder.
-- System.IO.readFile looks at the the current locale to choose a decoder,
-- but here we always want to use the utf8 decoder.
import qualified Data.ByteString as BS (readFile)
import Data.ByteString.UTF8 as UTF8 (toString)
readFile :: FilePath -> IO String
readFile path = BS.readFile path >>= return . UTF8.toString
{-
-- Equivalent:
import System.IO (withBinaryFile, IOMode(ReadMode), hSetEncoding, utf8, hGetContents)
readFile :: FilePath -> IO String
readFile path = withBinaryFile path ReadMode $ \ h -> hSetEncoding h utf8 >> hGetContents h
-}
#else
import qualified System.IO.UTF8 as UTF8 ( readFile )
-- | Use the UTF8 version of readfile from the old utf8-string package.
readFile :: FilePath -> IO String
readFile = UTF8.readFile
#endif
-- | 'parseFeedFromFile fp' reads in the contents of the file at @fp@;
-- the assumed encoding is UTF-8.
parseFeedFromFile :: FilePath -> IO Feed
parseFeedFromFile fp = do
ls <- readFile fp
case parseFeedString ls of
Nothing -> fail ("parseFeedFromFile: not a well-formed XML content in: " ++ fp)
Just f -> return f
-- | 'parseFeedWithParser tries to parse the string @str@
-- as one of the feed formats. First as Atom, then RSS2 before
-- giving RSS1 a try. @Nothing@ is, rather unhelpfully, returned
-- as an indication of error.
parseFeedWithParser :: XmlSource s => (s -> Maybe Element) -> s -> Maybe Feed
parseFeedWithParser parser str =
case parser str of
Nothing -> Nothing
Just e ->
readAtom e `mplus`
readRSS2 e `mplus`
readRSS1 e `mplus`
Just (XMLFeed e)
-- | 'parseFeedString str' tries to parse the string @str@ as
-- one of the feed formats. First as Atom, then RSS2 before
-- giving RSS1 a try. @Nothing@ is, rather unhelpfully, returned
-- as an indication of error.
parseFeedString :: String -> Maybe Feed
parseFeedString = parseFeedWithParser parseXMLDoc
-- | 'readRSS2 elt' tries to derive an RSS2.x, RSS-0.9x feed document
-- from the XML element @e@.
readRSS2 :: XML.Element -> Maybe Feed
readRSS2 e = fmap RSSFeed $ RSS.elementToRSS e
-- | 'readRSS1 elt' tries to derive an RSS1.0 feed document
-- from the XML element @e@.
readRSS1 :: XML.Element -> Maybe Feed
readRSS1 e = fmap RSS1Feed $ RSS1.elementToFeed e
-- | 'readAtom elt' tries to derive an Atom feed document
-- from the XML element @e@.
readAtom :: XML.Element -> Maybe Feed
readAtom e = fmap AtomFeed $ Atom.elementFeed e
| seereason/feed | Text/Feed/Import.hs | bsd-3-clause | 3,555 | 0 | 12 | 696 | 484 | 279 | 205 | 41 | 2 |
module Types where
import qualified Data.Map as M
data Result = Sat | Unsat | Unknown | OptFound deriving (Eq,Show)
data Lit = PosLit Var | NegLit Var deriving (Eq,Show)
data Constraint = Geq Lhs Rhs | Eq Lhs Rhs deriving (Show,Eq)
data Term = MkTrm Integer Var deriving (Show,Eq)
type Assignment = M.Map Var Bool
makeAssignment :: [Lit] -> Assignment
makeAssignment = foldl go M.empty
where
go as (PosLit x) = M.insert x True as
go as (NegLit x) = M.insert x False as
data Var = X Integer deriving (Eq,Show,Ord)
type Rhs = Integer
type Lhs = [Term]
type MinFunction = [Term]
| EJahren/PBCompOutputChecker | src/Types.hs | bsd-3-clause | 617 | 0 | 9 | 145 | 252 | 141 | 111 | 15 | 2 |
module Physics.Falling2d.UnitSphere2d
(
)
where
import System.Random
import Data.Random.Normal
import Data.Vect.Double.Base
import Physics.Falling.Math.UnitSphere
instance UnitSphere Normal2 where
unitSphereSamples = _randomCirclePoints
nUnitSphereSamples n = map (\ang -> toNormalUnsafe $ Vec2 (cos ang) (sin ang)) subdivs
where
subdivs = [ fromIntegral i * 2.0 * pi / fromIntegral n | i <- [ 0 .. n - 1 ] ]
_randomCirclePoints :: [Normal2]
_randomCirclePoints = map mkNormal $ _doubleList2Vec2List $ normals $ mkStdGen 0
_doubleList2Vec2List :: [Double] -> [Vec2]
_doubleList2Vec2List (a:b:l) = Vec2 a b : _doubleList2Vec2List l
_doubleList2Vec2List _ = []
| sebcrozet/falling2d | Physics/Falling2d/UnitSphere2d.hs | bsd-3-clause | 728 | 0 | 12 | 159 | 223 | 121 | 102 | 15 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
--------------------------------------------------------------------------------
-- |
-- Module : $Header$
-- Copyright : Β© 2012-2015 Nicola Squartini
-- License : BSD3
--
-- Maintainer : Nicola Squartini <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-- This module define a datatype @'Tensor'@ which implements the classes and
-- methods defined in "Data.Tensor", but is represented internally as a
-- @'V.Vector'@, and is therefore faster and more memory efficient than the
-- standard @'T.Tensor'@.
--
--------------------------------------------------------------------------------
module Data.Tensor.Vector
( MultiIndex
, unMultiIndex
, Tensor
, Vector
, Matrix
, ColumnVector
, vector2ColumnVector
, columnVector2Vector
, RowVector
, fromList
-- , fromVector
, module Data.Indexable
, module Data.MultiIndex
, module Data.Sliceable
, module Data.Tensor
, Slicer
) where
import Control.Applicative
import Control.Arrow
import Control.DeepSeq
import Control.Exception (throw)
import Control.Monad (liftM)
import Data.Function (on)
import Data.Maybe (fromJust)
import Data.Singletons
import Data.Singletons.Prelude.List hiding (Reverse)
import qualified Data.Vector as V
import Data.Vector.Generic hiding (Vector, fromList,
replicate, toList, (++))
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Unboxed as U
import Data.Word
import GHC.Exts (IsList (..))
import Prelude hiding (drop, head, init, length,
null, product, reverse, splitAt,
tail, take, zipWith)
import qualified Prelude as P
import Prelude.Unicode
import System.Random
import Data.Indexable
import qualified Data.Indexable as I
import Data.MultiIndex hiding (MultiIndex, fromList,
toList)
import qualified Data.MultiIndex as M
import Data.Sliceable hiding (Slicer)
import qualified Data.Sliceable as S
import Data.Tensor hiding (ColumnVector, Matrix,
RowVector, Slicer (..), Tensor,
Vector)
-- | An @'IsMultiIndex'@ type optimized for indexing @'Tensor'@. It is
-- internally represented as a @'U.Vector'@ of @'Word'@s.
newtype MultiIndex (is β· [PI]) = MultiIndex
{ unMultiIndex β· U.Vector Word -- ^ Yield the internal representation of
-- @'MultiIndex'@.
} deriving Eq
-- | Standard total order on a @'MultiIndex'@ of length 1 (@[i] β€ [j]@ iff @i β€
-- j@). Methods have @O(1)@ complexity.
instance Ord (MultiIndex '[i]) where
compare = compare `on` (head β unMultiIndex)
instance IsMultiIndex MultiIndex where
nil = MultiIndex G.empty
oneCons = MultiIndex β cons 1 β unMultiIndex
headSucc = MultiIndex β succHead β unMultiIndex
toMultiIndex = toM sing
where
toM β· Shape is β MultiIndex is β M.MultiIndex is
toM SNil = const Nil
toM (SCons SOne sh) =
OneCons β toM sh β MultiIndex β tail β unMultiIndex
toM (SCons (SS i) sh) =
headSucc β toM (SCons i sh) β MultiIndex β predHead β unMultiIndex
toList = P.map fromIntegral β toList β unMultiIndex
predHead β· U.Vector Word β U.Vector Word
predHead = imap (\i β if i β‘ 0 then pred else id)
succHead β· U.Vector Word β U.Vector Word
succHead = imap (\i β if i β‘ 0 then succ else id)
-- | An @'IsTensor'@ type, internally represented as a @'V.Vector'@. It features
-- @O(r)@ @('I.!')@.
data Tensor (is β· [PI]) e = Tensor
{ form β· U.Vector Word
, content β· V.Vector e
} deriving Eq
-- | A vector.
type Vector i = Tensor '[i]
-- | A matrix.
type Matrix i j = Tensor '[i, j]
-- | A matrix with only one column.
type ColumnVector n = Matrix n 'One
-- | Transform a vector into a one-column matrix.
vector2ColumnVector :: Vector n e -> ColumnVector n e
vector2ColumnVector (Tensor ds x) = (Tensor (ds `snoc` 1) x)
-- | Transform a one-column matrix into a vector.
columnVector2Vector :: ColumnVector n e -> Vector n e
columnVector2Vector (Tensor ds x) = (Tensor (init ds) x)
-- | A matrix with only one row.
type RowVector n = Matrix 'One n
linearize β· U.Vector Word -- ^ Dimension array
β U.Vector Word -- ^ Index array
β Int
linearize ds is = fromIntegral $ go ds is 0
where go es js acc =
if length js β‘ 0
then acc
else let t = tail es in
go t (tail js) (acc + (head js - 1) β
product t)
unlinearize β· U.Vector Word -- ^ Dimension array
β Int -- ^ Linearized position (starting from 0)
β U.Vector Word
unlinearize ds i = go ds (fromIntegral i) G.empty
where go es j acc = if G.null es
then acc
else let t = tail es
(q,r) = quotRem j (product t)
in go t r (acc `snoc` (q + 1))
unsafeTensorGet :: Tensor i e -> U.Vector Word -> e
unsafeTensorGet (Tensor ds x) = (G.!) x β linearize ds
unsafeTensorGen :: U.Vector Word -> (U.Vector Word -> e) -> Tensor i e
unsafeTensorGen ds f =
Tensor ds $ G.generate (fromIntegral $ product ds) (f β (unlinearize ds))
----------------------------------- Functor -----------------------------------
-- | @'fmap' = 'I.map'@.
instance Functor (Tensor is) where
fmap = I.map
--------------------------------- Applicative ---------------------------------
-- | @'pure' a@ yields a @'Tensor'@ with all components equal to @a@. @'<*>'@
-- applies the @'Tensor'@ of functions componentwise.
instance SingI is β Applicative (Tensor is) where
pure = Tensor r β G.replicate (fromIntegral $ product r)
where r = fromList $ fromShape (sing β· Shape is)
(<*>) = ap
------------------------------------ Show ------------------------------------
-- | Rank 0 @'Tensor'@s are shown as a single element, rank 1 as lists, rank 2
-- as lists of lists, and so on, using [row-major
-- order](http://en.wikipedia.org/wiki/Row-major_order).
instance Show e β Show (Tensor i e) where
showsPrec _ (Tensor ds v) =
let sd = reverse ds
l = length v
r = length ds
in if r β‘ 0
then shows $ v G.! 0
else showsT (toList sd) l (replicate r 1) 1 β
(shows $ v G.! (l-1)) β
(replicate r ']' ++)
where showsT sd l ys n =
let (zs,k) = match sd ys
in if n < l
then showsT sd l zs (n+1) β
(shows $ v G.! (l-n-1)) β
(replicate k ']' ++) β
(',':) β (replicate k '[' ++)
else (replicate k '[' ++)
match is js = match' is js [] 0
where match' [] _ zs n = (zs,n)
match' _ [] zs n = (zs,n)
match' (x:xs) (y:ys) zs n
| x β‘ y = match' xs ys (zs ++ [1]) (n+1)
| otherwise = (zs ++ ((y+1):ys),n)
---------------------------------- Indexable ----------------------------------
-- | @('I.!')@ has @O(r)@ complexity.
instance Indexable Tensor where
type Index Tensor = MultiIndex
Tensor d u ! ix = u G.! linearize d (unMultiIndex ix)
generate = gen sing
where
gen β· Shape is β (MultiIndex is β e) β Tensor is e
gen s f = let r = fromList $ fromShape s
in Tensor r $ G.generate (fromIntegral $ product r)
(f β MultiIndex β (unlinearize r))
generateA = genA sing
where
genA β· Applicative f β Shape is β (MultiIndex is β f e) β f (Tensor is e)
genA s f = let r = fromList $ fromShape s
in Tensor r <$> genAV (fromIntegral $ product r)
(f β MultiIndex β (unlinearize r))
genAV β· Applicative f β Int β (Int β f a) β f (V.Vector a)
genAV n g | n β‘ 0 = pure G.empty
| otherwise = liftA2 cons (g 0)
(genAV (n-1) (g β succ))
generateM = genM sing
where
genM β· Monad m β Shape is β (MultiIndex is β m e) β m (Tensor is e)
genM s f = do let r = fromList $ fromShape s
Tensor r `liftM` G.generateM (fromIntegral $ product r)
(f β MultiIndex β (unlinearize r))
map f (Tensor d u) = Tensor d $ fmap f u
ap (Tensor _ f) (Tensor d u) = Tensor d (zipWith ($) f u)
------------------------------- MultiIndexable -------------------------------
-- | Methods are usually implemented using @'G.generate'@ to produce the
-- underlying @'V.Vector'@.
instance MultiIndexable Tensor where
t0 = Tensor G.empty β singleton
unT0 = head β content
concat (Tensor ds u) =
let h = head ds
t = tail ds
l = fromIntegral $ product t
j = h `cons` form (u G.! 0)
in Tensor t $ G.generate l (\x β Tensor j
$ G.concat
$ [ content (u G.! (x + kβ
l)) | k β [0..pred (fromIntegral h)] ])
unConcat (Tensor ds u) =
let h = head es
l = fromIntegral (product ds β
h)
in Tensor (h `cons` ds) $ G.generate l f
where f n = let (i, is) = quotRem n $ fromIntegral $ product ds
in Tensor t $ G.slice (i β
tl) tl $ content (u G.! is)
es = form (u G.! 0)
t = tail es
tl = fromIntegral $ product t
Tensor ds u `at` ix =
let h = head ds
t = tail ds
l = fromIntegral $ product t
i = linearize t $ unMultiIndex ix
in Tensor (singleton h) $
G.generate (fromIntegral h) (\x β u G.! ((x β
l) + i))
ix `ta` Tensor ds u = let t = tail ds
i = fromIntegral $ head $ unMultiIndex ix
l = fromIntegral $ product t
in Tensor t $
G.slice (i β
l) l u
rev (Tensor sh v) =
let hs = reverse sh
zh = hs G.++ form (head v)
s = fromIntegral $ product sh
in Tensor zh $ G.concat $
P.map (content β (G.!) v β transposeV sh) [0 .. pred s]
unRev (Tensor sh v) =
Tensor zh $ G.generate (fromIntegral $ product zh) gnr
where gnr i = let (q,r) = quotRem i s
in content (v G.! r) G.! transposeV rh q
zh = reverse rh G.++ sh
s = fromIntegral $ product sh
rh = form $ head v
-- | Index permutation yielding the @'V.Vector'@ representation of a transposed
-- tensor: (x^T)_i = x_(Οi). The first argument is the dimensions array of the
-- original tensor.
transposeV β· U.Vector Word β Int β Int
transposeV ds = linearize ds β G.reverse β unlinearize (reverse ds)
---------------------------------- IsTensor ----------------------------------
-- | @'append'@ uses @'G.generate'@ to produce the underlying @'V.Vector'@,
-- @'split'@ uses @'imap'@.
instance IsTensor Tensor where
t1 (Tensor d u) = Tensor (1 `cons` d) u
unT1 (Tensor d u) = Tensor (tail d) u
(Tensor _ u) |: (Tensor d v) = let h = head d
t = tail d
in Tensor (succ h `cons` t) (u G.++ v)
unCons (Tensor d u) = let t = tail d
(v, w) = splitAt (fromIntegral $ product t) u
in (Tensor t v, Tensor (pred (head d) `cons` t) w)
append sh (Tensor d x) (Tensor e y) =
Tensor (take i d G.++ f) (G.generate len gnr)
where f β· U.Vector Word
f = (d G.! i + e G.! i) `cons` drop (succ i) d
len, m, n, o, i β· Int
len = fromIntegral $ product (take i d G.++ f)
m = fromIntegral $ product $ drop i d
n = fromIntegral $ product $ drop i e
o = fromIntegral $ product f
i = pred $ fromPI sh
gnr k = let (q,r) = quotRem k o
in if r < m
then x G.! (qβ
m + r)
else y G.! (qβ
n + r - m)
split = split' appendSing
where split' β· Append n is js ks
β SPI n β Tensor ks e β (Tensor is e, Tensor js e)
split' a _ t = (u1, u2)
where (i, f1S) = splitDims a
f1, f2 β· U.Vector Word
f1 = imap (\n β if n β‘ i then const f1S else id) $
form t
f2 = imap (\n β if n β‘ i then (\x β x - f1S) else id) $
form t
u1 = unsafeTensorGen f1 (unsafeTensorGet t)
u2 = unsafeTensorGen f2 (unsafeTensorGet t β fun)
fun = imap (\n β if n β‘ i then (+) f1S else id)
splitDims β· Append n is js ks
β (Int, Word) -- ( position of split (from 0)
-- , first form vector at split position)
splitDims A1 = (0,1)
splitDims (A1S s) = second succ $ splitDims s
splitDims (An s) = first succ $ splitDims s
----------------------------------- IsList -----------------------------------
-- | The list representation of @'Tensor'@ uses [row-major
-- order](http://en.wikipedia.org/wiki/Row-major_order).
instance SingI is β IsList (Tensor is e) where
type Item (Tensor is e) = e
fromList l = let s = (sing β· Shape is)
ds = fromList $ fromShape s
in if P.length l β‘ (fromIntegral $ product ds)
then Tensor ds $ fromList l
else throw WrongListLength
toList = toList β content
----------------------------------- NFData -----------------------------------
-- | Evaluate the underlying @'V.Vector'@.
instance NFData e β NFData (Tensor is e) where
rnf (Tensor d u) = rnf d `seq` rnf u
----------------------------------- Random -----------------------------------
-- | Random @'Tensor'@ with independent and identically distributed components.
instance (Random e, SingI is) β Random (Tensor is e) where
randomR (l, h) =
let l' = toList $ content l
h' = toList $ content h
in first (Tensor (form l) β fromList) β randomListR (l', h')
where randomListR β· (Random e, RandomGen g) β ([e], [e]) β g β ([e], g)
randomListR ([] , _ ) = (,) []
randomListR (_ , [] ) = (,) []
randomListR (a:as, b:bs) = (\(x, (y, z)) β (x : y, z)) β
second (randomListR (as, bs)) β
randomR (a, b)
random = let s = (sing β· Shape is)
ds = fromList $ fromShape s
l = product ds
in first (Tensor ds β fromList) β randomsWithLength l
where randomsWithLength β· (Random e, RandomGen g) β Word β g β ([e], g)
randomsWithLength 0 = (,) []
randomsWithLength d = (\(x, (y, z)) β (x : y, z)) β
second (randomsWithLength (d-1)) β random
---------------------------------- Sliceable ----------------------------------
sliceV β· V.Vector a β U.Vector Word β V.Vector (Maybe Word) β V.Vector a
sliceV v sh sl = G.generate (sliceSize sh sl) (\x β v G.! f sh ks sl 0 x)
where ks β· U.Vector Word
ks = prescanr' (β
) 1 sh
f β· U.Vector Word β U.Vector Word β V.Vector (Maybe Word) β Int
β Int β Int
f zh js zl acc x
| null zh = acc
| otherwise = case head zl of
Just i β f (tail zh) (tail js) (tail zl)
(acc + fromIntegral (pred i β
head js))
x
Nothing β let (q,r) = quotRem x
(sliceSize (tail zh)
(tail zl))
in f (tail zh) (tail js) (tail zl)
(acc + q β
(fromIntegral $ head js))
r
sliceSize β· U.Vector Word β V.Vector (Maybe Word) β Int
sliceSize sh = fromIntegral β ifoldr' (\i m acc β case m of
Nothing β sh G.! i β
acc
_ β acc
) 1
sliceSh β· V.Vector (Maybe Word) β U.Vector Word β U.Vector Word
sliceSh sl = ifilter (\i _ β case sl G.! i of
Nothing β True
_ β False
)
-- | An @'IsSlicer'@ type optimized for slicing @'Tensor'@. It is internally
-- represented as a @'V.Vector'@ of @'Maybe' 'Word'@.
newtype Slicer (i β· [PI]) (j β· [Maybe PI]) (k β· [PI]) = Slicer
{ unSl β· V.Vector (Maybe Word) }
deriving Eq
instance IsSlicer Slicer where
nilS = Slicer G.empty
allCons = Slicer β cons Nothing β unSl
(&) i = Slicer β cons (Just $ head $ unMultiIndex $ fromMultiIndex i) β unSl
toSlicer = toSlicer' slicerShape
where toSlicer' β· SlicerShape is js ks
β Slicer is js ks β S.Slicer is js ks
toSlicer' NilSh = const NilS
toSlicer' (AllConsSh sh) =
AllCons β toSlicer' sh β Slicer β tail β unSl
toSlicer' (SOne :$ ssh) =
(:&) (OneCons Nil) β toSlicer' ssh β Slicer β tail β unSl
toSlicer' (SS n :$ ssh) =
bumpSl β toSlicer' (n :$ ssh) β Slicer β predJHead β unSl
bumpSl β· S.Slicer (i ': is) ('Just i ': js) ks
β S.Slicer ('S i ': is) ('Just ('S i) ': js) ks
bumpSl (i :& s) = HeadSucc i :& s
predJHead β· V.Vector (Maybe Word) β V.Vector (Maybe Word)
predJHead = imap (\i β if i β‘ 0
then Just β pred β fromJust
else id)
-- | Slice a @'Tensor'@ using @'G.generate'@ with an appropriate index selection
-- function on the underlying @'V.Vector'@.
instance Sliceable Tensor where
type Sl Tensor = Slicer
slice (Tensor sh v) (Slicer sl) = Tensor (sliceSh sl sh) (sliceV v sh sl)
--------------------------------------------------------------------------------
| tensor5/tensor | src/Data/Tensor/Vector.hs | bsd-3-clause | 20,137 | 0 | 19 | 7,629 | 6,010 | 3,147 | 2,863 | -1 | -1 |
module Main where
import Haste
import Haste.HPlay.View hiding (head)
import Radio.Application
import Control.Monad.IO.Class
import Prelude hiding (div)
import Radio.Util
main :: IO (Maybe ())
main = do
addCss "./bootstrap.min.css"
addCss "./bootstrap-theme.min.css"
embedCss myCss
addJs "./jquery-1.11.2.min.js"
addJs "./bootstrap.min.js"
embedJs myJs
runBody $ at "main-content" Insert $ timeout 1000 (runApplication initialState)
addCss :: String -> IO ()
addCss s = addHeader $
link ! atr "rel" "stylesheet"
! href s
embedCss :: String -> IO ()
embedCss s = addHeader $ styleBlock s
addJs :: String -> IO ()
addJs s = addHeader $ script noHtml ! src s
embedJs :: String -> IO ()
embedJs s = addHeader $ script s
myCss :: String
myCss = ".vertical-align {\n" ++
" display: flex;\n" ++
" flex-direction: row;\n" ++
"}\n" ++
".vertical-align > [class^=\"col-\"],\n" ++
".vertical-align > [class*=\" col-\"] {\n" ++
" display: flex;\n" ++
" align-items: center;\n" ++
" justify-content: center; \n" ++
"}"
myJs :: String
myJs = "var mouse = {x: 0, y: 0};\n" ++
"document.addEventListener('mousemove', function(e){\n" ++
"mouse.x = e.clientX || e.pageX;\n" ++
"mouse.y = e.clientY || e.pageY\n" ++
"}, false);" | Teaspot-Studio/bmstu-radio-problem-haste | Main.hs | bsd-3-clause | 1,348 | 0 | 13 | 321 | 337 | 168 | 169 | 43 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : BParser
-- Copyright : (c) 2005 Lemmih <[email protected]>
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- A parsec style parser for BEncoded data
-----------------------------------------------------------------------------
module Data.BEncode.Parser {-#
DEPRECATED "Use \"Data.BEncode.Reader\" instead" #-}
( BParser
, runParser
, token
, dict
, list
, optional
, bstring
, bbytestring
, bint
, setInput
, (<|>)
) where
import Control.Applicative hiding (optional)
import Control.Monad
import Data.BEncode
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.Map as Map
data BParser a
= BParser (BEncode -> Reply a)
instance Alternative BParser where
(<|>) = mplus
empty = mzero
instance MonadPlus BParser where
mzero = BParser $ \_ -> Error "mzero"
mplus (BParser a) (BParser b) = BParser $ \st -> case a st of
Error _err -> b st
ok -> ok
runB :: BParser a -> BEncode -> Reply a
runB (BParser b) = b
data Reply a
= Ok a BEncode
| Error String
instance Applicative BParser where
pure = return
(<*>) = ap
instance Monad BParser where
(BParser p) >>= f = BParser $ \b -> case p b of
Ok a b' -> runB (f a) b'
Error str -> Error str
return val = BParser $ Ok val
fail str = BParser $ \_ -> Error str
instance Functor BParser where
fmap = liftM
runParser :: BParser a -> BEncode -> Either String a
runParser parser b = case runB parser b of
Ok a _ -> Right a
Error str -> Left str
token :: BParser BEncode
token = BParser $ \b -> Ok b b
dict :: String -> BParser BEncode
dict name = BParser $ \b -> case b of
BDict bmap | Just code <- Map.lookup name bmap
-> Ok code b
BDict _ -> Error $ "Name not found in dictionary: " ++ name
_ -> Error $ "Not a dictionary: " ++ name
list :: String -> BParser a -> BParser [a]
list name p
= dict name >>= \lst ->
BParser $ \b -> case lst of
BList bs -> foldr (cat . runB p) (Ok [] b) bs
_ -> Error $ "Not a list: " ++ name
where cat (Ok v _) (Ok vs b) = Ok (v:vs) b
cat (Ok _ _) (Error str) = Error str
cat (Error str) _ = Error str
optional :: BParser a -> BParser (Maybe a)
optional p = liftM Just p <|> return Nothing
bstring :: BParser BEncode -> BParser String
bstring p = do b <- p
case b of
BString str -> return (L.unpack str)
_ -> fail $ "Expected BString, found: " ++ show b
bbytestring :: BParser BEncode -> BParser L.ByteString
bbytestring p = do b <- p
case b of
BString str -> return str
_ -> fail $ "Expected BString, found: " ++ show b
bint :: BParser BEncode -> BParser Integer
bint p = do b <- p
case b of
BInt int -> return int
_ -> fail $ "Expected BInt, found: " ++ show b
setInput :: BEncode -> BParser ()
setInput b = BParser $ \_ -> Ok () b
| matthewleon/bencode | src/Data/BEncode/Parser.hs | bsd-3-clause | 3,569 | 0 | 15 | 1,334 | 1,080 | 547 | 533 | 83 | 4 |
{-# LANGUAGE OverloadedStrings, NoMonomorphismRestriction #-}
module Network.Torrent.Client (
StorageConfig(..),
StorageCmd(..),
StorageEvent(..),
StoragePiece,
PieceInfo,
storage) where
import Prelude hiding (FilePath)
import Filesystem.Path.CurrentOS
import Control.Monad
import Control.Exception as X
import Control.Monad.IO.Class
import qualified Control.Concurrent.Lifted as CC
import Data.Conduit
import Data.Conduit.Filesystem
import Data.Conduit.List as CL
import Data.Torrent.MetaInfo
import Data.Torrent.Types
import Data.Torrent
import Data.ByteString (ByteString)
import Control.Monad.STM
import Data.Conduit.TMChan
import Control.Concurrent (forkIO, killThread)
data StorageConfig = StorageConfig {
dataDir :: FilePath,
torrentDir :: FilePath
} deriving (Eq, Show)
data StorageState = StorageSt {
storageCfg :: StorageConfig
} deriving (Eq, Show)
type PieceInfo = (SHA1, Integer)
type StoragePiece = (PieceInfo, ByteString)
data StorageCmd =
StorageCmdPiece !StoragePiece |
StorageCmdShutdown
data StorageEvent =
StorageNewPiece !PieceInfo |
StorageShutdown
tryReadTorrent :: (MonadResource m) => Conduit FilePath m BEncodedT
tryReadTorrent = awaitForever $ \fp -> do
t <- runExceptionT $ (sourceFile fp $$ sinkBencoded)
case t of
Left _ -> tryReadTorrent
Right t' -> yield t'
tryReadTorrent
t :: IO ()
t = do
_ <- runExceptionT . runResourceT $ st $$ await
_ <- runExceptionT . runResourceT $ st $$ await
_ <- runExceptionT . runResourceT $ st $$ await
_ <- runExceptionT . runResourceT $ st $$ await
return ()
where baseDir = "/home/alios/tmp/torrent"
st = storage (StorageConfig (baseDir </> "d/")(baseDir </> "t/"))
storage :: MonadResource m => StorageConfig -> Source m ()
storage cfg = bracketP (storageThreadInit cfg) storageThreadRelease storageThreadMain
where storageThreadInit cfg =
let mkChans = atomically $ do
cmdChan <- newTBMChan 16
eventChan <- newTBMChan 16
return (cmdChan, eventChan)
storageThread (cmdChan, eventChan) = do
r <- runExceptionT . runResourceT $
sourceTBMChan cmdChan =$= storage' cfg $$ sinkTBMChan eventChan
case r of
Left err -> fail $ show err
Right () -> return ()
in do
liftIO . print $ "storageThreadInit with: " ++ show cfg
chans <- mkChans
tid <- forkIO $ storageThread chans
return (tid, chans)
storageThreadRelease (tid, (cmdChan, eventChan)) = do
liftIO . print $ "storageThreadRelease with TID: " ++ show tid
cl <- atomically . isClosedTBMChan $ cmdChan
if (not cl)
then do
-- sent shutdown
atomically $ unGetTBMChan cmdChan StorageCmdShutdown
-- wait for shutdown notice
let waitForShutdown = do
e <- (sourceTBMChan eventChan) $$ await
case e of
Just StorageShutdown -> return ()
_ -> waitForShutdown
runResourceT waitForShutdown
else return ()
killThread tid
storageThreadMain args@(tid, chans) = do
liftIO . print $ "storageThreadMain"
yield ()
storageThreadMain args
storage' :: MonadResource m =>
StorageConfig -> Conduit StorageCmd m StorageEvent
storage' cfg = stConduit
where stConduit = bracketP storageInit storageRelease storageMain
storageInit :: IO StorageState
storageInit = do
liftIO . print $ "storageInit with: " ++ show cfg
tfs <- runResourceT $ readTorrentsConduit
tpvs <- liftIO $ Prelude.sequence [metaInit t $ dataDir cfg | t <- tfs]
return $ StorageSt {
storageCfg = cfg
}
readTorrentsConduit =
(traverse True $ torrentDir cfg)
$= CL.filter (\fp -> fp `hasExtension` "torrent")
$= tryReadTorrent
$$ consume
storageRelease :: StorageState -> IO ()
storageRelease st = do
liftIO . print $ "storageRelease with: " ++ show st
storageMain :: (MonadIO m) => StorageState -> Conduit StorageCmd m StorageEvent
storageMain st = awaitForever $ \cmd -> do
liftIO . print $ "storageMain with: " ++ show st
case cmd of
StorageCmdShutdown -> return ()
StorageCmdPiece ((ihash, pid), bs) -> do
storageMain st
{-
initStorage :: (MonadIO m, MetaInfo meta) =>
StorageConfig -> Maybe [meta] -> m ()
initStorage cfg ts = do
fpProd = map (
return ()
-}
| alios/lcars2 | Network/Torrent/Client.hs | bsd-3-clause | 4,764 | 0 | 21 | 1,409 | 1,252 | 644 | 608 | 121 | 4 |
{-# LANGUAGE ConstraintKinds
, ScopedTypeVariables
, TypeOperators #-}
module Rad.QL.Internal.GUnion where
import Data.Monoid ((<>))
import Data.String (fromString)
import GHC.Generics
import Rad.QL.Internal.Types
import Rad.QL.AST
import Rad.QL.Types
type IsUnion m a = (Generic a, GUnion m (Rep a), Monad m)
unionName :: forall m a. (IsUnion m a) => m a -> Name
unionName _ = gUnionName (undefined :: m ())
$ from (undefined :: a )
unionTypes :: forall m a. (IsUnion m a) => m a -> [ObjectTypeDef]
unionTypes _ = gUnionTypes (undefined :: m ())
$ from (undefined :: a)
unionResolve :: forall m a. (IsUnion m a) => Resolver m a
unionResolve = castResolve from gUnionResolve
class (Monad m) => GUnion m f where
gUnionName :: m () -> f a -> Name
gUnionName _ _ = "someFreshName"
gUnionTypes :: m () -> f a -> [ObjectTypeDef]
gUnionResolve :: Resolver m (f a)
-- handles DataType
instance (GUnion m a, Datatype c) => GUnion m (M1 D c a) where
gUnionName _ _ = fromString $ datatypeName (undefined :: M1 i c a x)
gUnionTypes _ _ = gUnionTypes (undefined :: m ()) (undefined :: a x)
gUnionResolve = castResolve unM1 gUnionResolve
-- handles Constructor
instance (GUnion m a, Constructor c) => GUnion m (M1 C c a) where
gUnionName _ _ = "UNDEFINED"
gUnionTypes _ _ = gUnionTypes (undefined :: m ()) (undefined :: a x)
gUnionResolve = castResolve unM1 gUnionResolve
-- handles Selector
instance (GUnion m a, Selector c) => GUnion m (M1 S c a) where
gUnionName _ _ = "UNDEFINED"
gUnionTypes _ _ = gUnionTypes (undefined :: m ()) (undefined :: a x)
gUnionResolve = castResolve unM1 gUnionResolve
-- handles ADT unions
instance (GUnion m a, GUnion m b) => GUnion m (a :+: b) where
gUnionName _ _ = "UNDEFINED"
gUnionTypes _ _ = gUnionTypes (undefined :: m ()) (undefined :: a x)
<> gUnionTypes (undefined :: m ()) (undefined :: b y)
gUnionResolve args (L1 x) = gUnionResolve args x
gUnionResolve args (R1 x) = gUnionResolve args x
-- handles K1
instance (GraphQLType OBJECT m a) => GUnion m (K1 i a) where
gUnionName _ _ = "UNDEFINED"
gUnionTypes _ _ =
case (gqlTypeDef (def :: GraphQLTypeDef OBJECT m a)) of
TypeDefObject x -> [x]
_ -> [] -- this should never happen
gUnionResolve = castResolve unK1
$ gqlResolve (def :: GraphQLTypeDef OBJECT m a)
| jqyu/bustle-chi | src/Rad/QL/Internal/GUnion.hs | bsd-3-clause | 2,441 | 0 | 10 | 594 | 926 | 488 | 438 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
module AdjointFoldAndUnfold where
-- | ref.) http://www.cs.ox.ac.uk/ralf.hinze/SSGIP10/AdjointFolds.pdf
-- https://www.researchgate.net/publication/221440236_Adjoint_Folds_and_Unfolds
{--
import Numeric.Natural
data Stack s = Empty
| Push (Natural, s)
deriving Show
instance Functor Stack where
fmap f Empty = Empty
fmap f (Push (n, s)) = Push (n, f s)
newtype Fix f = In { unIn :: f (Fix f) }
newtype Cofix f = UnOut { out :: f (Cofix f) }
total :: Fix Stack -> Natural
total (In Empty) = 0
total (In (Push (n, s))) = n + total s
-- | Mendler-style
-- psi == total'
-- x == ttl
-- a == Empty and Pust (n, s)
total' :: (t -> Natural) -> Stack t -> Natural
total' ttl Empty = 0
total' ttl (Push (n, s)) = n + ttl s
ttl :: Fix Stack -> Natural
ttl (In s) = total' ttl s -- Mendler-style equation : x (In a) == psi x a
data Sequ s = Next (Natural, s) deriving Show
instance Functor Sequ where
fmap f (Next (n, s)) = Next (n, f s)
from :: Natural -> Cofix Sequ
from n = UnOut (Next (n, from (n+1)))
-- | Mendler-style
-- psi == from'
-- x == frm
-- a == n
from' :: (Natural -> s) -> Natural -> Sequ s
from' frm n = Next (n, frm (n+1))
frm :: Natural -> Cofix Sequ
frm n = UnOut (from' frm n) -- Mendler-style equation : x a = UnOut (psi x a)
data Pow a = Zero a
| Succ (Pow (Pair a))
deriving Show
type Pair a = (a, a)
data Base a b = Nil
| Cons (a, b)
deriving Show
type ListF x a = Base a (x a)
newtype List a = InList (ListF List a)
--}
{--
-- Example 3.1
--
-- ListF X = 1 + Id * X
--
data Base a b = Nil | Cons (a, b)
type ListF x a = Base a (x a)
newtype List a = In { out :: ListF List a }
--
-- data List a = Nil | Cons (a, List a)
--
base :: (a -> c) -> (b -> d) -> Base a b -> Base c d
base f g Nil = Nil
base f g (Cons (x, y)) = Cons (f x, g y)
hfold :: (forall a. ListF n a -> n a) -> List b -> n b
hfold f = f . listF (hfold f) . out
listF :: (forall a. x a -> y a) -> ListF x b -> ListF y b
listF f = base id f
--}
{--
-- Example 3.2
--
-- NestF X = 1 + Id * (X . Pair)
--
data Base a b = Nil | Cons (a, b)
type NestF x a = Base a (x (Pair a))
type Pair a = (a, a)
newtype Nest a = In { out :: NestF Nest a }
--
-- data Nest a = Nil | Cons (a, Nest (a, a))
--
base :: (a -> c) -> (b -> d) -> Base a b -> Base c d
base f g Nil = Nil
base f g (Cons (x, y)) = Cons (f x, g y)
hfold :: (forall a. Base a (n (Pair a)) -> n a) -> Nest b -> n b
hfold f = f . base id (hfold f) . out
nest :: (a -> b) -> Nest a -> Nest b
nest f = In . base f (nest (pair f)) . out
pair :: (a -> b) -> Pair a -> Pair b
pair f (x, y) = (f x, f y)
--}
{--
-- Example 3.3
--
-- HostF X = 1 + Id * (X . (Id * X))
--
data Base a b = Nil | Cons (a, b)
type HostF x a = Base a (x (a, x a))
newtype Host a = In { out :: HostF Host a }
--
-- data Host a = Nil | Cons (a, Host (a, Host a))
--
base :: (a -> c) -> (b -> d) -> Base a b -> Base c d
base f g Nil = Nil
base f g (Cons (x, y)) = Cons (f x, g y)
hfold :: (forall a. Base a (n (a, n a)) -> n a) -> Host b -> n b
hfold f = f . base id (hfold f . host (id *** hfold f)) . out
host :: (a -> b) -> Host a -> Host b
host f = In . base f (host (f *** host f)) . out
(***) :: (a -> c) -> (b -> d) -> (a, b) -> (c, d)
(f *** g) (a, b) = (f a, g b)
--}
{--
-- Example 4.1
-- Generalized
-- F X = B . <Id, X . F_1 X, X . F_2 X, ...>
--
-- ListF X = 1 + Id * X
--
-- Base X Y = 1 + X * Y
-- ListF X = Base . <Id, X . F_1 X> == ListF x a = Base a (x (f1 x a)) ==> Base a (x a)
-- F_1 X = Id == f1 x a = a
--
-- out
-- List A ------------------------> 1 + A x List A
-- | |
-- | (|f|) | 1 + 1_A x (|f|)
-- v v
-- n A ------------------------> 1 + A x n A
-- f
--
data Base a b = Nil | Cons (a, b)
type ListF x a = Base a (x a)
newtype List a = In { out :: ListF List a }
--
-- data List a = Nil | Cons (a, List a)
--
base :: (a -> c) -> (b -> d) -> Base a b -> Base c d
base f g Nil = Nil
base f g (Cons (x, y)) = Cons (f x, g y)
hfold :: (forall a. ListF n a -> n a) -> List b -> n b
hfold f = f . listF (hfold f) . out
listF :: (forall a. x a -> y a) -> ListF x b -> ListF y b
listF f = base id f
list :: (a -> b) -> List a -> List b
list f = In . base f (list f) . out
gfold :: (forall a. Base (m a) (n a) -> n a)
-> (forall a. m a -> m a)
-> List (m b) -> n b
gfold f g = f . base id (gfold f g . list g) . out
--}
{--
-- Example 4.2
-- Generalized
-- F X = B . <Id, X . F_1 X, X . F_2 X, ...>
--
-- NestF X = 1 + Id * (X . Pair)
--
-- Base X Y = 1 + X * Y
-- NestF X = Base . <Id, X . F_1 X> == HostF x a = Base a (x (f1 x a)) ==> Base a (x (Pair a))
-- F_1 X = Pair == f1 x a = Pair a
--
--
-- o hfold
-- out
-- Nest A ------------------------> 1 + A x Nest (Pair A)
-- | |
-- | (|f|) | 1 + 1_A x (|f|)
-- v v
-- n A <------------------------ 1 + A x n (Pair A)
-- ^
-- |
-- v
-- n a <------------------------ 1 + a x n a forall a.
-- f
-- * f :: forall a. 1 + a x n (Pair a) -> n a
--
-- o nest
-- out
-- A Nest A --------------------------------> 1 + A x Nest (Pair A)
-- | |
-- | nest f | 1 + 1_A x nest (pair f)
-- v v
-- B Nest B <------ 1 + B x n (Pair B) <------ 1 + A x Nest (Pair B)
-- In 1 + f x id
data Base a b = Nil | Cons (a, b)
type NestF x a = Base a (x (Pair a))
type Pair a = (a, a)
newtype Nest a = In { out :: NestF Nest a }
--
-- data Nest a = Nil | Cons (a, Nest (a, a))
--
base :: (a -> c) -> (b -> d) -> Base a b -> Base c d
base f g Nil = Nil
base f g (Cons (x, y)) = Cons (f x, g y)
hfold :: (forall a. Base a (n (Pair a)) -> n a) -> Nest b -> n b
hfold f = f . base id (hfold f) . out
nest :: (a -> b) -> Nest a -> Nest b
nest f = In . base f (nest (pair f)) . out
pair :: (a -> b) -> Pair a -> Pair b
pair f (x, y) = (f x, f y)
gfold :: (forall a. Base (m a) (n (Pair a)) -> n a)
-> (forall a. Pair (m a) -> m (Pair a))
-> Nest (m b) -> n b
gfold f g = f . base id (gfold f g . nest g) . out
--}
{--
-- Example 4.3
--
-- HostF X = 1 + Id * (X . (Id * X))
--
-- o hfold
-- out
-- Host A -------------------------------------> 1 + A x Host (A x Host A)
-- | |
-- | | 1 + 1_A x host (id_A x (|f|))
-- | v
-- | (|f|) 1 + A x Host (A x n A)
-- | |
-- | | 1 + 1_A x (|f|)
-- v v
-- n A <------------------------------------- 1 + A x n (A x n A)
-- f
--
-- out
-- Host A ----------------------------------> 1 + A x Host A
-- | |
-- | host f | 1 + 1_A x host f
-- v v
-- Host B <-------- 1 + B x Host B <--------- 1 + A x Host B
-- In 1 + f x id
--
data Base a b = Nil | Cons (a, b)
type HostF x a = Base a (x (a, x a))
newtype Host a = In { out :: HostF Host a }
--
-- data Host a = Nil | Cons (a, Host (a, Host a))
--
-- Base X Y = 1 + X * Y
-- HostF X = Base . <Id, X . F_1 X> == HostF x a = Baes a (x (f1 x a)) ==> Base a (x (a, x a))
-- F_1 X = * . <Id, X . F_2 X> == f1 x a = (a, x (f2 x a)) ==> (a, x a)
-- F_2 X = Id == f2 x a = a
--
base :: (a -> c) -> (b -> d) -> Base a b -> Base c d
base f g Nil = Nil
base f g (Cons (x, y)) = Cons (f x, g y)
hfold :: (forall a. Base a (n (a, n a)) -> n a) -> Host b -> n b
hfold f = f . base id (hfold f . host (id *** hfold f)) . out
host :: (a -> b) -> Host a -> Host b
host f = In . base f (host (f *** host f)) . out
(***) :: (a -> c) -> (b -> d) -> (a, b) -> (c, d)
(f *** g) (a, b) = (f a, g b)
gfold :: (forall a. Base (m a) (n (a, n a)) -> n a)
-> (forall a. (m a, n a) -> m (a, n a))
-> (forall a. m a -> m a)
-> Host (m b) -> n b
gfold f g1 g2 = f . base id (gfold f g1 g2 . host (g1 . (id *** (gfold f g1 g2 . host g2)))) . out
--}
| cutsea110/aop | src/AdjointFoldAndUnfold.hs | bsd-3-clause | 8,864 | 0 | 2 | 3,527 | 14 | 13 | 1 | 2 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.