code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
{- Copyright (C) 2014 Richard Larocque <[email protected]> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -} {-# LANGUAGE OverloadedStrings #-} module Wiki.PageParser( extractTemplates, getPOSSections, getSection, parsePage) where import qualified Data.Text as T import Data.Maybe import Data.List import Text.Regex.TDFA import Text.Regex.TDFA.Text import Wiki.Types import Wiki.Markup import Latin.PartsOfSpeech import Latin.Types getTemplates :: T.Text -> [TemplateRef] getTemplates txt = let submatches = firstSubMatches $ txt =~ ("{{([^}]*)}}" :: String) in map (TemplateRef.(T.splitOn (T.singleton '|'))) submatches where firstSubMatches :: [[T.Text]] -> [T.Text] firstSubMatches ms = map (\x -> x !! 1) ms -- We don't bother trying to extract templates from bullets or section headers. extractTemplates :: [WikiLine] -> [TemplateRef] extractTemplates ls = concat [ getTemplates txt | (Line txt) <- ls ] parsePage :: T.Text -> [WikiLine] parsePage page = map parseLine (T.lines page) parseLine :: T.Text -> WikiLine parseLine line = head $ catMaybes $ [ f line | f <- line_parsers ] line_parsers :: [T.Text -> Maybe WikiLine] line_parsers = [ parseSectionHeader, parseBlankLine, parseBullet, parseNumBullet, Just . Line . unMarkup] parseSectionHeader :: T.Text -> Maybe WikiLine parseSectionHeader line = do matches <- (line =~~ ("^(=+)([^=]+)" :: String)) let matches' = getAllTextSubmatches matches :: [T.Text] let equal_signs = matches' !! 1 let section_level = T.length equal_signs let header_text = matches' !! 2 return $ Section section_level header_text parseBlankLine :: T.Text -> Maybe WikiLine parseBlankLine txt = if txt == (T.empty) then Just BlankLine else Nothing parseBullet :: T.Text -> Maybe WikiLine parseBullet l = do unbullet <- T.stripPrefix (T.pack "* ") l return $ Bullet (unMarkup unbullet) parseNumBullet :: T.Text -> Maybe WikiLine parseNumBullet l = do unbullet <- T.stripPrefix (T.pack "# ") l return $ NumBullet (unMarkup unbullet) getSection :: Int -> T.Text -> [WikiLine] -> [WikiLine] getSection level name = (dropNonSectionSuffix.tryDropHead.dropNonSectionPrefix) where dropNonSectionPrefix = dropWhile (not.(isMatchingHeader level name)) dropNonSectionSuffix = takeWhile (not.(isLevelNHeader level)) tryDropHead ys = case ys of (_:rest) -> rest [] -> [] isMatchingHeader l n h = case h of (Section l' n') -> l == l' && n == n' _ -> False isLevelNHeader l h = case h of (Section l' _) -> l == l' _ -> False -- Mess of code to parse into sections. Could this be written better? getPOSSections :: [WikiLine] -> [(PartOfSpeech, [WikiLine])] getPOSSections ls = getPOSSections' [] ls getPOSSections' :: [(PartOfSpeech, [WikiLine])] -> [WikiLine] -> [(PartOfSpeech, [WikiLine])] getPOSSections' buf ls = case getPOSSection ls of Just (result, rest) -> getPOSSections' (result:buf) rest Nothing -> buf getPOSSection :: [WikiLine] -> Maybe ((PartOfSpeech, [WikiLine]), [WikiLine]) getPOSSection ls = do (part, level, after_header) <- getPOSStart ls let (section, rest) = span (not.(isLevelNHeader level)) after_header return ((part, section), rest) where isLevelNHeader n h = case h of (Section l _) -> l == n _ -> False getPOSStart :: [WikiLine] -> Maybe (PartOfSpeech, Int, [WikiLine]) getPOSStart ((Section l h):xs) = case lookup h header_part_map of Just part -> Just (part, l, xs) Nothing -> getPOSStart xs getPOSStart (_:xs) = getPOSStart xs getPOSStart [] = Nothing header_part_map :: [(T.Text, PartOfSpeech)] header_part_map = [ ("Adjective", Adjective), ("Adverb", Adverb), ("Conjunction", Conjunction), ("Determiner", Determiner), ("Interjection", Interjection), ("Noun", Noun), ("Proper noun", Noun), ("Numeral", Numeral), ("Particle", Particle), ("Preposition", Preposition), ("Pronoun", Pronoun), ("Verb", Verb)]
richardlarocque/latin-db-builder
Wiki/PageParser.hs
gpl-3.0
4,513
51
14
752
1,427
769
658
104
4
module Text.Lox ( loxVersion , formulaToHTML , formulaToLaTeX , preprocessDocument , processFormulasBetween ) where import Text.Lox.Reader import Text.Lox.Readers.Document import Text.Lox.Writers.Latex import Text.Lox.Writers.HTML import Text.ParserCombinators.Parsec import Text.ParserCombinators.Parsec.Error import Control.Applicative ((<$>)) loxVersion :: String loxVersion = "0.0.1" formulaToHTML :: String -> Either ParseError String formulaToHTML fml = asHTML <$> parse statement "" fml formulaToLaTeX :: String -> Either ParseError String formulaToLaTeX fml = asLatex <$> parse statement "" fml preprocessDocument :: String -> String preprocessDocument doc = processFormulasBetween '§' formulaToLaTeX doc processFormulasBetween :: Char -> (String -> Either ParseError String) -> String -> String processFormulasBetween separator transformer doc = case parse (constituents separator) "" doc of Left err -> doc Right parts -> concat (map (processPart transformer) parts) processPart :: (String -> Either ParseError String) -> Either String String -> String processPart transformer part = case transformer <$> part of -- TODO strict-mode Left x -> x Right (Left err) -> if "mode" == "strict" then error "err" else "§" ++ (plain part) ++ "§" Right (Right tex) -> "$" ++ tex ++ "$" where plain (Right x) = x
knuton/lox
Text/Lox.hs
gpl-3.0
1,499
0
12
363
409
216
193
32
4
{-#LANGUAGE DeriveGeneric #-} module Handler.Admin where import Import import Util.Data import Util.Database import Handler.Instructor (dateDisplay) import Yesod.Form.Bootstrap3 import Yesod.Form.Jquery import Text.Blaze.Html (toMarkup) import Data.Aeson (decode,encode) import Data.Time import System.FilePath import System.Directory (getDirectoryContents,removeFile, doesFileExist) deleteAdminR :: Handler Value deleteAdminR = do msg <- requireJsonBody :: Handler AdminDelete case msg of DowngradeInstructor uid -> do mud <- runDB $ get uid case mud of Just ud -> case userDataInstructorId ud of Just iid -> do runDB $ do cids <- map entityKey <$> selectList [CourseInstructor ==. iid] [] students <- concat <$> mapM (\cid -> selectList [UserDataEnrolledIn ==. Just cid] []) cids mapM (\student -> update (entityKey student) [UserDataEnrolledIn =. Nothing]) students update uid [UserDataInstructorId =. Nothing] deleteCascade iid returnJson ("Downgraded!" :: Text) Nothing -> returnJson ("Not an instructor" :: Text) _ -> returnJson ("Bad Message" :: Text) postAdminR :: Handler Html postAdminR = do allUserData <- runDB $ selectList [] [] let allStudentsData = filter (\x -> userDataInstructorId (entityVal x) == Nothing) allUserData allStudentUids = map (userDataUserId . entityVal) allStudentsData students <- catMaybes <$> mapM (\x -> runDB (get x)) allStudentUids ((upgraderslt,upgradeWidget),enctypeUpgrade) <- runFormPost (upgradeToInstructor students) case upgraderslt of (FormSuccess ident) -> do success <- runDB $ do imd <- insert InstructorMetadata muent <- getBy $ UniqueUser ident mudent <- case entityKey <$> muent of Just uid -> getBy $ UniqueUserData uid Nothing -> return Nothing case entityKey <$> mudent of Nothing -> return False Just udid -> do update udid [UserDataInstructorId =. Just imd] return True if success then setMessage $ "user " ++ (toMarkup ident) ++ " upgraded to instructor" else setMessage $ "couldn't upgrade user " ++ (toMarkup ident) ++ " to instructor" (FormFailure s) -> setMessage $ "Something went wrong: " ++ toMarkup (show s) FormMissing -> setMessage "Submission data incomplete" redirect AdminR --XXX: redirect here to make sure changes are visually reflected getAdminR :: Handler Html getAdminR = do allUserData <- runDB $ selectList [] [] let allInstructorsData = filter (\x -> userDataInstructorId (entityVal x) /= Nothing) allUserData allStudentsData = filter (\x -> userDataInstructorId (entityVal x) == Nothing) allUserData allInstructorUids = map (userDataUserId .entityVal) allInstructorsData allStudentUids = map (userDataUserId . entityVal) allStudentsData allCoursesByInstructor <- mapM getCoursesWithEnrollment (map entityVal allInstructorsData) allInstructors <- catMaybes <$> mapM (\x -> runDB (get x)) allInstructorUids let instructorsPlus = zip3 allInstructors allInstructorsData allCoursesByInstructor students <- catMaybes <$> mapM (\x -> runDB (get x)) allStudentUids instructorW <- instructorWidget instructorsPlus emailW <- emailWidget allInstructors unenrolledW <- unenrolledWidget allStudentsData (concat allCoursesByInstructor) (upgradeWidget,enctypeUpgrade) <- generateFormPost (upgradeToInstructor students) defaultLayout $ do toWidgetHead [julius| function tryDelete (ident, json) { if (ident == prompt("Are you sure you want to downgrade this instructor?\nAll their data will be lost. Enter their ident to confirm.")) { adminDelete(json); } else { alert("Wrong Ident!"); } }; function adminDelete (json) { jQuery.ajax({ url: '@{AdminR}', type: 'DELETE', contentType: "application/json", data: json, success: function(data) { window.alert(data); location.reload() }, error: function(data) { window.alert("Error, couldn't delete") }, }); }; |] [whamlet| <div.container> <h1> Admin Portal ^{emailW} ^{instructorW} ^{unenrolledW} <form method=post enctype=#{enctypeUpgrade}> ^{upgradeWidget} <div.form-group> <input.btn.btn-primary type=submit value="upgrade"> |] upgradeToInstructor users = renderBootstrap3 BootstrapBasicForm $ areq (selectFieldList userIdents) (bfs ("Upgrade User to Instructor" :: Text)) Nothing where userIdents = let idents = map userIdent users in zip idents idents unenrolledWidget :: [Entity UserData] -> [(Entity Course,[Entity UserData])] -> HandlerT App IO Widget unenrolledWidget students courses = do time <- liftIO getCurrentTime let unenrolledData = filter (\x -> userDataEnrolledIn (entityVal x) == Nothing) students inactive = filter (\(c,e) -> courseEndDate (entityVal c) < time) expiredData = concat . map snd . inactive $ courses unenrolled <- catMaybes <$> mapM (\ud -> runDB $ get (userDataUserId (entityVal ud))) unenrolledData expired <- catMaybes <$> mapM (\ud -> runDB $ get (userDataUserId (entityVal ud))) expiredData return [whamlet| <div.card style="margin-bottom:20px"> <div.card-header> Unenrolled and Expired Students <div.card-block> <table.table.table-striped> <thead> <th> Ident <th> Name <tbody> $forall (User ident _, Entity _ (UserData fn ln _ _ _)) <- zip unenrolled unenrolledData <tr> <td> <a href=@{UserR ident}>#{ident} <td> #{ln}, #{fn} <tbody> $forall (User ident _, Entity _ (UserData fn ln _ _ _)) <- zip expired expiredData <tr> <td> <a href=@{UserR ident}>#{ident} <td> #{ln}, #{fn} |] emailWidget :: [User] -> HandlerT App IO Widget emailWidget insts = do let emails = intercalate "," (map userIdent insts) return [whamlet| <a href="mailto:[email protected]?bcc=#{emails}">Email Instructors |] instructorWidget :: [(User,Entity UserData,[(Entity Course,[Entity UserData])])] -> HandlerT App IO Widget instructorWidget instructorPlus = do time <- liftIO getCurrentTime let active = filter (\(c,e) -> courseEndDate (entityVal c) > time) inactive = filter (\(c,e) -> courseEndDate (entityVal c) < time) return [whamlet| $forall (instructor, Entity key (UserData fn ln _ _ _), courses) <- instructorPlus <div.card style="margin-bottom:20px"> <div.card-header> <a href=@{UserR (userIdent instructor)}>#{userIdent instructor} — #{fn} #{ln} <div.card-block> $forall (course, enrollment) <- active courses <h3> #{courseTitle (entityVal course)} <table.table.table-striped> <thead> <th> Name <tbody> $forall UserData sfn sln _ _ _ <- map entityVal enrollment <tr> <td> #{sln}, #{sfn} $if null $ inactive courses $else <h3>Inactive Classes <table.table.table-striped> <thead> <th> Name <th> End Date <tbody> $forall (course, _) <- inactive courses <tr> <td> #{courseTitle (entityVal course)} <td> #{dateDisplay (courseEndDate (entityVal course)) (entityVal course)} <button.btn.btn-sm.btn-danger type="button" onclick="tryDelete('#{userIdent instructor}', '#{decodeUtf8 $ encode $ DowngradeInstructor key}')"> Downgrade Instructor |] getCoursesWithEnrollment ud = case userDataInstructorId ud of Just iid -> do courseEnt <- runDB $ selectList [CourseInstructor ==. iid] [] enrollments <- mapM (\c -> runDB $ selectList [UserDataEnrolledIn ==. Just (entityKey c)] []) courseEnt return $ zip courseEnt enrollments Nothing -> return [] data AdminDelete = DowngradeInstructor UserDataId deriving Generic instance ToJSON AdminDelete instance FromJSON AdminDelete
gleachkr/Carnap
Carnap-Server/Handler/Admin.hs
gpl-3.0
11,896
1
31
5,705
1,731
859
872
-1
-1
{-# LANGUAGE TemplateHaskell #-} module Types (IssueFieldType(..)) where import Database.Persist.TH data IssueFieldType = Reference | Text deriving (Read, Show, Eq) derivePersistField "IssueFieldType"
sirius94/free-tracker
free-tracker-server/app/Types.hs
gpl-3.0
217
0
6
38
51
30
21
6
0
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Dataproc.Projects.Regions.Clusters.Diagnose -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Gets cluster diagnostic information. After the operation completes, the -- Operation.response field contains \`DiagnoseClusterOutputLocation\`. -- -- /See:/ <https://cloud.google.com/dataproc/ Google Cloud Dataproc API Reference> for @dataproc.projects.regions.clusters.diagnose@. module Network.Google.Resource.Dataproc.Projects.Regions.Clusters.Diagnose ( -- * REST Resource ProjectsRegionsClustersDiagnoseResource -- * Creating a Request , projectsRegionsClustersDiagnose , ProjectsRegionsClustersDiagnose -- * Request Lenses , pXgafv , pUploadProtocol , pPp , pAccessToken , pUploadType , pPayload , pBearerToken , pClusterName , pRegion , pProjectId , pCallback ) where import Network.Google.Dataproc.Types import Network.Google.Prelude -- | A resource alias for @dataproc.projects.regions.clusters.diagnose@ method which the -- 'ProjectsRegionsClustersDiagnose' request conforms to. type ProjectsRegionsClustersDiagnoseResource = "v1" :> "projects" :> Capture "projectId" Text :> "regions" :> Capture "region" Text :> "clusters" :> CaptureMode "clusterName" "diagnose" Text :> QueryParam "$.xgafv" Text :> QueryParam "upload_protocol" Text :> QueryParam "pp" Bool :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "bearer_token" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] DiagnoseClusterRequest :> Post '[JSON] Operation -- | Gets cluster diagnostic information. After the operation completes, the -- Operation.response field contains \`DiagnoseClusterOutputLocation\`. -- -- /See:/ 'projectsRegionsClustersDiagnose' smart constructor. data ProjectsRegionsClustersDiagnose = ProjectsRegionsClustersDiagnose' { _pXgafv :: !(Maybe Text) , _pUploadProtocol :: !(Maybe Text) , _pPp :: !Bool , _pAccessToken :: !(Maybe Text) , _pUploadType :: !(Maybe Text) , _pPayload :: !DiagnoseClusterRequest , _pBearerToken :: !(Maybe Text) , _pClusterName :: !Text , _pRegion :: !Text , _pProjectId :: !Text , _pCallback :: !(Maybe Text) } deriving (Eq,Show,Data,Typeable,Generic) -- | Creates a value of 'ProjectsRegionsClustersDiagnose' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'pXgafv' -- -- * 'pUploadProtocol' -- -- * 'pPp' -- -- * 'pAccessToken' -- -- * 'pUploadType' -- -- * 'pPayload' -- -- * 'pBearerToken' -- -- * 'pClusterName' -- -- * 'pRegion' -- -- * 'pProjectId' -- -- * 'pCallback' projectsRegionsClustersDiagnose :: DiagnoseClusterRequest -- ^ 'pPayload' -> Text -- ^ 'pClusterName' -> Text -- ^ 'pRegion' -> Text -- ^ 'pProjectId' -> ProjectsRegionsClustersDiagnose projectsRegionsClustersDiagnose pPPayload_ pPClusterName_ pPRegion_ pPProjectId_ = ProjectsRegionsClustersDiagnose' { _pXgafv = Nothing , _pUploadProtocol = Nothing , _pPp = True , _pAccessToken = Nothing , _pUploadType = Nothing , _pPayload = pPPayload_ , _pBearerToken = Nothing , _pClusterName = pPClusterName_ , _pRegion = pPRegion_ , _pProjectId = pPProjectId_ , _pCallback = Nothing } -- | V1 error format. pXgafv :: Lens' ProjectsRegionsClustersDiagnose (Maybe Text) pXgafv = lens _pXgafv (\ s a -> s{_pXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). pUploadProtocol :: Lens' ProjectsRegionsClustersDiagnose (Maybe Text) pUploadProtocol = lens _pUploadProtocol (\ s a -> s{_pUploadProtocol = a}) -- | Pretty-print response. pPp :: Lens' ProjectsRegionsClustersDiagnose Bool pPp = lens _pPp (\ s a -> s{_pPp = a}) -- | OAuth access token. pAccessToken :: Lens' ProjectsRegionsClustersDiagnose (Maybe Text) pAccessToken = lens _pAccessToken (\ s a -> s{_pAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). pUploadType :: Lens' ProjectsRegionsClustersDiagnose (Maybe Text) pUploadType = lens _pUploadType (\ s a -> s{_pUploadType = a}) -- | Multipart request metadata. pPayload :: Lens' ProjectsRegionsClustersDiagnose DiagnoseClusterRequest pPayload = lens _pPayload (\ s a -> s{_pPayload = a}) -- | OAuth bearer token. pBearerToken :: Lens' ProjectsRegionsClustersDiagnose (Maybe Text) pBearerToken = lens _pBearerToken (\ s a -> s{_pBearerToken = a}) -- | [Required] The cluster name. pClusterName :: Lens' ProjectsRegionsClustersDiagnose Text pClusterName = lens _pClusterName (\ s a -> s{_pClusterName = a}) -- | [Required] The Cloud Dataproc region in which to handle the request. pRegion :: Lens' ProjectsRegionsClustersDiagnose Text pRegion = lens _pRegion (\ s a -> s{_pRegion = a}) -- | [Required] The ID of the Google Cloud Platform project that the cluster -- belongs to. pProjectId :: Lens' ProjectsRegionsClustersDiagnose Text pProjectId = lens _pProjectId (\ s a -> s{_pProjectId = a}) -- | JSONP pCallback :: Lens' ProjectsRegionsClustersDiagnose (Maybe Text) pCallback = lens _pCallback (\ s a -> s{_pCallback = a}) instance GoogleRequest ProjectsRegionsClustersDiagnose where type Rs ProjectsRegionsClustersDiagnose = Operation type Scopes ProjectsRegionsClustersDiagnose = '["https://www.googleapis.com/auth/cloud-platform"] requestClient ProjectsRegionsClustersDiagnose'{..} = go _pProjectId _pRegion _pClusterName _pXgafv _pUploadProtocol (Just _pPp) _pAccessToken _pUploadType _pBearerToken _pCallback (Just AltJSON) _pPayload dataprocService where go = buildClient (Proxy :: Proxy ProjectsRegionsClustersDiagnoseResource) mempty
rueshyna/gogol
gogol-dataproc/gen/Network/Google/Resource/Dataproc/Projects/Regions/Clusters/Diagnose.hs
mpl-2.0
7,099
0
23
1,780
1,097
636
461
153
1
-- This file is part of purebred -- Copyright (C) 2019 Róman Joost -- -- purebred is free software: you can redistribute it and/or modify -- it under the terms of the GNU Affero General Public License as published by -- the Free Software Foundation, either version 3 of the License, or -- (at your option) any later version. -- -- This program is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -- GNU Affero General Public License for more details. -- -- You should have received a copy of the GNU Affero General Public License -- along with this program. If not, see <http://www.gnu.org/licenses/>. {-# LANGUAGE RankNTypes #-} {- | Asynchronous validation for input from widgets -} module Purebred.UI.Validation ( dispatchValidation ) where import Control.Concurrent (forkIO, killThread, threadDelay) import Control.Lens (set, view) import Brick.BChan (writeBChan) import Purebred.Types -- | Schedules validation by sending a PurebredEvent. -- -- We fork a thread to send an event to the application which carries -- the lens to set the error as well as the error itself. To avoid -- setting an error on every key stroke, the thread is killed if we -- find a thread id already set and a new thread is scheduled. -- dispatchValidation :: (a -> Maybe UserMessage) -- ^ validation function -> a -> AppState -> IO AppState dispatchValidation fx a s = let go = maybe schedule (\t -> killThread t *> schedule) . view (asAsync . aValidation) chan = view bChan s schedule = forkIO (sleepMs 500 >> writeBChan chan (InputValidated (fx a))) in do tid <- go s pure $ set (asAsync . aValidation) (Just tid) s sleepMs :: Int -> IO () sleepMs n = threadDelay (n * 1000)
purebred-mua/purebred
src/Purebred/UI/Validation.hs
agpl-3.0
1,847
0
16
367
282
158
124
21
1
module Moonbase.Panel.Items.Expand ( expandL, expandR , (<--), (-->) ) where import qualified Graphics.UI.Gtk as Gtk import Moonbase.Panel import Moonbase.Util expandR :: Maybe String -> PanelItems expandR mlabel = item $ do label <- liftIO $ Gtk.labelNew mlabel return $ PanelItem "spacer-right" (Gtk.toWidget label) Gtk.PackGrow (-->) :: PanelItems -> PanelItems -> PanelItems (PanelItems a) --> (PanelItems b) = PanelItems $ a ++ spacer ++ b where (PanelItems spacer) = expandR Nothing expandL :: Maybe String -> PanelItems expandL mlabel = item $ do label <- liftIO $ Gtk.labelNew mlabel return $ PanelItem "spacer-left" (Gtk.toWidget label) Gtk.PackGrow (<--) :: PanelItems -> PanelItems -> PanelItems (PanelItems a) <-- (PanelItems b) = PanelItems $ a ++ spacer ++ b where (PanelItems spacer) = expandL Nothing
felixsch/moonbase-ng
src/Moonbase/Panel/Items/Expand.hs
lgpl-2.1
850
0
12
154
307
160
147
20
1
{-# language FlexibleInstances, DeriveDataTypeable #-} -- | provides a data type for pixmaps that saves the size and the internal offset (padding) -- of the image. -- Padding is the outer part of the image that should not be considered as part of -- the physical object depictured by the image, e.g. an outer glow. module Base.Pixmap ( Offset, Pixmap(..), pixmapOffset, loadPixmap, loadSymmetricPixmap, copyPixmap, mapColors, renderPixmapSimple, renderPixmap, RenderPixmap(RenderPixmap, RenderCommand, RenderOnTop), renderPosition, doRenderPixmaps, ) where import Data.Abelian import Data.Data import Data.Accessor import Data.Maybe import Data.IORef import Data.Map (Map, empty, insert, lookup) import Control.Monad.IO.Class import Control.DeepSeq import System.IO.Unsafe import Foreign.Ptr (nullPtr) import Foreign.ForeignPtr.Unsafe (unsafeForeignPtrToPtr) import Graphics.Qt import Physics.Chipmunk (Angle, rad2deg) import Utils type Offset a = Position a data Pixmap = Pixmap { pixmap :: ForeignPtr QPixmap, pixmapOffset_ :: Position Double, pixmapSize :: Size Double, pixmapImageSize :: Size Double } deriving (Show, Eq, Typeable, Data) instance NFData (ForeignPtr a) where rnf ptr = unsafeForeignPtrToPtr ptr == nullPtr `seq` () instance NFData Pixmap where rnf (Pixmap a b c d) = rnf a `seq` rnf b `seq` rnf c `seq` rnf d pixmapOffset :: Accessor Pixmap (Position Double) pixmapOffset = accessor pixmapOffset_ (\ a r -> r{pixmapOffset_ = a}) -- | Loads a pixmap with a symmetric offset (right == left && above == below). loadSymmetricPixmap :: MonadIO m => Position Double -- ^ Offset of the object in the image -> FilePath -> m Pixmap loadSymmetricPixmap padding path = io $ do pix <- newQPixmap path size <- fmap fromIntegral <$> sizeQPixmap pix return $ Pixmap pix (fmap negate padding) (size -~ fmap (* 2) (position2size padding)) size -- | Loads a pixmap. -- The offset and size define the offset and size of the object in the picture. loadPixmap :: MonadIO m => Offset Double -> Size Double -> FilePath -> m Pixmap loadPixmap offset size file = io $ do pixmap <- newQPixmap file imageSize <- fmap fromIntegral <$> sizeQPixmap pixmap return $ Pixmap pixmap (fmap negate offset) size imageSize -- | copy a pixmap copyPixmap :: Pixmap -> IO Pixmap copyPixmap (Pixmap pix size offset imageSize) = do pixCopy <- copyQPixmap pix return $ Pixmap pixCopy size offset imageSize -- | Iterates over the colors in the color table of the image. mapColors :: (QRgb -> QRgb) -> Pixmap -> IO Pixmap mapColors f (Pixmap pix size offset realSize) = do image <- toImageQPixmap pix True imageSize <- sizeQImage image colorN <- colorCountQImage image forM_ [0 .. pred colorN] $ \ i -> setColorQImage image i . f =<< colorQImage image i newPix <- fromImageQPixmap image destroyQImage image return $ Pixmap newPix size offset realSize -- * rendering -- | renders the pixmap renderPixmap :: MonadIO m => Ptr QPainter -- ^ painter to be rendered to -> Offset Double -- ^ global (camera) offset -> Position Double -- ^ position of pixmap -> Maybe Angle -- ^ rotation -> Pixmap -- ^ pixmap to be rendered -> m () renderPixmap ptr offset position mAngle pix = io $ do resetMatrix ptr translate ptr offset translate ptr position forM_ mAngle $ \ angle -> rotate ptr (rad2deg angle) translate ptr (pix ^. pixmapOffset) drawPixmap ptr zero (pixmap pix) -- | renders a Pixmap without altering the painter matrix renderPixmapSimple :: MonadIO m => Ptr QPainter -> Pixmap -> m () renderPixmapSimple ptr pix = io $ drawPixmap ptr (pix ^. pixmapOffset) (pixmap pix) -- | pixmap with rendering information (position and angle) data RenderPixmap = RenderPixmap { getRenderPixmap :: Pixmap, renderPosition_ :: Position Double, renderAngle :: Maybe Angle } | RenderCommand { renderPosition_ :: Position Double, renderCommand :: (Ptr QPainter -> IO ()) } | RenderOnTop { -- to be rendered on top. (After all other RenderPixmaps) renderInnerPixmap :: RenderPixmap } deriving (Show, Typeable) instance Show (Ptr QPainter -> IO ()) where show = const "<Ptr QPainter -> IO ()>" renderPosition :: Accessor RenderPixmap (Position Double) renderPosition = accessor getter setter where getter (RenderPixmap _ p _) = p getter (RenderCommand p _) = p getter (RenderOnTop inner) = getter inner setter p (RenderPixmap a _ c) = RenderPixmap a p c setter p (RenderCommand _ a) = RenderCommand p a setter p (RenderOnTop inner) = RenderOnTop (setter p inner) -- | renders a list of RenderPixmaps. Renders the top layers after that. doRenderPixmaps :: Ptr QPainter -> [RenderPixmap] -> IO () doRenderPixmaps ptr pixmaps = do onTop <- catMaybes <$> fmapM (doRenderPixmap ptr) pixmaps when (not $ null onTop) $ doRenderPixmaps ptr onTop -- | renders a pixmap and returns the layer to be rendered on top of that doRenderPixmap :: Ptr QPainter -> RenderPixmap -> IO (Maybe RenderPixmap) doRenderPixmap ptr (RenderPixmap pix position mAngle) = do resetMatrix ptr translate ptr position forM_ mAngle $ \ angle -> rotate ptr (rad2deg angle) translate ptr (pix ^. pixmapOffset) drawPixmap ptr zero (pixmap pix) when debugMode $ debugPixmaps ptr pix position mAngle return Nothing doRenderPixmap ptr (RenderCommand position command) = do resetMatrix ptr translate ptr position command ptr return Nothing doRenderPixmap ptr r@(RenderOnTop x) = do return $ Just x -- * debugging debugMode = False debugPixmaps :: Ptr QPainter -> Pixmap -> Position Double -> Maybe Angle -> IO () -- rotated Pixmaps aren't displayed debugPixmaps ptr pix position (Just _) = return () debugPixmaps ptr pix position Nothing = do key <- uniqueKey (pixmap pix) let upperLeft = position +~ (pix ^. pixmapOffset) resetMatrix ptr fillRect ptr upperLeft (pixmapImageSize pix) (alpha ^= 0.2 $ red) drawText ptr upperLeft True key -- | Returns a unique, preferrebly short key for the given argument. -- (Unique per run of the program.) uniqueKey :: ForeignPtr QPixmap -> IO String uniqueKey x = do (map, newKey : restKeys) <- readIORef _uniqueKeys case Data.Map.lookup x map of Just x -> return x Nothing -> do writeIORef _uniqueKeys (insert x (show newKey) map, restKeys) return (show newKey) {-# noinline _uniqueKeys #-} _uniqueKeys :: IORef (Map (ForeignPtr QPixmap) String, [Int]) _uniqueKeys = unsafePerformIO $ newIORef (empty, [0 ..])
changlinli/nikki
src/Base/Pixmap.hs
lgpl-3.0
6,798
0
16
1,533
1,933
970
963
155
5
import Data.Char import Data.Set import System.IO import Data.Tree -- create a transition table based on the value of the divisor getDFAtrans :: Int -> [Int] -> [Int] -> Set (Int, Int, Set Char) getDFAtrans k [a] [c] = singleton ((a, (a * 10 + c) `mod` k, singleton (intToDigit c))) `union` singleton ((a + k, ((a * 10 + c) `mod` k) + k, singleton (intToDigit c))) getDFAtrans k [a] (c:cs) = singleton ((a, (a * 10 + c) `mod` k, singleton (intToDigit c))) `union` singleton ((a + k, ((a * 10 + c) `mod` k) + k, singleton (intToDigit c))) `union` (getDFAtrans k [a] cs) getDFAtrans k (a:as) (c:cs) = (getDFAtrans k [a] (c:cs)) `union` (getDFAtrans k as (c:cs)) -- create a list of tuples for the dropped digit transitions getDropTrans :: Int -> Set ((Int, Int, [Int])) getDropTrans a = fromList $ zip3 [0..(a-1)] [a..(2*a-1)] (rep [0..9] a) -- replicate list a n number of times rep :: [a] -> Int -> [[a]] rep a 0 = [] rep a n = a : rep a (n-1) -- convert a list of Ints to a list of Chars intsToDigits :: [Int] -> Set Char intsToDigits [a] = singleton (intToDigit a) intsToDigits (a:as) = singleton (intToDigit a) `union` (intsToDigits as) -- convert the transition table from the dropped-digit tuples convertTransSet :: Set (Int, Int, [Int]) -> Set (Int, Int, Set Char) convertTransSet a = if Data.Set.null a then empty else singleton (convertTrans (findMin a)) `union` convertTransSet (deleteMin a) where convertTrans (a, b, c) = (a, b, intsToDigits c) -- equality operator for transitions eq :: (Eq a, Eq b) => (a, b, c) -> (a, b, c) -> Bool eq (a1, b1, _) (a2, b2, _) = if (a1 == a2) && (b1 == b2) then True else False -- merge two transitions from and to the same states on differing inputs merge :: Ord c => (a, b, Set c) -> (a, b, Set c) -> (a, b, Set c) merge (a1, b1, c1) (a2, b2, c2) = (a1, b1, c1 `union` c2) -- merge all the transitions from q_i to q_j mergeTransSet :: Set (Int, Int, Set Char) -> Set (Int, Int, Set Char) mergeTransSet a = if size a == 1 then a else Data.Set.fold merge q fst p `union` mergeTransSet (snd p) where q = findMin a p = partition (eq q) a -- create a list of final accepting states getFinals :: Int -> Set Int getFinals k = singleton 0 `union` singleton k main :: IO() main = putStrLn $ showTreeWith False True $ (getDFAtrans 7 [0..6] [0..9]) `union` convertTransSet (getDropTrans 7)
SonomaStatist/CS454_NFA
haskellDFA/fsaTest.hs
unlicense
2,457
0
15
570
1,153
637
516
46
2
-- | Popular transformation functions for the 'String' observation type. module Control.Monad.Ox.String ( prefix , suffix , substr , shape , pack ) where import qualified Data.Char as C import qualified Data.List as L -- | Prefix of the given size or 'Nothing' if the size exceeds the -- length of the string. prefix :: Int -> String -> Maybe String prefix k xs | k > 0 && k <= n = Just $ take k xs | k <= 0 && n + k > 0 = Just $ take (n + k) xs | otherwise = Nothing where n = length xs -- | Suffix of the given size or 'Nothing' if the size exceeds the -- length of the string. suffix :: Int -> String -> Maybe String suffix k xs | k > 0 && k <= n = Just . reverse . take k . reverse $ xs | k <= 0 && n + k > 0 = Just . reverse . take (n + k) . reverse $ xs | otherwise = Nothing where n = length xs -- | All substrings of the given size. substr :: Int -> String -> [String] substr k xs | k > 0 && k <= n = relevant $ map (take k) (L.tails xs) | otherwise = [] where n = length xs relevant = reverse . dropWhile ((<k).length) . reverse -- | Shape of the string. All lower-case characters are mapped to 'l', -- upper-case characters to 'u', digits to 'd' and rest of characters -- to 'x'. shape :: String -> String shape = map translate where translate char | C.isLower char = 'l' | C.isUpper char = 'u' | C.isDigit char = 'd' | otherwise = 'x' -- | Pack the string, that is remove all adjacent repetitions, -- for example /aabcccdde -> abcde/. pack :: String -> String pack = map head . L.group
kawu/monad-ox
Control/Monad/Ox/String.hs
bsd-2-clause
1,676
0
12
503
509
261
248
38
1
module Unit ( tests ) where import Test.Tasty import qualified Unit.CommitQueue tests :: TestTree tests = testGroup "Unit tests" [ Unit.CommitQueue.tests ]
sgraf812/feed-gipeda
tests/Unit.hs
bsd-3-clause
178
0
7
44
41
25
16
7
1
{-# LANGUAGE MultiParamTypeClasses, UndecidableInstances, FlexibleInstances #-} module Database.MetaStorage.Types (MetaStorageT (..), mkMetaStorageDefault) where import Prelude hiding (FilePath) import Filesystem.Path.CurrentOS import Crypto.Hash import Filesystem import Control.Monad.IO.Class import Database.MetaStorage.Classes newtype MetaStorageT = MetaStorage FilePath deriving (Show, Eq) instance (MonadIO m, HashAlgorithm h) => MetaStorage MetaStorageT m h where ms_basedir (MetaStorage fp) = fp mkMetaStorage fp = liftIO $ do createDirectory True fp return (MetaStorage fp, []) mkMetaStorageDefault :: (MonadIO m) => FilePath -> m (MetaStorageT, [Digest SHA1]) mkMetaStorageDefault = mkMetaStorage
alios/metastorage
Database/MetaStorage/Types.hs
bsd-3-clause
788
0
11
158
191
108
83
17
1
{-# LANGUAGE FlexibleContexts #-} module Charter(singleWeightChangeChart) where import GHC.TypeLits import Graphics.Rendering.Chart.Easy import Graphics.Rendering.Chart.Gtk import Minimizer import Control.Lens import Data.Proxy import Brain import Simulator import Convenience import SizedL import Control.Monad.Random rangedReplace :: _ => [a] -> Sized _ a -> Proxy _ -> [Sized _ a] rangedReplace rep xs index = rep &> replaceAt index xs neuralChart :: forall (m :: Nat) (t :: Nat) b (n1 :: Nat). (KnownNat m, Simulator b) => Proxy (m + 1) -> Int -> NeuralSim b ((m + n1) + 1) t Double -> [(Double, Double)] neuralChart weightIndex numIters (NeuralSim _ startWeights _ nSet) = undefined singleWeightChangeChart :: _ => NeuralSim a _ d Double -> IO () singleWeightChangeChart neuralSim = toWindow 500 500 $ do layout_title .= "Amplitude Modulation" setColors [opaque blue, opaque red, opaque green, opaque purple] -- plot (line "10" ([neuralChart (Proxy @1) 10 neuralSim])) -- plot (line "20" ([neuralChart (Proxy @1) 20 neuralSim])) plot (line "30" ([neuralChart (Proxy @1) 100 neuralSim])) -- plot (line "50" ([neuralChart (Proxy @2) 100 neuralSim]))
bmabsout/neural-swarm
src/Charter.hs
bsd-3-clause
1,273
0
16
290
366
198
168
-1
-1
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} module Servant.ContentType.Processable where import Data.ByteString.Lazy (ByteString) import Data.Proxy import Data.Typeable (Typeable) import Network.HTTP.Media (MediaType) import Servant.API (Accept (..), MimeRender (..)) data to :<- from deriving Typeable class Processable method from where type Processed method :: * process :: Proxy method -> from -> (Processed method) instance Accept to => Accept (to :<- from) where contentType _ = contentType (Proxy :: Proxy to) instance {-# OVERLAPPABLE #-} ( MimeRender to (Processed from) , Processable from a ) => MimeRender (to :<- from) a where mimeRender _ = mimeRender' Proxy . process' Proxy where mimeRender' :: Proxy to -> Processed from -> ByteString mimeRender' = mimeRender process' :: Proxy from -> a -> Processed from process' = process
notcome/liu-ms-adult
src/Servant/ContentType/Processable.hs
bsd-3-clause
1,111
0
10
248
273
152
121
-1
-1
module Options.Language where import Types languageOptions :: [Flag] languageOptions = [ flag { flagName = "-fconstraint-solver-iterations=⟨n⟩" , flagDescription = "*default: 4.* Set the iteration limit for the type-constraint "++ "solver. Typically one iteration suffices; so please "++ "yell if you find you need to set it higher than the default. "++ "Zero means infinity." , flagType = DynamicFlag } , flag { flagName = "-freduction-depth=⟨n⟩" , flagDescription = "*default: 200.* Set the :ref:`limit for type simplification "++ "<undecidable-instances>`. Zero means infinity." , flagType = DynamicFlag } , flag { flagName = "-fcontext-stack=⟨n⟩" , flagDescription = "Deprecated. Use ``-freduction-depth=⟨n⟩`` instead." , flagType = DynamicFlag } , flag { flagName = "-fglasgow-exts" , flagDescription = "Deprecated. Enable most language extensions; "++ "see :ref:`options-language` for exactly which ones." , flagType = DynamicFlag , flagReverse = "-fno-glasgow-exts" } , flag { flagName = "-firrefutable-tuples" , flagDescription = "Make tuple pattern matching irrefutable" , flagType = DynamicFlag , flagReverse = "-fno-irrefutable-tuples" } , flag { flagName = "-fpackage-trust" , flagDescription = "Enable :ref:`Safe Haskell <safe-haskell>` trusted package "++ "requirement for trustworthy modules." , flagType = DynamicFlag } , flag { flagName = "-ftype-function-depth=⟨n⟩" , flagDescription = "Deprecated. Use ``-freduction-depth=⟨n⟩`` instead." , flagType = DynamicFlag } , flag { flagName = "-XAllowAmbiguousTypes" , flagDescription = "Allow the user to write :ref:`ambiguous types <ambiguity>`, and "++ "the type inference engine to infer them." , flagType = DynamicFlag , flagReverse = "-XNoAllowAmbiguousTypes" , flagSince = "7.8.1" } , flag { flagName = "-XArrows" , flagDescription = "Enable :ref:`arrow notation <arrow-notation>` extension" , flagType = DynamicFlag , flagReverse = "-XNoArrows" , flagSince = "6.8.1" } , flag { flagName = "-XApplicativeDo" , flagDescription = "Enable :ref:`Applicative do-notation desugaring <applicative-do>`" , flagType = DynamicFlag , flagReverse = "-XNoApplicativeDo" , flagSince = "8.0.1" } , flag { flagName = "-XAutoDeriveTypeable" , flagDescription = "As of GHC 7.10, this option is not needed, and should not be "++ "used. Previously this would automatically :ref:`derive Typeable "++ "instances for every datatype and type class declaration "++ "<deriving-typeable>`. Implies ``-XDeriveDataTypeable``." , flagType = DynamicFlag , flagReverse = "-XNoAutoDeriveTypeable" , flagSince = "7.8.1" } , flag { flagName = "-XBangPatterns" , flagDescription = "Enable :ref:`bang patterns <bang-patterns>`." , flagType = DynamicFlag , flagReverse = "-XNoBangPatterns" , flagSince = "6.8.1" } , flag { flagName = "-XBinaryLiterals" , flagDescription = "Enable support for :ref:`binary literals <binary-literals>`." , flagType = DynamicFlag , flagReverse = "-XNoBinaryLiterals" , flagSince = "7.10.1" } , flag { flagName = "-XCApiFFI" , flagDescription = "Enable :ref:`the CAPI calling convention <ffi-capi>`." , flagType = DynamicFlag , flagReverse = "-XNoCAPIFFI" , flagSince = "7.10.1" } , flag { flagName = "-XConstrainedClassMethods" , flagDescription = "Enable :ref:`constrained class methods <class-method-types>`." , flagType = DynamicFlag , flagReverse = "-XNoConstrainedClassMethods" , flagSince = "6.8.1" } , flag { flagName = "-XConstraintKinds" , flagDescription = "Enable a :ref:`kind of constraints <constraint-kind>`." , flagType = DynamicFlag , flagReverse = "-XNoConstraintKinds" , flagSince = "7.4.1" } , flag { flagName = "-XCPP" , flagDescription = "Enable the :ref:`C preprocessor <c-pre-processor>`." , flagType = DynamicFlag , flagReverse = "-XNoCPP" , flagSince = "6.8.1" } , flag { flagName = "-XDataKinds" , flagDescription = "Enable :ref:`datatype promotion <promotion>`." , flagType = DynamicFlag , flagReverse = "-XNoDataKinds" , flagSince = "7.4.1" } , flag { flagName = "-XDefaultSignatures" , flagDescription = "Enable :ref:`default signatures <class-default-signatures>`." , flagType = DynamicFlag , flagReverse = "-XNoDefaultSignatures" , flagSince = "7.2.1" } , flag { flagName = "-XDeriveAnyClass" , flagDescription = "Enable :ref:`deriving for any class <derive-any-class>`." , flagType = DynamicFlag , flagReverse = "-XNoDeriveAnyClass" , flagSince = "7.10.1" } , flag { flagName = "-XDeriveDataTypeable" , flagDescription = "Enable ``deriving`` for the :ref:`Data class "++ "<deriving-typeable>`. Implied by ``-XAutoDeriveTypeable``." , flagType = DynamicFlag , flagReverse = "-XNoDeriveDataTypeable" , flagSince = "6.8.1" } , flag { flagName = "-XDeriveFunctor" , flagDescription = "Enable :ref:`deriving for the Functor class <deriving-extra>`. "++ "Implied by ``-XDeriveTraversable``." , flagType = DynamicFlag , flagReverse = "-XNoDeriveFunctor" , flagSince = "7.10.1" } , flag { flagName = "-XDeriveFoldable" , flagDescription = "Enable :ref:`deriving for the Foldable class <deriving-extra>`. "++ "Implied by ``-XDeriveTraversable``." , flagType = DynamicFlag , flagReverse = "-XNoDeriveFoldable" , flagSince = "7.10.1" } , flag { flagName = "-XDeriveGeneric" , flagDescription = "Enable :ref:`deriving for the Generic class <deriving-typeable>`." , flagType = DynamicFlag , flagReverse = "-XNoDeriveGeneric" , flagSince = "7.2.1" } , flag { flagName = "-XDeriveGeneric" , flagDescription = "Enable :ref:`deriving for the Generic class <deriving-typeable>`." , flagType = DynamicFlag , flagReverse = "-XNoDeriveGeneric" , flagSince = "7.2.1" } , flag { flagName = "-XDeriveLift" , flagDescription = "Enable :ref:`deriving for the Lift class <deriving-lift>`" , flagType = DynamicFlag , flagReverse = "-XNoDeriveLift" , flagSince = "7.2.1" } , flag { flagName = "-XDeriveTraversable" , flagDescription = "Enable :ref:`deriving for the Traversable class <deriving-extra>`. "++ "Implies ``-XDeriveFunctor`` and ``-XDeriveFoldable``." , flagType = DynamicFlag , flagReverse = "-XNoDeriveTraversable" , flagSince = "7.10.1" } , flag { flagName = "-XDisambiguateRecordFields" , flagDescription = "Enable :ref:`record field disambiguation <disambiguate-fields>`. "++ "Implied by ``-XRecordWildCards``." , flagType = DynamicFlag , flagReverse = "-XNoDisambiguateRecordFields" , flagSince = "6.8.1" } , flag { flagName = "-XEmptyCase" , flagDescription = "Allow :ref:`empty case alternatives <empty-case>`." , flagType = DynamicFlag , flagReverse = "-XNoEmptyCase" , flagSince = "7.8.1" } , flag { flagName = "-XEmptyDataDecls" , flagDescription = "Enable empty data declarations." , flagType = DynamicFlag , flagReverse = "-XNoEmptyDataDecls" , flagSince = "6.8.1" } , flag { flagName = "-XExistentialQuantification" , flagDescription = "Enable :ref:`existential quantification <existential-quantification>`." , flagType = DynamicFlag , flagReverse = "-XNoExistentialQuantification" , flagSince = "6.8.1" } , flag { flagName = "-XExplicitForAll" , flagDescription = "Enable :ref:`explicit universal quantification <explicit-foralls>`."++ " Implied by ``-XScopedTypeVariables``, ``-XLiberalTypeSynonyms``,"++ " ``-XRankNTypes`` and ``-XExistentialQuantification``." , flagType = DynamicFlag , flagReverse = "-XNoExplicitForAll" , flagSince = "6.12.1" } , flag { flagName = "-XExplicitNamespaces" , flagDescription = "Enable using the keyword ``type`` to specify the namespace of "++ "entries in imports and exports (:ref:`explicit-namespaces`). "++ "Implied by ``-XTypeOperators`` and ``-XTypeFamilies``." , flagType = DynamicFlag , flagReverse = "-XNoExplicitNamespaces" , flagSince = "7.6.1" } , flag { flagName = "-XExtendedDefaultRules" , flagDescription = "Use GHCi's :ref:`extended default rules <extended-default-rules>` "++ "in a normal module." , flagType = DynamicFlag , flagReverse = "-XNoExtendedDefaultRules" , flagSince = "6.8.1" } , flag { flagName = "-XFlexibleContexts" , flagDescription = "Enable :ref:`flexible contexts <flexible-contexts>`. Implied by "++ "``-XImplicitParams``." , flagType = DynamicFlag , flagReverse = "-XNoFlexibleContexts" , flagSince = "6.8.1" } , flag { flagName = "-XFlexibleInstances" , flagDescription = "Enable :ref:`flexible instances <instance-rules>`. "++ "Implies ``-XTypeSynonymInstances``. "++ "Implied by ``-XImplicitParams``." , flagType = DynamicFlag , flagReverse = "-XNoFlexibleInstances" , flagSince = "6.8.1" } , flag { flagName = "-XForeignFunctionInterface" , flagDescription = "Enable :ref:`foreign function interface <ffi>`." , flagType = DynamicFlag , flagReverse = "-XNoForeignFunctionInterface" , flagSince = "6.8.1" } , flag { flagName = "-XFunctionalDependencies" , flagDescription = "Enable :ref:`functional dependencies <functional-dependencies>`. "++ "Implies ``-XMultiParamTypeClasses``." , flagType = DynamicFlag , flagReverse = "-XNoFunctionalDependencies" , flagSince = "6.8.1" } , flag { flagName = "-XGADTs" , flagDescription = "Enable :ref:`generalised algebraic data types <gadt>`. "++ "Implies ``-XGADTSyntax`` and ``-XMonoLocalBinds``." , flagType = DynamicFlag , flagReverse = "-XNoGADTs" , flagSince = "6.8.1" } , flag { flagName = "-XGADTSyntax" , flagDescription = "Enable :ref:`generalised algebraic data type syntax <gadt-style>`." , flagType = DynamicFlag , flagReverse = "-XNoGADTSyntax" , flagSince = "7.2.1" } , flag { flagName = "-XGeneralizedNewtypeDeriving" , flagDescription = "Enable :ref:`newtype deriving <newtype-deriving>`." , flagType = DynamicFlag , flagReverse = "-XNoGeneralizedNewtypeDeriving" , flagSince = "6.8.1" } , flag { flagName = "-XGenerics" , flagDescription = "Deprecated, does nothing. No longer enables "++ ":ref:`generic classes <generic-classes>`. See also GHC's support "++ "for :ref:`generic programming <generic-programming>`." , flagType = DynamicFlag , flagReverse = "-XNoGenerics" , flagSince = "6.8.1" } , flag { flagName = "-XImplicitParams" , flagDescription = "Enable :ref:`Implicit Parameters <implicit-parameters>`. "++ "Implies ``-XFlexibleContexts`` and ``-XFlexibleInstances``." , flagType = DynamicFlag , flagReverse = "-XNoImplicitParams" , flagSince = "6.8.1" } , flag { flagName = "-XNoImplicitPrelude" , flagDescription = "Don't implicitly ``import Prelude``. "++ "Implied by ``-XRebindableSyntax``." , flagType = DynamicFlag , flagReverse = "-XImplicitPrelude" , flagSince = "6.8.1" } , flag { flagName = "-XImpredicativeTypes" , flagDescription = "Enable :ref:`impredicative types <impredicative-polymorphism>`. "++ "Implies ``-XRankNTypes``." , flagType = DynamicFlag , flagReverse = "-XNoImpredicativeTypes" , flagSince = "6.10.1" } , flag { flagName = "-XIncoherentInstances" , flagDescription = "Enable :ref:`incoherent instances <instance-overlap>`. "++ "Implies ``-XOverlappingInstances``." , flagType = DynamicFlag , flagReverse = "-XNoIncoherentInstances" , flagSince = "6.8.1" } , flag { flagName = "-XTypeFamilyDependencies" , flagDescription = "Enable :ref:`injective type families <injective-ty-fams>`. "++ "Implies ``-XTypeFamilies``." , flagType = DynamicFlag , flagReverse = "-XNoTypeFamilyDependencies" , flagSince = "8.0.1" } , flag { flagName = "-XInstanceSigs" , flagDescription = "Enable :ref:`instance signatures <instance-sigs>`." , flagType = DynamicFlag , flagReverse = "-XNoInstanceSigs" , flagSince = "7.10.1" } , flag { flagName = "-XInterruptibleFFI" , flagDescription = "Enable interruptible FFI." , flagType = DynamicFlag , flagReverse = "-XNoInterruptibleFFI" , flagSince = "7.2.1" } , flag { flagName = "-XKindSignatures" , flagDescription = "Enable :ref:`kind signatures <kinding>`. "++ "Implied by ``-XTypeFamilies`` and ``-XPolyKinds``." , flagType = DynamicFlag , flagReverse = "-XNoKindSignatures" , flagSince = "6.8.1" } , flag { flagName = "-XLambdaCase" , flagDescription = "Enable :ref:`lambda-case expressions <lambda-case>`." , flagType = DynamicFlag , flagReverse = "-XNoLambdaCase" , flagSince = "7.6.1" } , flag { flagName = "-XLiberalTypeSynonyms" , flagDescription = "Enable :ref:`liberalised type synonyms <type-synonyms>`." , flagType = DynamicFlag , flagReverse = "-XNoLiberalTypeSynonyms" , flagSince = "6.8.1" } , flag { flagName = "-XMagicHash" , flagDescription = "Allow ``#`` as a :ref:`postfix modifier on identifiers <magic-hash>`." , flagType = DynamicFlag , flagReverse = "-XNoMagicHash" , flagSince = "6.8.1" } , flag { flagName = "-XMonadComprehensions" , flagDescription = "Enable :ref:`monad comprehensions <monad-comprehensions>`." , flagType = DynamicFlag , flagReverse = "-XNoMonadComprehensions" , flagSince = "7.2.1" } , flag { flagName = "-XMonoLocalBinds" , flagDescription = "Enable :ref:`do not generalise local bindings <mono-local-binds>`. "++ "Implied by ``-XTypeFamilies`` and ``-XGADTs``." , flagType = DynamicFlag , flagReverse = "-XNoMonoLocalBinds" , flagSince = "6.12.1" } , flag { flagName = "-XNoMonomorphismRestriction" , flagDescription = "Disable the :ref:`monomorphism restriction <monomorphism>`." , flagType = DynamicFlag , flagReverse = "-XMonomorphismRestriction" , flagSince = "6.8.1" } , flag { flagName = "-XMultiParamTypeClasses" , flagDescription = "Enable :ref:`multi parameter type classes "++ "<multi-param-type-classes>`. Implied by "++ "``-XFunctionalDependencies``." , flagType = DynamicFlag , flagReverse = "-XNoMultiParamTypeClasses" , flagSince = "6.8.1" } , flag { flagName = "-XMultiWayIf" , flagDescription = "Enable :ref:`multi-way if-expressions <multi-way-if>`." , flagType = DynamicFlag , flagReverse = "-XNoMultiWayIf" , flagSince = "7.6.1" } , flag { flagName = "-XNamedFieldPuns" , flagDescription = "Enable :ref:`record puns <record-puns>`." , flagType = DynamicFlag , flagReverse = "-XNoNamedFieldPuns" , flagSince = "6.10.1" } , flag { flagName = "-XNamedWildCards" , flagDescription = "Enable :ref:`named wildcards <named-wildcards>`." , flagType = DynamicFlag , flagReverse = "-XNoNamedWildCards" , flagSince = "7.10.1" } , flag { flagName = "-XNegativeLiterals" , flagDescription = "Enable support for :ref:`negative literals <negative-literals>`." , flagType = DynamicFlag , flagReverse = "-XNoNegativeLiterals" , flagSince = "7.8.1" } , flag { flagName = "-XNoNPlusKPatterns" , flagDescription = "Disable support for ``n+k`` patterns." , flagType = DynamicFlag , flagReverse = "-XNPlusKPatterns" , flagSince = "6.12.1" } , flag { flagName = "-XNullaryTypeClasses" , flagDescription = "Deprecated, does nothing. :ref:`nullary (no parameter) type "++ "classes <nullary-type-classes>` are now enabled using "++ "``-XMultiParamTypeClasses``." , flagType = DynamicFlag , flagReverse = "-XNoNullaryTypeClasses" , flagSince = "7.8.1" } , flag { flagName = "-XNumDecimals" , flagDescription = "Enable support for 'fractional' integer literals." , flagType = DynamicFlag , flagReverse = "-XNoNumDecimals" , flagSince = "7.8.1" } , flag { flagName = "-XOverlappingInstances" , flagDescription = "Enable :ref:`overlapping instances <instance-overlap>`." , flagType = DynamicFlag , flagReverse = "-XNoOverlappingInstances" , flagSince = "6.8.1" } , flag { flagName = "-XOverloadedLists" , flagDescription = "Enable :ref:`overloaded lists <overloaded-lists>`." , flagType = DynamicFlag , flagReverse = "-XNoOverloadedLists" , flagSince = "7.8.1" } , flag { flagName = "-XOverloadedStrings" , flagDescription = "Enable :ref:`overloaded string literals <overloaded-strings>`." , flagType = DynamicFlag , flagReverse = "-XNoOverloadedStrings" , flagSince = "6.8.1" } , flag { flagName = "-XPackageImports" , flagDescription = "Enable :ref:`package-qualified imports <package-imports>`." , flagType = DynamicFlag , flagReverse = "-XNoPackageImports" , flagSince = "6.10.1" } , flag { flagName = "-XParallelArrays" , flagDescription = "Enable parallel arrays. Implies ``-XParallelListComp``." , flagType = DynamicFlag , flagReverse = "-XNoParallelArrays" , flagSince = "7.4.1" } , flag { flagName = "-XParallelListComp" , flagDescription = "Enable :ref:`parallel list comprehensions "++ "<parallel-list-comprehensions>`. "++ "Implied by ``-XParallelArrays``." , flagType = DynamicFlag , flagReverse = "-XNoParallelListComp" , flagSince = "6.8.1" } , flag { flagName = "-XPartialTypeSignatures" , flagDescription = "Enable :ref:`partial type signatures <partial-type-signatures>`." , flagType = DynamicFlag , flagReverse = "-XNoPartialTypeSignatures" , flagSince = "7.10.1" } , flag { flagName = "-XPatternGuards" , flagDescription = "Enable :ref:`pattern guards <pattern-guards>`." , flagType = DynamicFlag , flagReverse = "-XNoPatternGuards" , flagSince = "6.8.1" } , flag { flagName = "-XPatternSynonyms" , flagDescription = "Enable :ref:`pattern synonyms <pattern-synonyms>`." , flagType = DynamicFlag , flagReverse = "-XNoPatternSynonyms" , flagSince = "7.10.1" } , flag { flagName = "-XPolyKinds" , flagDescription = "Enable :ref:`kind polymorphism <kind-polymorphism>`. "++ "Implies ``-XKindSignatures``." , flagType = DynamicFlag , flagReverse = "-XNoPolyKinds" , flagSince = "7.4.1" } , flag { flagName = "-XPolymorphicComponents" , flagDescription = "Enable :ref:`polymorphic components for data constructors "++ "<universal-quantification>`. Synonym for ``-XRankNTypes``." , flagType = DynamicFlag , flagReverse = "-XNoPolymorphicComponents" , flagSince = "6.8.1" } , flag { flagName = "-XPostfixOperators" , flagDescription = "Enable :ref:`postfix operators <postfix-operators>`." , flagType = DynamicFlag , flagReverse = "-XNoPostfixOperators" , flagSince = "7.10.1" } , flag { flagName = "-XQuasiQuotes" , flagDescription = "Enable :ref:`quasiquotation <th-quasiquotation>`." , flagType = DynamicFlag , flagReverse = "-XNoQuasiQuotes" , flagSince = "6.10.1" } , flag { flagName = "-XRank2Types" , flagDescription = "Enable :ref:`rank-2 types <universal-quantification>`. "++ "Synonym for ``-XRankNTypes``." , flagType = DynamicFlag , flagReverse = "-XNoRank2Types" , flagSince = "6.8.1" } , flag { flagName = "-XRankNTypes" , flagDescription = "Enable :ref:`rank-N types <universal-quantification>`. "++ "Implied by ``-XImpredicativeTypes``." , flagType = DynamicFlag , flagReverse = "-XNoRankNTypes" , flagSince = "6.8.1" } , flag { flagName = "-XRebindableSyntax" , flagDescription = "Employ :ref:`rebindable syntax <rebindable-syntax>`. "++ "Implies ``-XNoImplicitPrelude``." , flagType = DynamicFlag , flagReverse = "-XNoRebindableSyntax" , flagSince = "7.0.1" } , flag { flagName = "-XRecordWildCards" , flagDescription = "Enable :ref:`record wildcards <record-wildcards>`. "++ "Implies ``-XDisambiguateRecordFields``." , flagType = DynamicFlag , flagReverse = "-XNoRecordWildCards" , flagSince = "6.8.1" } , flag { flagName = "-XRecursiveDo" , flagDescription = "Enable :ref:`recursive do (mdo) notation <recursive-do-notation>`." , flagType = DynamicFlag , flagReverse = "-XNoRecursiveDo" , flagSince = "6.8.1" } , flag { flagName = "-XRelaxedPolyRec" , flagDescription = "*(deprecated)* Relaxed checking for :ref:`mutually-recursive "++ "polymorphic functions <typing-binds>`." , flagType = DynamicFlag , flagReverse = "-XNoRelaxedPolyRec" , flagSince = "6.8.1" } , flag { flagName = "-XRoleAnnotations" , flagDescription = "Enable :ref:`role annotations <role-annotations>`." , flagType = DynamicFlag , flagReverse = "-XNoRoleAnnotations" , flagSince = "7.10.1" } , flag { flagName = "-XSafe" , flagDescription = "Enable the :ref:`Safe Haskell <safe-haskell>` Safe mode." , flagType = DynamicFlag , flagSince = "7.2.1" } , flag { flagName = "-XScopedTypeVariables" , flagDescription = "Enable :ref:`lexically-scoped type variables "++ "<scoped-type-variables>`." , flagType = DynamicFlag , flagReverse = "-XNoScopedTypeVariables" , flagSince = "6.8.1" } , flag { flagName = "-XStandaloneDeriving" , flagDescription = "Enable :ref:`standalone deriving <stand-alone-deriving>`." , flagType = DynamicFlag , flagReverse = "-XNoStandaloneDeriving" , flagSince = "6.8.1" } , flag { flagName = "-XStrictData" , flagDescription = "Enable :ref:`default strict datatype fields <strict-data>`." , flagType = DynamicFlag , flagReverse = "-XNoStrictData" } , flag { flagName = "-XTemplateHaskell" , flagDescription = "Enable :ref:`Template Haskell <template-haskell>`." , flagType = DynamicFlag , flagReverse = "-XNoTemplateHaskell" , flagSince = "6.8.1" } , flag { flagName = "-XTemplateHaskellQuotes" , flagDescription = "Enable quotation subset of "++ ":ref:`Template Haskell <template-haskell>`." , flagType = DynamicFlag , flagReverse = "-XNoTemplateHaskellQuotes" , flagSince = "8.0.1" } , flag { flagName = "-XNoTraditionalRecordSyntax" , flagDescription = "Disable support for traditional record syntax "++ "(as supported by Haskell 98) ``C {f = x}``" , flagType = DynamicFlag , flagReverse = "-XTraditionalRecordSyntax" , flagSince = "7.4.1" } , flag { flagName = "-XTransformListComp" , flagDescription = "Enable :ref:`generalised list comprehensions "++ "<generalised-list-comprehensions>`." , flagType = DynamicFlag , flagReverse = "-XNoTransformListComp" , flagSince = "6.10.1" } , flag { flagName = "-XTrustworthy" , flagDescription = "Enable the :ref:`Safe Haskell <safe-haskell>` Trustworthy mode." , flagType = DynamicFlag , flagSince = "7.2.1" } , flag { flagName = "-XTupleSections" , flagDescription = "Enable :ref:`tuple sections <tuple-sections>`." , flagType = DynamicFlag , flagReverse = "-XNoTupleSections" , flagSince = "7.10.1" } , flag { flagName = "-XTypeFamilies" , flagDescription = "Enable :ref:`type families <type-families>`. "++ "Implies ``-XExplicitNamespaces``, ``-XKindSignatures``, "++ "and ``-XMonoLocalBinds``." , flagType = DynamicFlag , flagReverse = "-XNoTypeFamilies" , flagSince = "6.8.1" } , flag { flagName = "-XTypeOperators" , flagDescription = "Enable :ref:`type operators <type-operators>`. "++ "Implies ``-XExplicitNamespaces``." , flagType = DynamicFlag , flagReverse = "-XNoTypeOperators" , flagSince = "6.8.1" } , flag { flagName = "-XTypeSynonymInstances" , flagDescription = "Enable :ref:`type synonyms in instance heads "++ "<flexible-instance-head>`. Implied by ``-XFlexibleInstances``." , flagType = DynamicFlag , flagReverse = "-XNoTypeSynonymInstances" , flagSince = "6.8.1" } , flag { flagName = "-XUnboxedTuples" , flagDescription = "Enable :ref:`unboxed tuples <unboxed-tuples>`." , flagType = DynamicFlag , flagReverse = "-XNoUnboxedTuples" , flagSince = "6.8.1" } , flag { flagName = "-XUndecidableInstances" , flagDescription = "Enable :ref:`undecidable instances <undecidable-instances>`." , flagType = DynamicFlag , flagReverse = "-XNoUndecidableInstances" , flagSince = "6.8.1" } , flag { flagName = "-XUnicodeSyntax" , flagDescription = "Enable :ref:`unicode syntax <unicode-syntax>`." , flagType = DynamicFlag , flagReverse = "-XNoUnicodeSyntax" , flagSince = "6.8.1" } , flag { flagName = "-XUnliftedFFITypes" , flagDescription = "Enable unlifted FFI types." , flagType = DynamicFlag , flagReverse = "-XNoUnliftedFFITypes" , flagSince = "6.8.1" } , flag { flagName = "-XUnsafe" , flagDescription = "Enable :ref:`Safe Haskell <safe-haskell>` Unsafe mode." , flagType = DynamicFlag , flagSince = "7.4.1" } , flag { flagName = "-XViewPatterns" , flagDescription = "Enable :ref:`view patterns <view-patterns>`." , flagType = DynamicFlag , flagReverse = "-XNoViewPatterns" , flagSince = "6.10.1" } ]
gridaphobe/ghc
utils/mkUserGuidePart/Options/Language.hs
bsd-3-clause
29,509
0
10
9,276
3,751
2,429
1,322
644
1
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE DataKinds #-} module Main where import SubHask import SubHask.Algebra.Array import SubHask.Algebra.Group import SubHask.Algebra.Container import SubHask.Algebra.Logic import SubHask.Algebra.Metric import SubHask.Algebra.Parallel import SubHask.Algebra.Vector import SubHask.Compatibility.ByteString import SubHask.Compatibility.Containers import SubHask.TemplateHaskell.Deriving import SubHask.TemplateHaskell.Test import Test.Framework (defaultMain, testGroup) import Test.Framework.Providers.QuickCheck2 (testProperty) import Test.Framework.Runners.Console import Test.Framework.Runners.Options -------------------------------------------------------------------------------- main = defaultMainWithOpts [ testGroup "simple" [ testGroup "numeric" [ $( mkSpecializedClassTests [t| Int |] [''Enum,''Ring, ''Bounded, ''Metric] ) , $( mkSpecializedClassTests [t| Integer |] [''Enum,''Ring, ''Lattice, ''Metric] ) , $( mkSpecializedClassTests [t| Rational |] [''Ord,''Ring, ''Lattice, ''Metric] ) , $( mkSpecializedClassTests [t| Float |] [''Bounded] ) , $( mkSpecializedClassTests [t| Double |] [''Bounded] ) , testGroup "transformers" [ $( mkSpecializedClassTests [t| NonNegative Int |] [''Enum,''Rig, ''Bounded, ''Metric] ) , $( mkSpecializedClassTests [t| Z 57 |] [''Ring] ) , $( mkSpecializedClassTests [t| NonNegative (Z 57) |] [''Rig] ) ] ] , testGroup "vector" [ $( mkSpecializedClassTests [t| SVector 0 Int |] [ ''Module ] ) , $( mkSpecializedClassTests [t| SVector 1 Int |] [ ''Module ] ) , $( mkSpecializedClassTests [t| SVector 2 Int |] [ ''Module ] ) , $( mkSpecializedClassTests [t| SVector 19 Int |] [ ''Module ] ) , $( mkSpecializedClassTests [t| SVector 1001 Int |] [ ''Module ] ) , $( mkSpecializedClassTests [t| SVector "dyn" Int |] [ ''Module ] ) , $( mkSpecializedClassTests [t| UVector "dyn" Int |] [ ''Module ] ) ] , testGroup "non-numeric" [ $( mkSpecializedClassTests [t| Bool |] [''Enum,''Boolean] ) , $( mkSpecializedClassTests [t| Char |] [''Enum,''Bounded] ) , $( mkSpecializedClassTests [t| Goedel |] [''Heyting] ) , $( mkSpecializedClassTests [t| H3 |] [''Heyting] ) , $( mkSpecializedClassTests [t| K3 |] [''Bounded] ) , testGroup "transformers" [ $( mkSpecializedClassTests [t| Boolean2Ring Bool |] [''Ring] ) ] ] ] , testGroup "objects" [ $( mkSpecializedClassTests [t| Labeled' Int Int |] [ ''Action,''Ord,''Metric ] ) ] , testGroup "arrays" [ $( mkSpecializedClassTests [t| BArray Char |] [ ''Foldable,''MinBound,''IxContainer ] ) , $( mkSpecializedClassTests [t| UArray Char |] [ ''Foldable,''MinBound,''IxContainer ] ) , $( mkSpecializedClassTests [t| UArray (UVector "dyn" Float) |] [ ''Foldable,''IxContainer ] ) , $( mkSpecializedClassTests [t| UArray (Labeled' (UVector "dyn" Float) Int) |] [ ''Foldable,''IxContainer ] ) ] , testGroup "containers" [ $( mkSpecializedClassTests [t| [] Char |] [ ''Foldable,''MinBound,''Partitionable ] ) , $( mkSpecializedClassTests [t| Set Char |] [ ''Foldable,''MinBound ] ) , $( mkSpecializedClassTests [t| Seq Char |] [ ''Foldable,''MinBound,''Partitionable ] ) , $( mkSpecializedClassTests [t| Map Int Int |] [ ''MinBound, ''IxConstructible ] ) , $( mkSpecializedClassTests [t| Map' Int Int |] [ ''MinBound, ''IxContainer ] ) , $( mkSpecializedClassTests [t| IntMap Int |] [ ''MinBound, ''IxContainer ] ) , $( mkSpecializedClassTests [t| IntMap' Int |] [ ''MinBound, ''IxContainer ] ) , $( mkSpecializedClassTests [t| ByteString Char |] [ ''Foldable,''MinBound,''Partitionable ] ) , testGroup "transformers" [ $( mkSpecializedClassTests [t| Lexical [Char] |] [''Ord,''MinBound] ) , $( mkSpecializedClassTests [t| ComponentWise [Char] |] [''Lattice,''MinBound] ) , $( mkSpecializedClassTests [t| Hamming [Char] |] [''Metric] ) , $( mkSpecializedClassTests [t| Levenshtein [Char] |] [''Metric] ) ] , testGroup "metric" -- [ $( mkSpecializedClassTests [t| Ball Int |] [''Eq,''Container] ) -- , $( mkSpecializedClassTests [t| Ball (Hamming [Char]) |] [''Eq,''Container] ) [ $( mkSpecializedClassTests [t| Box Int |] [''Eq,''Container] ) , $( mkSpecializedClassTests [t| Box (ComponentWise [Char]) |] [''Eq,''Container] ) ] ] ] $ RunnerOptions { ropt_threads = Nothing , ropt_test_options = Nothing , ropt_test_patterns = Nothing , ropt_xml_output = Nothing , ropt_xml_nested = Nothing , ropt_color_mode = Just ColorAlways , ropt_hide_successes = Just True , ropt_list_only = Just True } -------------------------------------------------------------------------------- -- orphan instances needed for compilation instance (Show a, Show b) => Show (a -> b) where show _ = "function"
cdepillabout/subhask
test/TestSuite.hs
bsd-3-clause
5,624
0
18
1,591
1,342
820
522
83
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeOperators #-} module HipChat.Types.API where import Data.Text (Text) import Servant.API import HipChat.Types.Auth import HipChat.Types.Common import HipChat.Types.Extensions import HipChat.Types.RoomAddonUIUpdateRequest import HipChat.Types.Rooms import HipChat.Types.User type HipChatAPI = TokenAuth( SendMessage :<|> SendNotification :<|> CreateRoom :<|> GetRoomStatistics :<|> ViewUser :<|> CreateWebhook :<|> GetAllMembers :<|> RoomAddonUIUpdate :<|> GenerateToken) type GenerateToken = "v2" :> "oauth" :> "token" :> ReqBody '[FormUrlEncoded] TokenRequest :> BasicAuth "oauth" Int :> Post '[JSON] TokenResponse type ViewUser = "v2" :> "user" :> Capture "id_or_email" Text :> Get '[JSON] User -------------------------------------------------------------------------------- -- -- Rooms type CreateRoom = "v2" :> "room" :> ReqBody '[JSON] CreateRoomRequest :> Post '[JSON] CreateRoomResponse type CreateWebhook = "v2" :> "room" :> Capture "room_id_or_name" IdOrName :> "extension" :> "webhook" :> Capture "key" Text :> ReqBody '[JSON] Webhook :> Put '[JSON] CreateWebhookResponse type SendMessage = "v2" :> "room" :> Capture "room" Text :> "message" :> ReqBody '[JSON] Message :> Post '[JSON] SendMessageResponse type SendNotification = "v2" :> "room" :> Capture "room" Text :> "notification" :> ReqBody '[JSON] SendNotificationRequest :> PostNoContent '[JSON] NoContent type GetRoomStatistics = "v2" :> "room" :> Capture "room" Text :> "statistics" :> Get '[JSON] RoomStatistics type GetAllMembers = "v2" :> "room" :> Capture "room_id_or_name" IdOrName :> "member" :> QueryParam "start-index" Int :> QueryParam "max-results" Int :> Get '[JSON] GetAllMembersResponse type GetAllRooms = "v2" :> "room" :> QueryParam "start-index" Int :> QueryParam "max-results" Int :> QueryParam "include-private" Bool :> QueryParam "include-archived" Bool :> Get '[JSON] GetAllRoomsResponse -------------------------------------------------------------------------------- -- Addons type RoomAddonUIUpdate = "v2" :> "addon" :> "ui" :> "room" :> Capture "room_id" Int :> ReqBody '[JSON] RoomAddonUIUpdateRequest :> PostNoContent '[JSON] NoContent
oswynb/hipchat-hs
lib/HipChat/Types/API.hs
bsd-3-clause
2,422
0
14
505
604
318
286
78
0
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE TypeSynonymInstances #-} module DataFlow.Graphviz.Renderer ( renderGraphviz ) where import Data.Char import Data.List.Utils import Text.Printf import DataFlow.PrettyRenderer import DataFlow.Graphviz convertNewline :: String -> String convertNewline = replace "\n" "<br/>" class Renderable t where render :: t -> Renderer () instance Renderable Attr where render (Attr i1 i2) = writeln $ printf "%s = %s;" i1 (convertNewline i2) instance Renderable AttrList where render = mapM_ render instance Renderable Port where render (Port (Just id') c) = write $ printf "%s:%s" (show id') (map toLower $ show c) render (Port Nothing c) = write $ map toLower $ show c instance Renderable NodeID where render (NodeID id' (Just port)) = do write id' write ":" render port render (NodeID id' Nothing) = write id' instance Renderable Subgraph where render (Subgraph id' []) = writeln $ printf "subgraph %s {}" id' render (Subgraph id' stmts) = do writeln $ printf "subgraph %s {" id' withIndent $ render stmts writeln "}" instance Renderable EdgeOperator where render Arrow = write " -> " render Line = write " -- " instance Renderable EdgeOperand where render (IDOperand nodeId) = render nodeId render (SubgraphOperand sg) = render sg instance Renderable EdgeExpr where render (EdgeExpr o1 operator o2) = do render o1 render operator render o2 instance Renderable AttrStmtType where render = write . map toLower . show inBrackets :: Renderer () -> Renderer () inBrackets r = do writeln " [" withIndent r writeln "]" instance Renderable Stmt where render (NodeStmt id' []) = do write id' writeln "" render (NodeStmt id' attrs) = do write id' inBrackets $ render attrs render (EdgeStmt expr []) = do render expr writeln ";" render (EdgeStmt expr attrs) = do render expr inBrackets $ render attrs render (AttrStmt t []) = do render t writeln " []" render (AttrStmt t attrs) = do render t inBrackets $ render attrs render (EqualsStmt i1 i2) = do write i1 write " = " write i2 writeln ";" render (SubgraphStmt sg) = render sg instance Renderable StmtList where render = mapM_ render instance Renderable Graph where render (Digraph id' stmts) = do writeln $ printf "digraph %s {" id' withIndent $ render stmts writeln "}" renderGraphviz :: Graph -> String renderGraphviz = renderWithIndent . render
sonyxperiadev/dataflow
src/DataFlow/Graphviz/Renderer.hs
bsd-3-clause
2,530
0
10
580
897
418
479
87
1
module Day5 where import Data.List (group, isInfixOf, tails) threeVowels :: String -> Bool threeVowels = (>= 3) . length . filter (`elem` "aeiouAEIOU") doubleLetter :: String -> Bool doubleLetter = any ((>1) . length) . group noBadStrings :: String -> Bool noBadStrings input = all (not . flip isInfixOf input) badstrings where badstrings = ["ab", "cd", "pq", "xy"] hasPalindromeTriplet :: String -> Bool hasPalindromeTriplet = any isPalindrome . triplets where isPalindrome = (==) <*> reverse triplets input = take (length input - 3 + 1) $ map (take 3) $ tails input repeatedPair :: String -> Bool repeatedPair = any repeatedPrefixPair . takeWhile ((>=4) . length) . tails where repeatedPrefixPair input = take 2 input `isInfixOf` drop 2 input applyRules :: [String -> Bool] -> String -> Bool applyRules rules string = and $ sequence rules string isNice :: String -> Bool isNice = applyRules [threeVowels, doubleLetter, noBadStrings] isNicer :: String -> Bool isNicer = applyRules [hasPalindromeTriplet, repeatedPair] solution :: String -> IO () solution input = do print $ length $ filter isNice (lines input) print $ length $ filter isNicer (lines input)
yarbroughw/advent
src/Day5.hs
bsd-3-clause
1,186
0
13
212
438
234
204
26
1
module Main where import Network.C10kServer import Network.Salvia.Handler.ColorLog import Network.Salvia.Handler.ExtendedFileSystem import Network.Salvia.Handlers import Network.Salvia.Impl.C10k import System.IO main :: IO () main = start "127.0.0.1" "root@localhost" C10kConfig { initHook = return () , exitHook = \s -> putStrLn ("C10kServer error:" ++ s) , parentStartedHook = return () , startedHook = return () , sleepTimer = 10 , preforkProcessNumber = 200 , threadNumberPerProcess = 200 , portName = "8080" , pidFile = "pid" , user = "sebas" , group = "wheel" } (hDefaultEnv (hExtendedFileSystem "." >> hColorLog stdout)) ()
sebastiaanvisser/salvia-demo
src/ServeDemo.hs
bsd-3-clause
813
0
11
268
186
110
76
24
1
{-# LANGUAGE CPP #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE OverloadedStrings #-} module Main where import Network.SSH.LoadKeys import Network.SSH.Messages import Network.SSH.Server import Control.Concurrent import Control.Exception import Control.Monad import qualified Data.ByteString as S import qualified Data.ByteString.Char8 as S8 import qualified Graphics.Vty as Vty import Network ( PortID(..), withSocketsDo, listenOn , accept, Socket ) import System.Directory (getHomeDirectory) import System.Environment import System.FilePath import System.IO ( hClose ) import System.IO.Error ( isIllegalOperation ) import System.Posix.IO ( fdToHandle, closeFd ) import Text.Read (readMaybe) import qualified SetGame import Openpty import UnixTerminalFlags #if MIN_VERSION_base(4,8,0) import System.Exit ( die ) #else import System.Exit ( exitFailure ) die :: String -> IO a die err = hPutStrLn stderr err >> exitFailure #endif main :: IO () main = withSocketsDo $ do args <- getArgs progName <- getProgName let usage = die $ "usage: "++progName++" LISTEN_PORT" port <- case args of [portString] -> do let mPort = readMaybe portString maybe usage return mPort _ -> usage sock <- listenOn (PortNumber $ fromInteger port) sAuth <- loadPrivateKeys "server_keys" home <- getHomeDirectory user <- getEnv "USER" let pubKeys = [home </> ".ssh" </> "authorized_keys"] let creds = [(S8.pack user,pubKeys)] let debugLevel = 1 sshServer (mkServer debugLevel sAuth creds sock) mkServer :: Int -> [ServerCredential] -> [ClientCredential] -> Socket -> Server mkServer debugLevel auths creds sock = Server { sAccept = do (handle',_,_) <- accept sock let h = handle2HandleLike handle' let sh = mkSessionHandlers creds return (sh, h) , sAuthenticationAlgs = auths , sVersion = "SSH_HaLVM_2.0" , sDebugLevel = debugLevel } convertWindowSize :: SshWindowSize -> Winsize convertWindowSize winsize = Winsize { wsRow = fromIntegral $ sshWsRows winsize , wsCol = fromIntegral $ sshWsCols winsize , wsXPixel = fromIntegral $ sshWsX winsize , wsYPixel = fromIntegral $ sshWsY winsize } type ClientCredential = (S.ByteString, [FilePath]) mkSessionHandlers :: [ClientCredential] -> SessionHandlers mkSessionHandlers creds = SessionHandlers { .. } where cDirectTcp _host _port _events _writeback = return False cRequestExec "echo" events writeback = do void (forkIO (echoServer events writeback)) return True cRequestExec _command _events _writeback = return False -- Same as 'exec echo' above, which you access in OpenSSH by running -- @ssh <host> echo@. To access this "echo" subsystem in OpenSSH, -- use @ssh <host> -s echo@. cRequestSubsystem "echo" readEvent writeback = do void (forkIO (echoServer readEvent writeback)) return True cRequestSubsystem _ _ _ = return False cOpenShell term winsize termflags readEvent writeBytes = do (masterFd, slaveFd) <- openpty Nothing (Just (convertWindowSize winsize)) (Just (foldl (\t (key,val) -> setTerminalFlag key val t) defaultTermios termflags)) masterH <- fdToHandle masterFd void $ forkIO $ forever (do out <- S.hGetSome masterH 1024 writeBytes (Just out) ) `finally` writeBytes Nothing `catch` \e -> unless (isIllegalOperation e) (throwIO e) void $ forkIO $ let loop = do event <- readEvent case event of SessionEof -> loop SessionClose -> closeFd slaveFd SessionWinsize winsize' -> do changePtyWinsize masterFd (convertWindowSize winsize') loop SessionData bs -> do S.hPut masterH bs loop SessionRequestResponse{} -> loop in loop let config = Vty.Config { Vty.vmin = Just 1 , Vty.vtime = Just 0 , Vty.mouseMode = Nothing , Vty.bracketedPasteMode = Nothing , Vty.debugLog = Nothing , Vty.inputMap = [] , Vty.inputFd = Just slaveFd , Vty.outputFd = Just slaveFd , Vty.termName = Just (S8.unpack term) } void $ forkIO $ do SetGame.gameMain config hClose masterH return True cAuthHandler = defaultAuthHandler (defaultCheckPw (const $ Just "god")) (defaultLookupPubKeys (\user -> return $ maybe [] id $ lookup user creds)) echoServer :: IO SessionEvent -> (Maybe S.ByteString -> IO ()) -> IO () echoServer readEvent write = loop where loop = do event <- readEvent case event of SessionData xs -> write (Just xs) >> loop SessionEof -> loop SessionClose -> write Nothing SessionWinsize{} -> loop SessionRequestResponse{} -> loop
glguy/ssh-hans
server/Main.hs
bsd-3-clause
5,609
0
23
1,969
1,372
707
665
131
7
------------------------------------------------------------------------------- -- | -- Module : Generator.Printer -- Copyright : (c) 2016 Michael Carpenter -- License : BSD3 -- Maintainer : Michael Carpenter <[email protected]> -- Stability : experimental -- Portability : portable -- ------------------------------------------------------------------------------- module Generator.Printer ( module Generator.Printer.Classic , module Generator.Printer.Modern , module Generator.Printer.Version ) where import Generator.Printer.Classic import Generator.Printer.Modern import Generator.Printer.Version
oldmanmike/hs-minecraft-protocol
generate/src/Generator/Printer.hs
bsd-3-clause
637
0
5
82
57
42
15
7
0
module Evaluator.FreeVars ( renamingFreeVars, inFreeVars, pureHeapFreeVars, pureHeapOpenFreeVars, stackFreeVars, stackFrameFreeVars, stateFreeVars ) where import Evaluator.Syntax import Core.FreeVars import Core.Renaming import Renaming import Utilities import qualified Data.Map as M import qualified Data.Set as S renamingFreeVars :: Renaming -> FreeVars -> FreeVars renamingFreeVars rn fvs = S.map (rename rn) fvs inFreeVars :: (a -> FreeVars) -> In a -> FreeVars inFreeVars thing_fvs (rn, thing) = renamingFreeVars rn (thing_fvs thing) pureHeapFreeVars :: PureHeap -> (BoundVars, FreeVars) -> FreeVars pureHeapFreeVars h (bvs, fvs) = fvs' S.\\ bvs' where (bvs', fvs') = pureHeapOpenFreeVars h (bvs, fvs) pureHeapOpenFreeVars :: PureHeap -> (BoundVars, FreeVars) -> (BoundVars, FreeVars) pureHeapOpenFreeVars = flip $ M.foldWithKey (\x' in_e (bvs, fvs) -> (S.insert x' bvs, fvs `S.union` inFreeVars taggedTermFreeVars in_e)) stackFreeVars :: Stack -> FreeVars -> (BoundVars, FreeVars) stackFreeVars k fvs = (S.unions *** (S.union fvs . S.unions)) . unzip . map (stackFrameFreeVars . tagee) $ k stackFrameFreeVars :: StackFrame -> (BoundVars, FreeVars) stackFrameFreeVars kf = case kf of Apply x' -> (S.empty, S.singleton x') Scrutinise in_alts -> (S.empty, inFreeVars taggedAltsFreeVars in_alts) PrimApply _ in_vs in_es -> (S.empty, S.unions (map (inFreeVars taggedValueFreeVars) in_vs) `S.union` S.unions (map (inFreeVars taggedTermFreeVars) in_es)) Update x' -> (S.singleton x', S.empty) stateFreeVars :: State -> FreeVars stateFreeVars (Heap h _, k, in_e) = pureHeapFreeVars h (stackFreeVars k (inFreeVars taggedTermFreeVars in_e))
batterseapower/supercompilation-by-evaluation
Evaluator/FreeVars.hs
bsd-3-clause
1,724
0
14
297
587
320
267
32
4
module Process.PeerManager ( start ) where {- import Control.Applicative import Control.Concurrent import Control.Concurrent.STM import Control.DeepSeq import Control.Monad.State import Control.Monad.Reader import Data.Array import qualified Data.Map as M import qualified Network.Socket as Sock import System.Log.Logger import Channels import Process import Process.Peer as Peer import Process.ChokeMgr hiding (start) import Process.FS hiding (start) import Process.PieceMgr hiding (start) import Process.Status hiding (start) import Protocol.Wire import Torrent hiding (infoHash) -} numPeers :: Int numPeers = 40 data PConf = PConf { cRateTVar :: RateTVar , cPeerManagerChan :: PeerManagerChannel , cChokeManagerChan :: ChokeMgrChannel , cPeerEventChannel :: PeerEventChannel } data PState = PState , sPeerId :: PeerId , sPeerQueue :: [(InfoHash, Peer)] , sActivePeers :: M.Map ThreadId PeerChannel , sChanManageMap :: M.Map InfoHash TorrentLocal } instance Logging PConf where logName _ = "Process.PeerManager" start :: PeerId -> RateTVar -> PeerManagerChannel -> IO ThreadId start peerId rateTV peerChan = do mgrC <- newTChanIO let conf = PConf rateTV peerChan chokeChan state = PState peerId [] M.empty M.empty spawnProcess conf state (catchProcess loop (return ())) where loop = do peerEventChan <- asks cPeerEventChan peerManagerChan <- asks cPeerManagerChan event <- liftIO . atomically $ (readTChan peerEventChan >>= return . Left) `orElse` (readTChan peerManagerChan >>= return . Right) case event of Left msg -> peerEvent msg Right msg -> incomingPeers msg fillPeers loop fillPeers :: Process PConf PState () fillPeers = do count <- M.size `fmap` gets sActivePeers when (count < numPeers) $ do let addCount = numPeers - count debugP $ "Подключаем новых " ++ show addCount ++ " пиров" queue <- gets sPeerQueue let (peers, rest) = splitAt addCount queue mapM_ addPeer peers modify (\state -> state { sPeerQueue = rest }) addPeer :: (InfoHash, Peer) -> Process PConf PState ThreadId addPeer (infoHash, Peer addr) = do peerId <- gets sPeerId -- pool <- asks cPeerPool mgrC <- asks mgrCh cm <- gets cChanManageMap rateTV <- asks cRateTV liftIO $ connect (addr, peerId, infoHash) pool mgrC rateTV cm incomingPeers :: PeerManagerMessage -> Process PConf PState () incomingPeers msg = case msg of PeersFromTracker infoHash peers -> do debugP "Добавление новых пиров в очередь" modify (\s -> s { sPeerQueue = (map (infohash,) peers) ++ sPeerQueue s }) NewIncoming conn@(sock, _) -> do size <- M.size `fmap` gets peers if size < numPeers then do debugP "Подключаем новые пиры" _ <- addIncoming conn return () else do debugP "Слишком много активных пиров, закрываем" liftIO $ Sock.sClose sock NewTorrent infoHash torrent -> do modify (\s -> s { sChanManageMap = M.insert infoHash torrent (sChanManageMap s) }) StopTorrent _ih -> do errorP "Остановка торрента не реализована" peerEvent :: PeerMessage -> Process PConf PState () peerEvent msg = case msg of Connect infoHash tid chan -> newPeer ih tid chan Disconnect tid -> removePeer tid where newPeer infoHash tid chan = do debugP $ "Подключаем пир " ++ show tid chockChan <- asks chokeMgrCh liftIO . atomically $ writeTChan chockChan (AddPeer infoHash tid chan) peers <- M.insert tid chan <$> gets sActivePeers modify (\s -> s { sActivePeers = peers }) removePeer threadId = do debugP $ "Отключаем пир " ++ show threadId chockChan <- asks cChokeManagerChan liftIO . atomically $ writeTChan chockChan (RemovePeer threadId) peers <- M.delete threadId <$> gets sActivePeers modify (\s -> s { sActivePeers = peers }) addIncoming :: (Sock.Socket, Sock.SockAddr) -> Process CF ST ThreadId addIncoming conn = do ppid <- gets peerId pool <- asks peerPool mgrC <- asks mgrCh v <- asks chokeRTV cm <- gets cmMap liftIO $ acceptor conn pool ppid mgrC v cm type ConnectRecord = (SockAddr, PeerId, InfoHash) connect :: ConnectRecord -> PeerChannel -> RateTVar -> ChanManageMap -> IO ThreadId connect (addr, peerId, infoHash) peeChan rateTV cmap = forkIO connector where connector = do sock <- socket AF_INET Stream defaultProtocol debugP $ "Соединяюсь с " ++ show addr connect sock addr debugP "Соединение установленно, рукопожатие" r <- initiateHandshake sock peerId infoHash case r of Left err -> do debugP $ "Не удалось соединится с " ++ show addr ++ ". Ошибка:" ++ err) return () Right (caps, himPeerId, himInfoHash) -> do debugP "Входим в работчий режис с " ++ show addr let tc = case M.lookup himInfoHash cmap of Just x -> x Nothing -> error "Impossible" children <- Peer.start sock caps mgrC rtv (tcPcMgrCh tc) (tcFSCh tc) (tcStatTV tc) (tcPM tc) (succ . snd . bounds $ tcPM tc) ihsh return () acceptor :: (Sock.Socket, Sock.SockAddr) -> SupervisorChannel -> PeerId -> MgrChannel -> RateTVar -> ChanManageMap -> IO ThreadId acceptor (s,sa) pool pid mgrC rtv cmmap = forkIO (connector >> return ()) where ihTst k = M.member k cmmap connector = {-# SCC "acceptor" #-} do debugLog "Handling incoming connection" r <- receiveHandshake s pid ihTst debugLog "RecvHandshake run" case r of Left err -> do debugLog ("Incoming Peer handshake failure with " ++ show sa ++ ", error: " ++ err) return() Right (caps, _rpid, ih) -> do debugLog "entering peerP loop code" let tc = case M.lookup ih cmmap of Nothing -> error "Impossible, I hope" Just x -> x children <- Peer.start s caps mgrC rtv (tcPcMgrCh tc) (tcFSCh tc) (tcStatTV tc) (tcPM tc) (succ . snd . bounds $ tcPM tc) ih atomically $ writeTChan pool $ SpawnNew (Supervisor $ allForOne "PeerSup" children) return () debugLog = debugM "Process.PeerMgr.acceptor"
artems/htorr
src/Process/PeerManager.hs
bsd-3-clause
7,426
14
20
2,500
1,843
893
950
-1
-1
{-# LANGUAGE StandaloneDeriving #-} module Web.Rest.HTTP ( runRestT,Hostname,Port,RestError(..) ) where import Web.Rest.Internal ( RestT,RestF(Rest),Method(..), Request(..),Response(..)) import Network.HTTP ( simpleHTTP,mkHeader,lookupHeader, HeaderName(HdrAccept,HdrContentType,HdrContentLength)) import qualified Network.HTTP as HTTP ( Request(Request),RequestMethod(..), Response(rspCode,rspHeaders,rspBody)) import Network.URI ( URI(URI),URIAuth(URIAuth)) import Network.Stream (ConnError) import Control.Error ( EitherT,runEitherT,scriptIO, hoistEither,fmapLT) import Control.Monad.Trans.Free (FreeT,FreeF(Pure,Free),runFreeT) import Control.Monad.Trans (lift) import Control.Monad.IO.Class (MonadIO) import Data.Text (Text,unpack,pack) import qualified Data.ByteString as ByteString (length) -- | The host url. For example "example.com". type Hostname = Text -- | The host port. For example 7474. type Port = Int -- | Possible errors when running a 'RestT' with the HTTP backend. data RestError = SimpleHTTPError String | ConnError ConnError deriving instance Show RestError -- | Run the given rest calls against the given hostname and port. Return a 'Left' on -- error. runRestT :: (MonadIO m) => Hostname -> Port -> RestT m a -> m (Either RestError a) runRestT hostname port = runEitherT . interpretRestT hostname port -- | Run the given rest calls against the given hostname and port and fail in the -- 'EitherT' monad. interpretRestT :: (MonadIO m) => Hostname -> Port -> RestT m a -> EitherT RestError m a interpretRestT hostname port restt = do next <- lift (runFreeT restt) case next of Pure result -> return result Free (Rest request continue) -> do let httprequest = HTTP.Request httpuri httpmethod httpheaders httpbody httpuri = URI "http:" (Just uriauth) (unpack (location request)) "" "" uriauth = URIAuth "" (unpack hostname) (":" ++ (show port)) httpmethod = (methodToMethod (method request)) httpheaders = [ mkHeader HdrAccept (unpack (accept request)), mkHeader HdrContentType (unpack (requestType request)), mkHeader HdrContentLength (show (ByteString.length (httpbody)))] httpbody = (requestBody request) resultresponse <- scriptIO (simpleHTTP httprequest) `onFailure` SimpleHTTPError httpresponse <- hoistEither resultresponse `onFailure` ConnError let mayberesponsetype = lookupHeader HdrContentType (HTTP.rspHeaders httpresponse) response = Response (HTTP.rspCode httpresponse) (fmap pack mayberesponsetype) (HTTP.rspBody httpresponse) interpretRestT hostname port (continue response) -- | Convert the package local definition of the http verb to the one used by io-streams. methodToMethod :: Method -> HTTP.RequestMethod methodToMethod GET = HTTP.GET methodToMethod POST = HTTP.POST methodToMethod PUT = HTTP.PUT methodToMethod DELETE = HTTP.DELETE -- | Annotate an error. onFailure :: Monad m => EitherT a m r -> (a -> b) -> EitherT b m r onFailure = flip fmapLT
phischu/haskell-rest
src/Web/Rest/HTTP.hs
bsd-3-clause
3,350
0
21
827
857
479
378
62
2
{-# LANGUAGE DataKinds #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE CPP #-} module Dixi.API where import Control.Lens hiding ((.=)) import Data.Aeson import Data.Aeson.Types import Data.Foldable import Data.Text (Text) import Data.Patch (Hunks, HunkStatus (..)) import Data.Proxy import Data.Vector (Vector) import Servant.API import Servant.HTML.Blaze import Text.Blaze.Html.Renderer.Text import Text.Hamlet (Html) #ifdef OLDBASE import Control.Applicative #endif import Dixi.Config import Dixi.Common import Dixi.Page import Dixi.PatchUtils type a :| b = a :<|> b infixr 8 :| infixr 8 |: (|:) :: a -> b -> a :| b (|:) = (:<|>) data PrettyPage = PP Renders Key Version (Page Html) data RawPage = RP Renders Key Version (Page Text) data DiffPage = DP Renders Key Version Version (Page (Hunks Char)) data History = H Renders Key [Page PatchSummary] data NewBody = NB Text (Maybe Text) data RevReq = DR Version Version (Maybe Text) type HistoryAPI = Get '[HTML, JSON] History :| Capture "version" Version :> VersionAPI :| "diff" :> QueryParam "from" Version :> QueryParam "to" Version :> Get '[HTML, JSON] DiffPage :| "revert" :> ReqBody '[FormUrlEncoded, JSON] RevReq :> Post '[HTML, JSON] PrettyPage type VersionAPI = PageViewAPI :| ReqBody '[FormUrlEncoded, JSON] NewBody :> Post '[HTML, JSON] PrettyPage type PageAPI = PageViewAPI :| "history" :> HistoryAPI type PageViewAPI = Get '[HTML, JSON] PrettyPage :| "raw" :> Get '[HTML, JSON] RawPage type Dixi = Capture "page" Key :> PageAPI :| PageAPI instance FromJSON RevReq where parseJSON (Object o) = DR <$> o .: "from" <*> o .: "to" <*> o .:? "comment" parseJSON wat = typeMismatch "Revert" wat instance ToJSON RevReq where toJSON (DR v1 v2 Nothing) = object ["from" .= v1, "to" .= v2] toJSON (DR v1 v2 (Just c)) = object ["from" .= v1, "to" .= v2, "comment" .= c] instance FromJSON NewBody where parseJSON (Object o) = NB <$> o .: "content" <*> o .:? "comment" parseJSON wat = typeMismatch "NewBody" wat instance ToJSON NewBody where toJSON (NB cn Nothing) = object ["content" .= cn ] toJSON (NB cn (Just c)) = object ["content" .= cn, "comment" .= c ] instance ToJSON DiffPage where toJSON (DP (Renders {..}) k v1 v2 p) = object [ "title" .= k , "versions" .= object [ "from" .= v1 , "to" .= v2 ] , "diff" .= map (uncurry hunkToJSON) (p ^. body) ] where hunkToJSON :: Vector Char -> HunkStatus -> Value hunkToJSON v s = object [ "text" .= toList v , "status" .= case s of Inserted -> '+' Deleted -> '-' Replaced -> '~' Unchanged -> ' ' ] instance ToJSON RawPage where toJSON (RP (Renders {..}) k v p) = let tim = renderTime $ p ^. time com = p ^. comment . traverse in object [ "title" .= k , "version" .= v , "time" .= tim , "comment" .= com , "content" .= (p ^. body) ] instance ToJSON PrettyPage where toJSON (PP (Renders {..}) k v p) = let tim = renderTime $ p ^. time com = p ^. comment . traverse in object [ "title" .= k , "version" .= v , "time" .= tim , "comment" .= com , "content" .= renderHtml (p ^. body) ] instance ToJSON History where toJSON (H (Renders {..}) k cs) = object [ "title" .= k , "history" .= zipWith versionToJSON [1 :: Version ..] cs] where versionToJSON v p = let tim = renderTime $ p ^. time com = p ^. comment . traverse (a,b,c) = p ^. body in object [ "version" .= v , "time" .= tim , "comment" .= com , "changes" .= object [ "insertions" .= a , "deletions" .= b, "modifications" .= c] ] dixi :: Proxy Dixi dixi = Proxy
liamoc/dixi
Dixi/API.hs
bsd-3-clause
4,474
0
16
1,503
1,416
763
653
-1
-1
module Genotype.Processor.KeepColumnNumbers ( process ) where import Control.Applicative (many) import Data.Attoparsec.Text (Parser) import Genotype.Processor (Processor) import qualified Data.Attoparsec.Text as P import qualified Data.Text as T import qualified Data.Text.IO as T import Genotype.Types getColumnList :: T.Text -> IO [Int] getColumnList filename = do cols <- T.readFile $ T.unpack filename either fail return $ P.parseOnly parseColumnList cols parseColumnList :: Parser [Int] parseColumnList = many $ do d <- P.decimal P.skipSpace return d keepColumns :: [Int] -> [a] -> [a] keepColumns = go 0 where go _ _ [] = [] go _ [] remaining = remaining go index (c:cs) (r:rs) | index == c = r : go (succ index) cs rs go index cs (_:rs) = go (succ index) cs rs process :: T.Text -> Processor process filename genos = do cols <- getColumnList filename return . flip map genos $ \g -> g { geno_datums = keepColumns cols $ geno_datums g }
Jonplussed/genotype-parser
src/Genotype/Processor/KeepColumnNumbers.hs
bsd-3-clause
998
0
12
207
393
207
186
30
4
------------------------------------------------------------------------------ -- | -- Maintainer : Ralf Laemmel, Joost Visser -- Stability : experimental -- Portability : portable -- -- This module is part of 'StrategyLib', a library of functional strategy -- combinators, including combinators for generic traversal. This module -- overloads basic combinators to enable uniform treatment of TU and TP -- strategies. The overloading scheme is motivated in the -- "... Polymorphic Symphony" paper. The names in the present module -- deviate from the paper in that they are postfixed by an "...S" -- in order to rule out name clashes and to avoid labour-intensive -- resolution. The class constraints in this module seem to be outrageous -- but this has to do with a type inferencing bug for class hierarchies -- in hugs. This bug is removed in the October 2002 release. ------------------------------------------------------------------------------ module OverloadingTheme where import Control.Monad import Data.Monoid import StrategyPrelude ------------------------------------------------------------------------------ -- * Unconstrained -- | Overload completely unconstrained strategy combinators class Monad m => Strategy s m where voidS :: s m -> TU () m -- | Sequential composition seqS :: TP m -> s m -> s m -- | Sequential composition with value passing passS :: TU a m -> (a -> s m) -> s m instance Monad m => Strategy TP m where voidS = voidTP seqS = seqTP passS = passTP instance Monad m => Strategy (TU a) m where voidS = voidTU seqS = seqTU passS = passTU -- | Overload apply and adhoc combinators class (Strategy s m, Monad m, Term t) => StrategyApply s m t x | s t -> x where -- | Strategy application applyS :: s m -> t -> m x -- | Dynamic type case adhocS :: s m -> (t -> m x) -> s m instance (Monad m, Term t) => StrategyApply TP m t t where applyS = applyTP adhocS = adhocTP instance (Monad m, Term t) => StrategyApply (TU a) m t a where applyS = applyTU adhocS = adhocTU ------------------------------------------------------------------------------ -- * Involving Monoid, MonadPlus, -- | Overload basic combinators which might involve a monoid class (Monad m, Strategy s m) => StrategyMonoid s m where -- | Identity (success) skipS :: s m -- | Push down to all children allS :: s m -> s m -- | Combine sequentially combS :: s m -> s m -> s m instance (Monad m, Strategy TP m) => StrategyMonoid TP m where skipS = idTP allS = allTP combS = seqTP instance (Monad m, Monoid u, Strategy (TU u) m) => StrategyMonoid (TU u) m where skipS = constTU mempty allS = allTU' combS = op2TU mappend -- | Overload basic combinators which involve MonadPlus class (Strategy s m, Monad m, MonadPlus m) => StrategyPlus s m where -- | Failure failS :: s m -- | Choice choiceS :: s m -> s m -> s m -- | Push down to a single child oneS :: s m -> s m instance (Monad m, MonadPlus m, Strategy TP m) => StrategyPlus TP m where failS = failTP choiceS = choiceTP oneS = oneTP instance (Monad m, MonadPlus m, Strategy (TU u) m) => StrategyPlus (TU u) m where failS = failTU choiceS = choiceTU oneS = oneTU -- | Overloaded lifting with failure monoS :: (StrategyApply s m t x, StrategyPlus s m) => (t -> m x) -> s m monoS f = adhocS failS f ------------------------------------------------------------------------------ -- * Effect substitution (see "EffectTheme"). -- | Overload msubst combinator (Experimental) class StrategyMSubst s where -- | Substitute one monad for another msubstS :: (Monad m, Monad m') => (forall t . m t -> m' t) -> s m -> s m' instance StrategyMSubst TP where msubstS f = msubstTP f instance StrategyMSubst (TU a) where msubstS f = msubstTU f ------------------------------------------------------------------------------
forste/haReFork
StrategyLib-4.0-beta/library/OverloadingTheme.hs
bsd-3-clause
4,178
6
12
1,077
932
494
438
-1
-1
module CommandLine ( optDescrs, cmdLineArgsMap, cmdFlavour, lookupFreeze1, cmdIntegerSimple, cmdProgressInfo, cmdConfigure, cmdCompleteSetting, cmdDocsArgs, lookupBuildRoot, TestArgs(..), TestSpeed(..), defaultTestArgs ) where import Data.Either import qualified Data.HashMap.Strict as Map import Data.List.Extra import Development.Shake hiding (Normal) import Flavour (DocTargets, DocTarget(..)) import Hadrian.Utilities hiding (buildRoot) import Settings.Parser import System.Console.GetOpt import System.Environment import qualified System.Directory as Directory import qualified Data.Set as Set data TestSpeed = TestSlow | TestNormal | TestFast deriving (Show, Eq) -- | All arguments that can be passed to Hadrian via the command line. data CommandLineArgs = CommandLineArgs { configure :: Bool , flavour :: Maybe String , freeze1 :: Bool , integerSimple :: Bool , progressInfo :: ProgressInfo , buildRoot :: BuildRoot , testArgs :: TestArgs , docTargets :: DocTargets , completeStg :: Maybe String } deriving (Eq, Show) -- | Default values for 'CommandLineArgs'. defaultCommandLineArgs :: CommandLineArgs defaultCommandLineArgs = CommandLineArgs { configure = False , flavour = Nothing , freeze1 = False , integerSimple = False , progressInfo = Brief , buildRoot = BuildRoot "_build" , testArgs = defaultTestArgs , docTargets = Set.fromList [minBound..maxBound] , completeStg = Nothing } -- | These arguments are used by the `test` target. data TestArgs = TestArgs { testKeepFiles :: Bool , testCompiler :: String , testConfigFile :: String , testConfigs :: [String] , testJUnit :: Maybe FilePath , testOnly :: [String] , testOnlyPerf :: Bool , testSkipPerf :: Bool , testRootDirs :: [FilePath] , testSpeed :: TestSpeed , testSummary :: Maybe FilePath , testVerbosity :: Maybe String , testWays :: [String] , testAccept :: Bool} deriving (Eq, Show) -- | Default value for `TestArgs`. defaultTestArgs :: TestArgs defaultTestArgs = TestArgs { testKeepFiles = False , testCompiler = "stage2" , testConfigFile = "testsuite/config/ghc" , testConfigs = [] , testJUnit = Nothing , testOnly = [] , testOnlyPerf = False , testSkipPerf = False , testRootDirs = [] , testSpeed = TestNormal , testSummary = Nothing , testVerbosity = Nothing , testWays = [] , testAccept = False } readConfigure :: Either String (CommandLineArgs -> CommandLineArgs) readConfigure = Right $ \flags -> flags { configure = True } readFlavour :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readFlavour ms = Right $ \flags -> flags { flavour = lower <$> ms } readBuildRoot :: Maybe FilePath -> Either String (CommandLineArgs -> CommandLineArgs) readBuildRoot ms = maybe (Left "Cannot parse build-root") (Right . set) (go =<< ms) where go :: String -> Maybe BuildRoot go = Just . BuildRoot set :: BuildRoot -> CommandLineArgs -> CommandLineArgs set flag flags = flags { buildRoot = flag } readFreeze1 :: Either String (CommandLineArgs -> CommandLineArgs) readFreeze1 = Right $ \flags -> flags { freeze1 = True } readIntegerSimple :: Either String (CommandLineArgs -> CommandLineArgs) readIntegerSimple = Right $ \flags -> flags { integerSimple = True } readProgressInfo :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readProgressInfo ms = maybe (Left "Cannot parse progress-info") (Right . set) (go =<< lower <$> ms) where go :: String -> Maybe ProgressInfo go "none" = Just None go "brief" = Just Brief go "normal" = Just Normal go "unicorn" = Just Unicorn go _ = Nothing set :: ProgressInfo -> CommandLineArgs -> CommandLineArgs set flag flags = flags { progressInfo = flag } readTestKeepFiles :: Either String (CommandLineArgs -> CommandLineArgs) readTestKeepFiles = Right $ \flags -> flags { testArgs = (testArgs flags) { testKeepFiles = True } } readTestAccept :: Either String (CommandLineArgs -> CommandLineArgs) readTestAccept = Right $ \flags -> flags { testArgs = (testArgs flags) { testAccept = True } } readTestCompiler :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readTestCompiler compiler = maybe (Left "Cannot parse compiler") (Right . set) compiler where set compiler = \flags -> flags { testArgs = (testArgs flags) { testCompiler = compiler } } readTestConfig :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readTestConfig config = case config of Nothing -> Right id Just conf -> Right $ \flags -> let configs = conf : testConfigs (testArgs flags) in flags { testArgs = (testArgs flags) { testConfigs = configs } } readTestConfigFile :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readTestConfigFile filepath = maybe (Left "Cannot parse test-config-file") (Right . set) filepath where set filepath flags = flags { testArgs = (testArgs flags) { testConfigFile = filepath } } readTestJUnit :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readTestJUnit filepath = Right $ \flags -> flags { testArgs = (testArgs flags) { testJUnit = filepath } } readTestOnly :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readTestOnly tests = Right $ \flags -> flags { testArgs = (testArgs flags) { testOnly = tests'' flags } } where tests' = maybe [] words tests tests'' flags = testOnly (testArgs flags) ++ tests' readTestOnlyPerf :: Either String (CommandLineArgs -> CommandLineArgs) readTestOnlyPerf = Right $ \flags -> flags { testArgs = (testArgs flags) { testOnlyPerf = True } } readTestSkipPerf :: Either String (CommandLineArgs -> CommandLineArgs) readTestSkipPerf = Right $ \flags -> flags { testArgs = (testArgs flags) { testSkipPerf = True } } readTestRootDirs :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readTestRootDirs rootdirs = Right $ \flags -> flags { testArgs = (testArgs flags) { testRootDirs = rootdirs'' flags } } where rootdirs' = maybe [] (splitOn ":") rootdirs rootdirs'' flags = testRootDirs (testArgs flags) ++ rootdirs' readTestSpeed :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readTestSpeed ms = maybe (Left "Cannot parse test-speed") (Right . set) (go =<< lower <$> ms) where go :: String -> Maybe TestSpeed go "fast" = Just TestFast go "slow" = Just TestSlow go "normal" = Just TestNormal go _ = Nothing set :: TestSpeed -> CommandLineArgs -> CommandLineArgs set flag flags = flags { testArgs = (testArgs flags) {testSpeed = flag} } readTestSummary :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readTestSummary filepath = Right $ \flags -> flags { testArgs = (testArgs flags) { testJUnit = filepath } } readTestVerbose :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readTestVerbose verbose = Right $ \flags -> flags { testArgs = (testArgs flags) { testVerbosity = verbose } } readTestWay :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readTestWay way = case way of Nothing -> Right id Just way -> Right $ \flags -> let newWays = way : testWays (testArgs flags) in flags { testArgs = (testArgs flags) {testWays = newWays} } readCompleteStg :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readCompleteStg ms = Right $ \flags -> flags { completeStg = ms } readDocsArg :: Maybe String -> Either String (CommandLineArgs -> CommandLineArgs) readDocsArg ms = maybe (Left "Cannot parse docs argument") (Right . set) (go =<< ms) where go :: String -> Maybe (DocTargets -> DocTargets) go "none" = Just (const Set.empty) go "no-haddocks" = Just (Set.delete Haddocks) go "no-sphinx-html" = Just (Set.delete SphinxHTML) go "no-sphinx-pdfs" = Just (Set.delete SphinxPDFs) go "no-sphinx-man" = Just (Set.delete SphinxMan) go "no-sphinx-info" = Just (Set.delete SphinxInfo) go "no-sphinx" = Just (Set.delete SphinxHTML . Set.delete SphinxPDFs . Set.delete SphinxMan . Set.delete SphinxInfo) go _ = Nothing set :: (DocTargets -> DocTargets) -> CommandLineArgs -> CommandLineArgs set tweakTargets flags = flags { docTargets = tweakTargets (docTargets flags) } -- | Standard 'OptDescr' descriptions of Hadrian's command line arguments. optDescrs :: [OptDescr (Either String (CommandLineArgs -> CommandLineArgs))] optDescrs = [ Option ['c'] ["configure"] (NoArg readConfigure) "Run the boot and configure scripts (if you do not want to run them manually)." , Option ['o'] ["build-root"] (OptArg readBuildRoot "BUILD_ROOT") "Where to store build artifacts. (Default _build)." , Option [] ["flavour"] (OptArg readFlavour "FLAVOUR") "Build flavour (Default, Devel1, Devel2, Perf, Prof, Quick or Quickest)." , Option [] ["freeze1"] (NoArg readFreeze1) "Freeze Stage1 GHC." , Option [] ["integer-simple"] (NoArg readIntegerSimple) "Build GHC with integer-simple library." , Option [] ["progress-info"] (OptArg readProgressInfo "STYLE") "Progress info style (None, Brief, Normal or Unicorn)." , Option [] ["docs"] (OptArg readDocsArg "TARGET") "Strip down docs targets (none, no-haddocks, no-sphinx[-{html, pdfs, man}]." , Option ['k'] ["keep-test-files"] (NoArg readTestKeepFiles) "Keep all the files generated when running the testsuite." , Option [] ["test-compiler"] (OptArg readTestCompiler "TEST_COMPILER") "Use given compiler [Default=stage2]." , Option [] ["test-config-file"] (OptArg readTestConfigFile "CONFIG_FILE") "configuration file for testsuite. Default=testsuite/config/ghc" , Option [] ["config"] (OptArg readTestConfig "EXTRA_TEST_CONFIG") "Configurations to run test, in key=value format." , Option [] ["summary-junit"] (OptArg readTestJUnit "TEST_SUMMARY_JUNIT") "Output testsuite summary in JUnit format." , Option [] ["only"] (OptArg readTestOnly "TESTS") "Test cases to run." , Option [] ["only-perf"] (NoArg readTestOnlyPerf) "Only run performance tests." , Option [] ["skip-perf"] (NoArg readTestSkipPerf) "Skip performance tests." , Option [] ["test-root-dirs"] (OptArg readTestRootDirs "DIR1:[DIR2:...:DIRn]") "Test root directories to look at (all by default)." , Option [] ["test-speed"] (OptArg readTestSpeed "SPEED") "fast, slow or normal. Normal by default" , Option [] ["summary"] (OptArg readTestSummary "TEST_SUMMARY") "Where to output the test summary file." , Option [] ["test-verbose"] (OptArg readTestVerbose "TEST_VERBOSE") "A verbosity value between 0 and 5. 0 is silent, 4 and higher activates extra output." , Option [] ["test-way"] (OptArg readTestWay "TEST_WAY") "only run these ways" , Option ['a'] ["test-accept"] (NoArg readTestAccept) "Accept new output of tests" , Option [] ["complete-setting"] (OptArg readCompleteStg "SETTING") "Setting key to autocomplete, for the 'autocomplete' target." ] -- | A type-indexed map containing Hadrian command line arguments to be passed -- to Shake via 'shakeExtra'. cmdLineArgsMap :: IO (Map.HashMap TypeRep Dynamic) cmdLineArgsMap = do xs <- getArgs let -- We split the arguments between the ones that look like -- "k = v" or "k += v", in cliSettings, and the rest in -- optArgs. (optsArgs, cliSettings) = partitionKVs xs -- We only use the arguments that don't look like setting -- updates for parsing Hadrian and Shake flags/options. (opts, _, _) = getOpt Permute optDescrs optsArgs args = foldl (flip id) defaultCommandLineArgs (rights opts) BuildRoot root = buildRoot args settingsFile = root -/- "hadrian.settings" -- We try to look at <root>/hadrian.settings, and if it exists -- we read as many settings as we can from it, combining -- them with the ones we got on the command line, in allSettings. -- We then insert all those settings in the dynamic map, so that -- the 'Settings.flavour' action can look them up and apply -- all the relevant updates to the flavour that Hadrian is set -- to run with. settingsFileExists <- Directory.doesFileExist settingsFile fileSettings <- if settingsFileExists then parseJustKVs . lines <$> readFile settingsFile else return [] let allSettings = cliSettings ++ fileSettings return $ insertExtra (progressInfo args) -- Accessed by Hadrian.Utilities $ insertExtra (buildRoot args) -- Accessed by Hadrian.Utilities $ insertExtra (testArgs args) -- Accessed by Settings.Builders.RunTest $ insertExtra allSettings -- Accessed by Settings $ insertExtra args Map.empty cmdLineArgs :: Action CommandLineArgs cmdLineArgs = userSetting defaultCommandLineArgs cmdConfigure :: Action Bool cmdConfigure = configure <$> cmdLineArgs cmdFlavour :: Action (Maybe String) cmdFlavour = flavour <$> cmdLineArgs cmdCompleteSetting :: Action (Maybe String) cmdCompleteSetting = completeStg <$> cmdLineArgs lookupBuildRoot :: Map.HashMap TypeRep Dynamic -> BuildRoot lookupBuildRoot = buildRoot . lookupExtra defaultCommandLineArgs lookupFreeze1 :: Map.HashMap TypeRep Dynamic -> Bool lookupFreeze1 = freeze1 . lookupExtra defaultCommandLineArgs cmdIntegerSimple :: Action Bool cmdIntegerSimple = integerSimple <$> cmdLineArgs cmdProgressInfo :: Action ProgressInfo cmdProgressInfo = progressInfo <$> cmdLineArgs cmdDocsArgs :: Action DocTargets cmdDocsArgs = docTargets <$> cmdLineArgs
sdiehl/ghc
hadrian/src/CommandLine.hs
bsd-3-clause
14,186
0
17
3,153
3,522
1,903
1,619
253
8
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} module Hchain.BlockChain (BlockChain, Hash, Block (..), content, BContent (..), isValidChain, addBlock, mkInitialChain, addValidBlock, mineBlock) where import qualified Data.ByteString.Lazy.Char8 as C8 import Control.Lens import Data.Digest.Pure.SHA (sha256, showDigest) import Data.String.Utils (startswith) import Data.Maybe (fromJust) import Data.Binary import Data.Typeable import GHC.Generics type Hash = String type BNum = Int type BNonce = Int data Block a = Block { _num :: BNum, _nonce :: BNonce, _content :: a, _prevH :: Hash, _bHash :: Hash } deriving (Show, Generic, Typeable, Eq) $(makeLenses ''Block) type BlockChain a = [a] class BContent a where serial :: a -> String mApply :: a -> [a] -> (a -> b) -> Maybe b instance (Typeable a, Binary a) => Binary (Block a) mkInitialChain :: BContent a => a -> BlockChain (Block a) mkInitialChain c = [mine (mkInitialBlock c)] mkInitialBlock :: BContent a => a -> Block a mkInitialBlock c = Block 1 1 c emptyHash emptyHash where emptyHash = "0000000000000000000000000000000000000000000000000000000000000000" isValidChain :: BContent a => BlockChain (Block a) -> Bool isValidChain = all $ checkSignature . view bHash mineBlock :: BContent a => a -> BlockChain (Block a) -> Maybe (BlockChain (Block a)) mineBlock = addBlock mine addValidBlock :: BContent a => Block a -> BlockChain (Block a) -> Maybe (BlockChain (Block a)) addValidBlock block chain | checkSignature (block ^. bHash) && not (null (addBlock id (block ^. content) chain)) = Just $ block : chain | otherwise = Nothing addBlock :: BContent a => (Block a -> Block a) -> a -> BlockChain (Block a) -> Maybe (BlockChain (Block a)) addBlock op c chain@(x:_xs) | null contentBlock = Nothing | otherwise = Just $ op (fromJust contentBlock) : chain where contentBlock = mApply c (reverse contents) cont cont = mkBlock (x ^. num + 1) (x ^. bHash) contents = map _content chain mkBlock :: BContent a => BNum -> Hash -> a -> Block a mkBlock n prevh c = Block n 1 c prevh "" hash :: BContent a => Block a -> Hash hash block = showDigest $ sha256 (C8.pack (serialize block)) serialize :: BContent a => Block a -> String serialize block = show (block ^. num) ++ show (block ^. nonce) ++ serial (block ^. content) ++ block ^. prevH checkSignature :: Hash -> Bool checkSignature = startswith "0000" mine :: BContent a => Block a -> Block a mine block | checkSignature computedHash = block & bHash .~ computedHash | otherwise = mine (block & nonce +~ 1) where computedHash = hash block
jesuspc/hchain
src/hchain/BlockChain.hs
bsd-3-clause
2,682
0
15
551
1,036
536
500
-1
-1
{-# LANGUAGE ImplicitParams #-} ---------------------------------------------------------------------------- -- | -- Module : DART.Run -- Copyright : (c) Carlos López-Camey, University of Freiburg -- License : BSD-3 -- -- Maintainer : [email protected] -- Stability : stable -- -- -- Runs the interpreter and the dart tester ----------------------------------------------------------------------------- module DART.Run where -------------------------------------------------------------------------------- -- DART import DART.DARTSettings import DART.FileIO import DART.FunctionFeeder import DART.MkRandomValue import DART.CmdLine import qualified DART.ModuleTester as T -------------------------------------------------------------------------------- -- Data structures import qualified Data.HashTable.IO as H import Data.Maybe -------------------------------------------------------------------------------- -- Core import Language.Core.Core import Language.Core.Interp import qualified Language.Core.Interpreter as I import Language.Core.Interpreter.Acknowledge(acknowledgeModule) import qualified Language.Core.Interpreter.Libraries as Libs import Language.Core.Interpreter.Structures import Language.Core.Interpreter.Util(showValue) import Language.Core.Util(showType) import Language.Core.Vdefg -------------------------------------------------------------------------------- -- Prelude import System.Directory(getCurrentDirectory) import System.Environment -------------------------------------------------------------------------------- -- import Text.Encoding.Z -------------------------------------------------------------------------------- -- system import Data.Time.Clock(getCurrentTime) -- | Creates an initial state given the arguments given -- in the command line and parsed by CmdArgs initDART :: DARTSettings -> IO DARTState initDART settings' = do h <- io H.new -- create a fresh new heap current_dir <- getCurrentDirectory let prependCurrentDir = (++) (current_dir ++ "/") let user_includes = include settings' let absolute_includes = map prependCurrentDir $ prelude_files ++ user_includes now <- io getCurrentTime return $ DState { benchmarks = [] , pbranches_record = [] , libraries_env = [] , heap = h , heap_count = 0 , number_of_reductions = 0 , number_of_reductions_part = 0 , tab_indentation = 1 , settings = settings' { include = (absolute_includes) } , start_time = now , test_name = Nothing , samplerStatus = UnitializedSampler , samplerDataSize = 0 } -- | Returns a list of *relative* paths pointing to default included libraries e.g. base -- Use case: if we always want the function split to be in scope for programs that we are testing, then we should load Data.List in the base package -- The file paths are path to .hcr files, since these modules sometimes need different arguments to be compiled with -fext-core prelude_files :: [FilePath] prelude_files = [ "/lib/base/GHC/Base.hcr" , "/lib/base/GHC/Base.hcr" , "/lib/base/Data/Tuple.hcr" , "/lib/base/GHC/Show.hcr" , "/lib/base/GHC/Enum.hcr" , "/lib/base/Data/Maybe.hcr" , "/lib/base/GHC/List.hcr" , "/lib/base/Data/List.hcr" ] -- | Assumming no library has been loaded, this function looks for the settings (often coming from the command line and loads: -- * the includes, -- * the base library -- * the builtin functions (coming from the Interpreter/Libraries module family) -- for the interpreter to work. loadLibraries :: IM () loadLibraries = do debugMStep ("Loading includes ") settings <- gets settings -- get the list of includes and acknowledge definitions in heap let includes = include settings lib_envs <- mapM loadFilePath includes -- :: [Env] -- builtin funs, e.g. GHC.Num.+ ghc_builtin_funs <- I.loadLibrary Libs.ghc_base modify $ \st -> st { libraries_env = ghc_builtin_funs ++ concat lib_envs } -- | After an initial state is created, evaluates according to the settings runDART :: IM () runDART = do settgs <- gets settings -- Evaluate specified module let pathToModule = file settgs let ?be_verbose = verbose settgs debugMStep $ "Reading module " ++ pathToModule ++ " .." m@(Module mdlname tdefs vdefgs) <- io . readModule $ file settgs module_env <- acknowledgeModule m loadLibraries let eval_funname = evaluate_function settgs test_funname = test_function settgs -- What should we eval? a function or the whole module? unless (not $ null test_funname) $ evaluate m module_env eval_funname -- What should we test? a function or the whole module? unless (not $ null eval_funname) $ test m module_env test_funname where test :: Module -> Env -> String -> IM () -- | No function specified test m env [] = do tested_funs <- T.testModule m env -- let prettyPrint :: TestedFun -> IM String -- prettyPrint (id,test_result) = T.showTestedFun test_result >>= return . (++) (id ++ ": \n") io . putStrLn $ "**************************************************" io . putStrLn $ "Module test results " io . putStrLn $ "**************************************************" mapM T.showTestedFun tested_funs >>= io . mapM_ putStrLn h <- gets heap whenFlag show_heap $ io . printHeap $ h -- | test specified function test m env fun_name = -- let prettyPrint :: Maybe (Id,T.TestResult) -> IM String -- prettyPrint Nothing = return $ "No test result " -- prettyPrint (Just (id,test_result)) = T.showTest test_result >>= return . (++) (id ++ ": \n") do fun_test_str <- T.testHaskellExpression m fun_name env >>= T.showFunTest io . putStrLn $ "**************************************************" io . putStrLn $ "Test results of " ++ fun_name io . putStrLn $ "**************************************************" io . putStrLn $ fun_test_str (gets heap >>= \h -> whenFlag show_heap $ io . printHeap $ h) evaluate :: Module -> Env -> String -> IM () -- | no function specified evaluate m env [] = do vals <- I.evalModule m env -- interpret values -- funt ion to pretty print let prettyPrint :: (Id,Value) -> IM String prettyPrint (id,val) = do pp <- showValue val setts <- gets settings case (benchmark setts) of False -> return $ id ++ " => " ++ pp True -> do bs <- gets benchmarks let time = fromJust $ lookup id bs return $ id ++ " => " ++ pp ++ " .. done in " ++ show time io . putStrLn $ "**************************************************" io . putStrLn $ "Module definitions evaluation: " io . putStrLn $ "**************************************************" mapM prettyPrint vals >>= io . mapM_ putStrLn h <- gets heap st <- gets settings when (show_heap st) $ io . printHeap $ h -- | eval fun_name evaluate m env fun_name = do debugM $ "evaluate fun_name; env.size == " ++ (show . length $ env) result <- I.evalHaskellExpression m fun_name env -- do we print the heap? h <- gets heap st <- gets settings when (show_heap $ st) $ io . printHeap $ h -- output computed result io . putStrLn $ "**************************************************" io . putStrLn $ "Evaluation of " ++ fun_name io . putStrLn $ "**************************************************" showValue result >>= io . putStrLn
kmels/dart-haskell
src/DART/Run.hs
bsd-3-clause
7,988
0
23
1,970
1,432
748
684
127
4
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude #-} module Lib ( getKey , startAPI , verboseLog ) where import ClassyPrelude import Types.API.Base import Types.General import Types.Options getKey :: APICaller APIKey getKey = asks apiKey isVerbose :: APICaller Bool isVerbose = do verbosity <- asks apiVerbosity return $ verbosity == Verbose verboseLog :: Text -> APICaller () verboseLog = whenM isVerbose . putStrLn startAPI = flip runReaderT
JoeMShanahan/blizzard-haskell-api
src/Lib.hs
bsd-3-clause
478
0
8
83
120
65
55
19
1
{-# LANGUAGE CPP, NondecreasingIndentation, ScopedTypeVariables #-} -- ----------------------------------------------------------------------------- -- -- (c) The University of Glasgow, 2005-2012 -- -- The GHC API -- -- ----------------------------------------------------------------------------- module GHC ( -- * Initialisation defaultErrorHandler, defaultCleanupHandler, prettyPrintGhcErrors, -- * GHC Monad Ghc, GhcT, GhcMonad(..), HscEnv, runGhc, runGhcT, initGhcMonad, gcatch, gbracket, gfinally, printException, handleSourceError, needsTemplateHaskell, -- * Flags and settings DynFlags(..), GeneralFlag(..), Severity(..), HscTarget(..), gopt, GhcMode(..), GhcLink(..), defaultObjectTarget, parseDynamicFlags, getSessionDynFlags, setSessionDynFlags, getProgramDynFlags, setProgramDynFlags, getInteractiveDynFlags, setInteractiveDynFlags, parseStaticFlags, -- * Targets Target(..), TargetId(..), Phase, setTargets, getTargets, addTarget, removeTarget, guessTarget, -- * Loading\/compiling the program depanal, load, LoadHowMuch(..), InteractiveImport(..), SuccessFlag(..), succeeded, failed, defaultWarnErrLogger, WarnErrLogger, workingDirectoryChanged, parseModule, typecheckModule, desugarModule, loadModule, ParsedModule(..), TypecheckedModule(..), DesugaredModule(..), TypecheckedSource, ParsedSource, RenamedSource, -- ditto TypecheckedMod, ParsedMod, moduleInfo, renamedSource, typecheckedSource, parsedSource, coreModule, -- ** Compiling to Core CoreModule(..), compileToCoreModule, compileToCoreSimplified, -- * Inspecting the module structure of the program ModuleGraph, ModSummary(..), ms_mod_name, ModLocation(..), getModSummary, getModuleGraph, isLoaded, topSortModuleGraph, -- * Inspecting modules ModuleInfo, getModuleInfo, modInfoTyThings, modInfoTopLevelScope, modInfoExports, modInfoInstances, modInfoIsExportedName, modInfoLookupName, modInfoIface, modInfoSafe, lookupGlobalName, findGlobalAnns, mkPrintUnqualifiedForModule, ModIface(..), SafeHaskellMode(..), -- * Querying the environment packageDbModules, -- * Printing PrintUnqualified, alwaysQualify, -- * Interactive evaluation getBindings, getInsts, getPrintUnqual, findModule, lookupModule, #ifdef GHCI isModuleTrusted, moduleTrustReqs, setContext, getContext, getNamesInScope, getRdrNamesInScope, getGRE, moduleIsInterpreted, getInfo, exprType, typeKind, parseName, RunResult(..), runStmt, runStmtWithLocation, runDecls, runDeclsWithLocation, runTcInteractive, -- Desired by some clients (Trac #8878) parseImportDecl, SingleStep(..), resume, Resume(resumeStmt, resumeThreadId, resumeBreakInfo, resumeSpan, resumeHistory, resumeHistoryIx), History(historyBreakInfo, historyEnclosingDecls), GHC.getHistorySpan, getHistoryModule, getResumeContext, abandon, abandonAll, InteractiveEval.back, InteractiveEval.forward, showModule, isModuleInterpreted, InteractiveEval.compileExpr, HValue, dynCompileExpr, GHC.obtainTermFromId, GHC.obtainTermFromVal, reconstructType, modInfoModBreaks, ModBreaks(..), BreakIndex, BreakInfo(breakInfo_number, breakInfo_module), BreakArray, setBreakOn, setBreakOff, getBreak, #endif lookupName, #ifdef GHCI -- ** EXPERIMENTAL setGHCiMonad, #endif -- * Abstract syntax elements -- ** Packages PackageId, -- ** Modules Module, mkModule, pprModule, moduleName, modulePackageId, ModuleName, mkModuleName, moduleNameString, -- ** Names Name, isExternalName, nameModule, pprParenSymName, nameSrcSpan, NamedThing(..), RdrName(Qual,Unqual), -- ** Identifiers Id, idType, isImplicitId, isDeadBinder, isExportedId, isLocalId, isGlobalId, isRecordSelector, isPrimOpId, isFCallId, isClassOpId_maybe, isDataConWorkId, idDataCon, isBottomingId, isDictonaryId, recordSelectorFieldLabel, -- ** Type constructors TyCon, tyConTyVars, tyConDataCons, tyConArity, isClassTyCon, isSynTyCon, isNewTyCon, isPrimTyCon, isFunTyCon, isFamilyTyCon, isOpenFamilyTyCon, tyConClass_maybe, synTyConRhs_maybe, synTyConDefn_maybe, synTyConResKind, -- ** Type variables TyVar, alphaTyVars, -- ** Data constructors DataCon, dataConSig, dataConType, dataConTyCon, dataConFieldLabels, dataConIsInfix, isVanillaDataCon, dataConUserType, dataConStrictMarks, StrictnessMark(..), isMarkedStrict, -- ** Classes Class, classMethods, classSCTheta, classTvsFds, classATs, pprFundeps, -- ** Instances ClsInst, instanceDFunId, pprInstance, pprInstanceHdr, pprFamInst, FamInst, -- ** Types and Kinds Type, splitForAllTys, funResultTy, pprParendType, pprTypeApp, Kind, PredType, ThetaType, pprForAll, pprThetaArrowTy, -- ** Entities TyThing(..), -- ** Syntax module HsSyn, -- ToDo: remove extraneous bits -- ** Fixities FixityDirection(..), defaultFixity, maxPrecedence, negateFixity, compareFixity, -- ** Source locations SrcLoc(..), RealSrcLoc, mkSrcLoc, noSrcLoc, srcLocFile, srcLocLine, srcLocCol, SrcSpan(..), RealSrcSpan, mkSrcSpan, srcLocSpan, isGoodSrcSpan, noSrcSpan, srcSpanStart, srcSpanEnd, srcSpanFile, srcSpanStartLine, srcSpanEndLine, srcSpanStartCol, srcSpanEndCol, -- ** Located GenLocated(..), Located, -- *** Constructing Located noLoc, mkGeneralLocated, -- *** Deconstructing Located getLoc, unLoc, -- *** Combining and comparing Located values eqLocated, cmpLocated, combineLocs, addCLoc, leftmost_smallest, leftmost_largest, rightmost, spans, isSubspanOf, -- * Exceptions GhcException(..), showGhcException, -- * Token stream manipulations Token, getTokenStream, getRichTokenStream, showRichTokenStream, addSourceToTokens, -- * Pure interface to the parser parser, -- * Miscellaneous --sessionHscEnv, cyclicModuleErr, ) where {- ToDo: * inline bits of HscMain here to simplify layering: hscTcExpr, hscStmt. * what StaticFlags should we expose, if any? -} #include "HsVersions.h" #ifdef GHCI import ByteCodeInstr import BreakArray import InteractiveEval import TcRnDriver ( runTcInteractive ) #endif import HscMain import GhcMake import DriverPipeline ( compileOne' ) import GhcMonad import TcRnMonad ( finalSafeMode ) import TcRnTypes import Packages import NameSet import RdrName import qualified HsSyn -- hack as we want to reexport the whole module import HsSyn import Type hiding( typeKind ) import Kind ( synTyConResKind ) import TcType hiding( typeKind ) import Id import TysPrim ( alphaTyVars ) import TyCon import Class import DataCon import Name hiding ( varName ) import Avail import InstEnv import FamInstEnv import SrcLoc import CoreSyn import TidyPgm import DriverPhases ( Phase(..), isHaskellSrcFilename ) import Finder import HscTypes import DynFlags import StaticFlags import SysTools import Annotations import Module import UniqFM import Panic import Platform import Bag ( unitBag ) import ErrUtils import MonadUtils import Util import StringBuffer import Outputable import BasicTypes import Maybes ( expectJust ) import FastString import qualified Parser import Lexer import System.Directory ( doesFileExist ) import Data.Maybe import Data.List ( find ) import Data.Time import Data.Typeable ( Typeable ) import Data.Word ( Word8 ) import Control.Monad import System.Exit ( exitWith, ExitCode(..) ) import Exception import Data.IORef import System.FilePath import System.IO import Prelude hiding (init) -- %************************************************************************ -- %* * -- Initialisation: exception handlers -- %* * -- %************************************************************************ -- | Install some default exception handlers and run the inner computation. -- Unless you want to handle exceptions yourself, you should wrap this around -- the top level of your program. The default handlers output the error -- message(s) to stderr and exit cleanly. defaultErrorHandler :: (ExceptionMonad m, MonadIO m) => FatalMessager -> FlushOut -> m a -> m a defaultErrorHandler fm (FlushOut flushOut) inner = -- top-level exception handler: any unrecognised exception is a compiler bug. ghandle (\exception -> liftIO $ do flushOut case fromException exception of -- an IO exception probably isn't our fault, so don't panic Just (ioe :: IOException) -> fatalErrorMsg'' fm (show ioe) _ -> case fromException exception of Just UserInterrupt -> -- Important to let this one propagate out so our -- calling process knows we were interrupted by ^C liftIO $ throwIO UserInterrupt Just StackOverflow -> fatalErrorMsg'' fm "stack overflow: use +RTS -K<size> to increase it" _ -> case fromException exception of Just (ex :: ExitCode) -> liftIO $ throwIO ex _ -> fatalErrorMsg'' fm (show (Panic (show exception))) exitWith (ExitFailure 1) ) $ -- error messages propagated as exceptions handleGhcException (\ge -> liftIO $ do flushOut case ge of PhaseFailed _ code -> exitWith code Signal _ -> exitWith (ExitFailure 1) _ -> do fatalErrorMsg'' fm (show ge) exitWith (ExitFailure 1) ) $ inner -- | Install a default cleanup handler to remove temporary files deposited by -- a GHC run. This is separate from 'defaultErrorHandler', because you might -- want to override the error handling, but still get the ordinary cleanup -- behaviour. defaultCleanupHandler :: (ExceptionMonad m, MonadIO m) => DynFlags -> m a -> m a defaultCleanupHandler dflags inner = -- make sure we clean up after ourselves inner `gfinally` (liftIO $ do cleanTempFiles dflags cleanTempDirs dflags ) -- exceptions will be blocked while we clean the temporary files, -- so there shouldn't be any difficulty if we receive further -- signals. -- %************************************************************************ -- %* * -- The Ghc Monad -- %* * -- %************************************************************************ -- | Run function for the 'Ghc' monad. -- -- It initialises the GHC session and warnings via 'initGhcMonad'. Each call -- to this function will create a new session which should not be shared among -- several threads. -- -- Any errors not handled inside the 'Ghc' action are propagated as IO -- exceptions. runGhc :: Maybe FilePath -- ^ See argument to 'initGhcMonad'. -> Ghc a -- ^ The action to perform. -> IO a runGhc mb_top_dir ghc = do ref <- newIORef (panic "empty session") let session = Session ref flip unGhc session $ do initGhcMonad mb_top_dir ghc -- XXX: unregister interrupt handlers here? -- | Run function for 'GhcT' monad transformer. -- -- It initialises the GHC session and warnings via 'initGhcMonad'. Each call -- to this function will create a new session which should not be shared among -- several threads. runGhcT :: (ExceptionMonad m, Functor m, MonadIO m) => Maybe FilePath -- ^ See argument to 'initGhcMonad'. -> GhcT m a -- ^ The action to perform. -> m a runGhcT mb_top_dir ghct = do ref <- liftIO $ newIORef (panic "empty session") let session = Session ref flip unGhcT session $ do initGhcMonad mb_top_dir ghct -- | Initialise a GHC session. -- -- If you implement a custom 'GhcMonad' you must call this function in the -- monad run function. It will initialise the session variable and clear all -- warnings. -- -- The first argument should point to the directory where GHC's library files -- reside. More precisely, this should be the output of @ghc --print-libdir@ -- of the version of GHC the module using this API is compiled with. For -- portability, you should use the @ghc-paths@ package, available at -- <http://hackage.haskell.org/package/ghc-paths>. initGhcMonad :: GhcMonad m => Maybe FilePath -> m () initGhcMonad mb_top_dir = do { env <- liftIO $ do { installSignalHandlers -- catch ^C ; initStaticOpts ; mySettings <- initSysTools mb_top_dir ; dflags <- initDynFlags (defaultDynFlags mySettings) ; checkBrokenTablesNextToCode dflags ; setUnsafeGlobalDynFlags dflags -- c.f. DynFlags.parseDynamicFlagsFull, which -- creates DynFlags and sets the UnsafeGlobalDynFlags ; newHscEnv dflags } ; setSession env } -- | The binutils linker on ARM emits unnecessary R_ARM_COPY relocations which -- breaks tables-next-to-code in dynamically linked modules. This -- check should be more selective but there is currently no released -- version where this bug is fixed. -- See https://sourceware.org/bugzilla/show_bug.cgi?id=16177 and -- https://ghc.haskell.org/trac/ghc/ticket/4210#comment:29 checkBrokenTablesNextToCode :: MonadIO m => DynFlags -> m () checkBrokenTablesNextToCode dflags = do { broken <- checkBrokenTablesNextToCode' dflags ; when broken $ do { _ <- liftIO $ throwIO $ mkApiErr dflags invalidLdErr ; fail "unsupported linker" } } where invalidLdErr = text "Tables-next-to-code not supported on ARM" <+> text "when using binutils ld (please see:" <+> text "https://sourceware.org/bugzilla/show_bug.cgi?id=16177)" checkBrokenTablesNextToCode' :: MonadIO m => DynFlags -> m Bool checkBrokenTablesNextToCode' dflags | not (isARM arch) = return False | WayDyn `notElem` ways dflags = return False | not (tablesNextToCode dflags) = return False | otherwise = do linkerInfo <- liftIO $ getLinkerInfo dflags case linkerInfo of GnuLD _ -> return True _ -> return False where platform = targetPlatform dflags arch = platformArch platform -- %************************************************************************ -- %* * -- Flags & settings -- %* * -- %************************************************************************ -- $DynFlags -- -- The GHC session maintains two sets of 'DynFlags': -- -- * The "interactive" @DynFlags@, which are used for everything -- related to interactive evaluation, including 'runStmt', -- 'runDecls', 'exprType', 'lookupName' and so on (everything -- under \"Interactive evaluation\" in this module). -- -- * The "program" @DynFlags@, which are used when loading -- whole modules with 'load' -- -- 'setInteractiveDynFlags', 'getInteractiveDynFlags' work with the -- interactive @DynFlags@. -- -- 'setProgramDynFlags', 'getProgramDynFlags' work with the -- program @DynFlags@. -- -- 'setSessionDynFlags' sets both @DynFlags@, and 'getSessionDynFlags' -- retrieves the program @DynFlags@ (for backwards compatibility). -- | Updates both the interactive and program DynFlags in a Session. -- This also reads the package database (unless it has already been -- read), and prepares the compilers knowledge about packages. It can -- be called again to load new packages: just add new package flags to -- (packageFlags dflags). -- -- Returns a list of new packages that may need to be linked in using -- the dynamic linker (see 'linkPackages') as a result of new package -- flags. If you are not doing linking or doing static linking, you -- can ignore the list of packages returned. -- setSessionDynFlags :: GhcMonad m => DynFlags -> m [PackageId] setSessionDynFlags dflags = do (dflags', preload) <- liftIO $ initPackages dflags modifySession $ \h -> h{ hsc_dflags = dflags' , hsc_IC = (hsc_IC h){ ic_dflags = dflags' } } invalidateModSummaryCache return preload -- | Sets the program 'DynFlags'. setProgramDynFlags :: GhcMonad m => DynFlags -> m [PackageId] setProgramDynFlags dflags = do (dflags', preload) <- liftIO $ initPackages dflags modifySession $ \h -> h{ hsc_dflags = dflags' } invalidateModSummaryCache return preload -- When changing the DynFlags, we want the changes to apply to future -- loads, but without completely discarding the program. But the -- DynFlags are cached in each ModSummary in the hsc_mod_graph, so -- after a change to DynFlags, the changes would apply to new modules -- but not existing modules; this seems undesirable. -- -- Furthermore, the GHC API client might expect that changing -- log_action would affect future compilation messages, but for those -- modules we have cached ModSummaries for, we'll continue to use the -- old log_action. This is definitely wrong (#7478). -- -- Hence, we invalidate the ModSummary cache after changing the -- DynFlags. We do this by tweaking the date on each ModSummary, so -- that the next downsweep will think that all the files have changed -- and preprocess them again. This won't necessarily cause everything -- to be recompiled, because by the time we check whether we need to -- recopmile a module, we'll have re-summarised the module and have a -- correct ModSummary. -- invalidateModSummaryCache :: GhcMonad m => m () invalidateModSummaryCache = modifySession $ \h -> h { hsc_mod_graph = map inval (hsc_mod_graph h) } where inval ms = ms { ms_hs_date = addUTCTime (-1) (ms_hs_date ms) } -- | Returns the program 'DynFlags'. getProgramDynFlags :: GhcMonad m => m DynFlags getProgramDynFlags = getSessionDynFlags -- | Set the 'DynFlags' used to evaluate interactive expressions. -- Note: this cannot be used for changes to packages. Use -- 'setSessionDynFlags', or 'setProgramDynFlags' and then copy the -- 'pkgState' into the interactive @DynFlags@. setInteractiveDynFlags :: GhcMonad m => DynFlags -> m () setInteractiveDynFlags dflags = do modifySession $ \h -> h{ hsc_IC = (hsc_IC h) { ic_dflags = dflags }} -- | Get the 'DynFlags' used to evaluate interactive expressions. getInteractiveDynFlags :: GhcMonad m => m DynFlags getInteractiveDynFlags = withSession $ \h -> return (ic_dflags (hsc_IC h)) parseDynamicFlags :: MonadIO m => DynFlags -> [Located String] -> m (DynFlags, [Located String], [Located String]) parseDynamicFlags = parseDynamicFlagsCmdLine -- %************************************************************************ -- %* * -- Setting, getting, and modifying the targets -- %* * -- %************************************************************************ -- ToDo: think about relative vs. absolute file paths. And what -- happens when the current directory changes. -- | Sets the targets for this session. Each target may be a module name -- or a filename. The targets correspond to the set of root modules for -- the program\/library. Unloading the current program is achieved by -- setting the current set of targets to be empty, followed by 'load'. setTargets :: GhcMonad m => [Target] -> m () setTargets targets = modifySession (\h -> h{ hsc_targets = targets }) -- | Returns the current set of targets getTargets :: GhcMonad m => m [Target] getTargets = withSession (return . hsc_targets) -- | Add another target. addTarget :: GhcMonad m => Target -> m () addTarget target = modifySession (\h -> h{ hsc_targets = target : hsc_targets h }) -- | Remove a target removeTarget :: GhcMonad m => TargetId -> m () removeTarget target_id = modifySession (\h -> h{ hsc_targets = filter (hsc_targets h) }) where filter targets = [ t | t@(Target id _ _) <- targets, id /= target_id ] -- | Attempts to guess what Target a string refers to. This function -- implements the @--make@/GHCi command-line syntax for filenames: -- -- - if the string looks like a Haskell source filename, then interpret it -- as such -- -- - if adding a .hs or .lhs suffix yields the name of an existing file, -- then use that -- -- - otherwise interpret the string as a module name -- guessTarget :: GhcMonad m => String -> Maybe Phase -> m Target guessTarget str (Just phase) = return (Target (TargetFile str (Just phase)) True Nothing) guessTarget str Nothing | isHaskellSrcFilename file = return (target (TargetFile file Nothing)) | otherwise = do exists <- liftIO $ doesFileExist hs_file if exists then return (target (TargetFile hs_file Nothing)) else do exists <- liftIO $ doesFileExist lhs_file if exists then return (target (TargetFile lhs_file Nothing)) else do if looksLikeModuleName file then return (target (TargetModule (mkModuleName file))) else do dflags <- getDynFlags liftIO $ throwGhcExceptionIO (ProgramError (showSDoc dflags $ text "target" <+> quotes (text file) <+> text "is not a module name or a source file")) where (file,obj_allowed) | '*':rest <- str = (rest, False) | otherwise = (str, True) hs_file = file <.> "hs" lhs_file = file <.> "lhs" target tid = Target tid obj_allowed Nothing -- | Inform GHC that the working directory has changed. GHC will flush -- its cache of module locations, since it may no longer be valid. -- -- Note: Before changing the working directory make sure all threads running -- in the same session have stopped. If you change the working directory, -- you should also unload the current program (set targets to empty, -- followed by load). workingDirectoryChanged :: GhcMonad m => m () workingDirectoryChanged = withSession $ (liftIO . flushFinderCaches) -- %************************************************************************ -- %* * -- Running phases one at a time -- %* * -- %************************************************************************ class ParsedMod m where modSummary :: m -> ModSummary parsedSource :: m -> ParsedSource class ParsedMod m => TypecheckedMod m where renamedSource :: m -> Maybe RenamedSource typecheckedSource :: m -> TypecheckedSource moduleInfo :: m -> ModuleInfo tm_internals :: m -> (TcGblEnv, ModDetails) -- ToDo: improvements that could be made here: -- if the module succeeded renaming but not typechecking, -- we can still get back the GlobalRdrEnv and exports, so -- perhaps the ModuleInfo should be split up into separate -- fields. class TypecheckedMod m => DesugaredMod m where coreModule :: m -> ModGuts -- | The result of successful parsing. data ParsedModule = ParsedModule { pm_mod_summary :: ModSummary , pm_parsed_source :: ParsedSource , pm_extra_src_files :: [FilePath] } instance ParsedMod ParsedModule where modSummary m = pm_mod_summary m parsedSource m = pm_parsed_source m -- | The result of successful typechecking. It also contains the parser -- result. data TypecheckedModule = TypecheckedModule { tm_parsed_module :: ParsedModule , tm_renamed_source :: Maybe RenamedSource , tm_typechecked_source :: TypecheckedSource , tm_checked_module_info :: ModuleInfo , tm_internals_ :: (TcGblEnv, ModDetails) } instance ParsedMod TypecheckedModule where modSummary m = modSummary (tm_parsed_module m) parsedSource m = parsedSource (tm_parsed_module m) instance TypecheckedMod TypecheckedModule where renamedSource m = tm_renamed_source m typecheckedSource m = tm_typechecked_source m moduleInfo m = tm_checked_module_info m tm_internals m = tm_internals_ m -- | The result of successful desugaring (i.e., translation to core). Also -- contains all the information of a typechecked module. data DesugaredModule = DesugaredModule { dm_typechecked_module :: TypecheckedModule , dm_core_module :: ModGuts } instance ParsedMod DesugaredModule where modSummary m = modSummary (dm_typechecked_module m) parsedSource m = parsedSource (dm_typechecked_module m) instance TypecheckedMod DesugaredModule where renamedSource m = renamedSource (dm_typechecked_module m) typecheckedSource m = typecheckedSource (dm_typechecked_module m) moduleInfo m = moduleInfo (dm_typechecked_module m) tm_internals m = tm_internals_ (dm_typechecked_module m) instance DesugaredMod DesugaredModule where coreModule m = dm_core_module m type ParsedSource = Located (HsModule RdrName) type RenamedSource = (HsGroup Name, [LImportDecl Name], Maybe [LIE Name], Maybe LHsDocString) type TypecheckedSource = LHsBinds Id -- NOTE: -- - things that aren't in the output of the typechecker right now: -- - the export list -- - the imports -- - type signatures -- - type/data/newtype declarations -- - class declarations -- - instances -- - extra things in the typechecker's output: -- - default methods are turned into top-level decls. -- - dictionary bindings -- | Return the 'ModSummary' of a module with the given name. -- -- The module must be part of the module graph (see 'hsc_mod_graph' and -- 'ModuleGraph'). If this is not the case, this function will throw a -- 'GhcApiError'. -- -- This function ignores boot modules and requires that there is only one -- non-boot module with the given name. getModSummary :: GhcMonad m => ModuleName -> m ModSummary getModSummary mod = do mg <- liftM hsc_mod_graph getSession case [ ms | ms <- mg, ms_mod_name ms == mod, not (isBootSummary ms) ] of [] -> do dflags <- getDynFlags liftIO $ throwIO $ mkApiErr dflags (text "Module not part of module graph") [ms] -> return ms multiple -> do dflags <- getDynFlags liftIO $ throwIO $ mkApiErr dflags (text "getModSummary is ambiguous: " <+> ppr multiple) -- | Parse a module. -- -- Throws a 'SourceError' on parse error. parseModule :: GhcMonad m => ModSummary -> m ParsedModule parseModule ms = do hsc_env <- getSession let hsc_env_tmp = hsc_env { hsc_dflags = ms_hspp_opts ms } hpm <- liftIO $ hscParse hsc_env_tmp ms return (ParsedModule ms (hpm_module hpm) (hpm_src_files hpm)) -- | Typecheck and rename a parsed module. -- -- Throws a 'SourceError' if either fails. typecheckModule :: GhcMonad m => ParsedModule -> m TypecheckedModule typecheckModule pmod = do let ms = modSummary pmod hsc_env <- getSession let hsc_env_tmp = hsc_env { hsc_dflags = ms_hspp_opts ms } (tc_gbl_env, rn_info) <- liftIO $ hscTypecheckRename hsc_env_tmp ms $ HsParsedModule { hpm_module = parsedSource pmod, hpm_src_files = pm_extra_src_files pmod } details <- liftIO $ makeSimpleDetails hsc_env_tmp tc_gbl_env safe <- liftIO $ finalSafeMode (ms_hspp_opts ms) tc_gbl_env return $ TypecheckedModule { tm_internals_ = (tc_gbl_env, details), tm_parsed_module = pmod, tm_renamed_source = rn_info, tm_typechecked_source = tcg_binds tc_gbl_env, tm_checked_module_info = ModuleInfo { minf_type_env = md_types details, minf_exports = availsToNameSet $ md_exports details, minf_rdr_env = Just (tcg_rdr_env tc_gbl_env), minf_instances = md_insts details, minf_iface = Nothing, minf_safe = safe #ifdef GHCI ,minf_modBreaks = emptyModBreaks #endif }} -- | Desugar a typechecked module. desugarModule :: GhcMonad m => TypecheckedModule -> m DesugaredModule desugarModule tcm = do let ms = modSummary tcm let (tcg, _) = tm_internals tcm hsc_env <- getSession let hsc_env_tmp = hsc_env { hsc_dflags = ms_hspp_opts ms } guts <- liftIO $ hscDesugar hsc_env_tmp ms tcg return $ DesugaredModule { dm_typechecked_module = tcm, dm_core_module = guts } -- | Load a module. Input doesn't need to be desugared. -- -- A module must be loaded before dependent modules can be typechecked. This -- always includes generating a 'ModIface' and, depending on the -- 'DynFlags.hscTarget', may also include code generation. -- -- This function will always cause recompilation and will always overwrite -- previous compilation results (potentially files on disk). -- loadModule :: (TypecheckedMod mod, GhcMonad m) => mod -> m mod loadModule tcm = do let ms = modSummary tcm let mod = ms_mod_name ms let loc = ms_location ms let (tcg, _details) = tm_internals tcm mb_linkable <- case ms_obj_date ms of Just t | t > ms_hs_date ms -> do l <- liftIO $ findObjectLinkable (ms_mod ms) (ml_obj_file loc) t return (Just l) _otherwise -> return Nothing let source_modified | isNothing mb_linkable = SourceModified | otherwise = SourceUnmodified -- we can't determine stability here -- compile doesn't change the session hsc_env <- getSession mod_info <- liftIO $ compileOne' (Just tcg) Nothing hsc_env ms 1 1 Nothing mb_linkable source_modified modifySession $ \e -> e{ hsc_HPT = addToUFM (hsc_HPT e) mod mod_info } return tcm -- %************************************************************************ -- %* * -- Dealing with Core -- %* * -- %************************************************************************ -- | A CoreModule consists of just the fields of a 'ModGuts' that are needed for -- the 'GHC.compileToCoreModule' interface. data CoreModule = CoreModule { -- | Module name cm_module :: !Module, -- | Type environment for types declared in this module cm_types :: !TypeEnv, -- | Declarations cm_binds :: CoreProgram, -- | Safe Haskell mode cm_safe :: SafeHaskellMode } instance Outputable CoreModule where ppr (CoreModule {cm_module = mn, cm_types = te, cm_binds = cb, cm_safe = sf}) = text "%module" <+> ppr mn <+> parens (ppr sf) <+> ppr te $$ vcat (map ppr cb) -- | This is the way to get access to the Core bindings corresponding -- to a module. 'compileToCore' parses, typechecks, and -- desugars the module, then returns the resulting Core module (consisting of -- the module name, type declarations, and function declarations) if -- successful. compileToCoreModule :: GhcMonad m => FilePath -> m CoreModule compileToCoreModule = compileCore False -- | Like compileToCoreModule, but invokes the simplifier, so -- as to return simplified and tidied Core. compileToCoreSimplified :: GhcMonad m => FilePath -> m CoreModule compileToCoreSimplified = compileCore True compileCore :: GhcMonad m => Bool -> FilePath -> m CoreModule compileCore simplify fn = do -- First, set the target to the desired filename target <- guessTarget fn Nothing addTarget target _ <- load LoadAllTargets -- Then find dependencies modGraph <- depanal [] True case find ((== fn) . msHsFilePath) modGraph of Just modSummary -> do -- Now we have the module name; -- parse, typecheck and desugar the module mod_guts <- coreModule `fmap` -- TODO: space leaky: call hsc* directly? (desugarModule =<< typecheckModule =<< parseModule modSummary) liftM (gutsToCoreModule (mg_safe_haskell mod_guts)) $ if simplify then do -- If simplify is true: simplify (hscSimplify), then tidy -- (tidyProgram). hsc_env <- getSession simpl_guts <- liftIO $ hscSimplify hsc_env mod_guts tidy_guts <- liftIO $ tidyProgram hsc_env simpl_guts return $ Left tidy_guts else return $ Right mod_guts Nothing -> panic "compileToCoreModule: target FilePath not found in\ module dependency graph" where -- two versions, based on whether we simplify (thus run tidyProgram, -- which returns a (CgGuts, ModDetails) pair, or not (in which case -- we just have a ModGuts. gutsToCoreModule :: SafeHaskellMode -> Either (CgGuts, ModDetails) ModGuts -> CoreModule gutsToCoreModule safe_mode (Left (cg, md)) = CoreModule { cm_module = cg_module cg, cm_types = md_types md, cm_binds = cg_binds cg, cm_safe = safe_mode } gutsToCoreModule safe_mode (Right mg) = CoreModule { cm_module = mg_module mg, cm_types = typeEnvFromEntities (bindersOfBinds (mg_binds mg)) (mg_tcs mg) (mg_fam_insts mg), cm_binds = mg_binds mg, cm_safe = safe_mode } -- %************************************************************************ -- %* * -- Inspecting the session -- %* * -- %************************************************************************ -- | Get the module dependency graph. getModuleGraph :: GhcMonad m => m ModuleGraph -- ToDo: DiGraph ModSummary getModuleGraph = liftM hsc_mod_graph getSession -- | Determines whether a set of modules requires Template Haskell. -- -- Note that if the session's 'DynFlags' enabled Template Haskell when -- 'depanal' was called, then each module in the returned module graph will -- have Template Haskell enabled whether it is actually needed or not. needsTemplateHaskell :: ModuleGraph -> Bool needsTemplateHaskell ms = any (xopt Opt_TemplateHaskell . ms_hspp_opts) ms -- | Return @True@ <==> module is loaded. isLoaded :: GhcMonad m => ModuleName -> m Bool isLoaded m = withSession $ \hsc_env -> return $! isJust (lookupUFM (hsc_HPT hsc_env) m) -- | Return the bindings for the current interactive session. getBindings :: GhcMonad m => m [TyThing] getBindings = withSession $ \hsc_env -> return $ icInScopeTTs $ hsc_IC hsc_env -- | Return the instances for the current interactive session. getInsts :: GhcMonad m => m ([ClsInst], [FamInst]) getInsts = withSession $ \hsc_env -> return $ ic_instances (hsc_IC hsc_env) getPrintUnqual :: GhcMonad m => m PrintUnqualified getPrintUnqual = withSession $ \hsc_env -> return (icPrintUnqual (hsc_dflags hsc_env) (hsc_IC hsc_env)) -- | Container for information about a 'Module'. data ModuleInfo = ModuleInfo { minf_type_env :: TypeEnv, minf_exports :: NameSet, -- ToDo, [AvailInfo] like ModDetails? minf_rdr_env :: Maybe GlobalRdrEnv, -- Nothing for a compiled/package mod minf_instances :: [ClsInst], minf_iface :: Maybe ModIface, minf_safe :: SafeHaskellMode #ifdef GHCI ,minf_modBreaks :: ModBreaks #endif } -- We don't want HomeModInfo here, because a ModuleInfo applies -- to package modules too. -- | Request information about a loaded 'Module' getModuleInfo :: GhcMonad m => Module -> m (Maybe ModuleInfo) -- XXX: Maybe X getModuleInfo mdl = withSession $ \hsc_env -> do let mg = hsc_mod_graph hsc_env if mdl `elem` map ms_mod mg then liftIO $ getHomeModuleInfo hsc_env mdl else do {- if isHomeModule (hsc_dflags hsc_env) mdl then return Nothing else -} liftIO $ getPackageModuleInfo hsc_env mdl -- ToDo: we don't understand what the following comment means. -- (SDM, 19/7/2011) -- getPackageModuleInfo will attempt to find the interface, so -- we don't want to call it for a home module, just in case there -- was a problem loading the module and the interface doesn't -- exist... hence the isHomeModule test here. (ToDo: reinstate) getPackageModuleInfo :: HscEnv -> Module -> IO (Maybe ModuleInfo) #ifdef GHCI getPackageModuleInfo hsc_env mdl = do eps <- hscEPS hsc_env iface <- hscGetModuleInterface hsc_env mdl let avails = mi_exports iface names = availsToNameSet avails pte = eps_PTE eps tys = [ ty | name <- concatMap availNames avails, Just ty <- [lookupTypeEnv pte name] ] -- return (Just (ModuleInfo { minf_type_env = mkTypeEnv tys, minf_exports = names, minf_rdr_env = Just $! availsToGlobalRdrEnv (moduleName mdl) avails, minf_instances = error "getModuleInfo: instances for package module unimplemented", minf_iface = Just iface, minf_safe = getSafeMode $ mi_trust iface, minf_modBreaks = emptyModBreaks })) #else -- bogusly different for non-GHCI (ToDo) getPackageModuleInfo _hsc_env _mdl = do return Nothing #endif getHomeModuleInfo :: HscEnv -> Module -> IO (Maybe ModuleInfo) getHomeModuleInfo hsc_env mdl = case lookupUFM (hsc_HPT hsc_env) (moduleName mdl) of Nothing -> return Nothing Just hmi -> do let details = hm_details hmi iface = hm_iface hmi return (Just (ModuleInfo { minf_type_env = md_types details, minf_exports = availsToNameSet (md_exports details), minf_rdr_env = mi_globals $! hm_iface hmi, minf_instances = md_insts details, minf_iface = Just iface, minf_safe = getSafeMode $ mi_trust iface #ifdef GHCI ,minf_modBreaks = getModBreaks hmi #endif })) -- | The list of top-level entities defined in a module modInfoTyThings :: ModuleInfo -> [TyThing] modInfoTyThings minf = typeEnvElts (minf_type_env minf) modInfoTopLevelScope :: ModuleInfo -> Maybe [Name] modInfoTopLevelScope minf = fmap (map gre_name . globalRdrEnvElts) (minf_rdr_env minf) modInfoExports :: ModuleInfo -> [Name] modInfoExports minf = nameSetToList $! minf_exports minf -- | Returns the instances defined by the specified module. -- Warning: currently unimplemented for package modules. modInfoInstances :: ModuleInfo -> [ClsInst] modInfoInstances = minf_instances modInfoIsExportedName :: ModuleInfo -> Name -> Bool modInfoIsExportedName minf name = elemNameSet name (minf_exports minf) mkPrintUnqualifiedForModule :: GhcMonad m => ModuleInfo -> m (Maybe PrintUnqualified) -- XXX: returns a Maybe X mkPrintUnqualifiedForModule minf = withSession $ \hsc_env -> do return (fmap (mkPrintUnqualified (hsc_dflags hsc_env)) (minf_rdr_env minf)) modInfoLookupName :: GhcMonad m => ModuleInfo -> Name -> m (Maybe TyThing) -- XXX: returns a Maybe X modInfoLookupName minf name = withSession $ \hsc_env -> do case lookupTypeEnv (minf_type_env minf) name of Just tyThing -> return (Just tyThing) Nothing -> do eps <- liftIO $ readIORef (hsc_EPS hsc_env) return $! lookupType (hsc_dflags hsc_env) (hsc_HPT hsc_env) (eps_PTE eps) name modInfoIface :: ModuleInfo -> Maybe ModIface modInfoIface = minf_iface -- | Retrieve module safe haskell mode modInfoSafe :: ModuleInfo -> SafeHaskellMode modInfoSafe = minf_safe #ifdef GHCI modInfoModBreaks :: ModuleInfo -> ModBreaks modInfoModBreaks = minf_modBreaks #endif isDictonaryId :: Id -> Bool isDictonaryId id = case tcSplitSigmaTy (idType id) of { (_tvs, _theta, tau) -> isDictTy tau } -- | Looks up a global name: that is, any top-level name in any -- visible module. Unlike 'lookupName', lookupGlobalName does not use -- the interactive context, and therefore does not require a preceding -- 'setContext'. lookupGlobalName :: GhcMonad m => Name -> m (Maybe TyThing) lookupGlobalName name = withSession $ \hsc_env -> do liftIO $ lookupTypeHscEnv hsc_env name findGlobalAnns :: (GhcMonad m, Typeable a) => ([Word8] -> a) -> AnnTarget Name -> m [a] findGlobalAnns deserialize target = withSession $ \hsc_env -> do ann_env <- liftIO $ prepareAnnotations hsc_env Nothing return (findAnns deserialize ann_env target) #ifdef GHCI -- | get the GlobalRdrEnv for a session getGRE :: GhcMonad m => m GlobalRdrEnv getGRE = withSession $ \hsc_env-> return $ ic_rn_gbl_env (hsc_IC hsc_env) #endif -- ----------------------------------------------------------------------------- -- | Return all /external/ modules available in the package database. -- Modules from the current session (i.e., from the 'HomePackageTable') are -- not included. packageDbModules :: GhcMonad m => Bool -- ^ Only consider exposed packages. -> m [Module] packageDbModules only_exposed = do dflags <- getSessionDynFlags let pkgs = eltsUFM (pkgIdMap (pkgState dflags)) return $ [ mkModule pid modname | p <- pkgs , not only_exposed || exposed p , let pid = packageConfigId p , modname <- exposedModules p ] -- ----------------------------------------------------------------------------- -- Misc exported utils dataConType :: DataCon -> Type dataConType dc = idType (dataConWrapId dc) -- | print a 'NamedThing', adding parentheses if the name is an operator. pprParenSymName :: NamedThing a => a -> SDoc pprParenSymName a = parenSymOcc (getOccName a) (ppr (getName a)) -- ---------------------------------------------------------------------------- #if 0 -- ToDo: -- - Data and Typeable instances for HsSyn. -- ToDo: check for small transformations that happen to the syntax in -- the typechecker (eg. -e ==> negate e, perhaps for fromIntegral) -- ToDo: maybe use TH syntax instead of IfaceSyn? There's already a way -- to get from TyCons, Ids etc. to TH syntax (reify). -- :browse will use either lm_toplev or inspect lm_interface, depending -- on whether the module is interpreted or not. #endif -- Extract the filename, stringbuffer content and dynflags associed to a module -- -- XXX: Explain pre-conditions getModuleSourceAndFlags :: GhcMonad m => Module -> m (String, StringBuffer, DynFlags) getModuleSourceAndFlags mod = do m <- getModSummary (moduleName mod) case ml_hs_file $ ms_location m of Nothing -> do dflags <- getDynFlags liftIO $ throwIO $ mkApiErr dflags (text "No source available for module " <+> ppr mod) Just sourceFile -> do source <- liftIO $ hGetStringBuffer sourceFile return (sourceFile, source, ms_hspp_opts m) -- | Return module source as token stream, including comments. -- -- The module must be in the module graph and its source must be available. -- Throws a 'HscTypes.SourceError' on parse error. getTokenStream :: GhcMonad m => Module -> m [Located Token] getTokenStream mod = do (sourceFile, source, flags) <- getModuleSourceAndFlags mod let startLoc = mkRealSrcLoc (mkFastString sourceFile) 1 1 case lexTokenStream source startLoc flags of POk _ ts -> return ts PFailed span err -> do dflags <- getDynFlags liftIO $ throwIO $ mkSrcErr (unitBag $ mkPlainErrMsg dflags span err) -- | Give even more information on the source than 'getTokenStream' -- This function allows reconstructing the source completely with -- 'showRichTokenStream'. getRichTokenStream :: GhcMonad m => Module -> m [(Located Token, String)] getRichTokenStream mod = do (sourceFile, source, flags) <- getModuleSourceAndFlags mod let startLoc = mkRealSrcLoc (mkFastString sourceFile) 1 1 case lexTokenStream source startLoc flags of POk _ ts -> return $ addSourceToTokens startLoc source ts PFailed span err -> do dflags <- getDynFlags liftIO $ throwIO $ mkSrcErr (unitBag $ mkPlainErrMsg dflags span err) -- | Given a source location and a StringBuffer corresponding to this -- location, return a rich token stream with the source associated to the -- tokens. addSourceToTokens :: RealSrcLoc -> StringBuffer -> [Located Token] -> [(Located Token, String)] addSourceToTokens _ _ [] = [] addSourceToTokens loc buf (t@(L span _) : ts) = case span of UnhelpfulSpan _ -> (t,"") : addSourceToTokens loc buf ts RealSrcSpan s -> (t,str) : addSourceToTokens newLoc newBuf ts where (newLoc, newBuf, str) = go "" loc buf start = realSrcSpanStart s end = realSrcSpanEnd s go acc loc buf | loc < start = go acc nLoc nBuf | start <= loc && loc < end = go (ch:acc) nLoc nBuf | otherwise = (loc, buf, reverse acc) where (ch, nBuf) = nextChar buf nLoc = advanceSrcLoc loc ch -- | Take a rich token stream such as produced from 'getRichTokenStream' and -- return source code almost identical to the original code (except for -- insignificant whitespace.) showRichTokenStream :: [(Located Token, String)] -> String showRichTokenStream ts = go startLoc ts "" where sourceFile = getFile $ map (getLoc . fst) ts getFile [] = panic "showRichTokenStream: No source file found" getFile (UnhelpfulSpan _ : xs) = getFile xs getFile (RealSrcSpan s : _) = srcSpanFile s startLoc = mkRealSrcLoc sourceFile 1 1 go _ [] = id go loc ((L span _, str):ts) = case span of UnhelpfulSpan _ -> go loc ts RealSrcSpan s | locLine == tokLine -> ((replicate (tokCol - locCol) ' ') ++) . (str ++) . go tokEnd ts | otherwise -> ((replicate (tokLine - locLine) '\n') ++) . ((replicate (tokCol - 1) ' ') ++) . (str ++) . go tokEnd ts where (locLine, locCol) = (srcLocLine loc, srcLocCol loc) (tokLine, tokCol) = (srcSpanStartLine s, srcSpanStartCol s) tokEnd = realSrcSpanEnd s -- ----------------------------------------------------------------------------- -- Interactive evaluation -- | Takes a 'ModuleName' and possibly a 'PackageId', and consults the -- filesystem and package database to find the corresponding 'Module', -- using the algorithm that is used for an @import@ declaration. findModule :: GhcMonad m => ModuleName -> Maybe FastString -> m Module findModule mod_name maybe_pkg = withSession $ \hsc_env -> do let dflags = hsc_dflags hsc_env this_pkg = thisPackage dflags -- case maybe_pkg of Just pkg | fsToPackageId pkg /= this_pkg && pkg /= fsLit "this" -> liftIO $ do res <- findImportedModule hsc_env mod_name maybe_pkg case res of Found _ m -> return m err -> throwOneError $ noModError dflags noSrcSpan mod_name err _otherwise -> do home <- lookupLoadedHomeModule mod_name case home of Just m -> return m Nothing -> liftIO $ do res <- findImportedModule hsc_env mod_name maybe_pkg case res of Found loc m | modulePackageId m /= this_pkg -> return m | otherwise -> modNotLoadedError dflags m loc err -> throwOneError $ noModError dflags noSrcSpan mod_name err modNotLoadedError :: DynFlags -> Module -> ModLocation -> IO a modNotLoadedError dflags m loc = throwGhcExceptionIO $ CmdLineError $ showSDoc dflags $ text "module is not loaded:" <+> quotes (ppr (moduleName m)) <+> parens (text (expectJust "modNotLoadedError" (ml_hs_file loc))) -- | Like 'findModule', but differs slightly when the module refers to -- a source file, and the file has not been loaded via 'load'. In -- this case, 'findModule' will throw an error (module not loaded), -- but 'lookupModule' will check to see whether the module can also be -- found in a package, and if so, that package 'Module' will be -- returned. If not, the usual module-not-found error will be thrown. -- lookupModule :: GhcMonad m => ModuleName -> Maybe FastString -> m Module lookupModule mod_name (Just pkg) = findModule mod_name (Just pkg) lookupModule mod_name Nothing = withSession $ \hsc_env -> do home <- lookupLoadedHomeModule mod_name case home of Just m -> return m Nothing -> liftIO $ do res <- findExposedPackageModule hsc_env mod_name Nothing case res of Found _ m -> return m err -> throwOneError $ noModError (hsc_dflags hsc_env) noSrcSpan mod_name err lookupLoadedHomeModule :: GhcMonad m => ModuleName -> m (Maybe Module) lookupLoadedHomeModule mod_name = withSession $ \hsc_env -> case lookupUFM (hsc_HPT hsc_env) mod_name of Just mod_info -> return (Just (mi_module (hm_iface mod_info))) _not_a_home_module -> return Nothing #ifdef GHCI -- | Check that a module is safe to import (according to Safe Haskell). -- -- We return True to indicate the import is safe and False otherwise -- although in the False case an error may be thrown first. isModuleTrusted :: GhcMonad m => Module -> m Bool isModuleTrusted m = withSession $ \hsc_env -> liftIO $ hscCheckSafe hsc_env m noSrcSpan -- | Return if a module is trusted and the pkgs it depends on to be trusted. moduleTrustReqs :: GhcMonad m => Module -> m (Bool, [PackageId]) moduleTrustReqs m = withSession $ \hsc_env -> liftIO $ hscGetSafe hsc_env m noSrcSpan -- | EXPERIMENTAL: DO NOT USE. -- -- Set the monad GHCi lifts user statements into. -- -- Checks that a type (in string form) is an instance of the -- @GHC.GHCi.GHCiSandboxIO@ type class. Sets it to be the GHCi monad if it is, -- throws an error otherwise. {-# WARNING setGHCiMonad "This is experimental! Don't use." #-} setGHCiMonad :: GhcMonad m => String -> m () setGHCiMonad name = withSession $ \hsc_env -> do ty <- liftIO $ hscIsGHCiMonad hsc_env name modifySession $ \s -> let ic = (hsc_IC s) { ic_monad = ty } in s { hsc_IC = ic } getHistorySpan :: GhcMonad m => History -> m SrcSpan getHistorySpan h = withSession $ \hsc_env -> return $ InteractiveEval.getHistorySpan hsc_env h obtainTermFromVal :: GhcMonad m => Int -> Bool -> Type -> a -> m Term obtainTermFromVal bound force ty a = withSession $ \hsc_env -> liftIO $ InteractiveEval.obtainTermFromVal hsc_env bound force ty a obtainTermFromId :: GhcMonad m => Int -> Bool -> Id -> m Term obtainTermFromId bound force id = withSession $ \hsc_env -> liftIO $ InteractiveEval.obtainTermFromId hsc_env bound force id #endif -- | Returns the 'TyThing' for a 'Name'. The 'Name' may refer to any -- entity known to GHC, including 'Name's defined using 'runStmt'. lookupName :: GhcMonad m => Name -> m (Maybe TyThing) lookupName name = withSession $ \hsc_env -> liftIO $ hscTcRcLookupName hsc_env name -- ----------------------------------------------------------------------------- -- Pure API -- | A pure interface to the module parser. -- parser :: String -- ^ Haskell module source text (full Unicode is supported) -> DynFlags -- ^ the flags -> FilePath -- ^ the filename (for source locations) -> Either ErrorMessages (WarningMessages, Located (HsModule RdrName)) parser str dflags filename = let loc = mkRealSrcLoc (mkFastString filename) 1 1 buf = stringToStringBuffer str in case unP Parser.parseModule (mkPState dflags buf loc) of PFailed span err -> Left (unitBag (mkPlainErrMsg dflags span err)) POk pst rdr_module -> let (warns,_) = getMessages pst in Right (warns, rdr_module)
ryantm/ghc
compiler/main/GHC.hs
bsd-3-clause
54,766
4
28
15,034
9,910
5,331
4,579
-1
-1
{-# LANGUAGE GADTs, RankNTypes, ScopedTypeVariables #-} {-# LANGUAGE GeneralizedNewtypeDeriving, FlexibleContexts, FlexibleInstances #-} -- | 'ProgramAlt' example: simple applicative parsers module Parser where import Control.Applicative import Control.Alternative.Operational import Control.Operational.Instruction import Control.Monad import Control.Monad.Trans.State import Data.Functor.Compose import Data.Traversable import Data.Maybe (listToMaybe) import Data.List (find, stripPrefix) --------------------------------------------------------------- --------------------------------------------------------------- -- -- Parser combinators -- data ParserI a where Symbol :: Char -> ParserI Char char :: Operational ParserI f => Char -> f Char char = singleton . Symbol string :: (Operational ParserI f, Applicative f) => String -> f String string = traverse char oneOf :: (Operational ParserI f, Alternative f) => String -> f Char oneOf = foldr (<|>) empty . map char -- | Interpret a parser program syntactically by pattern matching on -- its view. runParser :: ProgramAlt ParserI a -> String -> Maybe a runParser = fmap listToMaybe . eval . viewAlt where eval :: ProgramViewAlt ParserI a -> String -> [a] eval (Pure a) [] = pure a eval (Pure a) _ = empty eval (Symbol c :<**> k) [] = empty eval (Symbol c :<**> k) (x:xs) | c == x = pure c <**> eval k xs | otherwise = empty eval (Many ps) str = fmap asum (sequenceA (map eval ps)) str asum :: Alternative f => [f a] -> f a asum = foldr (<|>) empty -- | This version is maybe more conventional than 'runParser'. runConventional :: ProgramAlt ParserI a -> String -> Maybe a runConventional = (succeed .) . eval . viewAlt where eval :: ProgramViewAlt ParserI a -> String -> [(a, String)] eval (Pure a) str = [(a, str)] eval (Symbol c :<**> k) "" = [] eval (Symbol c :<**> k) (x:xs) | c == x = [(k' c, str') | (k', str') <- eval k xs] | otherwise = [] eval (Many ps) str = asum $ map (flip eval str) ps -- | Example parser: match parentheses and count depth. parens :: ProgramAlt ParserI Int parens = pure 0 <|> fmap (+1) (char '(' *> parens <* char ')') -- | Interpret a parser program denotationally, by evaluating each -- 'ParserI' instruction to an 'Alternative' action. runParser' :: ProgramAlt ParserI a -> String -> Maybe a runParser' = (succeed .) . runStateT . interpretAlt evalParserI succeed :: [(a, String)] -> Maybe a succeed = fmap fst . find step where step (a, "") = True step (_, _) = False evalParserI :: ParserI a -> StateT String [] a evalParserI (Symbol c) = do str <- get case str of x:xs | c == x -> put xs >> return c otherwise -> mzero -- | Static analysis example: enumerate the strings accepted by a parser. -- Not all parsers can be enumerate; for example, this one doesn't: -- -- > -- diverges: -- > let a = char 'a' *> a -- > in enumerate a -- -- The problem in this example is that there is no string such that -- the parser accepts it in finitely many steps. enumerate :: ProgramAlt ParserI a -> [String] enumerate = go [showString ""] . viewAlt where go :: [ShowS] -> ProgramViewAlt ParserI a -> [String] go strs (Pure a) = map ($"") strs go strs (Symbol c :<**> k) = go (map (.(showChar c)) strs) k go strs (Many ps) = interleave $ map (go strs) ps interleave :: [[a]] -> [a] interleave = foldr interleave2 [] where interleave2 :: [a] -> [a] -> [a] interleave2 [] ys = ys interleave2 (x:xs) ys = x : interleave2 ys xs -- | Static analysis example: optimize a parser by merging shared -- prefixes. This works by reducing a parser to this normal form: -- -- > 1. Pure a -- > 2. Symbol c :<**> nf -- nf is in normal form -- > 3. Many [nf, ...] -- nf is Pure _ or Symbol c :<**> nf' -- -- In addition, we order the branches by Symbol order to ensure -- merging. optimize :: ProgramAlt ParserI a -> ProgramAlt ParserI a optimize = compileAlt . merge . viewAlt merge :: ProgramViewAlt ParserI a -> ProgramViewAlt ParserI a merge p@(Pure _) = p merge (Symbol a :<**> k) = Symbol a :<**> merge k merge (Many ps) = Many (mergeMany ps) mergeMany :: [ProgramViewAlt ParserI a] -> [ProgramViewAlt ParserI a] mergeMany = foldr step [] . map merge where step (Pure a) ps = Pure a : ps step (Symbol a :<**> l) ((Symbol b :<**> r) : ps) = case a `compare` b of EQ -> (Symbol a :<**> Many (mergeMany [l, r])) : ps LT -> (Symbol a :<**> l) : (Symbol b :<**> r) : ps GT -> (Symbol b :<**> r) : (Symbol a :<**> l) : ps step (Symbol a :<**> l) ps = (Symbol a :<**> l) : ps step (Many ps) ps' = mergeMany (mergeMany ps ++ ps') tokens :: [String] -> ProgramAlt ParserI String tokens = asum . map string example = ["abactor", "abacus", "abaft", "abaisance", "abaissed", "abalone"] describe :: forall a. ProgramAlt ParserI a -> Description describe = eval . viewAlt where eval :: forall x. ProgramViewAlt ParserI x -> Description eval (Pure _) = Ok eval (Symbol c :<**> k) = c :> (eval k) eval (Many ps) = OneOf (map eval ps) data Description = Ok | Char :> Description | OneOf [Description] deriving Show data StringI a where String :: String -> StringI String evalStringI :: StringI a -> StateT String [] a evalStringI (String "") = return "" evalStringI (String str) = do str' <- get case str `stripPrefix` str' of Nothing -> mzero Just suffix -> put suffix >> return str runStringP :: ProgramAlt (Coproduct ParserI StringI) a -> String -> [(a, String)] runStringP = runStateT . interpretAlt (coproduct evalParserI evalStringI)
sacundim/free-operational
examples/Parser.hs
bsd-3-clause
5,927
1
16
1,501
1,964
1,025
939
108
6
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE Rank2Types #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE StandaloneDeriving #-} module Snap.Internal.Types where ------------------------------------------------------------------------------ import Blaze.ByteString.Builder import Blaze.ByteString.Builder.Char.Utf8 import Control.Applicative import Control.Exception (SomeException, throwIO, ErrorCall(..)) import Control.Monad import Control.Monad.CatchIO import qualified Control.Monad.Error.Class as EC import Control.Monad.State import Data.ByteString.Char8 (ByteString) import qualified Data.ByteString.Char8 as S import qualified Data.ByteString.Lazy.Char8 as L import Data.CaseInsensitive (CI) import Data.Int import Data.IORef import Data.Maybe import Data.Monoid import Data.Time import qualified Data.Text as T import qualified Data.Text.Lazy as LT import Data.Typeable #if MIN_VERSION_base(4,6,0) import Prelude hiding (take) #else import Prelude hiding (catch, take) #endif import System.PosixCompat.Files hiding (setFileSize) import System.Posix.Types (FileOffset) ------------------------------------------------------------------------------ import Snap.Internal.Exceptions import Snap.Internal.Http.Types import Snap.Internal.Iteratee.Debug import Snap.Iteratee hiding (map) import qualified Snap.Types.Headers as H import Snap.Util.Readable ------------------------------------------------------------------------------ -------------------- -- The Snap Monad -- -------------------- {-| 'Snap' is the 'Monad' that user web handlers run in. 'Snap' gives you: 1. stateful access to fetch or modify an HTTP 'Request' 2. stateful access to fetch or modify an HTTP 'Response' 3. failure \/ 'Alternative' \/ 'MonadPlus' semantics: a 'Snap' handler can choose not to handle a given request, using 'empty' or its synonym 'pass', and you can try alternative handlers with the '<|>' operator: > a :: Snap String > a = pass > > b :: Snap String > b = return "foo" > > c :: Snap String > c = a <|> b -- try running a, if it fails then try b 4. convenience functions ('writeBS', 'writeLBS', 'writeText', 'writeLazyText', 'addToOutput') for queueing output to be written to the 'Response': > a :: (forall a . Enumerator a) -> Snap () > a someEnumerator = do > writeBS "I'm a strict bytestring" > writeLBS "I'm a lazy bytestring" > writeText "I'm strict text" > addToOutput someEnumerator 5. early termination: if you call 'finishWith': > a :: Snap () > a = do > modifyResponse $ setResponseStatus 500 "Internal Server Error" > writeBS "500 error" > r <- getResponse > finishWith r then any subsequent processing will be skipped and supplied 'Response' value will be returned from 'runSnap' as-is. 6. access to the 'IO' monad through a 'MonadIO' instance: > a :: Snap () > a = liftIO fireTheMissiles 7. the ability to set or extend a timeout which will kill the handler thread after @N@ seconds of inactivity (the default is 20 seconds): > a :: Snap () > a = setTimeout 30 8. throw and catch exceptions using a 'MonadCatchIO' instance: > foo :: Snap () > foo = bar `catch` \(e::SomeException) -> baz > where > bar = throw FooException 9. log a message to the error log: > foo :: Snap () > foo = logError "grumble." You may notice that most of the type signatures in this module contain a @(MonadSnap m) => ...@ typeclass constraint. 'MonadSnap' is a typeclass which, in essence, says \"you can get back to the 'Snap' monad from here\". Using 'MonadSnap' you can extend the 'Snap' monad with additional functionality and still have access to most of the 'Snap' functions without writing 'lift' everywhere. Instances are already provided for most of the common monad transformers ('ReaderT', 'WriterT', 'StateT', etc.). -} ------------------------------------------------------------------------------ -- | 'MonadSnap' is a type class, analogous to 'MonadIO' for 'IO', that makes -- it easy to wrap 'Snap' inside monad transformers. class (Monad m, MonadIO m, MonadCatchIO m, MonadPlus m, Functor m, Applicative m, Alternative m) => MonadSnap m where liftSnap :: Snap a -> m a ------------------------------------------------------------------------------ data SnapResult a = SnapValue a | PassOnProcessing String | EarlyTermination Response ------------------------------------------------------------------------------ newtype Snap a = Snap { unSnap :: StateT SnapState (Iteratee ByteString IO) (SnapResult a) } ------------------------------------------------------------------------------ data SnapState = SnapState { _snapRequest :: Request , _snapResponse :: Response , _snapLogError :: ByteString -> IO () , _snapModifyTimeout :: (Int -> Int) -> IO () } ------------------------------------------------------------------------------ instance Monad Snap where (>>=) = snapBind return = snapReturn fail = snapFail ------------------------------------------------------------------------------ snapBind :: Snap a -> (a -> Snap b) -> Snap b snapBind (Snap m) f = Snap $ do res <- m case res of SnapValue a -> unSnap $! f a PassOnProcessing r -> return $! PassOnProcessing r EarlyTermination r -> return $! EarlyTermination r {-# INLINE snapBind #-} snapReturn :: a -> Snap a snapReturn = Snap . return . SnapValue {-# INLINE snapReturn #-} snapFail :: String -> Snap a snapFail !m = Snap $! return $! PassOnProcessing m {-# INLINE snapFail #-} ------------------------------------------------------------------------------ instance MonadIO Snap where liftIO m = Snap $! liftM SnapValue $! liftIO m ------------------------------------------------------------------------------ instance MonadCatchIO Snap where catch (Snap m) handler = Snap $! m `catch` h where h e = do rethrowIfUncatchable $ fromException e maybe (throw e) (\e' -> let (Snap z) = handler e' in z) (fromException e) block (Snap m) = Snap $ block m unblock (Snap m) = Snap $ unblock m ------------------------------------------------------------------------------ rethrowIfUncatchable :: (MonadCatchIO m) => Maybe UncatchableException -> m () rethrowIfUncatchable Nothing = return () rethrowIfUncatchable (Just e) = throw e ------------------------------------------------------------------------------ instance MonadPlus Snap where mzero = Snap $! return $! PassOnProcessing "" a `mplus` b = Snap $! do r <- unSnap a -- redundant just in case ordering by frequency helps here. case r of SnapValue _ -> return r PassOnProcessing _ -> unSnap b _ -> return r ------------------------------------------------------------------------------ instance (EC.MonadError String) Snap where throwError = fail catchError act hndl = Snap $ do r <- unSnap act -- redundant just in case ordering by frequency helps here. case r of SnapValue _ -> return r PassOnProcessing m -> unSnap $ hndl m _ -> return r ------------------------------------------------------------------------------ instance Functor Snap where fmap = liftM ------------------------------------------------------------------------------ instance Applicative Snap where pure = return (<*>) = ap ------------------------------------------------------------------------------ instance Alternative Snap where empty = mzero (<|>) = mplus ------------------------------------------------------------------------------ instance MonadSnap Snap where liftSnap = id ------------------------------------------------------------------------------ -- | The Typeable instance is here so Snap can be dynamically executed with -- Hint. snapTyCon :: TyCon #if MIN_VERSION_base(4,4,0) snapTyCon = mkTyCon3 "snap-core" "Snap.Core" "Snap" #else snapTyCon = mkTyCon "Snap.Core.Snap" #endif {-# NOINLINE snapTyCon #-} #if __GLASGOW_HASKELL__ < 708 instance Typeable1 Snap where typeOf1 _ = mkTyConApp snapTyCon [] #else deriving instance Typeable Snap #endif ------------------------------------------------------------------------------ liftIter :: MonadSnap m => Iteratee ByteString IO a -> m a liftIter i = liftSnap $ Snap (lift i >>= return . SnapValue) ------------------------------------------------------------------------------ -- | Sends the request body through an iteratee (data consumer) and -- returns the result. -- -- If the iteratee you pass in here throws an exception, Snap will attempt to -- clear the rest of the unread request body before rethrowing the exception. -- If your iteratee used 'terminateConnection', however, Snap will give up and -- immediately close the socket. runRequestBody :: MonadSnap m => Iteratee ByteString IO a -> m a runRequestBody iter = do bumpTimeout <- liftM ($ max 5) getTimeoutModifier req <- getRequest senum <- liftIO $ readIORef $ rqBody req let (SomeEnumerator enum) = senum -- make sure the iteratee consumes all of the output let iter' = handle bumpTimeout req (iter >>= \a -> skipToEnd bumpTimeout >> return a) -- run the iteratee step <- liftIO $ runIteratee iter' result <- liftIter $ enum step -- stuff a new dummy enumerator into the request, so you can only try to -- read the request body from the socket once resetEnum req return result where resetEnum req = liftIO $ writeIORef (rqBody req) $ SomeEnumerator $ joinI . take 0 skipToEnd bump = killIfTooSlow bump 500 5 skipToEof `catchError` \e -> throwError $ ConnectionTerminatedException e handle bump req = (`catches` [ Handler $ \(e :: ConnectionTerminatedException) -> do let en = SomeEnumerator $ const $ throwError e liftIO $ writeIORef (rqBody req) en throwError e , Handler $ \(e :: SomeException) -> do resetEnum req skipToEnd bump throwError e ]) ------------------------------------------------------------------------------ -- | Returns the request body as a lazy bytestring. -- -- This function is deprecated as of 0.6; it places no limits on the size of -- the request being read, and as such, if used, can result in a -- denial-of-service attack on your server. Please use 'readRequestBody' -- instead. getRequestBody :: MonadSnap m => m L.ByteString getRequestBody = liftM L.fromChunks $ runRequestBody consume {-# INLINE getRequestBody #-} {-# DEPRECATED getRequestBody "As of 0.6, please use 'readRequestBody' instead" #-} ------------------------------------------------------------------------------ -- | Returns the request body as a lazy bytestring. /New in 0.6./ readRequestBody :: MonadSnap m => Int64 -- ^ size of the largest request body we're willing -- to accept. If a request body longer than this is -- received, a 'TooManyBytesReadException' is -- thrown. See 'takeNoMoreThan'. -> m L.ByteString readRequestBody sz = liftM L.fromChunks $ runRequestBody $ joinI $ takeNoMoreThan sz $$ consume ------------------------------------------------------------------------------ -- | Normally Snap is careful to ensure that the request body is fully -- consumed after your web handler runs, but before the 'Response' enumerator -- is streamed out the socket. If you want to transform the request body into -- some output in O(1) space, you should use this function. -- -- Note that upon calling this function, response processing finishes early as -- if you called 'finishWith'. Make sure you set any content types, headers, -- cookies, etc. before you call this function. -- transformRequestBody :: (forall a . Enumerator Builder IO a) -- ^ the output 'Iteratee' is passed to this -- 'Enumerator', and then the resulting 'Iteratee' is -- fed the request body stream. Your 'Enumerator' is -- responsible for transforming the input. -> Snap () transformRequestBody trans = do req <- getRequest let ioref = rqBody req senum <- liftIO $ readIORef ioref let (SomeEnumerator enum') = senum let enum = mapEnum toByteString fromByteString enum' liftIO $ writeIORef ioref (SomeEnumerator enumEOF) origRsp <- getResponse let rsp = setResponseBody (\writeEnd -> do let i = iterateeDebugWrapperWith showBuilder "transformRequestBody" $ trans writeEnd st <- liftIO $ runIteratee i enum st) $ origRsp { rspTransformingRqBody = True } finishWith rsp ------------------------------------------------------------------------------ -- | Short-circuits a 'Snap' monad action early, storing the given -- 'Response' value in its state. finishWith :: MonadSnap m => Response -> m a finishWith = liftSnap . Snap . return . EarlyTermination {-# INLINE finishWith #-} ------------------------------------------------------------------------------ -- | Capture the flow of control in case a handler calls 'finishWith'. -- -- /WARNING/: in the event of a call to 'transformRequestBody' it is possible -- to violate HTTP protocol safety when using this function. If you call -- 'catchFinishWith' it is suggested that you do not modify the body of the -- 'Response' which was passed to the 'finishWith' call. catchFinishWith :: Snap a -> Snap (Either Response a) catchFinishWith (Snap m) = Snap $ do r <- m case r of SnapValue a -> return $! SnapValue $! Right a PassOnProcessing e -> return $! PassOnProcessing e EarlyTermination resp -> return $! SnapValue $! Left resp {-# INLINE catchFinishWith #-} ------------------------------------------------------------------------------ -- | Fails out of a 'Snap' monad action. This is used to indicate -- that you choose not to handle the given request within the given -- handler. pass :: MonadSnap m => m a pass = empty ------------------------------------------------------------------------------ -- | Runs a 'Snap' monad action only if the request's HTTP method matches -- the given method. method :: MonadSnap m => Method -> m a -> m a method m action = do req <- getRequest unless (rqMethod req == m) pass action {-# INLINE method #-} ------------------------------------------------------------------------------ -- | Runs a 'Snap' monad action only if the request's HTTP method matches -- one of the given methods. methods :: MonadSnap m => [Method] -> m a -> m a methods ms action = do req <- getRequest unless (rqMethod req `elem` ms) pass action {-# INLINE methods #-} ------------------------------------------------------------------------------ -- Appends n bytes of the path info to the context path with a -- trailing slash. updateContextPath :: Int -> Request -> Request updateContextPath n req | n > 0 = req { rqContextPath = ctx , rqPathInfo = pinfo } | otherwise = req where ctx' = S.take n (rqPathInfo req) ctx = S.concat [rqContextPath req, ctx', "/"] pinfo = S.drop (n+1) (rqPathInfo req) ------------------------------------------------------------------------------ -- Runs a 'Snap' monad action only if the 'rqPathInfo' matches the given -- predicate. pathWith :: MonadSnap m => (ByteString -> ByteString -> Bool) -> ByteString -> m a -> m a pathWith c p action = do req <- getRequest unless (c p (rqPathInfo req)) pass localRequest (updateContextPath $ S.length p) action ------------------------------------------------------------------------------ -- | Runs a 'Snap' monad action only when the 'rqPathInfo' of the request -- starts with the given path. For example, -- -- > dir "foo" handler -- -- Will fail if 'rqPathInfo' is not \"@\/foo@\" or \"@\/foo\/...@\", and will -- add @\"foo\/\"@ to the handler's local 'rqContextPath'. dir :: MonadSnap m => ByteString -- ^ path component to match -> m a -- ^ handler to run -> m a dir = pathWith f where f dr pinfo = dr == x where (x,_) = S.break (=='/') pinfo {-# INLINE dir #-} ------------------------------------------------------------------------------ -- | Runs a 'Snap' monad action only for requests where 'rqPathInfo' is -- exactly equal to the given string. If the path matches, locally sets -- 'rqContextPath' to the old value of 'rqPathInfo', sets 'rqPathInfo'=\"\", -- and runs the given handler. path :: MonadSnap m => ByteString -- ^ path to match against -> m a -- ^ handler to run -> m a path = pathWith (==) {-# INLINE path #-} ------------------------------------------------------------------------------ -- | Runs a 'Snap' monad action only when the first path component is -- successfully parsed as the argument to the supplied handler function. pathArg :: (Readable a, MonadSnap m) => (a -> m b) -> m b pathArg f = do req <- getRequest let (p,_) = S.break (=='/') (rqPathInfo req) a <- fromBS p localRequest (updateContextPath $ S.length p) (f a) ------------------------------------------------------------------------------ -- | Runs a 'Snap' monad action only when 'rqPathInfo' is empty. ifTop :: MonadSnap m => m a -> m a ifTop = path "" {-# INLINE ifTop #-} ------------------------------------------------------------------------------ -- | Local Snap version of 'get'. sget :: Snap SnapState sget = Snap $ liftM SnapValue get {-# INLINE sget #-} ------------------------------------------------------------------------------ -- | Local Snap monad version of 'modify'. smodify :: (SnapState -> SnapState) -> Snap () smodify f = Snap $ modify f >> return (SnapValue ()) {-# INLINE smodify #-} ------------------------------------------------------------------------------ -- | Grabs the 'Request' object out of the 'Snap' monad. getRequest :: MonadSnap m => m Request getRequest = liftSnap $ liftM _snapRequest sget {-# INLINE getRequest #-} ------------------------------------------------------------------------------ -- | Grabs something out of the 'Request' object, using the given projection -- function. See 'gets'. getsRequest :: MonadSnap m => (Request -> a) -> m a getsRequest f = liftSnap $ liftM (f . _snapRequest) sget {-# INLINE getsRequest #-} ------------------------------------------------------------------------------ -- | Grabs the 'Response' object out of the 'Snap' monad. getResponse :: MonadSnap m => m Response getResponse = liftSnap $ liftM _snapResponse sget {-# INLINE getResponse #-} ------------------------------------------------------------------------------ -- | Grabs something out of the 'Response' object, using the given projection -- function. See 'gets'. getsResponse :: MonadSnap m => (Response -> a) -> m a getsResponse f = liftSnap $ liftM (f . _snapResponse) sget {-# INLINE getsResponse #-} ------------------------------------------------------------------------------ -- | Puts a new 'Response' object into the 'Snap' monad. putResponse :: MonadSnap m => Response -> m () putResponse r = liftSnap $ smodify $ \ss -> ss { _snapResponse = r } {-# INLINE putResponse #-} ------------------------------------------------------------------------------ -- | Puts a new 'Request' object into the 'Snap' monad. putRequest :: MonadSnap m => Request -> m () putRequest r = liftSnap $ smodify $ \ss -> ss { _snapRequest = r } {-# INLINE putRequest #-} ------------------------------------------------------------------------------ -- | Modifies the 'Request' object stored in a 'Snap' monad. modifyRequest :: MonadSnap m => (Request -> Request) -> m () modifyRequest f = liftSnap $ smodify $ \ss -> ss { _snapRequest = f $ _snapRequest ss } {-# INLINE modifyRequest #-} ------------------------------------------------------------------------------ -- | Modifes the 'Response' object stored in a 'Snap' monad. modifyResponse :: MonadSnap m => (Response -> Response) -> m () modifyResponse f = liftSnap $ smodify $ \ss -> ss { _snapResponse = f $ _snapResponse ss } {-# INLINE modifyResponse #-} ------------------------------------------------------------------------------ -- | Performs a redirect by setting the @Location@ header to the given target -- URL/path and the status code to 302 in the 'Response' object stored in a -- 'Snap' monad. Note that the target URL is not validated in any way. -- Consider using 'redirect\'' instead, which allows you to choose the correct -- status code. redirect :: MonadSnap m => ByteString -> m a redirect target = redirect' target 302 {-# INLINE redirect #-} ------------------------------------------------------------------------------ -- | Performs a redirect by setting the @Location@ header to the given target -- URL/path and the status code (should be one of 301, 302, 303 or 307) in the -- 'Response' object stored in a 'Snap' monad. Note that the target URL is not -- validated in any way. redirect' :: MonadSnap m => ByteString -> Int -> m a redirect' target status = do r <- getResponse finishWith $ setResponseCode status $ setContentLength 0 $ modifyResponseBody (const $ enumBuilder mempty) $ setHeader "Location" target r {-# INLINE redirect' #-} ------------------------------------------------------------------------------ -- | Log an error message in the 'Snap' monad logError :: MonadSnap m => ByteString -> m () logError s = liftSnap $ Snap $ gets _snapLogError >>= (\l -> liftIO $ l s) >> return (SnapValue ()) {-# INLINE logError #-} ------------------------------------------------------------------------------ -- | Adds the output from the given enumerator to the 'Response' -- stored in the 'Snap' monad state. addToOutput :: MonadSnap m => (forall a . Enumerator Builder IO a) -- ^ output to add -> m () addToOutput enum = modifyResponse $ modifyResponseBody (>==> enum) ------------------------------------------------------------------------------ -- | Adds the given 'Builder' to the body of the 'Response' stored in the -- | 'Snap' monad state. writeBuilder :: MonadSnap m => Builder -> m () writeBuilder b = addToOutput $ enumBuilder b {-# INLINE writeBuilder #-} ------------------------------------------------------------------------------ -- | Adds the given strict 'ByteString' to the body of the 'Response' stored -- in the 'Snap' monad state. -- -- Warning: This function is intentionally non-strict. If any pure -- exceptions are raised by the expression creating the 'ByteString', -- the exception won't actually be raised within the Snap handler. writeBS :: MonadSnap m => ByteString -> m () writeBS s = writeBuilder $ fromByteString s ------------------------------------------------------------------------------ -- | Adds the given lazy 'L.ByteString' to the body of the 'Response' stored -- in the 'Snap' monad state. -- -- Warning: This function is intentionally non-strict. If any pure -- exceptions are raised by the expression creating the 'ByteString', -- the exception won't actually be raised within the Snap handler. writeLBS :: MonadSnap m => L.ByteString -> m () writeLBS s = writeBuilder $ fromLazyByteString s ------------------------------------------------------------------------------ -- | Adds the given strict 'T.Text' to the body of the 'Response' stored in -- the 'Snap' monad state. -- -- Warning: This function is intentionally non-strict. If any pure -- exceptions are raised by the expression creating the 'ByteString', -- the exception won't actually be raised within the Snap handler. writeText :: MonadSnap m => T.Text -> m () writeText s = writeBuilder $ fromText s ------------------------------------------------------------------------------ -- | Adds the given lazy 'LT.Text' to the body of the 'Response' stored in the -- 'Snap' monad state. -- -- Warning: This function is intentionally non-strict. If any pure -- exceptions are raised by the expression creating the 'ByteString', -- the exception won't actually be raised within the Snap handler. writeLazyText :: MonadSnap m => LT.Text -> m () writeLazyText s = writeBuilder $ fromLazyText s ------------------------------------------------------------------------------ -- | Sets the output to be the contents of the specified file. -- -- Calling 'sendFile' will overwrite any output queued to be sent in the -- 'Response'. If the response body is not modified after the call to -- 'sendFile', Snap will use the efficient @sendfile()@ system call on -- platforms that support it. -- -- If the response body is modified (using 'modifyResponseBody'), the file -- will be read using @mmap()@. sendFile :: (MonadSnap m) => FilePath -> m () sendFile f = modifyResponse $ \r -> r { rspBody = SendFile f Nothing } ------------------------------------------------------------------------------ -- | Sets the output to be the contents of the specified file, within the -- given (start,end) range. -- -- Calling 'sendFilePartial' will overwrite any output queued to be sent in -- the 'Response'. If the response body is not modified after the call to -- 'sendFilePartial', Snap will use the efficient @sendfile()@ system call on -- platforms that support it. -- -- If the response body is modified (using 'modifyResponseBody'), the file -- will be read using @mmap()@. sendFilePartial :: (MonadSnap m) => FilePath -> (Int64,Int64) -> m () sendFilePartial f rng = modifyResponse $ \r -> r { rspBody = SendFile f (Just rng) } ------------------------------------------------------------------------------ -- | Runs a 'Snap' action with a locally-modified 'Request' state -- object. The 'Request' object in the Snap monad state after the call -- to localRequest will be unchanged. localRequest :: MonadSnap m => (Request -> Request) -> m a -> m a localRequest f m = do req <- getRequest runAct req <|> (putRequest req >> pass) where runAct req = do modifyRequest f result <- m putRequest req return result {-# INLINE localRequest #-} ------------------------------------------------------------------------------ -- | Fetches the 'Request' from state and hands it to the given action. withRequest :: MonadSnap m => (Request -> m a) -> m a withRequest = (getRequest >>=) {-# INLINE withRequest #-} ------------------------------------------------------------------------------ -- | Fetches the 'Response' from state and hands it to the given action. withResponse :: MonadSnap m => (Response -> m a) -> m a withResponse = (getResponse >>=) {-# INLINE withResponse #-} ------------------------------------------------------------------------------ -- | Modifies the 'Request' in the state to set the 'rqRemoteAddr' -- field to the value in the X-Forwarded-For header. If the header is -- not present, this action has no effect. -- -- This action should be used only when working behind a reverse http -- proxy that sets the X-Forwarded-For header. This is the only way to -- ensure the value in the X-Forwarded-For header can be trusted. -- -- This is provided as a filter so actions that require the remote -- address can get it in a uniform manner. It has specifically limited -- functionality to ensure that its transformation can be trusted, -- when used correctly. ipHeaderFilter :: MonadSnap m => m () ipHeaderFilter = ipHeaderFilter' "x-forwarded-for" ------------------------------------------------------------------------------ -- | Modifies the 'Request' in the state to set the 'rqRemoteAddr' -- field to the value from the header specified. If the header -- specified is not present, this action has no effect. -- -- This action should be used only when working behind a reverse http -- proxy that sets the header being looked at. This is the only way to -- ensure the value in the header can be trusted. -- -- This is provided as a filter so actions that require the remote -- address can get it in a uniform manner. It has specifically limited -- functionality to ensure that its transformation can be trusted, -- when used correctly. ipHeaderFilter' :: MonadSnap m => CI ByteString -> m () ipHeaderFilter' header = do headerContents <- getHeader header <$> getRequest let whitespace = [ ' ', '\t', '\r', '\n' ] ipChrs = [ '.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' ] trim f s = f (`elem` s) clean = trim S.takeWhile ipChrs . trim S.dropWhile whitespace setIP ip = modifyRequest $ \rq -> rq { rqRemoteAddr = clean ip } maybe (return ()) setIP headerContents ------------------------------------------------------------------------------ -- | This function brackets a Snap action in resource acquisition and -- release. This is provided because MonadCatchIO's 'bracket' function -- doesn't work properly in the case of a short-circuit return from -- the action being bracketed. -- -- In order to prevent confusion regarding the effects of the -- aquisition and release actions on the Snap state, this function -- doesn't accept Snap actions for the acquire or release actions. -- -- This function will run the release action in all cases where the -- acquire action succeeded. This includes the following behaviors -- from the bracketed Snap action. -- -- 1. Normal completion -- -- 2. Short-circuit completion, either from calling 'fail' or 'finishWith' -- -- 3. An exception being thrown. bracketSnap :: IO a -> (a -> IO b) -> (a -> Snap c) -> Snap c bracketSnap before after thing = block . Snap $ do a <- liftIO before let after' = liftIO $ after a (Snap thing') = thing a r <- unblock thing' `onException` after' _ <- after' return r ------------------------------------------------------------------------------ -- | This exception is thrown if the handler you supply to 'runSnap' fails. data NoHandlerException = NoHandlerException String deriving (Eq, Typeable) ------------------------------------------------------------------------------ instance Show NoHandlerException where show (NoHandlerException e) = "No handler for request: failure was " ++ e ------------------------------------------------------------------------------ instance Exception NoHandlerException ------------------------------------------------------------------------------ -- | Terminate the HTTP session with the given exception. terminateConnection :: (Exception e, MonadCatchIO m) => e -> m a terminateConnection = throw . ConnectionTerminatedException . toException ------------------------------------------------------------------------------ -- | Terminate the HTTP session and hand control to some external handler, -- escaping all further HTTP traffic. -- -- The external handler takes two arguments: a function to modify the thread's -- timeout, and a write end to the socket. escapeHttp :: MonadCatchIO m => EscapeHttpHandler -> m () escapeHttp = throw . EscapeHttpException ------------------------------------------------------------------------------ -- | Runs a 'Snap' monad action in the 'Iteratee IO' monad. runSnap :: Snap a -> (ByteString -> IO ()) -> ((Int -> Int) -> IO ()) -> Request -> Iteratee ByteString IO (Request,Response) runSnap (Snap m) logerr timeoutAction req = do (r, ss') <- runStateT m ss let resp = case r of SnapValue _ -> _snapResponse ss' PassOnProcessing _ -> fourohfour EarlyTermination x -> x let req' = _snapRequest ss' resp' <- liftIO $ fixupResponse req' resp return (req', resp') where -------------------------------------------------------------------------- fourohfour = do clearContentLength $ setResponseStatus 404 "Not Found" $ setResponseBody enum404 $ emptyResponse -------------------------------------------------------------------------- enum404 = enumBuilder $ mconcat $ map fromByteString html -------------------------------------------------------------------------- html = [ S.concat [ "<!DOCTYPE html>\n" , "<html>\n" , "<head>\n" , "<title>Not found</title>\n" , "</head>\n" , "<body>\n" , "<code>No handler accepted \"" ] , rqURI req , "\"</code>\n</body></html>" ] -------------------------------------------------------------------------- dresp = emptyResponse { rspHttpVersion = rqVersion req } -------------------------------------------------------------------------- ss = SnapState req dresp logerr timeoutAction {-# INLINE runSnap #-} -------------------------------------------------------------------------- -- | Post-process a finalized HTTP response: -- -- * fixup content-length header -- * properly handle 204/304 responses -- * if request was HEAD, remove response body -- -- Note that we do NOT deal with transfer-encoding: chunked or "connection: -- close" here. fixupResponse :: Request -> Response -> IO Response fixupResponse req rsp = {-# SCC "fixupResponse" #-} do let code = rspStatus rsp let rsp' = if code == 204 || code == 304 then handle304 rsp else rsp rsp'' <- do z <- case rspBody rsp' of (Enum _) -> return rsp' (SendFile f Nothing) -> setFileSize f rsp' (SendFile _ (Just (s,e))) -> return $! setContentLength (e-s) rsp' return $! case rspContentLength z of Nothing -> deleteHeader "Content-Length" z (Just sz) -> setHeader "Content-Length" (toByteString $ fromShow sz) z -- HEAD requests cannot have bodies per RFC 2616 sec. 9.4 if rqMethod req == HEAD then return $! deleteHeader "Transfer-Encoding" $ rsp'' { rspBody = Enum $ enumBuilder mempty } else return $! rsp'' where -------------------------------------------------------------------------- setFileSize :: FilePath -> Response -> IO Response setFileSize fp r = {-# SCC "setFileSize" #-} do fs <- liftM fromIntegral $ getFileSize fp return $! r { rspContentLength = Just fs } ------------------------------------------------------------------------------ getFileSize :: FilePath -> IO FileOffset getFileSize fp = liftM fileSize $ getFileStatus fp -------------------------------------------------------------------------- handle304 :: Response -> Response handle304 r = setResponseBody (enumBuilder mempty) $ updateHeaders (H.delete "Transfer-Encoding") $ clearContentLength r {-# INLINE fixupResponse #-} ------------------------------------------------------------------------------ evalSnap :: Snap a -> (ByteString -> IO ()) -> ((Int -> Int) -> IO ()) -> Request -> Iteratee ByteString IO a evalSnap (Snap m) logerr timeoutAction req = do (r, _) <- runStateT m ss case r of SnapValue x -> return x PassOnProcessing e -> liftIO $ throwIO $ NoHandlerException e EarlyTermination _ -> liftIO $ throwIO $ ErrorCall "no value" where dresp = emptyResponse { rspHttpVersion = rqVersion req } ss = SnapState req dresp logerr timeoutAction {-# INLINE evalSnap #-} ------------------------------------------------------------------------------ getParamFrom :: MonadSnap m => (ByteString -> Request -> Maybe [ByteString]) -> ByteString -> m (Maybe ByteString) getParamFrom f k = do rq <- getRequest return $! liftM (S.intercalate " ") $ f k rq {-# INLINE getParamFrom #-} ------------------------------------------------------------------------------ -- | See 'rqParam'. Looks up a value for the given named parameter in the -- 'Request'. If more than one value was entered for the given parameter name, -- 'getParam' gloms the values together with: -- -- @ 'S.intercalate' \" \"@ -- getParam :: MonadSnap m => ByteString -- ^ parameter name to look up -> m (Maybe ByteString) getParam = getParamFrom rqParam {-# INLINE getParam #-} ------------------------------------------------------------------------------ -- | See 'rqPostParam'. Looks up a value for the given named parameter in the -- POST form parameters mapping in 'Request'. If more than one value was -- entered for the given parameter name, 'getPostParam' gloms the values -- together with: -- -- @ 'S.intercalate' \" \"@ -- getPostParam :: MonadSnap m => ByteString -- ^ parameter name to look up -> m (Maybe ByteString) getPostParam = getParamFrom rqPostParam {-# INLINE getPostParam #-} ------------------------------------------------------------------------------ -- | See 'rqQueryParam'. Looks up a value for the given named parameter in the -- query string parameters mapping in 'Request'. If more than one value was -- entered for the given parameter name, 'getQueryParam' gloms the values -- together with: -- -- @ 'S.intercalate' \" \"@ -- getQueryParam :: MonadSnap m => ByteString -- ^ parameter name to look up -> m (Maybe ByteString) getQueryParam = getParamFrom rqQueryParam {-# INLINE getQueryParam #-} ------------------------------------------------------------------------------ -- | See 'rqParams'. Convenience function to return 'Params' from the -- 'Request' inside of a 'MonadSnap' instance. getParams :: MonadSnap m => m Params getParams = getRequest >>= return . rqParams ------------------------------------------------------------------------------ -- | See 'rqParams'. Convenience function to return 'Params' from the -- 'Request' inside of a 'MonadSnap' instance. getPostParams :: MonadSnap m => m Params getPostParams = getRequest >>= return . rqPostParams ------------------------------------------------------------------------------ -- | See 'rqParams'. Convenience function to return 'Params' from the -- 'Request' inside of a 'MonadSnap' instance. getQueryParams :: MonadSnap m => m Params getQueryParams = getRequest >>= return . rqQueryParams ------------------------------------------------------------------------------ -- | Gets the HTTP 'Cookie' with the specified name. getCookie :: MonadSnap m => ByteString -> m (Maybe Cookie) getCookie name = withRequest $ return . listToMaybe . filter (\c -> cookieName c == name) . rqCookies ------------------------------------------------------------------------------ -- | Gets the HTTP 'Cookie' with the specified name and decodes it. If the -- decoding fails, the handler calls pass. readCookie :: (MonadSnap m, Readable a) => ByteString -> m a readCookie name = maybe pass (fromBS . cookieValue) =<< getCookie name ------------------------------------------------------------------------------ -- | Expire the given 'Cookie' in client's browser. expireCookie :: (MonadSnap m) => ByteString -- ^ Cookie name -> Maybe ByteString -- ^ Cookie domain -> m () expireCookie nm dm = do let old = UTCTime (ModifiedJulianDay 0) 0 modifyResponse $ addResponseCookie $ Cookie nm "" (Just old) Nothing dm False False ------------------------------------------------------------------------------ -- | Causes the handler thread to be killed @n@ seconds from now. setTimeout :: MonadSnap m => Int -> m () setTimeout = modifyTimeout . const ------------------------------------------------------------------------------ -- | Causes the handler thread to be killed at least @n@ seconds from now. extendTimeout :: MonadSnap m => Int -> m () extendTimeout = modifyTimeout . max ------------------------------------------------------------------------------ -- | Modifies the amount of time remaining before the request times out. modifyTimeout :: MonadSnap m => (Int -> Int) -> m () modifyTimeout f = do m <- getTimeoutModifier liftIO $ m f ------------------------------------------------------------------------------ -- | Returns an 'IO' action which you can use to set the handling thread's -- timeout value. getTimeoutAction :: MonadSnap m => m (Int -> IO ()) getTimeoutAction = do modifier <- liftSnap $ liftM _snapModifyTimeout sget return $! modifier . const {-# DEPRECATED getTimeoutAction "use getTimeoutModifier instead. Since 0.8." #-} ------------------------------------------------------------------------------ -- | Returns an 'IO' action which you can use to modify the timeout value. getTimeoutModifier :: MonadSnap m => m ((Int -> Int) -> IO ()) getTimeoutModifier = liftSnap $ liftM _snapModifyTimeout sget
f-me/snap-core
src/Snap/Internal/Types.hs
bsd-3-clause
42,614
3
20
9,349
6,317
3,348
2,969
497
6
module Fibon.Run.BenchmarkRunner ( RunResult(..) , RunFailure(..) , Fibon.Run.BenchmarkRunner.run ) where import Control.Concurrent import Control.Monad import Control.Exception import qualified Data.ByteString as B import Data.Maybe import Data.Time.Clock import qualified Data.Vector.Unboxed as Vector import Fibon.BenchmarkInstance import Fibon.Result import Fibon.Run.BenchmarkBundle import Fibon.Run.Log as Log import qualified Fibon.Run.SysTools as SysTools import Statistics.Sample import System.Directory import System.FilePath import System.IO import System.Process import Text.Printf data RunResult = Success {runSummary :: RunSummary, runDetails :: [RunDetail]} | Failure [RunFailure] deriving (Read, Show) data RunFailure = MissingOutput FilePath | DiffError String | Timeout | ExitError {exitExpected :: ExitCode, exitActual :: ExitCode} deriving (Read, Show) run :: BenchmarkBundle -> IO RunResult run bb = do let bmk = (bundleName bb) pwd = (pathToExeBuildDir bb) cmd = (prettyRunCommand bb) Log.info $ "Running Benchmark " Log.info $ " BMK: " ++ bmk Log.info $ " PWD: " ++ pwd Log.info $ " CMD: " ++ cmd Log.info $ printf "\n@%s|%s|%s" bmk pwd cmd runDirect bb checkResult :: BenchmarkBundle -> ExitCode -> IO (Maybe [RunFailure]) checkResult bb exitCode = do outputs <- mapM (checkOutput bb) (output . benchDetails $ bb) let results = checkExit bb exitCode : outputs errs = filter isJust results case errs of [] -> return $ Nothing es -> return $ Just (catMaybes es) checkExit :: BenchmarkBundle -> ExitCode -> Maybe RunFailure checkExit bb actual = if actual == expected then Nothing else Just ee where expected = expectedExit . benchDetails $ bb ee = ExitError {exitExpected = expected, exitActual = actual} checkOutput :: BenchmarkBundle -> OutputDescription -> IO (Maybe RunFailure) checkOutput bb (o, Exists) = do let f = (destinationToRealFile bb o) e <- doesFileExist f if e then return Nothing else return $ Just $ MissingOutput ("File "++f++" does not exist") checkOutput bb (o, Diff diffFile) = do e1 <- checkOutput bb (o, Exists) e2 <- checkOutput bb (d, Exists) e3 <- runDiff f1 f2 return $ msum [e1, e2, e3] where d = OutputFile diffFile f1 = (destinationToRealFile bb o) f2 = (destinationToRealFile bb d) runDiff :: FilePath -> FilePath -> IO (Maybe RunFailure) runDiff f1 f2 = do Log.info $ "Diffing files: "++f1++" "++f2 (r, o, _) <- readProcessWithExitCode (SysTools.diff) [f1, f2] "" if r == ExitSuccess then Log.info "No diff error" >> return Nothing else Log.info "Diff error" >> (return $ Just $ DiffError o) destinationToRealFile :: BenchmarkBundle -> OutputDestination -> FilePath destinationToRealFile bb (OutputFile f) = (pathToExeRunDir bb) </> f destinationToRealFile bb Stdout = (pathToStdoutFile bb) destinationToRealFile bb Stderr = (pathToStderrFile bb) readExtraStats :: BenchmarkBundle -> IO ExtraStats readExtraStats bb = do let mbStatsFile = extraStats bb statsFile = fromJust mbStatsFile logReadE :: IOException -> IO ExtraStats logReadE e = Log.warn ("Error reading stats file: "++statsFile++"\n "++show e) >> return B.empty case mbStatsFile of Nothing -> return B.empty Just f -> do handle logReadE $ bracket (openFile ((pathToExeRunDir bb) </> f) ReadMode) (hClose) (\h -> B.hGetContents h >>= \s -> B.length s `seq` return s) type RunStepResult = IO (Either [RunFailure] RunDetail) runDirect :: BenchmarkBundle -> IO RunResult runDirect bb = do mbDetails <- go count [] case mbDetails of Left e -> return $ Failure e Right ds -> return $ Success (summarize ds) ds where go 0 ds = return $ Right (reverse ds) go n ds = do res <- runB bb case res of Right d -> go (n-1) (d:ds) Left e -> return $ Left e runB = maybe runBenchmarkWithoutTimeout runBenchmarkWithTimeout limit limit = timeout bb count = (iters bb) summarize :: [RunDetail] -> RunSummary summarize ds = RunSummary { meanTime = mean times , stdDevTime = stdDev times , statsSummary = stats } where times = (Vector.fromList $ map runTime ds) stats = case ds of (x:_) -> runStats x; _ -> B.empty type TimeoutLength = Int runBenchmarkWithTimeout :: TimeoutLength -> BenchmarkBundle -> RunStepResult runBenchmarkWithTimeout us bb = do resMVar <- newEmptyMVar pidMVar <- newEmptyMVar tid1 <- forkIO $ (putMVar resMVar . Just) =<< timeBenchmarkExe bb (Just pidMVar) _ <- forkIO $ threadDelay us >> putMVar resMVar Nothing res <- takeMVar resMVar case res of Nothing -> do Log.info $ "benchmark timed out after "++(show us)++" us" -- try to kill the subprocess pid <- tryTakeMVar pidMVar maybe pass terminateProcess pid -- kill the haskell thread killThread tid1 return $ Left [Timeout] Just (runDetail, exitCode) -> do maybe (Right runDetail) Left `liftM` checkResult bb exitCode runBenchmarkWithoutTimeout :: BenchmarkBundle -> RunStepResult runBenchmarkWithoutTimeout bb = do (runDetail, exitCode) <- timeBenchmarkExe bb Nothing maybe (Right runDetail) Left `liftM` checkResult bb exitCode timeBenchmarkExe :: BenchmarkBundle -- benchmark to run -> Maybe (MVar ProcessHandle) -- in case we need to kill it -> IO (RunDetail, ExitCode) timeBenchmarkExe bb pidMVar = do p <- bundleProcessSpec bb start <- getCurrentTime (_, _, _, pid) <- createProcess p maybe pass (flip putMVar pid) pidMVar exit <- waitForProcess pid end <- getCurrentTime mapM_ closeStdIO [std_in p, std_out p, std_err p] stats <- readExtraStats bb return $ (RunDetail (realToFrac (diffUTCTime end start)) stats, exit) closeStdIO :: StdStream -> IO () closeStdIO (UseHandle h) = hClose h closeStdIO _ = return () pass :: IO () pass = return()
dmpots/fibon
tools/fibon-run/Fibon/Run/BenchmarkRunner.hs
bsd-3-clause
6,130
0
20
1,449
2,052
1,043
1,009
158
4
{-# LANGUAGE OverloadedStrings #-} module Tests.Readers.Markdown (tests) where import Text.Pandoc.Definition import Test.Framework import Tests.Helpers import Tests.Arbitrary() import Text.Pandoc.Builder import qualified Data.Set as Set -- import Text.Pandoc.Shared ( normalize ) import Text.Pandoc markdown :: String -> Pandoc markdown = readMarkdown def markdownSmart :: String -> Pandoc markdownSmart = readMarkdown def { readerSmart = True } markdownCDL :: String -> Pandoc markdownCDL = readMarkdown def { readerExtensions = Set.insert Ext_compact_definition_lists $ readerExtensions def } markdownGH :: String -> Pandoc markdownGH = readMarkdown def { readerExtensions = githubMarkdownExtensions } infix 4 =: (=:) :: ToString c => String -> (String, c) -> Test (=:) = test markdown testBareLink :: (String, Inlines) -> Test testBareLink (inp, ils) = test (readMarkdown def{ readerExtensions = Set.fromList [Ext_autolink_bare_uris, Ext_raw_html] }) inp (inp, doc $ para ils) autolink :: String -> Inlines autolink s = link s "" (str s) bareLinkTests :: [(String, Inlines)] bareLinkTests = [ ("http://google.com is a search engine.", autolink "http://google.com" <> " is a search engine.") , ("<a href=\"http://foo.bar.baz\">http://foo.bar.baz</a>", rawInline "html" "<a href=\"http://foo.bar.baz\">" <> "http://foo.bar.baz" <> rawInline "html" "</a>") , ("Try this query: http://google.com?search=fish&time=hour.", "Try this query: " <> autolink "http://google.com?search=fish&time=hour" <> ".") , ("HTTPS://GOOGLE.COM,", autolink "HTTPS://GOOGLE.COM" <> ",") , ("http://el.wikipedia.org/wiki/Τεχνολογία,", autolink "http://el.wikipedia.org/wiki/Τεχνολογία" <> ",") , ("doi:10.1000/182,", autolink "doi:10.1000/182" <> ",") , ("git://github.com/foo/bar.git,", autolink "git://github.com/foo/bar.git" <> ",") , ("file:///Users/joe/joe.txt, and", autolink "file:///Users/joe/joe.txt" <> ", and") , ("mailto:[email protected].", autolink "mailto:[email protected]" <> ".") , ("Use http: this is not a link!", "Use http: this is not a link!") , ("(http://google.com).", "(" <> autolink "http://google.com" <> ").") , ("http://en.wikipedia.org/wiki/Sprite_(computer_graphics)", autolink "http://en.wikipedia.org/wiki/Sprite_(computer_graphics)") , ("http://en.wikipedia.org/wiki/Sprite_[computer_graphics]", autolink "http://en.wikipedia.org/wiki/Sprite_[computer_graphics]") , ("http://en.wikipedia.org/wiki/Sprite_{computer_graphics}", autolink "http://en.wikipedia.org/wiki/Sprite_{computer_graphics}") , ("http://example.com/Notification_Center-GitHub-20101108-140050.jpg", autolink "http://example.com/Notification_Center-GitHub-20101108-140050.jpg") , ("https://github.com/github/hubot/blob/master/scripts/cream.js#L20-20", autolink "https://github.com/github/hubot/blob/master/scripts/cream.js#L20-20") , ("http://www.rubyonrails.com", autolink "http://www.rubyonrails.com") , ("http://www.rubyonrails.com:80", autolink "http://www.rubyonrails.com:80") , ("http://www.rubyonrails.com/~minam", autolink "http://www.rubyonrails.com/~minam") , ("https://www.rubyonrails.com/~minam", autolink "https://www.rubyonrails.com/~minam") , ("http://www.rubyonrails.com/~minam/url%20with%20spaces", autolink "http://www.rubyonrails.com/~minam/url%20with%20spaces") , ("http://www.rubyonrails.com/foo.cgi?something=here", autolink "http://www.rubyonrails.com/foo.cgi?something=here") , ("http://www.rubyonrails.com/foo.cgi?something=here&and=here", autolink "http://www.rubyonrails.com/foo.cgi?something=here&and=here") , ("http://www.rubyonrails.com/contact;new", autolink "http://www.rubyonrails.com/contact;new") , ("http://www.rubyonrails.com/contact;new%20with%20spaces", autolink "http://www.rubyonrails.com/contact;new%20with%20spaces") , ("http://www.rubyonrails.com/contact;new?with=query&string=params", autolink "http://www.rubyonrails.com/contact;new?with=query&string=params") , ("http://www.rubyonrails.com/~minam/contact;new?with=query&string=params", autolink "http://www.rubyonrails.com/~minam/contact;new?with=query&string=params") , ("http://en.wikipedia.org/wiki/Wikipedia:Today%27s_featured_picture_%28animation%29/January_20%2C_2007", autolink "http://en.wikipedia.org/wiki/Wikipedia:Today%27s_featured_picture_%28animation%29/January_20%2C_2007") , ("http://www.mail-archive.com/[email protected]/", autolink "http://www.mail-archive.com/[email protected]/") , ("http://www.amazon.com/Testing-Equal-Sign-In-Path/ref=pd_bbs_sr_1?ie=UTF8&s=books&qid=1198861734&sr=8-1", autolink "http://www.amazon.com/Testing-Equal-Sign-In-Path/ref=pd_bbs_sr_1?ie=UTF8&s=books&qid=1198861734&sr=8-1") , ("http://en.wikipedia.org/wiki/Texas_hold%27em", autolink "http://en.wikipedia.org/wiki/Texas_hold%27em") , ("https://www.google.com/doku.php?id=gps:resource:scs:start", autolink "https://www.google.com/doku.php?id=gps:resource:scs:start") , ("http://www.rubyonrails.com", autolink "http://www.rubyonrails.com") , ("http://manuals.ruby-on-rails.com/read/chapter.need_a-period/103#page281", autolink "http://manuals.ruby-on-rails.com/read/chapter.need_a-period/103#page281") , ("http://foo.example.com/controller/action?parm=value&p2=v2#anchor123", autolink "http://foo.example.com/controller/action?parm=value&p2=v2#anchor123") , ("http://foo.example.com:3000/controller/action", autolink "http://foo.example.com:3000/controller/action") , ("http://foo.example.com:3000/controller/action+pack", autolink "http://foo.example.com:3000/controller/action+pack") , ("http://business.timesonline.co.uk/article/0,,9065-2473189,00.html", autolink "http://business.timesonline.co.uk/article/0,,9065-2473189,00.html") , ("http://www.mail-archive.com/[email protected]/", autolink "http://www.mail-archive.com/[email protected]/") ] {- p_markdown_round_trip :: Block -> Bool p_markdown_round_trip b = matches d' d'' where d' = normalize $ Pandoc (Meta [] [] []) [b] d'' = normalize $ readMarkdown def { readerSmart = True } $ writeMarkdown def d' matches (Pandoc _ [Plain []]) (Pandoc _ []) = True matches (Pandoc _ [Para []]) (Pandoc _ []) = True matches (Pandoc _ [Plain xs]) (Pandoc _ [Para xs']) = xs == xs' matches x y = x == y -} tests :: [Test] tests = [ testGroup "inline code" [ "with attribute" =: "`document.write(\"Hello\");`{.javascript}" =?> para (codeWith ("",["javascript"],[]) "document.write(\"Hello\");") , "with attribute space" =: "`*` {.haskell .special x=\"7\"}" =?> para (codeWith ("",["haskell","special"],[("x","7")]) "*") ] , testGroup "emph and strong" [ "two strongs in emph" =: "***a**b **c**d*" =?> para (emph (strong (str "a") <> str "b" <> space <> strong (str "c") <> str "d")) , "emph and strong emph alternating" =: "*xxx* ***xxx*** xxx\n*xxx* ***xxx*** xxx" =?> para (emph "xxx" <> space <> strong (emph "xxx") <> space <> "xxx" <> space <> emph "xxx" <> space <> strong (emph "xxx") <> space <> "xxx") , "emph with spaced strong" =: "*x **xx** x*" =?> para (emph ("x" <> space <> strong "xx" <> space <> "x")) , "intraword underscore with opening underscore (#1121)" =: "_foot_ball_" =?> para (emph (text "foot_ball")) ] , testGroup "raw LaTeX" [ "in URL" =: "\\begin\n" =?> para (text "\\begin") ] , testGroup "raw HTML" [ "nesting (issue #1330)" =: "<del>test</del>" =?> rawBlock "html" "<del>" <> plain (str "test") <> rawBlock "html" "</del>" ] , "unbalanced brackets" =: "[[[[[[[[[[[[[[[hi" =?> para (text "[[[[[[[[[[[[[[[hi") , testGroup "backslash escapes" [ "in URL" =: "[hi](/there\\))" =?> para (link "/there)" "" "hi") , "in title" =: "[hi](/there \"a\\\"a\")" =?> para (link "/there" "a\"a" "hi") , "in reference link title" =: "[hi]\n\n[hi]: /there (a\\)a)" =?> para (link "/there" "a)a" "hi") , "in reference link URL" =: "[hi]\n\n[hi]: /there\\.0" =?> para (link "/there.0" "" "hi") ] , testGroup "bare URIs" (map testBareLink bareLinkTests) , testGroup "autolinks" [ "with unicode dash following" =: "<http://foo.bar>\8212" =?> para (autolink "http://foo.bar" <> str "\8212") ] , testGroup "Headers" [ "blank line before header" =: "\n# Header\n" =?> headerWith ("header",[],[]) 1 "Header" ] , testGroup "smart punctuation" [ test markdownSmart "quote before ellipses" ("'...hi'" =?> para (singleQuoted "…hi")) , test markdownSmart "apostrophe before emph" ("D'oh! A l'*aide*!" =?> para ("D’oh! A l’" <> emph "aide" <> "!")) , test markdownSmart "apostrophe in French" ("À l'arrivée de la guerre, le thème de l'«impossibilité du socialisme»" =?> para "À l’arrivée de la guerre, le thème de l’«impossibilité du socialisme»") ] , testGroup "footnotes" [ "indent followed by newline and flush-left text" =: "[^1]\n\n[^1]: my note\n\n \nnot in note\n" =?> para (note (para "my note")) <> para "not in note" , "indent followed by newline and indented text" =: "[^1]\n\n[^1]: my note\n \n in note\n" =?> para (note (para "my note" <> para "in note")) , "recursive note" =: "[^1]\n\n[^1]: See [^1]\n" =?> para (note (para "See [^1]")) ] , testGroup "lhs" [ test (readMarkdown def{ readerExtensions = Set.insert Ext_literate_haskell $ readerExtensions def }) "inverse bird tracks and html" $ "> a\n\n< b\n\n<div>\n" =?> codeBlockWith ("",["sourceCode","literate","haskell"],[]) "a" <> codeBlockWith ("",["sourceCode","haskell"],[]) "b" <> rawBlock "html" "<div>\n\n" ] -- the round-trip properties frequently fail -- , testGroup "round trip" -- [ property "p_markdown_round_trip" p_markdown_round_trip -- ] , testGroup "definition lists" [ "no blank space" =: "foo1\n : bar\n\nfoo2\n : bar2\n : bar3\n" =?> definitionList [ (text "foo1", [plain (text "bar")]) , (text "foo2", [plain (text "bar2"), plain (text "bar3")]) ] , "blank space before first def" =: "foo1\n\n : bar\n\nfoo2\n\n : bar2\n : bar3\n" =?> definitionList [ (text "foo1", [para (text "bar")]) , (text "foo2", [para (text "bar2"), plain (text "bar3")]) ] , "blank space before second def" =: "foo1\n : bar\n\nfoo2\n : bar2\n\n : bar3\n" =?> definitionList [ (text "foo1", [plain (text "bar")]) , (text "foo2", [plain (text "bar2"), para (text "bar3")]) ] , "laziness" =: "foo1\n : bar\nbaz\n : bar2\n" =?> definitionList [ (text "foo1", [plain (text "bar baz"), plain (text "bar2")]) ] , "no blank space before first of two paragraphs" =: "foo1\n : bar\n\n baz\n" =?> definitionList [ (text "foo1", [para (text "bar") <> para (text "baz")]) ] ] , testGroup "+compact_definition_lists" [ test markdownCDL "basic compact list" $ "foo1\n: bar\n baz\nfoo2\n: bar2\n" =?> definitionList [ (text "foo1", [plain (text "bar baz")]) , (text "foo2", [plain (text "bar2")]) ] ] , testGroup "lists" [ "issue #1154" =: " - <div>\n first div breaks\n </div>\n\n <button>if this button exists</button>\n\n <div>\n with this div too.\n </div>\n" =?> bulletList [divWith nullAttr (para $ text "first div breaks") <> rawBlock "html" "<button>" <> plain (text "if this button exists") <> rawBlock "html" "</button>" <> divWith nullAttr (para $ text "with this div too.")] , test markdownGH "issue #1636" $ unlines [ "* a" , "* b" , "* c" , " * d" ] =?> bulletList [ plain "a" , plain "b" , plain "c" <> bulletList [plain "d"] ] ] ]
sapek/pandoc
tests/Tests/Readers/Markdown.hs
gpl-2.0
13,875
0
22
3,968
2,326
1,243
1,083
243
1
{-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-orphans #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Test.AWS.Gen.CognitoIdentity -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- module Test.AWS.Gen.CognitoIdentity where import Data.Proxy import Test.AWS.Fixture import Test.AWS.Prelude import Test.Tasty import Network.AWS.CognitoIdentity import Test.AWS.CognitoIdentity.Internal -- Auto-generated: the actual test selection needs to be manually placed into -- the top-level so that real test data can be incrementally added. -- -- This commented snippet is what the entire set should look like: -- fixtures :: TestTree -- fixtures = -- [ testGroup "request" -- [ testGetOpenIdToken $ -- getOpenIdToken -- -- , testGetOpenIdTokenForDeveloperIdentity $ -- getOpenIdTokenForDeveloperIdentity -- -- , testDescribeIdentityPool $ -- describeIdentityPool -- -- , testGetId $ -- getId -- -- , testDeleteIdentityPool $ -- deleteIdentityPool -- -- , testUpdateIdentityPool $ -- updateIdentityPool -- -- , testUnlinkDeveloperIdentity $ -- unlinkDeveloperIdentity -- -- , testGetIdentityPoolRoles $ -- getIdentityPoolRoles -- -- , testListIdentityPools $ -- listIdentityPools -- -- , testGetCredentialsForIdentity $ -- getCredentialsForIdentity -- -- , testDeleteIdentities $ -- deleteIdentities -- -- , testSetIdentityPoolRoles $ -- setIdentityPoolRoles -- -- , testListIdentities $ -- listIdentities -- -- , testLookupDeveloperIdentity $ -- lookupDeveloperIdentity -- -- , testUnlinkIdentity $ -- unlinkIdentity -- -- , testDescribeIdentity $ -- describeIdentity -- -- , testCreateIdentityPool $ -- createIdentityPool -- -- , testMergeDeveloperIdentities $ -- mergeDeveloperIdentities -- -- ] -- , testGroup "response" -- [ testGetOpenIdTokenResponse $ -- getOpenIdTokenResponse -- -- , testGetOpenIdTokenForDeveloperIdentityResponse $ -- getOpenIdTokenForDeveloperIdentityResponse -- -- , testDescribeIdentityPoolResponse $ -- identityPool -- -- , testGetIdResponse $ -- getIdResponse -- -- , testDeleteIdentityPoolResponse $ -- deleteIdentityPoolResponse -- -- , testUpdateIdentityPoolResponse $ -- identityPool -- -- , testUnlinkDeveloperIdentityResponse $ -- unlinkDeveloperIdentityResponse -- -- , testGetIdentityPoolRolesResponse $ -- getIdentityPoolRolesResponse -- -- , testListIdentityPoolsResponse $ -- listIdentityPoolsResponse -- -- , testGetCredentialsForIdentityResponse $ -- getCredentialsForIdentityResponse -- -- , testDeleteIdentitiesResponse $ -- deleteIdentitiesResponse -- -- , testSetIdentityPoolRolesResponse $ -- setIdentityPoolRolesResponse -- -- , testListIdentitiesResponse $ -- listIdentitiesResponse -- -- , testLookupDeveloperIdentityResponse $ -- lookupDeveloperIdentityResponse -- -- , testUnlinkIdentityResponse $ -- unlinkIdentityResponse -- -- , testDescribeIdentityResponse $ -- identityDescription -- -- , testCreateIdentityPoolResponse $ -- identityPool -- -- , testMergeDeveloperIdentitiesResponse $ -- mergeDeveloperIdentitiesResponse -- -- ] -- ] -- Requests testGetOpenIdToken :: GetOpenIdToken -> TestTree testGetOpenIdToken = req "GetOpenIdToken" "fixture/GetOpenIdToken.yaml" testGetOpenIdTokenForDeveloperIdentity :: GetOpenIdTokenForDeveloperIdentity -> TestTree testGetOpenIdTokenForDeveloperIdentity = req "GetOpenIdTokenForDeveloperIdentity" "fixture/GetOpenIdTokenForDeveloperIdentity.yaml" testDescribeIdentityPool :: DescribeIdentityPool -> TestTree testDescribeIdentityPool = req "DescribeIdentityPool" "fixture/DescribeIdentityPool.yaml" testGetId :: GetId -> TestTree testGetId = req "GetId" "fixture/GetId.yaml" testDeleteIdentityPool :: DeleteIdentityPool -> TestTree testDeleteIdentityPool = req "DeleteIdentityPool" "fixture/DeleteIdentityPool.yaml" testUpdateIdentityPool :: UpdateIdentityPool -> TestTree testUpdateIdentityPool = req "UpdateIdentityPool" "fixture/UpdateIdentityPool.yaml" testUnlinkDeveloperIdentity :: UnlinkDeveloperIdentity -> TestTree testUnlinkDeveloperIdentity = req "UnlinkDeveloperIdentity" "fixture/UnlinkDeveloperIdentity.yaml" testGetIdentityPoolRoles :: GetIdentityPoolRoles -> TestTree testGetIdentityPoolRoles = req "GetIdentityPoolRoles" "fixture/GetIdentityPoolRoles.yaml" testListIdentityPools :: ListIdentityPools -> TestTree testListIdentityPools = req "ListIdentityPools" "fixture/ListIdentityPools.yaml" testGetCredentialsForIdentity :: GetCredentialsForIdentity -> TestTree testGetCredentialsForIdentity = req "GetCredentialsForIdentity" "fixture/GetCredentialsForIdentity.yaml" testDeleteIdentities :: DeleteIdentities -> TestTree testDeleteIdentities = req "DeleteIdentities" "fixture/DeleteIdentities.yaml" testSetIdentityPoolRoles :: SetIdentityPoolRoles -> TestTree testSetIdentityPoolRoles = req "SetIdentityPoolRoles" "fixture/SetIdentityPoolRoles.yaml" testListIdentities :: ListIdentities -> TestTree testListIdentities = req "ListIdentities" "fixture/ListIdentities.yaml" testLookupDeveloperIdentity :: LookupDeveloperIdentity -> TestTree testLookupDeveloperIdentity = req "LookupDeveloperIdentity" "fixture/LookupDeveloperIdentity.yaml" testUnlinkIdentity :: UnlinkIdentity -> TestTree testUnlinkIdentity = req "UnlinkIdentity" "fixture/UnlinkIdentity.yaml" testDescribeIdentity :: DescribeIdentity -> TestTree testDescribeIdentity = req "DescribeIdentity" "fixture/DescribeIdentity.yaml" testCreateIdentityPool :: CreateIdentityPool -> TestTree testCreateIdentityPool = req "CreateIdentityPool" "fixture/CreateIdentityPool.yaml" testMergeDeveloperIdentities :: MergeDeveloperIdentities -> TestTree testMergeDeveloperIdentities = req "MergeDeveloperIdentities" "fixture/MergeDeveloperIdentities.yaml" -- Responses testGetOpenIdTokenResponse :: GetOpenIdTokenResponse -> TestTree testGetOpenIdTokenResponse = res "GetOpenIdTokenResponse" "fixture/GetOpenIdTokenResponse.proto" cognitoIdentity (Proxy :: Proxy GetOpenIdToken) testGetOpenIdTokenForDeveloperIdentityResponse :: GetOpenIdTokenForDeveloperIdentityResponse -> TestTree testGetOpenIdTokenForDeveloperIdentityResponse = res "GetOpenIdTokenForDeveloperIdentityResponse" "fixture/GetOpenIdTokenForDeveloperIdentityResponse.proto" cognitoIdentity (Proxy :: Proxy GetOpenIdTokenForDeveloperIdentity) testDescribeIdentityPoolResponse :: IdentityPool -> TestTree testDescribeIdentityPoolResponse = res "DescribeIdentityPoolResponse" "fixture/DescribeIdentityPoolResponse.proto" cognitoIdentity (Proxy :: Proxy DescribeIdentityPool) testGetIdResponse :: GetIdResponse -> TestTree testGetIdResponse = res "GetIdResponse" "fixture/GetIdResponse.proto" cognitoIdentity (Proxy :: Proxy GetId) testDeleteIdentityPoolResponse :: DeleteIdentityPoolResponse -> TestTree testDeleteIdentityPoolResponse = res "DeleteIdentityPoolResponse" "fixture/DeleteIdentityPoolResponse.proto" cognitoIdentity (Proxy :: Proxy DeleteIdentityPool) testUpdateIdentityPoolResponse :: IdentityPool -> TestTree testUpdateIdentityPoolResponse = res "UpdateIdentityPoolResponse" "fixture/UpdateIdentityPoolResponse.proto" cognitoIdentity (Proxy :: Proxy UpdateIdentityPool) testUnlinkDeveloperIdentityResponse :: UnlinkDeveloperIdentityResponse -> TestTree testUnlinkDeveloperIdentityResponse = res "UnlinkDeveloperIdentityResponse" "fixture/UnlinkDeveloperIdentityResponse.proto" cognitoIdentity (Proxy :: Proxy UnlinkDeveloperIdentity) testGetIdentityPoolRolesResponse :: GetIdentityPoolRolesResponse -> TestTree testGetIdentityPoolRolesResponse = res "GetIdentityPoolRolesResponse" "fixture/GetIdentityPoolRolesResponse.proto" cognitoIdentity (Proxy :: Proxy GetIdentityPoolRoles) testListIdentityPoolsResponse :: ListIdentityPoolsResponse -> TestTree testListIdentityPoolsResponse = res "ListIdentityPoolsResponse" "fixture/ListIdentityPoolsResponse.proto" cognitoIdentity (Proxy :: Proxy ListIdentityPools) testGetCredentialsForIdentityResponse :: GetCredentialsForIdentityResponse -> TestTree testGetCredentialsForIdentityResponse = res "GetCredentialsForIdentityResponse" "fixture/GetCredentialsForIdentityResponse.proto" cognitoIdentity (Proxy :: Proxy GetCredentialsForIdentity) testDeleteIdentitiesResponse :: DeleteIdentitiesResponse -> TestTree testDeleteIdentitiesResponse = res "DeleteIdentitiesResponse" "fixture/DeleteIdentitiesResponse.proto" cognitoIdentity (Proxy :: Proxy DeleteIdentities) testSetIdentityPoolRolesResponse :: SetIdentityPoolRolesResponse -> TestTree testSetIdentityPoolRolesResponse = res "SetIdentityPoolRolesResponse" "fixture/SetIdentityPoolRolesResponse.proto" cognitoIdentity (Proxy :: Proxy SetIdentityPoolRoles) testListIdentitiesResponse :: ListIdentitiesResponse -> TestTree testListIdentitiesResponse = res "ListIdentitiesResponse" "fixture/ListIdentitiesResponse.proto" cognitoIdentity (Proxy :: Proxy ListIdentities) testLookupDeveloperIdentityResponse :: LookupDeveloperIdentityResponse -> TestTree testLookupDeveloperIdentityResponse = res "LookupDeveloperIdentityResponse" "fixture/LookupDeveloperIdentityResponse.proto" cognitoIdentity (Proxy :: Proxy LookupDeveloperIdentity) testUnlinkIdentityResponse :: UnlinkIdentityResponse -> TestTree testUnlinkIdentityResponse = res "UnlinkIdentityResponse" "fixture/UnlinkIdentityResponse.proto" cognitoIdentity (Proxy :: Proxy UnlinkIdentity) testDescribeIdentityResponse :: IdentityDescription -> TestTree testDescribeIdentityResponse = res "DescribeIdentityResponse" "fixture/DescribeIdentityResponse.proto" cognitoIdentity (Proxy :: Proxy DescribeIdentity) testCreateIdentityPoolResponse :: IdentityPool -> TestTree testCreateIdentityPoolResponse = res "CreateIdentityPoolResponse" "fixture/CreateIdentityPoolResponse.proto" cognitoIdentity (Proxy :: Proxy CreateIdentityPool) testMergeDeveloperIdentitiesResponse :: MergeDeveloperIdentitiesResponse -> TestTree testMergeDeveloperIdentitiesResponse = res "MergeDeveloperIdentitiesResponse" "fixture/MergeDeveloperIdentitiesResponse.proto" cognitoIdentity (Proxy :: Proxy MergeDeveloperIdentities)
fmapfmapfmap/amazonka
amazonka-cognito-identity/test/Test/AWS/Gen/CognitoIdentity.hs
mpl-2.0
11,359
0
7
2,084
1,093
647
446
189
1
{- Copyright 2012-2013 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -} {-# LANGUAGE GeneralizedNewtypeDeriving #-} module Plush.Run.BuiltIns.Syntax ( -- * Standard Utiilty Syntax stdSyntax, perArg, -- * Option Specification flag, flagAlt, toggle, argOpt, ) where import Control.Monad.Exception (catchAll) import Data.List ((\\)) import Data.Monoid import Plush.ArgParser import Plush.Run.Posix import Plush.Run.Posix.Return import Plush.Run.Posix.Utilities import Plush.Run.Types import Plush.Types.CommandSummary -- | Create a 'Utility' that can parse options on execution, as well as -- provide annotations and completions of candidate command lines. -- -- Option parsing proceeds according to the \"Utility Conventions\" in §12. -- This is essentially the common conventions for single letter options, and -- using "--" to end argument processing. -- -- The supplied utility function takes the resulting option state and the -- arguments to produce an 'ExitCode'. -- -- The 'CommandSummary' argument is last because it is supplied by the -- machinery in "Plush.Run.BuiltIns", since that is where a utility's -- implementation is associated with a name, which is used there to find -- the summary info. Hence, typical usage is: -- -- @ -- example = BuiltInUtility $ stdSyntax options initialFlags exec -- where -- exec flags args = ... -- ... -- @ stdSyntax :: (PosixLike m, Returnable r) => [OptionSpec a] -- ^ options -> a -- ^ the initial option state -> (a -> Args -> m r) -- ^ utility function -> CommandSummary -- ^ summary information for error help messages -> Utility m r stdSyntax options opt0 action summary = Utility exec anno where exec cmdLineArgs = case mconcat $ processArgs options cmdLineArgs of OA (Right (optF, args)) -> reportError $ action (optF opt0) args OA (Left errs) -> do errStr errs errStrLn $ ciSynopsis summary errStr $ formatOptions summary failure anno cmdLineArgs = return $ map (argAnnotations summary) $ processArgs options cmdLineArgs -- | When a utility simply applies to each of the arguments in turn, this -- transforms a function of option state and single arg into the function -- of option state and all args needed by 'stdUtility'. perArg :: (PosixLike m) => (a -> String -> m ExitCode) -> a -> [String] -> m ExitCode perArg cmd opts args = mapM (reportError . cmd opts) args >>= return . maximum -- | Catch any errors, report them, and return an error exit code. reportError :: (PosixLike m, Returnable r) => m r -> m r reportError act = act `catchAll` (exitMsg 1 . show) -- | Declare a flag option. The option state is a list of flag characters, -- last flag from the command line, first. If a flag is repeated, it will -- only be included in the state once. flag :: Char -> OptionSpec String flag f = OptionSpec [f] [] (NoArg (\fs -> f : (fs \\ [f]))) -- | Declare a flag option, with possible alternatives. The first flag -- listed will be used as the canonical flag added to the option state. flagAlt :: String -> OptionSpec String flagAlt fa@(f0:_) = OptionSpec fa [] (NoArg (\fs -> f0 : (fs \\ fa))) flagAlt [] = error "Plush.Run.Builtins.Syntax.flagAlt: no flags supplied" -- | Declare a flag that is among an exclusive set of flags. Only one of -- the flags will appear in the final option state. In the initial option -- state supplied to 'stdUtility', either include the default flag as, or -- include none if you want to detect if the user didn't specify a choice. toggle :: Char -- ^ the flag character -> String -- ^ the mutually exclusive set of flags -> OptionSpec String toggle f excl = OptionSpec [f] [] (NoArg (\fs -> f : (fs \\ excl))) -- | Declare a string option. The option state is the value of this option. -- Supply the default as the initial option state to 'stdUtility'. argOpt :: Char -- ^ the flag character -> OptionSpec String argOpt f = OptionSpec [f] [] (ReqArg const)
mzero/plush
src/Plush/Run/BuiltIns/Syntax.hs
apache-2.0
4,587
0
13
962
727
404
323
48
2
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE OverloadedStrings #-} module Path.CheckInstall where import Control.Monad (unless) import Control.Monad.Extra (anyM, (&&^)) import Control.Monad.IO.Class import Control.Monad.Logger import Data.Foldable (forM_) import Data.Text (Text) import qualified Data.Text as T import qualified System.Directory as D import qualified System.FilePath as FP -- | Checks if the installed executable will be available on the user's -- PATH. This doesn't use @envSearchPath menv@ because it includes paths -- only visible when running in the stack environment. warnInstallSearchPathIssues :: (MonadIO m, MonadLogger m) => FilePath -> [Text] -> m () warnInstallSearchPathIssues destDir installed = do searchPath <- liftIO FP.getSearchPath destDirIsInPATH <- liftIO $ anyM (\dir -> D.doesDirectoryExist dir &&^ fmap (FP.equalFilePath destDir) (D.canonicalizePath dir)) searchPath if destDirIsInPATH then forM_ installed $ \exe -> do mexePath <- (liftIO . D.findExecutable . T.unpack) exe case mexePath of Just exePath -> do exeDir <- (liftIO . fmap FP.takeDirectory . D.canonicalizePath) exePath unless (exeDir `FP.equalFilePath` destDir) $ do $logWarn "" $logWarn $ T.concat [ "WARNING: The \"" , exe , "\" executable found on the PATH environment variable is " , T.pack exePath , ", and not the version that was just installed." ] $logWarn $ T.concat [ "This means that \"" , exe , "\" calls on the command line will not use this version." ] Nothing -> do $logWarn "" $logWarn $ T.concat [ "WARNING: Installation path " , T.pack destDir , " is on the PATH but the \"" , exe , "\" executable that was just installed could not be found on the PATH." ] else do $logWarn "" $logWarn $ T.concat [ "WARNING: Installation path " , T.pack destDir , " not found on the PATH environment variable" ]
mrkkrp/stack
src/Path/CheckInstall.hs
bsd-3-clause
2,672
0
24
1,102
462
243
219
50
3
-- this file illustrates several uses of `zoom` -- one of them is quadratic in the length of the file -- since it has to decode and encode repeatedly, -- and is thus no good on long files. {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE BangPatterns#-} {-# LANGUAGE RankNTypes #-} import Blaze.ByteString.Builder (Builder, fromByteString, toByteString) import Control.Exception (Exception) import Control.Monad.Trans.Class (lift) import Data.ByteString (ByteString) import qualified Data.ByteString as S import qualified Data.ByteString.Lazy as L import Data.Monoid import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Encoding as TEE import qualified Data.Text.Lazy as TL import qualified Data.Text.Lazy.Encoding as TLE import Pipes import Pipes.Parse import qualified Pipes.Prelude as PP import qualified Pipes.ByteString as Bytes import qualified Pipes.Text as Txt import Pipes.Text.Encoding (utf8) import Control.Lens -- we use 'zoom' with MonadState, not just StateT import Control.Monad import qualified System.IO as IO import Control.Monad.Trans.Maybe import Control.Monad.State.Class main :: IO () main = do S.writeFile fp $ contents 10000 -- 10000 cannot be handled fileParser0 and 1 -- parse_file fileParser0 -- pathological -- parse_file fileParser1 -- programs parse_file fileParser2 -- good program where parse_file parser = IO.withBinaryFile fp IO.ReadMode $ \h -> do p' <- runEffect $ parseWith parser ( Bytes.fromHandle h ) >-> PP.print runEffect $ p' >-> PP.print parseWith parser = loop where loop p = do (m,p') <- lift (runStateT (runMaybeT parser) p) case m of Nothing -> return p' Just file -> do yield file loop p' fp = "encoded.fileformat" contents n = (toByteString . mconcat . replicate n . encodeFiles) input <> S.pack (replicate 10 250) fileParser0, fileParser1, fileParser2 :: Monad m => MaybeT (StateT (Producer ByteString m x) m) File fileParser0 = do (name, len) <- zoom utf8 parseText contents <- zoom (Bytes.splitAt len) (lift drawAll) return (File name (S.concat contents)) where -- this parser aggregates all Text parsing into one preliminary parser -- which is then applied with `zoom utf8` -- we cannot tell in advance how long, e.g. the file name will be parseText :: Monad m => MaybeT (StateT (Producer Text m x) m) (Text, Int) parseText = do nameLength <- parseNumber names <- zoom (Txt.splitAt nameLength) $ (lift drawAll) contentLength <- parseNumber return $! (T.concat names, contentLength) -- here we disaggregate the little Text parsers but still apply them with `zoom utf8` -- this makes no difference fileParser1 = do nameLength <- zoom utf8 parseNumber names <- zoom (utf8 . Txt.splitAt nameLength) (lift drawAll) contentLength <- zoom utf8 parseNumber contents <- zoom (Bytes.splitAt contentLength) (lift drawAll) return (File (T.concat names) (S.concat contents)) -- This is the good program; by reflecting on the fact that file names -- should not be a 1000 bytes long, and binary files longer than e.g. 10 ^ 10 -- we can restrict the length of the byte stream to which we apply `zoom utf8` fileParser2 = do nameLength <- zoom (Bytes.splitAt 3 . utf8) parseNumber names <- zoom (Bytes.splitAt nameLength . utf8) (lift drawAll) len <- zoom (Bytes.splitAt 10 . utf8) parseNumber contents <- zoom (Bytes.splitAt len) (lift drawAll) return (File (T.concat names) (S.concat contents)) parseNumber :: Monad m => MaybeT (StateT (Producer Text m x) m) Int parseNumber = loop 0 where loop !n = do c <- MaybeT Txt.drawChar case c of ':' -> return n _ -> do guard ('0' <= c && c <= '9') loop $! n * 10 + (fromEnum c - fromEnum '0') -- --- Michael S's `File` type and its binary encoding, etc. data File = File { fileName :: !Text , fileContents :: !ByteString } deriving Show encodeFile :: File -> Builder encodeFile (File name contents) = tellLength (S.length bytesname) <> fromByteString bytesname <> tellLength (S.length contents) <> fromByteString contents where tellLength i = fromByteString $ TEE.encodeUtf8 (T.pack (shows i ":")) bytesname = TEE.encodeUtf8 name encodeFiles :: [File] -> Builder encodeFiles = mconcat . map encodeFile input :: [File] input = [ File "utf8.txt" $ TEE.encodeUtf8 "This file is in UTF-8" , File "utf16.txt" $ TEE.encodeUtf16LE "This file is in UTF-16" , File "binary.dat" "we'll pretend to be binary" ]
bitemyapp/text-pipes
examples/zoom.hs
bsd-3-clause
5,180
0
18
1,539
1,277
665
612
88
2
module RunCommand (runCommandStrWait) where import System.Process import System.Exit import System.IO import Control.Concurrent import Control.Concurrent.Chan import Data.Either type Pipe = Chan (Either Char ()) pipeGetContents :: Pipe -> IO String pipeGetContents p = do s <- getChanContents p return $ map fromLeft $ takeWhile isLeft s pipeWrite :: Pipe -> String -> IO () pipeWrite p s = writeList2Chan p (map Left s) -- close the pipe for writing pipeClose :: Pipe -> IO () pipeClose p = writeChan p (Right ()) -- -- * Either utilities -- isLeft :: Either a b -> Bool isLeft = either (const True) (const False) fromLeft :: Either a b -> a fromLeft = either id (error "fromLeft: Right") -- -- * Various versions of runCommand -- runCommandChan :: String -- ^ command -> IO (Pipe,Pipe,Pipe,ProcessHandle) -- ^ stdin, stdout, stderr, process runCommandChan c = do inC <- newChan outC <- newChan errC <- newChan (pin,pout,perr,p) <- runInteractiveCommand c forkIO (pipeGetContents inC >>= hPutStr pin >> hClose pin) forkIO (hGetContents pout >>= pipeWrite outC >> pipeClose outC) forkIO (hGetContents perr >>= pipeWrite errC >> pipeClose errC) return (inC,outC,errC,p) runCommandStr :: String -- ^ command -> String -- ^ stdin data -> IO (String,String,ProcessHandle) -- ^ stdout, stderr, process runCommandStr c inStr = do (inC,outC,errC,p) <- runCommandChan c forkIO (pipeWrite inC inStr >> pipeClose inC) out <- pipeGetContents outC err <- pipeGetContents errC return (out,err,p) runCommandStrWait :: String -- ^ command -> String -- ^ stdin data -> IO (String,String,ExitCode) -- ^ stdout, stderr, process exit status runCommandStrWait c inStr = do (out,err,p) <- runCommandStr c inStr s <- waitForProcess p return (out,err,s)
bvdelft/parac2
simpletestsuite/RunCommand.hs
bsd-3-clause
1,866
10
11
403
640
327
313
51
1
module Program.List.Operation where import qualified Program.List.Value as V import qualified Program.List.Store as S import Autolib.Reporter hiding ( result ) import Autolib.ToDoc import Control.Monad.State import Data.Ix ( inRange ) data Type = Void | Index -- ^ 0 to length - 1 | Index' -- ^ 0 to length (used to List.add) | Element -- ^ the element type of the collection deriving ( Eq, Show, Enum, Bounded ) instance ToDoc Type where toDoc = text . show data Operation = Operation { object :: V.Name -- ^ the type of "this" , method :: String , args :: [ Type ] , result :: Type , semantics :: S.Key -> [ S.Key ] -> S.ReporterST S.Key , toplevel :: Bool } ops :: [ Operation ] ops = list_ops ++ stack_ops ++ queue_ops add = Operation { object = V.List , method = "add" , args = [ Index', Element ], result = Void , semantics = \ self [ a1, a2 ] -> do sc <- S.access self i <- S.access a1 let k = fromIntegral $ S.scontents i c = S.contents sc bnd = (0, length c) when ( not $ inRange bnd k ) $ lift $ reject $ text "index" <+> toDoc k <+> text "not in range" <+> toDoc bnd let ( pre, post ) = splitAt k c S.update self $ sc { S.contents = pre ++ a2 : post } S.void , toplevel = True } remove = Operation { object = V.List , method = "remove" , args = [ Index ], result = Element , semantics = \ self [ a1 ] -> do sc <- S.access self i <- S.access a1 let k = fromIntegral $ S.scontents i c = S.contents sc bnd = (0,length c - 1) when ( not $ inRange bnd k ) $ lift $ reject $ text "index" <+> toDoc k <+> text "not in range" <+> toDoc bnd let ( pre, a2 : post ) = splitAt k c S.update self $ sc { S.contents = pre ++ post } return a2 , toplevel = True } list_ops = [ add , remove , Operation { object = V.List , method = "get" , args = [ Index ], result = Element , semantics = \ self [ a1 ] -> do sc <- S.access self i <- S.access a1 let k = fromIntegral $ S.scontents i c = S.contents sc bnd = (0,length c - 1) when ( not $ inRange bnd k ) $ lift $ reject $ text "index" <+> toDoc k <+> text "not in range" <+> toDoc bnd let a2 = c !! k return a2 , toplevel = False } , Operation { object = V.List , method = "size" , args = [ ], result = Index , semantics = \ self [ ] -> do sc <- S.access self S.scalar $ fromIntegral $ length $ S.contents sc , toplevel = False } ] stack_ops = [ Operation { object = V.Stack , method = "push" , args = [ Element ], result = Void , semantics = \ self [ a2 ] -> do sc <- S.access self S.update self $ sc { S.contents = S.contents sc ++ [ a2 ] } S.void , toplevel = True } , Operation { object = V.Stack , method = "pop" , args = [ ], result = Element , semantics = \ self [ ] -> do sc <- S.access self lift $ when ( null $ S.contents sc ) $ reject $ text "pop: Stack darf nicht leer sein" S.update self $ sc { S.contents = init $ S.contents sc } return $ last $ S.contents sc , toplevel = True } , Operation { object = V.Stack , method = "peek" , args = [ ], result = Element , semantics = \ self [ ] -> do sc <- S.access self sc <- S.access self lift $ when ( null $ S.contents sc ) $ reject $ text "peek: Stack darf nicht leer sein" return $ last $ S.contents sc , toplevel = False } , Operation { object = V.Stack , method = "size" , args = [ ], result = Index , semantics = \ self [ ] -> do sc <- S.access self S.scalar $ fromIntegral $ length $ S.contents sc , toplevel = False } ] queue_ops = [ Operation { object = V.Queue , method = "add" , args = [ Element ], result = Void , semantics = \ self [ a2 ] -> do sc <- S.access self S.update self $ sc { S.contents = S.contents sc ++ [ a2 ] } S.void , toplevel = True } , Operation { object = V.Queue , method = "remove" , args = [ ], result = Element , semantics = \ self [ ] -> do sc <- S.access self lift $ when ( null $ S.contents sc ) $ reject $ text "remove: Queue darf nicht leer sein" S.update self $ sc { S.contents = tail $ S.contents sc } return $ head $ S.contents sc , toplevel = True } , Operation { object = V.Queue , method = "size" , args = [ ], result = Index , semantics = \ self [ ] -> do sc <- S.access self S.scalar $ fromIntegral $ length $ S.contents sc , toplevel = False } ]
Erdwolf/autotool-bonn
src/Program/List/Operation.hs
gpl-2.0
6,322
0
20
3,135
1,802
970
832
128
1
{- (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 \section{SetLevels} *************************** Overview *************************** 1. We attach binding levels to Core bindings, in preparation for floating outwards (@FloatOut@). 2. We also let-ify many expressions (notably case scrutinees), so they will have a fighting chance of being floated sensible. 3. Note [Need for cloning during float-out] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We clone the binders of any floatable let-binding, so that when it is floated out it will be unique. Example (let x=2 in x) + (let x=3 in x) we must clone before floating so we get let x1=2 in let x2=3 in x1+x2 NOTE: this can't be done using the uniqAway idea, because the variable must be unique in the whole program, not just its current scope, because two variables in different scopes may float out to the same top level place NOTE: Very tiresomely, we must apply this substitution to the rules stored inside a variable too. We do *not* clone top-level bindings, because some of them must not change, but we *do* clone bindings that are heading for the top level 4. Note [Binder-swap during float-out] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In the expression case x of wild { p -> ...wild... } we substitute x for wild in the RHS of the case alternatives: case x of wild { p -> ...x... } This means that a sub-expression involving x is not "trapped" inside the RHS. And it's not inconvenient because we already have a substitution. Note that this is EXACTLY BACKWARDS from the what the simplifier does. The simplifier tries to get rid of occurrences of x, in favour of wild, in the hope that there will only be one remaining occurrence of x, namely the scrutinee of the case, and we can inline it. -} {-# LANGUAGE CPP, MultiWayIf #-} module SetLevels ( setLevels, Level(..), LevelType(..), tOP_LEVEL, isJoinCeilLvl, asJoinCeilLvl, LevelledBind, LevelledExpr, LevelledBndr, FloatSpec(..), floatSpecLevel, incMinorLvl, ltMajLvl, ltLvl, isTopLvl ) where #include "HsVersions.h" import GhcPrelude import CoreSyn import CoreMonad ( FloatOutSwitches(..) ) import CoreUtils ( exprType, exprIsHNF , exprOkForSpeculation , exprIsTopLevelBindable , isExprLevPoly , collectMakeStaticArgs ) import CoreArity ( exprBotStrictness_maybe ) import CoreFVs -- all of it import CoreSubst import MkCore ( sortQuantVars ) import Id import IdInfo import Var import VarSet import UniqSet ( nonDetFoldUniqSet ) import VarEnv import Literal ( litIsTrivial ) import Demand ( StrictSig, Demand, isStrictDmd, splitStrictSig, increaseStrictSigArity ) import Name ( getOccName, mkSystemVarName ) import OccName ( occNameString ) import Type ( Type, mkLamTypes, splitTyConApp_maybe, tyCoVarsOfType ) import BasicTypes ( Arity, RecFlag(..), isRec ) import DataCon ( dataConOrigResTy ) import TysWiredIn import UniqSupply import Util import Outputable import FastString import UniqDFM import FV import Data.Maybe import MonadUtils ( mapAccumLM ) {- ************************************************************************ * * \subsection{Level numbers} * * ************************************************************************ -} type LevelledExpr = TaggedExpr FloatSpec type LevelledBind = TaggedBind FloatSpec type LevelledBndr = TaggedBndr FloatSpec data Level = Level Int -- Level number of enclosing lambdas Int -- Number of big-lambda and/or case expressions and/or -- context boundaries between -- here and the nearest enclosing lambda LevelType -- Binder or join ceiling? data LevelType = BndrLvl | JoinCeilLvl deriving (Eq) data FloatSpec = FloatMe Level -- Float to just inside the binding -- tagged with this level | StayPut Level -- Stay where it is; binding is -- tagged with this level floatSpecLevel :: FloatSpec -> Level floatSpecLevel (FloatMe l) = l floatSpecLevel (StayPut l) = l {- The {\em level number} on a (type-)lambda-bound variable is the nesting depth of the (type-)lambda which binds it. The outermost lambda has level 1, so (Level 0 0) means that the variable is bound outside any lambda. On an expression, it's the maximum level number of its free (type-)variables. On a let(rec)-bound variable, it's the level of its RHS. On a case-bound variable, it's the number of enclosing lambdas. Top-level variables: level~0. Those bound on the RHS of a top-level definition but ``before'' a lambda; e.g., the \tr{x} in (levels shown as ``subscripts'')... \begin{verbatim} a_0 = let b_? = ... in x_1 = ... b ... in ... \end{verbatim} The main function @lvlExpr@ carries a ``context level'' (@le_ctxt_lvl@). That's meant to be the level number of the enclosing binder in the final (floated) program. If the level number of a sub-expression is less than that of the context, then it might be worth let-binding the sub-expression so that it will indeed float. If you can float to level @Level 0 0@ worth doing so because then your allocation becomes static instead of dynamic. We always start with context @Level 0 0@. Note [FloatOut inside INLINE] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @InlineCtxt@ very similar to @Level 0 0@, but is used for one purpose: to say "don't float anything out of here". That's exactly what we want for the body of an INLINE, where we don't want to float anything out at all. See notes with lvlMFE below. But, check this out: -- At one time I tried the effect of not float anything out of an InlineMe, -- but it sometimes works badly. For example, consider PrelArr.done. It -- has the form __inline (\d. e) -- where e doesn't mention d. If we float this to -- __inline (let x = e in \d. x) -- things are bad. The inliner doesn't even inline it because it doesn't look -- like a head-normal form. So it seems a lesser evil to let things float. -- In SetLevels we do set the context to (Level 0 0) when we get to an InlineMe -- which discourages floating out. So the conclusion is: don't do any floating at all inside an InlineMe. (In the above example, don't float the {x=e} out of the \d.) One particular case is that of workers: we don't want to float the call to the worker outside the wrapper, otherwise the worker might get inlined into the floated expression, and an importing module won't see the worker at all. Note [Join ceiling] ~~~~~~~~~~~~~~~~~~~ Join points can't float very far; too far, and they can't remain join points So, suppose we have: f x = (joinrec j y = ... x ... in jump j x) + 1 One may be tempted to float j out to the top of f's RHS, but then the jump would not be a tail call. Thus we keep track of a level called the *join ceiling* past which join points are not allowed to float. The troublesome thing is that, unlike most levels to which something might float, there is not necessarily an identifier to which the join ceiling is attached. Fortunately, if something is to be floated to a join ceiling, it must be dropped at the *nearest* join ceiling. Thus each level is marked as to whether it is a join ceiling, so that FloatOut can tell which binders are being floated to the nearest join ceiling and which to a particular binder (or set of binders). -} instance Outputable FloatSpec where ppr (FloatMe l) = char 'F' <> ppr l ppr (StayPut l) = ppr l tOP_LEVEL :: Level tOP_LEVEL = Level 0 0 BndrLvl incMajorLvl :: Level -> Level incMajorLvl (Level major _ _) = Level (major + 1) 0 BndrLvl incMinorLvl :: Level -> Level incMinorLvl (Level major minor _) = Level major (minor+1) BndrLvl asJoinCeilLvl :: Level -> Level asJoinCeilLvl (Level major minor _) = Level major minor JoinCeilLvl maxLvl :: Level -> Level -> Level maxLvl l1@(Level maj1 min1 _) l2@(Level maj2 min2 _) | (maj1 > maj2) || (maj1 == maj2 && min1 > min2) = l1 | otherwise = l2 ltLvl :: Level -> Level -> Bool ltLvl (Level maj1 min1 _) (Level maj2 min2 _) = (maj1 < maj2) || (maj1 == maj2 && min1 < min2) ltMajLvl :: Level -> Level -> Bool -- Tells if one level belongs to a difft *lambda* level to another ltMajLvl (Level maj1 _ _) (Level maj2 _ _) = maj1 < maj2 isTopLvl :: Level -> Bool isTopLvl (Level 0 0 _) = True isTopLvl _ = False isJoinCeilLvl :: Level -> Bool isJoinCeilLvl (Level _ _ t) = t == JoinCeilLvl instance Outputable Level where ppr (Level maj min typ) = hcat [ char '<', int maj, char ',', int min, char '>' , ppWhen (typ == JoinCeilLvl) (char 'C') ] instance Eq Level where (Level maj1 min1 _) == (Level maj2 min2 _) = maj1 == maj2 && min1 == min2 {- ************************************************************************ * * \subsection{Main level-setting code} * * ************************************************************************ -} setLevels :: FloatOutSwitches -> CoreProgram -> UniqSupply -> [LevelledBind] setLevels float_lams binds us = initLvl us (do_them init_env binds) where init_env = initialEnv float_lams do_them :: LevelEnv -> [CoreBind] -> LvlM [LevelledBind] do_them _ [] = return [] do_them env (b:bs) = do { (lvld_bind, env') <- lvlTopBind env b ; lvld_binds <- do_them env' bs ; return (lvld_bind : lvld_binds) } lvlTopBind :: LevelEnv -> Bind Id -> LvlM (LevelledBind, LevelEnv) lvlTopBind env (NonRec bndr rhs) = do { rhs' <- lvl_top env NonRecursive bndr rhs ; let (env', [bndr']) = substAndLvlBndrs NonRecursive env tOP_LEVEL [bndr] ; return (NonRec bndr' rhs', env') } lvlTopBind env (Rec pairs) = do { let (env', bndrs') = substAndLvlBndrs Recursive env tOP_LEVEL (map fst pairs) ; rhss' <- mapM (\(b,r) -> lvl_top env' Recursive b r) pairs ; return (Rec (bndrs' `zip` rhss'), env') } lvl_top :: LevelEnv -> RecFlag -> Id -> CoreExpr -> LvlM LevelledExpr lvl_top env is_rec bndr rhs = lvlRhs env is_rec (isBottomingId bndr) Nothing -- Not a join point (freeVars rhs) {- ************************************************************************ * * \subsection{Setting expression levels} * * ************************************************************************ Note [Floating over-saturated applications] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If we see (f x y), and (f x) is a redex (ie f's arity is 1), we call (f x) an "over-saturated application" Should we float out an over-sat app, if can escape a value lambda? It is sometimes very beneficial (-7% runtime -4% alloc over nofib -O2). But we don't want to do it for class selectors, because the work saved is minimal, and the extra local thunks allocated cost money. Arguably we could float even class-op applications if they were going to top level -- but then they must be applied to a constant dictionary and will almost certainly be optimised away anyway. -} lvlExpr :: LevelEnv -- Context -> CoreExprWithFVs -- Input expression -> LvlM LevelledExpr -- Result expression {- The @le_ctxt_lvl@ is, roughly, the level of the innermost enclosing binder. Here's an example v = \x -> ...\y -> let r = case (..x..) of ..x.. in .. When looking at the rhs of @r@, @le_ctxt_lvl@ will be 1 because that's the level of @r@, even though it's inside a level-2 @\y@. It's important that @le_ctxt_lvl@ is 1 and not 2 in @r@'s rhs, because we don't want @lvlExpr@ to turn the scrutinee of the @case@ into an MFE --- because it isn't a *maximal* free expression. If there were another lambda in @r@'s rhs, it would get level-2 as well. -} lvlExpr env (_, AnnType ty) = return (Type (CoreSubst.substTy (le_subst env) ty)) lvlExpr env (_, AnnCoercion co) = return (Coercion (substCo (le_subst env) co)) lvlExpr env (_, AnnVar v) = return (lookupVar env v) lvlExpr _ (_, AnnLit lit) = return (Lit lit) lvlExpr env (_, AnnCast expr (_, co)) = do expr' <- lvlNonTailExpr env expr return (Cast expr' (substCo (le_subst env) co)) lvlExpr env (_, AnnTick tickish expr) = do expr' <- lvlNonTailExpr env expr let tickish' = substTickish (le_subst env) tickish return (Tick tickish' expr') lvlExpr env expr@(_, AnnApp _ _) = lvlApp env expr (collectAnnArgs expr) -- We don't split adjacent lambdas. That is, given -- \x y -> (x+1,y) -- we don't float to give -- \x -> let v = x+1 in \y -> (v,y) -- Why not? Because partial applications are fairly rare, and splitting -- lambdas makes them more expensive. lvlExpr env expr@(_, AnnLam {}) = do { new_body <- lvlNonTailMFE new_env True body ; return (mkLams new_bndrs new_body) } where (bndrs, body) = collectAnnBndrs expr (env1, bndrs1) = substBndrsSL NonRecursive env bndrs (new_env, new_bndrs) = lvlLamBndrs env1 (le_ctxt_lvl env) bndrs1 -- At one time we called a special verion of collectBinders, -- which ignored coercions, because we don't want to split -- a lambda like this (\x -> coerce t (\s -> ...)) -- This used to happen quite a bit in state-transformer programs, -- but not nearly so much now non-recursive newtypes are transparent. -- [See SetLevels rev 1.50 for a version with this approach.] lvlExpr env (_, AnnLet bind body) = do { (bind', new_env) <- lvlBind env bind ; body' <- lvlExpr new_env body -- No point in going via lvlMFE here. If the binding is alive -- (mentioned in body), and the whole let-expression doesn't -- float, then neither will the body ; return (Let bind' body') } lvlExpr env (_, AnnCase scrut case_bndr ty alts) = do { scrut' <- lvlNonTailMFE env True scrut ; lvlCase env (freeVarsOf scrut) scrut' case_bndr ty alts } lvlNonTailExpr :: LevelEnv -- Context -> CoreExprWithFVs -- Input expression -> LvlM LevelledExpr -- Result expression lvlNonTailExpr env expr = lvlExpr (placeJoinCeiling env) expr ------------------------------------------- lvlApp :: LevelEnv -> CoreExprWithFVs -> (CoreExprWithFVs, [CoreExprWithFVs]) -- Input application -> LvlM LevelledExpr -- Result expression lvlApp env orig_expr ((_,AnnVar fn), args) | floatOverSat env -- See Note [Floating over-saturated applications] , arity > 0 , arity < n_val_args , Nothing <- isClassOpId_maybe fn = do { rargs' <- mapM (lvlNonTailMFE env False) rargs ; lapp' <- lvlNonTailMFE env False lapp ; return (foldl App lapp' rargs') } | otherwise = do { (_, args') <- mapAccumLM lvl_arg stricts args -- Take account of argument strictness; see -- Note [Floating to the top] ; return (foldl App (lookupVar env fn) args') } where n_val_args = count (isValArg . deAnnotate) args arity = idArity fn stricts :: [Demand] -- True for strict /value/ arguments stricts = case splitStrictSig (idStrictness fn) of (arg_ds, _) | arg_ds `lengthExceeds` n_val_args -> [] | otherwise -> arg_ds -- Separate out the PAP that we are floating from the extra -- arguments, by traversing the spine until we have collected -- (n_val_args - arity) value arguments. (lapp, rargs) = left (n_val_args - arity) orig_expr [] left 0 e rargs = (e, rargs) left n (_, AnnApp f a) rargs | isValArg (deAnnotate a) = left (n-1) f (a:rargs) | otherwise = left n f (a:rargs) left _ _ _ = panic "SetLevels.lvlExpr.left" is_val_arg :: CoreExprWithFVs -> Bool is_val_arg (_, AnnType {}) = False is_val_arg _ = True lvl_arg :: [Demand] -> CoreExprWithFVs -> LvlM ([Demand], LevelledExpr) lvl_arg strs arg | (str1 : strs') <- strs , is_val_arg arg = do { arg' <- lvlMFE env (isStrictDmd str1) arg ; return (strs', arg') } | otherwise = do { arg' <- lvlMFE env False arg ; return (strs, arg') } lvlApp env _ (fun, args) = -- No PAPs that we can float: just carry on with the -- arguments and the function. do { args' <- mapM (lvlNonTailMFE env False) args ; fun' <- lvlNonTailExpr env fun ; return (foldl App fun' args') } ------------------------------------------- lvlCase :: LevelEnv -- Level of in-scope names/tyvars -> DVarSet -- Free vars of input scrutinee -> LevelledExpr -- Processed scrutinee -> Id -> Type -- Case binder and result type -> [CoreAltWithFVs] -- Input alternatives -> LvlM LevelledExpr -- Result expression lvlCase env scrut_fvs scrut' case_bndr ty alts | [(con@(DataAlt {}), bs, body)] <- alts , exprOkForSpeculation (deTagExpr scrut') -- See Note [Check the output scrutinee for okForSpec] , not (isTopLvl dest_lvl) -- Can't have top-level cases , not (floatTopLvlOnly env) -- Can float anywhere = -- See Note [Floating cases] -- Always float the case if possible -- Unlike lets we don't insist that it escapes a value lambda do { (env1, (case_bndr' : bs')) <- cloneCaseBndrs env dest_lvl (case_bndr : bs) ; let rhs_env = extendCaseBndrEnv env1 case_bndr scrut' ; body' <- lvlMFE rhs_env True body ; let alt' = (con, map (stayPut dest_lvl) bs', body') ; return (Case scrut' (TB case_bndr' (FloatMe dest_lvl)) ty' [alt']) } | otherwise -- Stays put = do { let (alts_env1, [case_bndr']) = substAndLvlBndrs NonRecursive env incd_lvl [case_bndr] alts_env = extendCaseBndrEnv alts_env1 case_bndr scrut' ; alts' <- mapM (lvl_alt alts_env) alts ; return (Case scrut' case_bndr' ty' alts') } where ty' = substTy (le_subst env) ty incd_lvl = incMinorLvl (le_ctxt_lvl env) dest_lvl = maxFvLevel (const True) env scrut_fvs -- Don't abstract over type variables, hence const True lvl_alt alts_env (con, bs, rhs) = do { rhs' <- lvlMFE new_env True rhs ; return (con, bs', rhs') } where (new_env, bs') = substAndLvlBndrs NonRecursive alts_env incd_lvl bs {- Note [Floating cases] ~~~~~~~~~~~~~~~~~~~~~ Consider this: data T a = MkT !a f :: T Int -> blah f x vs = case x of { MkT y -> let f vs = ...(case y of I# w -> e)...f.. in f vs Here we can float the (case y ...) out, because y is sure to be evaluated, to give f x vs = case x of { MkT y -> caes y of I# w -> let f vs = ...(e)...f.. in f vs That saves unboxing it every time round the loop. It's important in some DPH stuff where we really want to avoid that repeated unboxing in the inner loop. Things to note * We can't float a case to top level * It's worth doing this float even if we don't float the case outside a value lambda. Example case x of { MkT y -> (case y of I# w2 -> ..., case y of I# w2 -> ...) If we floated the cases out we could eliminate one of them. * We only do this with a single-alternative case Note [Check the output scrutinee for okForSpec] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider this: case x of y { A -> ....(case y of alts).... } Because of the binder-swap, the inner case will get substituted to (case x of ..). So when testing whether the scrutinee is okForSpeculation we must be careful to test the *result* scrutinee ('x' in this case), not the *input* one 'y'. The latter *is* ok for speculation here, but the former is not -- and indeed we can't float the inner case out, at least not unless x is also evaluated at its binding site. See Trac #5453. That's why we apply exprOkForSpeculation to scrut' and not to scrut. -} lvlNonTailMFE :: LevelEnv -- Level of in-scope names/tyvars -> Bool -- True <=> strict context [body of case -- or let] -> CoreExprWithFVs -- input expression -> LvlM LevelledExpr -- Result expression lvlNonTailMFE env strict_ctxt ann_expr = lvlMFE (placeJoinCeiling env) strict_ctxt ann_expr lvlMFE :: LevelEnv -- Level of in-scope names/tyvars -> Bool -- True <=> strict context [body of case or let] -> CoreExprWithFVs -- input expression -> LvlM LevelledExpr -- Result expression -- lvlMFE is just like lvlExpr, except that it might let-bind -- the expression, so that it can itself be floated. lvlMFE env _ (_, AnnType ty) = return (Type (CoreSubst.substTy (le_subst env) ty)) -- No point in floating out an expression wrapped in a coercion or note -- If we do we'll transform lvl = e |> co -- to lvl' = e; lvl = lvl' |> co -- and then inline lvl. Better just to float out the payload. lvlMFE env strict_ctxt (_, AnnTick t e) = do { e' <- lvlMFE env strict_ctxt e ; let t' = substTickish (le_subst env) t ; return (Tick t' e') } lvlMFE env strict_ctxt (_, AnnCast e (_, co)) = do { e' <- lvlMFE env strict_ctxt e ; return (Cast e' (substCo (le_subst env) co)) } lvlMFE env strict_ctxt e@(_, AnnCase {}) | strict_ctxt -- Don't share cases in a strict context = lvlExpr env e -- See Note [Case MFEs] lvlMFE env strict_ctxt ann_expr | floatTopLvlOnly env && not (isTopLvl dest_lvl) -- Only floating to the top level is allowed. || anyDVarSet isJoinId fvs -- If there is a free join, don't float -- See Note [Free join points] || isExprLevPoly expr -- We can't let-bind levity polymorphic expressions -- See Note [Levity polymorphism invariants] in CoreSyn || notWorthFloating expr abs_vars || not float_me = -- Don't float it out lvlExpr env ann_expr | float_is_new_lam || exprIsTopLevelBindable expr expr_ty -- No wrapping needed if the type is lifted, or is a literal string -- or if we are wrapping it in one or more value lambdas = do { expr1 <- lvlFloatRhs abs_vars dest_lvl rhs_env NonRecursive (isJust mb_bot_str) join_arity_maybe ann_expr -- Treat the expr just like a right-hand side ; var <- newLvlVar expr1 join_arity_maybe is_mk_static ; let var2 = annotateBotStr var float_n_lams mb_bot_str ; return (Let (NonRec (TB var2 (FloatMe dest_lvl)) expr1) (mkVarApps (Var var2) abs_vars)) } -- OK, so the float has an unlifted type (not top-level bindable) -- and no new value lambdas (float_is_new_lam is False) -- Try for the boxing strategy -- See Note [Floating MFEs of unlifted type] | escapes_value_lam , not expr_ok_for_spec -- Boxing/unboxing isn't worth it for cheap expressions -- See Note [Test cheapness with exprOkForSpeculation] , Just (tc, _) <- splitTyConApp_maybe expr_ty , Just dc <- boxingDataCon_maybe tc , let dc_res_ty = dataConOrigResTy dc -- No free type variables [bx_bndr, ubx_bndr] = mkTemplateLocals [dc_res_ty, expr_ty] = do { expr1 <- lvlExpr rhs_env ann_expr ; let l1r = incMinorLvlFrom rhs_env float_rhs = mkLams abs_vars_w_lvls $ Case expr1 (stayPut l1r ubx_bndr) dc_res_ty [(DEFAULT, [], mkConApp dc [Var ubx_bndr])] ; var <- newLvlVar float_rhs Nothing is_mk_static ; let l1u = incMinorLvlFrom env use_expr = Case (mkVarApps (Var var) abs_vars) (stayPut l1u bx_bndr) expr_ty [(DataAlt dc, [stayPut l1u ubx_bndr], Var ubx_bndr)] ; return (Let (NonRec (TB var (FloatMe dest_lvl)) float_rhs) use_expr) } | otherwise -- e.g. do not float unboxed tuples = lvlExpr env ann_expr where expr = deAnnotate ann_expr expr_ty = exprType expr fvs = freeVarsOf ann_expr fvs_ty = tyCoVarsOfType expr_ty is_bot = isBottomThunk mb_bot_str is_function = isFunction ann_expr mb_bot_str = exprBotStrictness_maybe expr -- See Note [Bottoming floats] -- esp Bottoming floats (2) expr_ok_for_spec = exprOkForSpeculation expr dest_lvl = destLevel env fvs fvs_ty is_function is_bot False abs_vars = abstractVars dest_lvl env fvs -- float_is_new_lam: the floated thing will be a new value lambda -- replacing, say (g (x+4)) by (lvl x). No work is saved, nor is -- allocation saved. The benefit is to get it to the top level -- and hence out of the body of this function altogether, making -- it smaller and more inlinable float_is_new_lam = float_n_lams > 0 float_n_lams = count isId abs_vars (rhs_env, abs_vars_w_lvls) = lvlLamBndrs env dest_lvl abs_vars join_arity_maybe = Nothing is_mk_static = isJust (collectMakeStaticArgs expr) -- Yuk: See Note [Grand plan for static forms] in main/StaticPtrTable -- A decision to float entails let-binding this thing, and we only do -- that if we'll escape a value lambda, or will go to the top level. float_me = saves_work || saves_alloc || is_mk_static -- We can save work if we can move a redex outside a value lambda -- But if float_is_new_lam is True, then the redex is wrapped in a -- a new lambda, so no work is saved saves_work = escapes_value_lam && not float_is_new_lam escapes_value_lam = dest_lvl `ltMajLvl` (le_ctxt_lvl env) -- See Note [Escaping a value lambda] -- See Note [Floating to the top] saves_alloc = isTopLvl dest_lvl && floatConsts env && (not strict_ctxt || is_bot || exprIsHNF expr) isBottomThunk :: Maybe (Arity, s) -> Bool -- See Note [Bottoming floats] (2) isBottomThunk (Just (0, _)) = True -- Zero arity isBottomThunk _ = False {- Note [Floating to the top] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We are keen to float something to the top level, even if it does not escape a value lambda (and hence save work), for two reasons: * Doing so makes the function smaller, by floating out bottoming expressions, or integer or string literals. That in turn makes it easier to inline, with less duplication. * (Minor) Doing so may turn a dynamic allocation (done by machine instructions) into a static one. Minor because we are assuming we are not escaping a value lambda. But do not so if: - the context is a strict, and - the expression is not a HNF, and - the expression is not bottoming Exammples: * Bottoming f x = case x of 0 -> error <big thing> _ -> x+1 Here we want to float (error <big thing>) to top level, abstracting over 'x', so as to make f's RHS smaller. * HNF f = case y of True -> p:q False -> blah We may as well float the (p:q) so it becomes a static data structure. * Case scrutinee f = case g True of .... Don't float (g True) to top level; then we have the admin of a top-level thunk to worry about, with zero gain. * Case alternative h = case y of True -> g True False -> False Don't float (g True) to the top level * Arguments t = f (g True) If f is lazy, we /do/ float (g True) because then we can allocate the thunk statically rather than dynamically. But if f is strict we don't (see the use of idStrictness in lvlApp). It's not clear if this test is worth the bother: it's only about CAFs! It's controlled by a flag (floatConsts), because doing this too early loses opportunities for RULES which (needless to say) are important in some nofib programs (gcd is an example). [SPJ note: I think this is obselete; the flag seems always on.] Note [Floating join point bindings] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Mostly we only float a join point if it can /stay/ a join point. But there is one exception: if it can go to the top level (Trac #13286). Consider f x = joinrec j y n = <...j y' n'...> in jump j x 0 Here we may just as well produce j y n = <....j y' n'...> f x = j x 0 and now there is a chance that 'f' will be inlined at its call sites. It shouldn't make a lot of difference, but thes tests perf/should_run/MethSharing simplCore/should_compile/spec-inline and one nofib program, all improve if you do float to top, because of the resulting inlining of f. So ok, let's do it. Note [Free join points] ~~~~~~~~~~~~~~~~~~~~~~~ We never float a MFE that has a free join-point variable. You mght think this can never occur. After all, consider join j x = ... in ....(jump j x).... How might we ever want to float that (jump j x)? * If it would escape a value lambda, thus join j x = ... in (\y. ...(jump j x)... ) then 'j' isn't a valid join point in the first place. But consider join j x = .... in joinrec j2 y = ...(jump j x)...(a+b).... Since j2 is recursive, it /is/ worth floating (a+b) out of the joinrec. But it is emphatically /not/ good to float the (jump j x) out: (a) 'j' will stop being a join point (b) In any case, jumping to 'j' must be an exit of the j2 loop, so no work would be saved by floating it out of the \y. Even if we floated 'j' to top level, (b) would still hold. Bottom line: never float a MFE that has a free JoinId. Note [Floating MFEs of unlifted type] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Suppose we have case f x of (r::Int#) -> blah we'd like to float (f x). But it's not trivial because it has type Int#, and we don't want to evaluate it too early. But we can instead float a boxed version y = case f x of r -> I# r and replace the original (f x) with case (case y of I# r -> r) of r -> blah Being able to float unboxed expressions is sometimes important; see Trac #12603. I'm not sure how /often/ it is important, but it's not hard to achieve. We only do it for a fixed collection of types for which we have a convenient boxing constructor (see boxingDataCon_maybe). In particular we /don't/ do it for unboxed tuples; it's better to float the components of the tuple individually. I did experiment with a form of boxing that works for any type, namely wrapping in a function. In our example let y = case f x of r -> \v. f x in case y void of r -> blah It works fine, but it's 50% slower (based on some crude benchmarking). I suppose we could do it for types not covered by boxingDataCon_maybe, but it's more code and I'll wait to see if anyone wants it. Note [Test cheapness with exprOkForSpeculation] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We don't want to float very cheap expressions by boxing and unboxing. But we use exprOkForSpeculation for the test, not exprIsCheap. Why? Because it's important /not/ to transform f (a /# 3) to f (case bx of I# a -> a /# 3) and float bx = I# (a /# 3), because the application of f no longer obeys the let/app invariant. But (a /# 3) is ok-for-spec due to a special hack that says division operators can't fail when the denominator is definitely non-zero. And yet that same expression says False to exprIsCheap. Simplest way to guarantee the let/app invariant is to use the same function! If an expression is okay for speculation, we could also float it out *without* boxing and unboxing, since evaluating it early is okay. However, it turned out to usually be better not to float such expressions, since they tend to be extremely cheap things like (x +# 1#). Even the cost of spilling the let-bound variable to the stack across a call may exceed the cost of recomputing such an expression. (And we can't float unlifted bindings to top-level.) We could try to do something smarter here, and float out expensive yet okay-for-speculation things, such as division by non-zero constants. But I suspect it's a narrow target. Note [Bottoming floats] ~~~~~~~~~~~~~~~~~~~~~~~ If we see f = \x. g (error "urk") we'd like to float the call to error, to get lvl = error "urk" f = \x. g lvl But, as ever, we need to be careful: (1) We want to float a bottoming expression even if it has free variables: f = \x. g (let v = h x in error ("urk" ++ v)) Then we'd like to abstract over 'x' can float the whole arg of g: lvl = \x. let v = h x in error ("urk" ++ v) f = \x. g (lvl x) To achieve this we pass is_bot to destLevel (2) We do not do this for lambdas that return bottom. Instead we treat the /body/ of such a function specially, via point (1). For example: f = \x. ....(\y z. if x then error y else error z).... ===> lvl = \x z y. if b then error y else error z f = \x. ...(\y z. lvl x z y)... (There is no guarantee that we'll choose the perfect argument order.) (3) If we have a /binding/ that returns bottom, we want to float it to top level, even if it has free vars (point (1)), and even it has lambdas. Example: ... let { v = \y. error (show x ++ show y) } in ... We want to abstract over x and float the whole thing to top: lvl = \xy. errror (show x ++ show y) ...let {v = lvl x} in ... Then of course we don't want to separately float the body (error ...) as /another/ MFE, so we tell lvlFloatRhs not to do that, via the is_bot argument. See Maessen's paper 1999 "Bottom extraction: factoring error handling out of functional programs" (unpublished I think). When we do this, we set the strictness and arity of the new bottoming Id, *immediately*, for three reasons: * To prevent the abstracted thing being immediately inlined back in again via preInlineUnconditionally. The latter has a test for bottoming Ids to stop inlining them, so we'd better make sure it *is* a bottoming Id! * So that it's properly exposed as such in the interface file, even if this is all happening after strictness analysis. * In case we do CSE with the same expression that *is* marked bottom lvl = error "urk" x{str=bot) = error "urk" Here we don't want to replace 'x' with 'lvl', else we may get Lint errors, e.g. via a case with empty alternatives: (case x of {}) Lint complains unless the scrutinee of such a case is clearly bottom. This was reported in Trac #11290. But since the whole bottoming-float thing is based on the cheap-and-cheerful exprIsBottom, I'm not sure that it'll nail all such cases. Note [Bottoming floats: eta expansion] c.f Note [Bottoming floats] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tiresomely, though, the simplifier has an invariant that the manifest arity of the RHS should be the same as the arity; but we can't call etaExpand during SetLevels because it works over a decorated form of CoreExpr. So we do the eta expansion later, in FloatOut. Note [Case MFEs] ~~~~~~~~~~~~~~~~ We don't float a case expression as an MFE from a strict context. Why not? Because in doing so we share a tiny bit of computation (the switch) but in exchange we build a thunk, which is bad. This case reduces allocation by 7% in spectral/puzzle (a rather strange benchmark) and 1.2% in real/fem. Doesn't change any other allocation at all. We will make a separate decision for the scrutinee and alternatives. However this can have a knock-on effect for fusion: consider \v -> foldr k z (case x of I# y -> build ..y..) Perhaps we can float the entire (case x of ...) out of the \v. Then fusion will not happen, but we will get more sharing. But if we don't float the case (as advocated here) we won't float the (build ...y..) either, so fusion will happen. It can be a big effect, esp in some artificial benchmarks (e.g. integer, queens), but there is no perfect answer. -} annotateBotStr :: Id -> Arity -> Maybe (Arity, StrictSig) -> Id -- See Note [Bottoming floats] for why we want to add -- bottoming information right now -- -- n_extra are the number of extra value arguments added during floating annotateBotStr id n_extra mb_str = case mb_str of Nothing -> id Just (arity, sig) -> id `setIdArity` (arity + n_extra) `setIdStrictness` (increaseStrictSigArity n_extra sig) notWorthFloating :: CoreExpr -> [Var] -> Bool -- Returns True if the expression would be replaced by -- something bigger than it is now. For example: -- abs_vars = tvars only: return True if e is trivial, -- but False for anything bigger -- abs_vars = [x] (an Id): return True for trivial, or an application (f x) -- but False for (f x x) -- -- One big goal is that floating should be idempotent. Eg if -- we replace e with (lvl79 x y) and then run FloatOut again, don't want -- to replace (lvl79 x y) with (lvl83 x y)! notWorthFloating e abs_vars = go e (count isId abs_vars) where go (Var {}) n = n >= 0 go (Lit lit) n = ASSERT( n==0 ) litIsTrivial lit -- Note [Floating literals] go (Tick t e) n = not (tickishIsCode t) && go e n go (Cast e _) n = go e n go (App e arg) n | Type {} <- arg = go e n | Coercion {} <- arg = go e n | n==0 = False | is_triv arg = go e (n-1) | otherwise = False go _ _ = False is_triv (Lit {}) = True -- Treat all literals as trivial is_triv (Var {}) = True -- (ie not worth floating) is_triv (Cast e _) = is_triv e is_triv (App e (Type {})) = is_triv e is_triv (App e (Coercion {})) = is_triv e is_triv (Tick t e) = not (tickishIsCode t) && is_triv e is_triv _ = False {- Note [Floating literals] ~~~~~~~~~~~~~~~~~~~~~~~~ It's important to float Integer literals, so that they get shared, rather than being allocated every time round the loop. Hence the litIsTrivial. Ditto literal strings (MachStr), which we'd like to float to top level, which is now possible. Note [Escaping a value lambda] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We want to float even cheap expressions out of value lambdas, because that saves allocation. Consider f = \x. .. (\y.e) ... Then we'd like to avoid allocating the (\y.e) every time we call f, (assuming e does not mention x). An example where this really makes a difference is simplrun009. Another reason it's good is because it makes SpecContr fire on functions. Consider f = \x. ....(f (\y.e)).... After floating we get lvl = \y.e f = \x. ....(f lvl)... and that is much easier for SpecConstr to generate a robust specialisation for. However, if we are wrapping the thing in extra value lambdas (in abs_vars), then nothing is saved. E.g. f = \xyz. ...(e1[y],e2).... If we float lvl = \y. (e1[y],e2) f = \xyz. ...(lvl y)... we have saved nothing: one pair will still be allocated for each call of 'f'. Hence the (not float_is_lam) in float_me. ************************************************************************ * * \subsection{Bindings} * * ************************************************************************ The binding stuff works for top level too. -} lvlBind :: LevelEnv -> CoreBindWithFVs -> LvlM (LevelledBind, LevelEnv) lvlBind env (AnnNonRec bndr rhs) | isTyVar bndr -- Don't do anything for TyVar binders -- (simplifier gets rid of them pronto) || isCoVar bndr -- Difficult to fix up CoVar occurrences (see extendPolyLvlEnv) -- so we will ignore this case for now || not (profitableFloat env dest_lvl) || (isTopLvl dest_lvl && not (exprIsTopLevelBindable deann_rhs bndr_ty)) -- We can't float an unlifted binding to top level (except -- literal strings), so we don't float it at all. It's a -- bit brutal, but unlifted bindings aren't expensive either = -- No float do { rhs' <- lvlRhs env NonRecursive is_bot mb_join_arity rhs ; let bind_lvl = incMinorLvl (le_ctxt_lvl env) (env', [bndr']) = substAndLvlBndrs NonRecursive env bind_lvl [bndr] ; return (NonRec bndr' rhs', env') } -- Otherwise we are going to float | null abs_vars = do { -- No type abstraction; clone existing binder rhs' <- lvlFloatRhs [] dest_lvl env NonRecursive is_bot mb_join_arity rhs ; (env', [bndr']) <- cloneLetVars NonRecursive env dest_lvl [bndr] ; let bndr2 = annotateBotStr bndr' 0 mb_bot_str ; return (NonRec (TB bndr2 (FloatMe dest_lvl)) rhs', env') } | otherwise = do { -- Yes, type abstraction; create a new binder, extend substitution, etc rhs' <- lvlFloatRhs abs_vars dest_lvl env NonRecursive is_bot mb_join_arity rhs ; (env', [bndr']) <- newPolyBndrs dest_lvl env abs_vars [bndr] ; let bndr2 = annotateBotStr bndr' n_extra mb_bot_str ; return (NonRec (TB bndr2 (FloatMe dest_lvl)) rhs', env') } where bndr_ty = idType bndr ty_fvs = tyCoVarsOfType bndr_ty rhs_fvs = freeVarsOf rhs bind_fvs = rhs_fvs `unionDVarSet` dIdFreeVars bndr abs_vars = abstractVars dest_lvl env bind_fvs dest_lvl = destLevel env bind_fvs ty_fvs (isFunction rhs) is_bot is_join deann_rhs = deAnnotate rhs mb_bot_str = exprBotStrictness_maybe deann_rhs is_bot = isJust mb_bot_str -- NB: not isBottomThunk! See Note [Bottoming floats] point (3) n_extra = count isId abs_vars mb_join_arity = isJoinId_maybe bndr is_join = isJust mb_join_arity lvlBind env (AnnRec pairs) | floatTopLvlOnly env && not (isTopLvl dest_lvl) -- Only floating to the top level is allowed. || not (profitableFloat env dest_lvl) = do { let bind_lvl = incMinorLvl (le_ctxt_lvl env) (env', bndrs') = substAndLvlBndrs Recursive env bind_lvl bndrs lvl_rhs (b,r) = lvlRhs env' Recursive is_bot (isJoinId_maybe b) r ; rhss' <- mapM lvl_rhs pairs ; return (Rec (bndrs' `zip` rhss'), env') } | null abs_vars = do { (new_env, new_bndrs) <- cloneLetVars Recursive env dest_lvl bndrs ; new_rhss <- mapM (do_rhs new_env) pairs ; return ( Rec ([TB b (FloatMe dest_lvl) | b <- new_bndrs] `zip` new_rhss) , new_env) } -- ToDo: when enabling the floatLambda stuff, -- I think we want to stop doing this | [(bndr,rhs)] <- pairs , count isId abs_vars > 1 = do -- Special case for self recursion where there are -- several variables carried around: build a local loop: -- poly_f = \abs_vars. \lam_vars . letrec f = \lam_vars. rhs in f lam_vars -- This just makes the closures a bit smaller. If we don't do -- this, allocation rises significantly on some programs -- -- We could elaborate it for the case where there are several -- mutually functions, but it's quite a bit more complicated -- -- This all seems a bit ad hoc -- sigh let (rhs_env, abs_vars_w_lvls) = lvlLamBndrs env dest_lvl abs_vars rhs_lvl = le_ctxt_lvl rhs_env (rhs_env', [new_bndr]) <- cloneLetVars Recursive rhs_env rhs_lvl [bndr] let (lam_bndrs, rhs_body) = collectAnnBndrs rhs (body_env1, lam_bndrs1) = substBndrsSL NonRecursive rhs_env' lam_bndrs (body_env2, lam_bndrs2) = lvlLamBndrs body_env1 rhs_lvl lam_bndrs1 new_rhs_body <- lvlRhs body_env2 Recursive is_bot (get_join bndr) rhs_body (poly_env, [poly_bndr]) <- newPolyBndrs dest_lvl env abs_vars [bndr] return (Rec [(TB poly_bndr (FloatMe dest_lvl) , mkLams abs_vars_w_lvls $ mkLams lam_bndrs2 $ Let (Rec [( TB new_bndr (StayPut rhs_lvl) , mkLams lam_bndrs2 new_rhs_body)]) (mkVarApps (Var new_bndr) lam_bndrs1))] , poly_env) | otherwise -- Non-null abs_vars = do { (new_env, new_bndrs) <- newPolyBndrs dest_lvl env abs_vars bndrs ; new_rhss <- mapM (do_rhs new_env) pairs ; return ( Rec ([TB b (FloatMe dest_lvl) | b <- new_bndrs] `zip` new_rhss) , new_env) } where (bndrs,rhss) = unzip pairs is_join = isJoinId (head bndrs) -- bndrs is always non-empty and if one is a join they all are -- Both are checked by Lint is_fun = all isFunction rhss is_bot = False -- It's odd to have an unconditionally divergent -- function in a Rec, and we don't much care what -- happens to it. False is simple! do_rhs env (bndr,rhs) = lvlFloatRhs abs_vars dest_lvl env Recursive is_bot (get_join bndr) rhs get_join bndr | need_zap = Nothing | otherwise = isJoinId_maybe bndr need_zap = dest_lvl `ltLvl` joinCeilingLevel env -- Finding the free vars of the binding group is annoying bind_fvs = ((unionDVarSets [ freeVarsOf rhs | (_, rhs) <- pairs]) `unionDVarSet` (fvDVarSet $ unionsFV [ idFVs bndr | (bndr, (_,_)) <- pairs])) `delDVarSetList` bndrs ty_fvs = foldr (unionVarSet . tyCoVarsOfType . idType) emptyVarSet bndrs dest_lvl = destLevel env bind_fvs ty_fvs is_fun is_bot is_join abs_vars = abstractVars dest_lvl env bind_fvs profitableFloat :: LevelEnv -> Level -> Bool profitableFloat env dest_lvl = (dest_lvl `ltMajLvl` le_ctxt_lvl env) -- Escapes a value lambda || isTopLvl dest_lvl -- Going all the way to top level ---------------------------------------------------- -- Three help functions for the type-abstraction case lvlRhs :: LevelEnv -> RecFlag -> Bool -- Is this a bottoming function -> Maybe JoinArity -> CoreExprWithFVs -> LvlM LevelledExpr lvlRhs env rec_flag is_bot mb_join_arity expr = lvlFloatRhs [] (le_ctxt_lvl env) env rec_flag is_bot mb_join_arity expr lvlFloatRhs :: [OutVar] -> Level -> LevelEnv -> RecFlag -> Bool -- Binding is for a bottoming function -> Maybe JoinArity -> CoreExprWithFVs -> LvlM (Expr LevelledBndr) -- Ignores the le_ctxt_lvl in env; treats dest_lvl as the baseline lvlFloatRhs abs_vars dest_lvl env rec is_bot mb_join_arity rhs = do { body' <- if not is_bot -- See Note [Floating from a RHS] && any isId bndrs then lvlMFE body_env True body else lvlExpr body_env body ; return (mkLams bndrs' body') } where (bndrs, body) | Just join_arity <- mb_join_arity = collectNAnnBndrs join_arity rhs | otherwise = collectAnnBndrs rhs (env1, bndrs1) = substBndrsSL NonRecursive env bndrs all_bndrs = abs_vars ++ bndrs1 (body_env, bndrs') | Just _ <- mb_join_arity = lvlJoinBndrs env1 dest_lvl rec all_bndrs | otherwise = case lvlLamBndrs env1 dest_lvl all_bndrs of (env2, bndrs') -> (placeJoinCeiling env2, bndrs') -- The important thing here is that we call lvlLamBndrs on -- all these binders at once (abs_vars and bndrs), so they -- all get the same major level. Otherwise we create stupid -- let-bindings inside, joyfully thinking they can float; but -- in the end they don't because we never float bindings in -- between lambdas {- Note [Floating from a RHS] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When float the RHS of a let-binding, we don't always want to apply lvlMFE to the body of a lambda, as we usually do, because the entire binding body is already going to the right place (dest_lvl). A particular example is the top level. Consider concat = /\ a -> foldr ..a.. (++) [] We don't want to float the body of the lambda to get lvl = /\ a -> foldr ..a.. (++) [] concat = /\ a -> lvl a That would be stupid. Previously this was avoided in a much nastier way, by testing strict_ctxt in float_me in lvlMFE. But that wasn't even right because it would fail to float out the error sub-expression in f = \x. case x of True -> error ("blah" ++ show x) False -> ... But we must be careful: * If we had f = \x -> factorial 20 we /would/ want to float that (factorial 20) out! Functions are treated differently: see the use of isFunction in the calls to destLevel. If there are only type lambdas, then destLevel will say "go to top, and abstract over the free tyvars" and we don't want that here. * But if we had f = \x -> error (...x....) we would NOT want to float the bottoming expression out to give lvl = \x -> error (...x...) f = \x -> lvl x Conclusion: use lvlMFE if there are * any value lambdas in the original function, and * this is not a bottoming function (the is_bot argument) Use lvlExpr otherwise. A little subtle, and I got it wrong at least twice (e.g. Trac #13369). -} {- ************************************************************************ * * \subsection{Deciding floatability} * * ************************************************************************ -} substAndLvlBndrs :: RecFlag -> LevelEnv -> Level -> [InVar] -> (LevelEnv, [LevelledBndr]) substAndLvlBndrs is_rec env lvl bndrs = lvlBndrs subst_env lvl subst_bndrs where (subst_env, subst_bndrs) = substBndrsSL is_rec env bndrs substBndrsSL :: RecFlag -> LevelEnv -> [InVar] -> (LevelEnv, [OutVar]) -- So named only to avoid the name clash with CoreSubst.substBndrs substBndrsSL is_rec env@(LE { le_subst = subst, le_env = id_env }) bndrs = ( env { le_subst = subst' , le_env = foldl add_id id_env (bndrs `zip` bndrs') } , bndrs') where (subst', bndrs') = case is_rec of NonRecursive -> substBndrs subst bndrs Recursive -> substRecBndrs subst bndrs lvlLamBndrs :: LevelEnv -> Level -> [OutVar] -> (LevelEnv, [LevelledBndr]) -- Compute the levels for the binders of a lambda group lvlLamBndrs env lvl bndrs = lvlBndrs env new_lvl bndrs where new_lvl | any is_major bndrs = incMajorLvl lvl | otherwise = incMinorLvl lvl is_major bndr = isId bndr && not (isProbablyOneShotLambda bndr) -- The "probably" part says "don't float things out of a -- probable one-shot lambda" -- See Note [Computing one-shot info] in Demand.hs lvlJoinBndrs :: LevelEnv -> Level -> RecFlag -> [OutVar] -> (LevelEnv, [LevelledBndr]) lvlJoinBndrs env lvl rec bndrs = lvlBndrs env new_lvl bndrs where new_lvl | isRec rec = incMajorLvl lvl | otherwise = incMinorLvl lvl -- Non-recursive join points are one-shot; recursive ones are not lvlBndrs :: LevelEnv -> Level -> [CoreBndr] -> (LevelEnv, [LevelledBndr]) -- The binders returned are exactly the same as the ones passed, -- apart from applying the substitution, but they are now paired -- with a (StayPut level) -- -- The returned envt has le_ctxt_lvl updated to the new_lvl -- -- All the new binders get the same level, because -- any floating binding is either going to float past -- all or none. We never separate binders. lvlBndrs env@(LE { le_lvl_env = lvl_env }) new_lvl bndrs = ( env { le_ctxt_lvl = new_lvl , le_join_ceil = new_lvl , le_lvl_env = addLvls new_lvl lvl_env bndrs } , map (stayPut new_lvl) bndrs) stayPut :: Level -> OutVar -> LevelledBndr stayPut new_lvl bndr = TB bndr (StayPut new_lvl) -- Destination level is the max Id level of the expression -- (We'll abstract the type variables, if any.) destLevel :: LevelEnv -> DVarSet -- Free vars of the term -> TyCoVarSet -- Free in the /type/ of the term -- (a subset of the previous argument) -> Bool -- True <=> is function -> Bool -- True <=> is bottom -> Bool -- True <=> is a join point -> Level -- INVARIANT: if is_join=True then result >= join_ceiling destLevel env fvs fvs_ty is_function is_bot is_join | isTopLvl max_fv_id_level -- Float even joins if they get to top level -- See Note [Floating join point bindings] = tOP_LEVEL | is_join -- Never float a join point past the join ceiling -- See Note [Join points] in FloatOut = if max_fv_id_level `ltLvl` join_ceiling then join_ceiling else max_fv_id_level | is_bot -- Send bottoming bindings to the top = as_far_as_poss -- regardless; see Note [Bottoming floats] -- Esp Bottoming floats (1) | Just n_args <- floatLams env , n_args > 0 -- n=0 case handled uniformly by the 'otherwise' case , is_function , countFreeIds fvs <= n_args = as_far_as_poss -- Send functions to top level; see -- the comments with isFunction | otherwise = max_fv_id_level where join_ceiling = joinCeilingLevel env max_fv_id_level = maxFvLevel isId env fvs -- Max over Ids only; the -- tyvars will be abstracted as_far_as_poss = maxFvLevel' isId env fvs_ty -- See Note [Floating and kind casts] {- Note [Floating and kind casts] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider this case x of K (co :: * ~# k) -> let v :: Int |> co v = e in blah Then, even if we are abstracting over Ids, or if e is bottom, we can't float v outside the 'co' binding. Reason: if we did we'd get v' :: forall k. (Int ~# Age) => Int |> co and now 'co' isn't in scope in that type. The underlying reason is that 'co' is a value-level thing and we can't abstract over that in a type (else we'd get a dependent type). So if v's /type/ mentions 'co' we can't float it out beyond the binding site of 'co'. That's why we have this as_far_as_poss stuff. Usually as_far_as_poss is just tOP_LEVEL; but occasionally a coercion variable (which is an Id) mentioned in type prevents this. Example Trac #14270 comment:15. -} isFunction :: CoreExprWithFVs -> Bool -- The idea here is that we want to float *functions* to -- the top level. This saves no work, but -- (a) it can make the host function body a lot smaller, -- and hence inlinable. -- (b) it can also save allocation when the function is recursive: -- h = \x -> letrec f = \y -> ...f...y...x... -- in f x -- becomes -- f = \x y -> ...(f x)...y...x... -- h = \x -> f x x -- No allocation for f now. -- We may only want to do this if there are sufficiently few free -- variables. We certainly only want to do it for values, and not for -- constructors. So the simple thing is just to look for lambdas isFunction (_, AnnLam b e) | isId b = True | otherwise = isFunction e -- isFunction (_, AnnTick _ e) = isFunction e -- dubious isFunction _ = False countFreeIds :: DVarSet -> Int countFreeIds = nonDetFoldUDFM add 0 -- It's OK to use nonDetFoldUDFM here because we're just counting things. where add :: Var -> Int -> Int add v n | isId v = n+1 | otherwise = n {- ************************************************************************ * * \subsection{Free-To-Level Monad} * * ************************************************************************ -} data LevelEnv = LE { le_switches :: FloatOutSwitches , le_ctxt_lvl :: Level -- The current level , le_lvl_env :: VarEnv Level -- Domain is *post-cloned* TyVars and Ids , le_join_ceil:: Level -- Highest level to which joins float -- Invariant: always >= le_ctxt_lvl -- See Note [le_subst and le_env] , le_subst :: Subst -- Domain is pre-cloned TyVars and Ids -- The Id -> CoreExpr in the Subst is ignored -- (since we want to substitute a LevelledExpr for -- an Id via le_env) but we do use the Co/TyVar substs , le_env :: IdEnv ([OutVar], LevelledExpr) -- Domain is pre-cloned Ids } {- Note [le_subst and le_env] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We clone let- and case-bound variables so that they are still distinct when floated out; hence the le_subst/le_env. (see point 3 of the module overview comment). We also use these envs when making a variable polymorphic because we want to float it out past a big lambda. The le_subst and le_env always implement the same mapping, in_x :-> out_x a b where out_x is an OutVar, and a,b are its arguments (when we perform abstraction at the same time as floating). le_subst maps to CoreExpr le_env maps to LevelledExpr Since the range is always a variable or application, there is never any difference between the two, but sadly the types differ. The le_subst is used when substituting in a variable's IdInfo; the le_env when we find a Var. In addition the le_env records a [OutVar] of variables free in the OutExpr/LevelledExpr, just so we don't have to call freeVars repeatedly. This list is always non-empty, and the first element is out_x The domain of the both envs is *pre-cloned* Ids, though The domain of the le_lvl_env is the *post-cloned* Ids -} initialEnv :: FloatOutSwitches -> LevelEnv initialEnv float_lams = LE { le_switches = float_lams , le_ctxt_lvl = tOP_LEVEL , le_join_ceil = panic "initialEnv" , le_lvl_env = emptyVarEnv , le_subst = emptySubst , le_env = emptyVarEnv } addLvl :: Level -> VarEnv Level -> OutVar -> VarEnv Level addLvl dest_lvl env v' = extendVarEnv env v' dest_lvl addLvls :: Level -> VarEnv Level -> [OutVar] -> VarEnv Level addLvls dest_lvl env vs = foldl (addLvl dest_lvl) env vs floatLams :: LevelEnv -> Maybe Int floatLams le = floatOutLambdas (le_switches le) floatConsts :: LevelEnv -> Bool floatConsts le = floatOutConstants (le_switches le) floatOverSat :: LevelEnv -> Bool floatOverSat le = floatOutOverSatApps (le_switches le) floatTopLvlOnly :: LevelEnv -> Bool floatTopLvlOnly le = floatToTopLevelOnly (le_switches le) incMinorLvlFrom :: LevelEnv -> Level incMinorLvlFrom env = incMinorLvl (le_ctxt_lvl env) -- extendCaseBndrEnv adds the mapping case-bndr->scrut-var if it can -- See Note [Binder-swap during float-out] extendCaseBndrEnv :: LevelEnv -> Id -- Pre-cloned case binder -> Expr LevelledBndr -- Post-cloned scrutinee -> LevelEnv extendCaseBndrEnv le@(LE { le_subst = subst, le_env = id_env }) case_bndr (Var scrut_var) = le { le_subst = extendSubstWithVar subst case_bndr scrut_var , le_env = add_id id_env (case_bndr, scrut_var) } extendCaseBndrEnv env _ _ = env -- See Note [Join ceiling] placeJoinCeiling :: LevelEnv -> LevelEnv placeJoinCeiling le@(LE { le_ctxt_lvl = lvl }) = le { le_ctxt_lvl = lvl', le_join_ceil = lvl' } where lvl' = asJoinCeilLvl (incMinorLvl lvl) maxFvLevel :: (Var -> Bool) -> LevelEnv -> DVarSet -> Level maxFvLevel max_me env var_set = foldDVarSet (maxIn max_me env) tOP_LEVEL var_set maxFvLevel' :: (Var -> Bool) -> LevelEnv -> TyCoVarSet -> Level -- Same but for TyCoVarSet maxFvLevel' max_me env var_set = nonDetFoldUniqSet (maxIn max_me env) tOP_LEVEL var_set maxIn :: (Var -> Bool) -> LevelEnv -> InVar -> Level -> Level maxIn max_me (LE { le_lvl_env = lvl_env, le_env = id_env }) in_var lvl = case lookupVarEnv id_env in_var of Just (abs_vars, _) -> foldr max_out lvl abs_vars Nothing -> max_out in_var lvl where max_out out_var lvl | max_me out_var = case lookupVarEnv lvl_env out_var of Just lvl' -> maxLvl lvl' lvl Nothing -> lvl | otherwise = lvl -- Ignore some vars depending on max_me lookupVar :: LevelEnv -> Id -> LevelledExpr lookupVar le v = case lookupVarEnv (le_env le) v of Just (_, expr) -> expr _ -> Var v -- Level to which join points are allowed to float (boundary of current tail -- context). See Note [Join ceiling] joinCeilingLevel :: LevelEnv -> Level joinCeilingLevel = le_join_ceil abstractVars :: Level -> LevelEnv -> DVarSet -> [OutVar] -- Find the variables in fvs, free vars of the target expression, -- whose level is greater than the destination level -- These are the ones we are going to abstract out -- -- Note that to get reproducible builds, the variables need to be -- abstracted in deterministic order, not dependent on the values of -- Uniques. This is achieved by using DVarSets, deterministic free -- variable computation and deterministic sort. -- See Note [Unique Determinism] in Unique for explanation of why -- Uniques are not deterministic. abstractVars dest_lvl (LE { le_subst = subst, le_lvl_env = lvl_env }) in_fvs = -- NB: sortQuantVars might not put duplicates next to each other map zap $ sortQuantVars $ uniq [out_var | out_fv <- dVarSetElems (substDVarSet subst in_fvs) , out_var <- dVarSetElems (close out_fv) , abstract_me out_var ] -- NB: it's important to call abstract_me only on the OutIds the -- come from substDVarSet (not on fv, which is an InId) where uniq :: [Var] -> [Var] -- Remove duplicates, preserving order uniq = dVarSetElems . mkDVarSet abstract_me v = case lookupVarEnv lvl_env v of Just lvl -> dest_lvl `ltLvl` lvl Nothing -> False -- We are going to lambda-abstract, so nuke any IdInfo, -- and add the tyvars of the Id (if necessary) zap v | isId v = WARN( isStableUnfolding (idUnfolding v) || not (isEmptyRuleInfo (idSpecialisation v)), text "absVarsOf: discarding info on" <+> ppr v ) setIdInfo v vanillaIdInfo | otherwise = v close :: Var -> DVarSet -- Close over variables free in the type -- Result includes the input variable itself close v = foldDVarSet (unionDVarSet . close) (unitDVarSet v) (fvDVarSet $ varTypeTyCoFVs v) type LvlM result = UniqSM result initLvl :: UniqSupply -> UniqSM a -> a initLvl = initUs_ newPolyBndrs :: Level -> LevelEnv -> [OutVar] -> [InId] -> LvlM (LevelEnv, [OutId]) -- The envt is extended to bind the new bndrs to dest_lvl, but -- the le_ctxt_lvl is unaffected newPolyBndrs dest_lvl env@(LE { le_lvl_env = lvl_env, le_subst = subst, le_env = id_env }) abs_vars bndrs = ASSERT( all (not . isCoVar) bndrs ) -- What would we add to the CoSubst in this case. No easy answer. do { uniqs <- getUniquesM ; let new_bndrs = zipWith mk_poly_bndr bndrs uniqs bndr_prs = bndrs `zip` new_bndrs env' = env { le_lvl_env = addLvls dest_lvl lvl_env new_bndrs , le_subst = foldl add_subst subst bndr_prs , le_env = foldl add_id id_env bndr_prs } ; return (env', new_bndrs) } where add_subst env (v, v') = extendIdSubst env v (mkVarApps (Var v') abs_vars) add_id env (v, v') = extendVarEnv env v ((v':abs_vars), mkVarApps (Var v') abs_vars) mk_poly_bndr bndr uniq = transferPolyIdInfo bndr abs_vars $ -- Note [transferPolyIdInfo] in Id.hs transfer_join_info bndr $ mkSysLocalOrCoVar (mkFastString str) uniq poly_ty where str = "poly_" ++ occNameString (getOccName bndr) poly_ty = mkLamTypes abs_vars (CoreSubst.substTy subst (idType bndr)) -- If we are floating a join point to top level, it stops being -- a join point. Otherwise it continues to be a join point, -- but we may need to adjust its arity dest_is_top = isTopLvl dest_lvl transfer_join_info bndr new_bndr | Just join_arity <- isJoinId_maybe bndr , not dest_is_top = new_bndr `asJoinId` join_arity + length abs_vars | otherwise = new_bndr newLvlVar :: LevelledExpr -- The RHS of the new binding -> Maybe JoinArity -- Its join arity, if it is a join point -> Bool -- True <=> the RHS looks like (makeStatic ...) -> LvlM Id newLvlVar lvld_rhs join_arity_maybe is_mk_static = do { uniq <- getUniqueM ; return (add_join_info (mk_id uniq rhs_ty)) } where add_join_info var = var `asJoinId_maybe` join_arity_maybe de_tagged_rhs = deTagExpr lvld_rhs rhs_ty = exprType de_tagged_rhs mk_id uniq rhs_ty -- See Note [Grand plan for static forms] in StaticPtrTable. | is_mk_static = mkExportedVanillaId (mkSystemVarName uniq (mkFastString "static_ptr")) rhs_ty | otherwise = mkLocalIdOrCoVar (mkSystemVarName uniq (mkFastString "lvl")) rhs_ty cloneCaseBndrs :: LevelEnv -> Level -> [Var] -> LvlM (LevelEnv, [Var]) cloneCaseBndrs env@(LE { le_subst = subst, le_lvl_env = lvl_env, le_env = id_env }) new_lvl vs = do { us <- getUniqueSupplyM ; let (subst', vs') = cloneBndrs subst us vs env' = env { le_ctxt_lvl = new_lvl , le_join_ceil = new_lvl , le_lvl_env = addLvls new_lvl lvl_env vs' , le_subst = subst' , le_env = foldl add_id id_env (vs `zip` vs') } ; return (env', vs') } cloneLetVars :: RecFlag -> LevelEnv -> Level -> [InVar] -> LvlM (LevelEnv, [OutVar]) -- See Note [Need for cloning during float-out] -- Works for Ids bound by let(rec) -- The dest_lvl is attributed to the binders in the new env, -- but cloneVars doesn't affect the le_ctxt_lvl of the incoming env cloneLetVars is_rec env@(LE { le_subst = subst, le_lvl_env = lvl_env, le_env = id_env }) dest_lvl vs = do { us <- getUniqueSupplyM ; let vs1 = map zap vs -- See Note [Zapping the demand info] (subst', vs2) = case is_rec of NonRecursive -> cloneBndrs subst us vs1 Recursive -> cloneRecIdBndrs subst us vs1 prs = vs `zip` vs2 env' = env { le_lvl_env = addLvls dest_lvl lvl_env vs2 , le_subst = subst' , le_env = foldl add_id id_env prs } ; return (env', vs2) } where zap :: Var -> Var zap v | isId v = zap_join (zapIdDemandInfo v) | otherwise = v zap_join | isTopLvl dest_lvl = zapJoinId | otherwise = \v -> v add_id :: IdEnv ([Var], LevelledExpr) -> (Var, Var) -> IdEnv ([Var], LevelledExpr) add_id id_env (v, v1) | isTyVar v = delVarEnv id_env v | otherwise = extendVarEnv id_env v ([v1], ASSERT(not (isCoVar v1)) Var v1) {- Note [Zapping the demand info] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ VERY IMPORTANT: we must zap the demand info if the thing is going to float out, because it may be less demanded than at its original binding site. Eg f :: Int -> Int f x = let v = 3*4 in v+x Here v is strict; but if we float v to top level, it isn't any more. Similarly, if we're floating a join point, it won't be one anymore, so we zap join point information as well. -}
shlevy/ghc
compiler/simplCore/SetLevels.hs
bsd-3-clause
70,015
0
21
19,965
10,062
5,311
4,751
-1
-1
{-# LANGUAGE OverloadedStrings #-} module NativeWhitelist (read) where import Prelude hiding (read) import Data.Aeson as Json import qualified Data.ByteString.Lazy as LBS import qualified System.Directory as Dir import System.IO import qualified Elm.Package.Name as Name nativeWhitelist :: FilePath nativeWhitelist = "native-whitelist.json" read :: IO [Name.Name] read = do exists <- Dir.doesFileExist nativeWhitelist case exists of False -> return [] True -> withBinaryFile nativeWhitelist ReadMode $ \handle -> do json <- LBS.hGetContents handle case Json.decode json of Nothing -> return [] Just names -> return names
Dedoig/package.elm-lang.org
backend/NativeWhitelist.hs
bsd-3-clause
750
0
18
212
182
100
82
22
3
-- Unlike the rest of xmonad, this file is copyright under the terms of the -- GPL. -- -- Generates man/xmonad.1 from man/xmonad.1.in by filling the list of -- keybindings with values scraped from Config.hs -- -- Uses cabal to grab the xmonad version from xmonad.cabal -- -- Uses pandoc to convert the "xmonad.1.markdown" to "xmonad.1" -- -- Format for the docstrings in Config.hs takes the following form: -- -- -- mod-x %! Frob the whatsit -- -- "Frob the whatsit" will be used as the description for keybinding "mod-x" -- -- If the keybinding name is omitted, it will try to guess from the rest of the -- line. For example: -- -- [ ((modMask .|. shiftMask, xK_Return), spawn "xterm") -- %! Launch an xterm -- -- Here, mod-shift-return will be used as the keybinding name. import Control.Monad import Control.Applicative import Text.Regex.Posix import Data.Char import Data.List import Distribution.PackageDescription.Parse import Distribution.Verbosity import Distribution.Package import Distribution.PackageDescription import Text.PrettyPrint.HughesPJ import Distribution.Text import Text.Pandoc -- works with 1.12.4 releaseDate = "31 December 2012" trim :: String -> String trim = reverse . dropWhile isSpace . reverse . dropWhile isSpace guessKeys line = concat $ intersperse "-" (modifiers ++ [map toLower key]) where modifiers = map (!!1) (line =~ "(mod|shift|control)Mask") (_, _, _, [key]) = line =~ "xK_([_[:alnum:]]+)" :: (String, String, String, [String]) binding :: [String] -> (String, String) binding [ _, bindingLine, "", desc ] = (guessKeys bindingLine, desc) binding [ _, _, keyCombo, desc ] = (keyCombo, desc) allBindings :: String -> [(String, String)] allBindings xs = map (binding . map trim) (xs =~ "(.*)--(.*)%!(.*)") -- FIXME: What escaping should we be doing on these strings? markdownDefn :: (String, String) -> String markdownDefn (key, desc) = key ++ "\n: " ++ desc replace :: Eq a => a -> a -> [a] -> [a] replace x y = map (\a -> if a == x then y else a) -- rawSystem "pandoc" ["--read=markdown","--write=man","man/xmonad.1.markdown"] main = do releaseName <- (show . disp . package . packageDescription) `liftM`readPackageDescription normal "xmonad.cabal" keybindings <- (intercalate "\n\n" . map markdownDefn . allBindings) `liftM` readFile "./src/XMonad/Config.hs" let manHeader = unwords [".TH xmonad 1","\""++releaseDate++"\"",releaseName,"\"xmonad manual\""] parsed <- readMarkdown def . unlines . replace "___KEYBINDINGS___" keybindings . lines <$> readFile "./man/xmonad.1.markdown" Right template <- getDefaultTemplate Nothing "man" writeFile "./man/xmonad.1" . (manHeader ++) . writeMan def{ writerStandalone = True, writerTemplate = template } $ parsed putStrLn "Documentation created: man/xmonad.1" Right template <- getDefaultTemplate Nothing "html" writeFile "./man/xmonad.1.html" . writeHtmlString def { writerVariables = [("include-before" ,"<h1>"++releaseName++"</h1>"++ "<p>Section: xmonad manual (1)<br/>"++ "Updated: "++releaseDate++"</p>"++ "<hr/>")] , writerStandalone = True , writerTemplate = template , writerTableOfContents = True } $ parsed putStrLn "Documentation created: man/xmonad.1.html"
atupal/xmonad-mirror
xmonad/util/GenerateManpage.hs
mit
3,525
0
20
814
743
413
330
58
2
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="ar-SA"> <title>Reveal | ZAP Extension</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>بحث</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
thc202/zap-extensions
addOns/reveal/src/main/javahelp/org/zaproxy/zap/extension/reveal/resources/help_ar_SA/helpset_ar_SA.hs
apache-2.0
967
80
65
158
409
207
202
-1
-1
module PatternIn3 where sumSquares y = let x = 1 in (case x of 0 -> 0 x -> x ^ pow) + (sq y) where sq 0 = 0 sq x = x ^ pow sumSquares_1 y = let x = 1 in case x of 0 -> return 0 x -> return 1 where sq 0 = 0 sq x = x ^ pow pow = 2
kmate/HaRe
old/testing/simplifyExpr/PatternIn3AST.hs
bsd-3-clause
367
0
12
207
146
73
73
17
3
{-# LANGUAGE TemplateHaskell #-} module T16195 where import T16195A main2 :: IO () main2 = return () main :: IO () main = $$foo main3 :: IO () main3 = putStrLn ($$showC $$unitC)
sdiehl/ghc
testsuite/tests/th/T16195.hs
bsd-3-clause
184
0
8
39
70
36
34
-1
-1
module A3 where data BTree a = Empty | T a (BTree a) (BTree a) deriving Show buildtree :: Ord a => [a] -> BTree a buildtree [] = Empty buildtree ((x : xs)) = insert x (buildtree xs) insert :: Ord a => a -> (BTree a) -> BTree a insert val (t@(T val Empty Empty)) = T val Empty result where result = t insert val (T tval left right) | val > tval = T tval left (insert val right) | otherwise = T tval (insert val left) right newPat_1 = Empty main :: BTree Int main = buildtree [3, 1, 2]
kmate/HaRe
old/testing/unfoldAsPatterns/A3AST.hs
bsd-3-clause
515
0
9
136
265
136
129
15
1
{- We expect to get a suggestion to add 'type' keyword and enable TypeOperators extension. -} {-# LANGUAGE TypeOperators #-} module T11432 ((-.->)(..)) where newtype (f -.-> g) a = Fn { apFn :: f a -> g a }
ezyang/ghc
testsuite/tests/module/T11432.hs
bsd-3-clause
209
2
7
42
51
31
20
-1
-1
-- Basic Tic Tac Toe game against the AI, for testing import Core import Algo import AI runCycle :: Board -> Board runCycle board = if checkGameOver board then board else makeMove board PO runMove :: Board -> IO (Bool, Player, Board) runMove board = do if checkGameOver board then do return (True, case determineWin board of (Just p) -> p _ -> N, board) else do putStrLn "Board:" print board putStrLn "Enter move coordinates in format (x, y):" coordStr <- getLine let coords = read coordStr if placeIsFull $ getByPosition board coords then do putStrLn "Selected position is already occupied. Please try again.\n" runMove board else do let board' = setByPosition board coords X runMove $ runCycle board' main :: IO () main = do putStrLn "Tic Tac Toe game: You are player X.\n" let board = emptyBoard (_, player, board') <- runMove board putStrLn $ case player of PO -> "Sorry, you lose" PX -> "Congratulations, you win" _ -> "It's a tie" putStrLn "Final board:\n" print board'
quantum-dan/tictactoe-ai
Play.hs
mit
1,254
0
16
448
312
148
164
38
4
module Main ( main ) where import RunDataAssocWekaApriorySimple import DataAssociation import System.Environment import System.Exit import System.IO import Data.Maybe import Control.Monad main :: IO () main = getArgs >>= parse maybeRead = fmap fst . listToMaybe . reads maybeReadInUnit :: String -> Maybe Float maybeReadInUnit s = (maybeRead s :: Maybe Float) >>= f where f x | x <= 1 && x >= 0 = Just x | otherwise = Nothing parse ["-h"] = usage >> exitSuccess parse [fname, msup, mconf] = do let minsup = maybe minsupError MinSupport $ maybeReadInUnit msup let minconf = maybe minconfError MinConfidence $ maybeReadInUnit mconf run fname minsup minconf parse _ = unknownCmd >> usage >> exitFailure unknownCmd = putStrLn "Wrong arguments!" usage = do putStrLn "Usage: mine-rules [-h] file MinSupport MinConfidence" putStrLn " where file is an *.arff nominal data file" putStrLn " MinSupport and MinConfidence must be Float values in [0, 1]" minsupError = boundError "MinSupport" minconfError = boundError "MinConfidence" boundError nme = error $ nme ++ " must be a Float in [0, 1]"
fehu/min-dat--a-priori
core/src/Main.hs
mit
1,197
0
12
287
314
156
158
29
1
module LogicProver.Prover (isValid) where import qualified Data.Map as M import LogicProver.Lang -- The type of the proof tree used for determining the validity of a proposition data ProofTree = Leaf Bool Prop -- used, prop | Branch1 Bool Prop ProofTree -- used, prop, left | Branch2 Bool Prop ProofTree ProofTree -- used, prop, left, right deriving (Show, Eq) getProp :: ProofTree -> Prop getProp (Leaf _ p) = p getProp (Branch1 _ p _) = p getProp (Branch2 _ p _ _) = p getUsed :: ProofTree -> Bool getUsed (Leaf u _) = u getUsed (Branch1 u _ _) = u getUsed (Branch2 u _ _ _) = u -- Return true if the proposition is valid: there is some combination of truth -- values for all of the atomic variables that allow the proposition to be true. isValid :: Prop -> Bool isValid = allClosed . collapseBranches . openBranches . solveProp -- Return true if all variables are consitent isConsistent :: [(String, Bool)] -> Bool isConsistent = all (\(k,v) -> v) -- Takes a list of True for an open branch and False for a closed branch allClosed :: [Bool]-> Bool allClosed = all (\x -> not x) -- Collapse a branch of the proof tree (a list of vars to consistency) into a -- single consistency value. collapseBranches :: [[(String, Bool)]] -> [Bool] collapseBranches l = map isConsistent l -- Create a list where each element in a branch of the proof tree. Each subelement -- is a variable appearing in the branch and its consistency within the branch. openBranches :: ProofTree -> [[(String, Bool)]] openBranches t = map isOpenBranch $ getAtoms t -- Create an association list of variable name to consitency isOpenBranch :: M.Map String [Prop] -> [(String, Bool)] isOpenBranch m = map gather $ M.toList $ m where gather :: (String, [Prop]) -> (String, Bool) gather (k, l) = (k, all (\x -> x == head l) l) -- Create a dictionary of variable names to atomic presence in the prooftree getAtoms :: ProofTree -> [ M.Map String [Prop] ] getAtoms t = getAtoms' t M.empty where getAtoms' t m = if isAtom t -- If the current node is an atom, then add it to the dictionary then let var = getVar $ getProp t in case M.lookup var m of -- If it does not exist in the dictionary, add it Nothing -> case t of Leaf _ p -> (M.insert var [p] m) : [] Branch1 _ p l -> getAtoms' l $ M.insert var [p] m Branch2 _ p l r -> (getAtoms' l $ M.insert var [p] m) ++ (getAtoms' r $ M.insert var [p] m) -- Otherwise, append it to the current entry for the variable Just past -> case t of Leaf _ p -> (M.insert var (p:past) m) : [] Branch1 _ p l -> getAtoms' l $ M.insert var (p:past) m Branch2 _ p l r -> (getAtoms' l $ M.insert var (p:past) m) ++ (getAtoms' r $ M.insert var (p:past) m) -- If the current node is not atomic, skip the entry and continue else case t of Leaf _ p -> [m] Branch1 _ p l -> getAtoms' l m Branch2 _ p l r -> (getAtoms' l m) ++ (getAtoms' r m) -- Apply a function to each leaf of a proof tree morphLeaves :: (Prop -> ProofTree -> ProofTree) -> Prop -> ProofTree -> ProofTree morphLeaves f p t = case t of Leaf _ _ -> f p t Branch1 u p' l -> Branch1 u p' (morphLeaves f p l) Branch2 u p' l r -> Branch2 u p' (morphLeaves f p l) (morphLeaves f p r) -- 1) Take a proof tree -- 2) Traverse the tree looking for the highest node that has not had a rule applied to it -- 3) Construct the proof tree resulting from applying the found rule to the given tree -- 4) Return the tree step :: ProofTree -> ProofTree step t = case getUsed t of -- This branch has been used, so step its children if applicable True -> case t of Leaf _ _ -> t Branch1 _ p l -> Branch1 True p (step l) Branch2 _ p l r -> Branch2 True p (step l) (step r) -- Otherwise -- If atomic, there are no rules to apply, so mark it as hit -- If not, apply the rule associated with its proposition False -> case isAtom t of True -> setUsed t False -> case t of Leaf u p -> setUsed $ morphLeaves propToTree p t Branch1 u p l -> setUsed $ morphLeaves propToTree p t Branch2 u p l r -> setUsed $ morphLeaves propToTree p t -- Turn a proposition into a prooftree and apply all rules to it solveProp :: Prop -> ProofTree solveProp = solveTree . initTree -- Given a proof tree, proceed to iteratively apply all rules to it until there -- are no more rules to apply solveTree :: ProofTree -> ProofTree solveTree t = case treeSolved t of True -> t False -> solveTree $ step t -- Returns true if the tree has been fully applied, false otherwise treeSolved :: ProofTree -> Bool treeSolved t = case t of Leaf u _ -> u Branch1 u _ l -> u && treeSolved l Branch2 u _ l r -> u && treeSolved l && treeSolved r -- Given a proposition and a prooftree, apply the rule of the proposition on a -- new proof tree that has as its root the given proposition propToTree :: Prop -> ProofTree -> ProofTree -- (not (not P)) propToTree p'@(PNegate (PNegate _)) (Leaf u p) = Branch1 u p $ Leaf False (collapseNegations p') -- P and Q propToTree (PAnd p1 p2) (Leaf u p) = Branch1 u p $ Branch1 False p1 $ Leaf False p2 -- ~(P and Q) propToTree (PNegate (PAnd p1 p2)) l = propToTree (POr (PNegate p1) (PNegate p2)) l -- (P or Q) propToTree (POr p1 p2) (Leaf u p) = Branch2 u p (Leaf False p1) (Leaf False p2) -- ~(P or Q) propToTree (PNegate (POr p1 p2)) l = propToTree (PAnd (PNegate p1) (PNegate p2)) l -- P implies Q propToTree (PCond p1 p2) l = propToTree (POr (PNegate p1) p2) l -- ~(P implies Q) propToTree (PNegate (PCond p1 p2)) l = propToTree (PAnd p1 (PNegate p2)) l -- Collapse all stacked negations collapseNegations :: Prop -> Prop collapseNegations (PNegate (PNegate p)) = collapseNegations p collapseNegations p = p -- Return true is there is no rule to apply on the given node isAtom :: ProofTree -> Bool isAtom t = case getProp t of PVar _ -> True PNegate (PVar _) -> True _ -> False -- Given a propsition, create a proof tree by negating the proposition. This is -- to facilitate a proof by contradication of the validity of the proposition. initTree :: Prop -> ProofTree initTree p = Leaf False (PNegate p) -- Set the `used` flag on a prooftree node setUsed :: ProofTree -> ProofTree setUsed (Leaf _ p) = Leaf True p setUsed (Branch1 _ p b) = Branch1 True p b setUsed (Branch2 _ p b1 b2) = Branch2 True p b1 b2
igorii/LogicProver
LogicProver/Prover.hs
mit
6,837
0
21
1,919
2,015
1,032
983
103
9
-- Speed Control -- http://www.codewars.com/kata/56484848ba95170a8000004d/ module Codewars.G964.Gps1 where gps :: Int -> [Double] -> Int gps _ [] = 0 gps _ [x] = 0 gps s xs = floor . maximum $ zipWith (\a b -> (b-a)*60*60 / fromIntegral s) xs (tail xs)
gafiatulin/codewars
src/7 kyu/Gps1.hs
mit
260
0
13
51
113
61
52
5
1
{-# LANGUAGE CPP #-} module Stackage.Config where import Control.Monad (when) import Control.Monad.Trans.Writer (execWriter, tell) import qualified Data.Map as Map import Data.Set (fromList, singleton) import Distribution.Text (simpleParse) import Stackage.Types -- | Packages which are shipped with GHC but are not included in the -- Haskell Platform list of core packages. defaultExtraCore :: GhcMajorVersion -> Set PackageName defaultExtraCore _ = fromList $ map PackageName $ words "binary Win32" -- | Test suites which are expected to fail for some reason. The test suite -- will still be run and logs kept, but a failure will not indicate an -- error in our package combination. defaultExpectedFailures :: GhcMajorVersion -> Set PackageName defaultExpectedFailures ghcVer = execWriter $ do -- Requires an old version of WAI and Warp for tests add "HTTP" -- text and setenv have recursive dependencies in their tests, which -- cabal can't (yet) handle add "text" add "setenv" -- The version of GLUT included with the HP does not generate -- documentation correctly. add "GLUT" -- https://github.com/bos/statistics/issues/42 add "statistics" -- https://github.com/kazu-yamamoto/simple-sendfile/pull/10 add "simple-sendfile" -- http://hackage.haskell.org/trac/hackage/ticket/954 add "diagrams" -- https://github.com/fpco/stackage/issues/24 add "unix-time" -- With transformers 0.3, it doesn't provide any modules add "transformers-compat" -- Tests require shell script and are incompatible with sandboxed package -- databases add "HTF" -- https://github.com/simonmar/monad-par/issues/28 add "monad-par" -- Unfortunately network failures seem to happen haphazardly add "network" -- https://github.com/ekmett/hyphenation/issues/1 add "hyphenation" -- Test suite takes too long to run on some systems add "punycode" -- http://hub.darcs.net/stepcut/happstack/issue/1 add "happstack-server" -- Requires a Facebook app. add "fb" -- https://github.com/tibbe/hashable/issues/64 add "hashable" -- https://github.com/vincenthz/language-java/issues/10 add "language-java" add "threads" add "crypto-conduit" add "pandoc" add "language-ecmascript" add "hspec" add "alex" -- https://github.com/basvandijk/concurrent-extra/issues/ add "concurrent-extra" -- https://github.com/rrnewton/haskell-lockfree-queue/issues/7 add "abstract-deque" -- https://github.com/skogsbaer/xmlgen/issues/2 add "xmlgen" -- Something very strange going on with the test suite, I can't figure -- out how to fix it add "bson" -- Requires a locally running PostgreSQL server with appropriate users add "postgresql-simple" -- Missing files add "websockets" -- Some kind of Cabal bug when trying to run tests add "thyme" when (ghcVer < GhcMajorVersion 7 6) $ do -- https://github.com/haskell-suite/haskell-names/issues/39 add "haskell-names" add "shake" -- https://github.com/jgm/pandoc-citeproc/issues/5 add "pandoc-citeproc" -- Problems with doctest and sandboxing add "warp" add "wai-logger" -- https://github.com/fpco/stackage/issues/163 add "hTalos" add "seqloc" -- FIXME the test suite fails fairly regularly in builds, though I haven't -- discovered why yet add "crypto-numbers" where add = tell . singleton . PackageName -- | List of packages for our stable Hackage. All dependencies will be -- included as well. Please indicate who will be maintaining the package -- via comments. defaultStablePackages :: GhcMajorVersion -> Map PackageName (VersionRange, Maintainer) defaultStablePackages ghcVer = unPackageMap $ execWriter $ do mapM_ (add "[email protected]") $ words =<< [ "yesod yesod-newsfeed yesod-sitemap yesod-static yesod-test yesod-bin" , "markdown filesystem-conduit mime-mail-ses" , "persistent persistent-template persistent-sqlite" , "network-conduit-tls yackage warp-tls keter" , "shakespeare-text process-conduit stm-conduit" , "classy-prelude-yesod yesod-fay yesod-eventsource wai-websockets" , "random-shuffle safe-failure hackage-proxy hebrew-time" ] when (ghcVer >= GhcMajorVersion 7 6) $ add "[email protected]" "mega-sdist" mapM_ (add "FP Complete <[email protected]>") $ words =<< [ "web-fpco th-expand-syns configurator compdata smtLib unification-fd" , "fixed-list indents language-c pretty-class" , "aws yesod-auth-oauth csv-conduit cassava" , "async shelly thyme" , "hxt hxt-relaxng dimensional" , "cairo diagrams-cairo" , "persistent-mongoDB" , "threepenny-gui base16-bytestring convertible" ] when (ghcVer < GhcMajorVersion 7 6) $ do addRange "FP Complete <[email protected]>" "hxt" "<= 9.3.0.1" addRange "FP Complete <[email protected]>" "shelly" "<= 1.0" when (ghcVer >= GhcMajorVersion 7 6) $ do add "FP Complete <[email protected]>" "repa-devil" addRange "FP Complete <[email protected]>" "kure" "<= 2.4.10" mapM_ (add "Neil Mitchell") $ words "hlint hoogle shake derive" mapM_ (add "Alan Zimmerman") $ words "hjsmin language-javascript" mapM_ (add "Jasper Van der Jeugt") $ words "blaze-html blaze-markup stylish-haskell" mapM_ (add "Antoine Latter") $ words "uuid byteorder" mapM_ (add "Stefan Wehr <[email protected]>") $ words "HTF hscurses xmlgen stm-stats" mapM_ (add "Bart Massey <[email protected]>") $ words "parseargs" mapM_ (add "Vincent Hanquez") $ words =<< [ "asn1-data bytedump certificate cipher-aes cipher-rc4 connection" , "cprng-aes cpu crypto-pubkey-types crypto-random-api cryptocipher" , "cryptohash hit language-java libgit pem siphash socks tls" , "tls-debug tls-extra vhd" ] addRange "Vincent Hanquez" "language-java" "< 0.2.5" #if !defined(mingw32_HOST_OS) && !defined(__MINGW32__) -- Does not compile on Windows mapM_ (add "Vincent Hanquez") $ words "udbus xenstore" #endif mapM_ (add "Alberto G. Corona <[email protected]>") $ words "RefSerialize TCache Workflow MFlow" mapM_ (add "Edward Kmett <[email protected]>") $ words =<< [ "ad adjunctions bifunctors bound categories charset comonad comonad-transformers" , "comonads-fd comonad-extras compressed concurrent-supply constraints contravariant" , "distributive either eq free groupoids heaps hyphenation" , "integration intervals kan-extensions lca lens linear monadic-arrays machines" , "mtl profunctors profunctor-extras reducers reflection" , "semigroups semigroupoids semigroupoid-extras speculation tagged void" , "graphs monad-products monad-st wl-pprint-extras wl-pprint-terminfo" , "numeric-extras parsers pointed prelude-extras recursion-schemes reducers" , "streams syb-extras vector-instances" ] mapM_ (add "Simon Hengel <[email protected]>") $ words "hspec doctest base-compat" mapM_ (add "Mario Blazevic <[email protected]>") $ words "monad-parallel monad-coroutine" -- https://github.com/blamario/monoid-subclasses/issues/3 when (ghcVer >= GhcMajorVersion 7 6) $ do mapM_ (add "Mario Blazevic <[email protected]>") $ words "incremental-parser monoid-subclasses" mapM_ (add "Brent Yorgey <[email protected]>") $ words =<< [ "monoid-extras dual-tree vector-space-points active force-layout" , "diagrams diagrams-contrib diagrams-core diagrams-lib diagrams-svg" , "diagrams-postscript diagrams-builder diagrams-haddock haxr" , "BlogLiterately BlogLiterately-diagrams" , "MonadRandom" ] mapM_ (add "Patrick Brisbin") $ words "gravatar" mapM_ (add "Felipe Lessa <[email protected]>") $ words "esqueleto fb fb-persistent yesod-fb yesod-auth-fb" mapM_ (add "Alexander Altman <[email protected]>") $ words "base-unicode-symbols containers-unicode-symbols" mapM_ (add "Ryan Newton <[email protected]>") $ words "accelerate" when (ghcVer < GhcMajorVersion 7 6) $ do addRange "Ryan Newton <[email protected]>" "accelerate" "< 0.14" addRange "Ryan Newton <[email protected]>" "fclabels" "< 2.0" mapM_ (add "Dan Burton <[email protected]>") $ words =<< [ "basic-prelude composition io-memoize numbers rev-state runmemo" , "tardis" ] mapM_ (add "Daniel Díaz <[email protected]>") $ words "HaTeX" mapM_ (add "Adam Bergmark <[email protected]>") $ words "fay fay-base fay-dom fay-jquery fay-text fay-uri snaplet-fay" mapM_ (add "Boris Lykah <[email protected]>") $ words "groundhog groundhog-th groundhog-sqlite groundhog-postgresql groundhog-mysql" mapM_ (add "Janne Hellsten <[email protected]>") $ words "sqlite-simple" mapM_ (add "Michal J. Gajda") $ words "iterable Octree FenwickTree hPDB" when (ghcVer >= GhcMajorVersion 7 6) $ do mapM_ (add "Michal J. Gajda") $ words "hPDB-examples" mapM_ (add "Roman Cheplyaka <[email protected]>") $ words =<< [ "smallcheck tasty tasty-smallcheck tasty-quickcheck tasty-hunit tasty-golden" , "traverse-with-class regex-applicative time-lens" , "haskell-names haskell-packages hse-cpp" ] -- https://github.com/fpco/stackage/issues/160 when (ghcVer >= GhcMajorVersion 7 6) $ do mapM_ (add "Ketil Malde") $ words =<< [ "biocore biofasta biofastq biosff" , "blastxml bioace biophd" , "biopsl samtools" , "seqloc bioalign BlastHTTP" , "RNAFold" , "parsestar hTalos" -- The following have out-of-date dependencies currently -- biostockholm memexml RNAwolf -- , "Biobase BiobaseDotP BiobaseFR3D BiobaseInfernal BiobaseMAF" -- , "BiobaseTrainingData BiobaseTurner BiobaseXNA BiobaseVienna" -- , "BiobaseTypes BiobaseFasta" -- MC-Fold-DP ] -- https://github.com/fpco/stackage/issues/163 addRange "Michael Snoyman" "biophd" "< 0.0.6 || > 0.0.6" -- https://github.com/fpco/stackage/issues/46 addRange "Michael Snoyman" "QuickCheck" "< 2.6" -- https://github.com/fpco/stackage/issues/68 addRange "Michael Snoyman" "criterion" "< 0.8" -- https://github.com/fpco/stackage/issues/72 addRange "Michael Snoyman" "HaXml" "< 1.24" -- Due to binary package dep addRange "Michael Snoyman" "statistics" "< 0.10.4" -- Newest hxt requires network 2.4 or newest addRange "Michael Snoyman" "hxt" "< 9.3.1" addRange "Michael Snoyman" "network" "< 2.4" -- https://github.com/fpco/stackage/issues/153 addRange "Michael Snoyman" "text" "< 1.0" -- https://github.com/fpco/stackage/issues/156 addRange "Michael Snoyman" "hspec" "< 1.8" addRange "Michael Snoyman" "hspec-expectations" "< 0.4" -- https://github.com/fpco/stackage/issues/159 addRange "Michael Snoyman" "pretty-show" "< 1.6.2" -- https://github.com/fpco/stackage/issues/161 addRange "Michael Snoyman" "RSA" "< 1.3" -- https://github.com/fpco/stackage/issues/168 addRange "Michael Snoyman" "crypto-api" "< 0.13" -- https://github.com/fpco/stackage/issues/170 addRange "Michael Snoyman" "aeson" "< 0.7" -- https://github.com/fpco/stackage/issues/171 addRange "Michael Snoyman" "pandoc-citeproc" "< 0.3" -- https://github.com/fpco/stackage/issues/172 addRange "Michael Snoyman" "attoparsec" "< 0.11" addRange "Michael Snoyman" "fay" "< 0.19" addRange "Michael Snoyman" "fay-base" "< 0.19" addRange "Michael Snoyman" "fay-text" "< 0.3.0.1" -- binary package dep issue, figure out more fine-grained workaround addRange "Michael Snoyman" "SHA" "< 1.6.3" addRange "Michael Snoyman" "hashable" "< 1.2" -- Requires binary 0.7 addRange "FP Complete <[email protected]>" "bson" "< 0.2.3" -- Version 0.15.3 requires a newer template-haskell addRange "FP Complete <[email protected]>" "language-ecmascript" "< 0.15.3" -- unknown symbol `utf8_table4' addRange "Michael Snoyman" "regex-pcre-builtin" "< 0.94.4.6.8.31" where add maintainer package = addRange maintainer package "-any" addRange maintainer package range = case simpleParse range of Nothing -> error $ "Invalid range " ++ show range ++ " for " ++ package Just range' -> tell $ PackageMap $ Map.singleton (PackageName package) (range', Maintainer maintainer)
sinelaw/stackage
Stackage/Config.hs
mit
13,098
0
15
2,970
1,764
830
934
192
2
{-# LANGUAGE RecordWildCards #-} -- | Defines the XglImporter. {- Importer notes: - Ignores ambient and spherermap lights. -} module Codec.Soten.Importer.XglImporter ( XglImporter(..) -- Only for testing purpose. , transformLights , transformMaterials , transformToScene ) where import Data.List ( intercalate ) import Data.Maybe ( catMaybes ) import qualified Data.ByteString.Lazy as ByteString ( readFile ) import Data.ByteString.Lazy.Char8 ( unpack ) import qualified Data.Vector as V ( fromList , length , replicate ) import Codec.Compression.Zlib ( decompress ) import Control.Lens ((&), (^.), (.~)) import Linear ( V3(..) , cross , dot , normalize ) import Linear.Matrix ( M44 , (!!*) , identity , mkTransformationMat ) import Codec.Soten.BaseImporter ( BaseImporter(..) , searchFileHeaderForToken ) import Codec.Soten.Data.XglData as X ( Model(..) , LightingTag(..) , Material(..) , Mesh(..) , Face(..) , Transform(..) , Vertex(..) ) import qualified Codec.Soten.Parser.XglParser as Parser ( getModel ) import Codec.Soten.Scene.Light ( Light(..) , LightSource(LightDirectional) , newLight , lightType , lightDirection , lightColorDiffuse , lightColorSpecular ) import Codec.Soten.Scene.Material as S ( Material(..) , MaterialProperty(..) , newMaterial , addProperty ) import Codec.Soten.Scene.Mesh as S ( Mesh(..) , PrimitiveType(..) , Face(..) , newMesh , meshPrimitiveTypes , meshNormals , meshVertices , meshFaces , meshMaterialIndex ) import Codec.Soten.Scene ( Scene(..) , newScene , sceneLights , sceneMaterials ) import Codec.Soten.Util ( CheckType(..) , DeadlyImporterError(..) , throw , hasExtention , squareLength ) -- | Implementation of the XGL/ZGL importer. data XglImporter = XglImporter deriving Show instance BaseImporter XglImporter where canImport _ filePath CheckExtension = return $ hasExtention filePath [".xgl", ".zgl"] canImport _ filePath CheckHeader = searchFileHeaderForToken filePath ["<WORLD>"] readModel _ = internalReadFile -- | Reads file content and parsers it into the 'Scene'. Returns error messages -- as 'String's. internalReadFile :: FilePath -> IO (Either String Scene) internalReadFile filePath = Right <$> transformToScene <$> parseModelFile filePath -- | Parses model file into its internal representation. Decodess zlib files if -- needed. parseModelFile :: FilePath -> IO Model parseModelFile filePath = if hasExtention filePath [".zgl"] then do fileContent <- ByteString.readFile filePath Parser.getModel (unpack $ decompress fileContent) else readFile filePath >>= Parser.getModel -- | Transforms internal model representation into the 'Scene' object. transformToScene :: Model -> Scene transformToScene Model{..} = newScene & sceneMaterials .~ V.fromList (transformMaterials materials) -- & sceneMeshes .~ V.fromList (transformMeshes meshMaterials) -- & sceneRootNode .~ Just node & sceneLights .~ V.fromList (transformLights modelLightingTags) where materials = intercalate [] $ map meshMaterials modelMeshes -- | Transforms direction light into Light object. transformLights :: [LightingTag] -> [Light] transformLights = foldl tagToLight [] where tagToLight :: [Light] -> LightingTag -> [Light] tagToLight acc LightingTagDirectional{..} = light : acc where light = newLight & lightType .~ LightDirectional & lightDirection .~ lightingTagDirectionalDirection & lightColorDiffuse .~ lightingTagDirectionalDiffuse & lightColorSpecular .~ lightingTagDirectionalSpecular tagToLight acc _ = acc -- | Transforms internal material into scene's ones. transformMaterials :: [X.Material] -> [S.Material] transformMaterials = map sceneMat where -- TODO: Material id is missing! sceneMat X.Material{..} = foldl addProperty newMaterial (requiredProperties ++ optionalProperties) where requiredProperties = [ MaterialName "DefaultMaterial" , MaterialColorAmbient materialAmbient , MaterialColorDiffuse materialDiffuse ] optionalProperties = catMaybes [ fmap MaterialColorSpecular materialSpecular , fmap MaterialColorEmissive materialEmiss , fmap MaterialColorShininess materialShine , fmap MaterialColorOpacity materialAlpha ] -- | Calculates matrix of transformation. transformation :: Transform -> M44 Float transformation Transform{..} | squareLength transForward < 1e-4 = identity | squareLength transUp < 1e-4 = identity | up `dot` forward > 1e-4 = identity | otherwise = mkTransformationMat scaledRotMat transForward where forward = normalize transForward up = normalize transUp right = forward `cross` up rotateMatrix = V3 right up forward scaledRotMat = maybe rotateMatrix (rotateMatrix !!* ) transScale -- | Transforms internal mesh structure into global one. transformMeshes :: [X.Mesh] -> [S.Mesh] transformMeshes = map sceneMesh where sceneMesh X.Mesh{..} = newMesh & S.meshNormals .~ normals & meshVertices .~ vertices & S.meshFaces .~ V.fromList (map mkFace meshFaces) & meshMaterialIndex .~ Just 0 -- TODO: Retrive index from mat list. & meshPrimitiveTypes .~ V.replicate (V.length normals `div` 3) PrimitiveTriangle where vertices = V.fromList meshPositions normals = V.fromList meshNormals mkFace :: X.Face -> S.Face mkFace (X.Face _ v1 v2 v3) = S.Face (V.fromList [ vertexPosition v1 , vertexPosition v2 , vertexPosition v3 ])
triplepointfive/soten
src/Codec/Soten/Importer/XglImporter.hs
mit
7,173
0
17
2,658
1,320
755
565
154
2
{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE UndecidableInstances #-} module Betfair.StreamingAPI.Requests.AuthenticationMessage ( AuthenticationMessage(..) , defaultAuthenticationMessage ) where import Data.Aeson.TH (Options (omitNothingFields), defaultOptions, deriveJSON) import Protolude import Text.PrettyPrint.GenericPretty import Betfair.StreamingAPI.API.AddId data AuthenticationMessage = AuthenticationMessage { op :: Text , id :: Int -- Client generated unique id to link request with response (like json rpc) , appKey :: Text , session :: Text } deriving (Eq, Read, Show, Generic, Pretty) $(deriveJSON defaultOptions {omitNothingFields = True} ''AuthenticationMessage) defaultAuthenticationMessage :: AuthenticationMessage defaultAuthenticationMessage = AuthenticationMessage "authentication" 0 undefined undefined instance AddId AuthenticationMessage where addId o i = o {id = i}
joe9/streaming-betfair-api
src/Betfair/StreamingAPI/Requests/AuthenticationMessage.hs
mit
1,182
0
9
238
188
115
73
27
1
module GHCJS.DOM.WebGLCompressedTextureATC ( ) where
manyoo/ghcjs-dom
ghcjs-dom-webkit/src/GHCJS/DOM/WebGLCompressedTextureATC.hs
mit
55
0
3
7
10
7
3
1
0
import XMonad -- LAYOUTS import XMonad.Layout.Spacing import XMonad.Layout.Fullscreen import XMonad.Layout.NoBorders import XMonad.Layout.PerWorkspace import XMonad.Layout.SimplestFloat import XMonad.Layout.ResizableTile import XMonad.Layout.Circle import XMonad.Layout.Grid import XMonad.Layout.IM -- WINDOW RULES import XMonad.ManageHook -- KEYBOARD & MOUSE CONFIG import XMonad.Util.EZConfig import XMonad.Actions.FloatKeys import Graphics.X11.ExtraTypes.XF86 -- STATUS BAR import XMonad.Hooks.DynamicLog hiding (xmobar, xmobarPP, xmobarColor, sjanssenPP, byorgeyPP) import XMonad.Hooks.ManageDocks import XMonad.Hooks.ManageHelpers import XMonad.Hooks.SetWMName import XMonad.Hooks.UrgencyHook import XMonad.Util.Dmenu --import XMonad.Hooks.FadeInactive import XMonad.Hooks.EwmhDesktops hiding (fullscreenEventHook) import System.IO (hPutStrLn) --import XMonad.Operations import XMonad.Util.Run (spawnPipe) import XMonad.Actions.CycleWS -- nextWS, prevWS import Data.List -- clickable workspaces import Data.Ratio ((%)) -- clickable workspaces -------------------------------------------------------------------------------------------------------------------- -- DECLARE WORKSPACES RULES -------------------------------------------------------------------------------------------------------------------- myLayout = onWorkspace (myWorkspaces !! 0) (avoidStruts (tiledSpace ||| tiled) ||| fullTile) $ onWorkspace (myWorkspaces !! 1) (avoidStruts (noBorders(tiledSpace ||| fullTile)) ||| fullScreen) $ onWorkspace (myWorkspaces !! 2) (avoidStruts simplestFloat) $ onWorkspace (myWorkspaces !! 6) (avoidStruts pidginLayout) -- --$ onWorkspace (myWorkspaces !! 6) (pidginLayout) $ avoidStruts ( tiledSpace ||| tiled ||| fullTile ) where tiled = spacing 5 $ ResizableTall nmaster delta ratio [] tiledSpace = spacing 60 $ ResizableTall nmaster delta ratio [] fullScreen = noBorders(fullscreenFull Full) fullTile = ResizableTall nmaster delta ratio [] fullTiled = ResizableTall nmaster delta ratio [] borderlessTile = noBorders(fullTile) -- Default number of windows in master pane nmaster = 1 -- Percent of the screen to increment when resizing delta = 5/100 -- Default proportion of the screen taken up by main pane ratio = toRational (2/(1 + sqrt 5 :: Double)) gridLayout = spacing 8 $ Grid pidginRoster = And (ClassName "Pidgin") (Role "buddy_list") pidginLayout = withIM (1%9) pidginRoster gridLayout -------------------------------------------------------------------------------------------------------------------- -- WORKSPACE DEFINITIONS -------------------------------------------------------------------------------------------------------------------- myWorkspaces = clickable $ ["term" ,"web" ,"float" ,"docs" ,"tunes" ,"mail" ,"pidgin"] where clickable l = [ "^ca(1,xdotool key alt+" ++ show (n) ++ ")" ++ ws ++ "^ca()" | (i,ws) <- zip [1..] l, let n = i ] -------------------------------------------------------------------------------------------------------------------- -- APPLICATION SPECIFIC RULES -------------------------------------------------------------------------------------------------------------------- myManageHook = composeAll [ resource =? "dmenu" --> doFloat , resource =? "skype" --> doFloat , resource =? "mplayer" --> doFloat , resource =? "feh" --> doFloat , resource =? "google-chrome"--> doShift (myWorkspaces !! 1) , className =? "Pidgin"--> doShift (myWorkspaces !! 6) , resource =? "lowriter"--> doShift (myWorkspaces !! 3) , resource =? "localc"--> doShift (myWorkspaces !! 3) , resource =? "loimpress"--> doShift (myWorkspaces !! 3) , resource =? "zathura"--> doShift (myWorkspaces !! 3) , resource =? "ario"--> doShift (myWorkspaces !! 4) , resource =? "ncmpcpp"--> doShift (myWorkspaces !! 4) , resource =? "alsamixer"--> doShift (myWorkspaces !! 4) , resource =? "mutt"--> doShift (myWorkspaces !! 5) , resource =? "irssi"--> doShift (myWorkspaces !! 5) , resource =? "centerim"--> doShift (myWorkspaces !! 5) , manageDocks] newManageHook = myManageHook <+> manageHook defaultConfig -------------------------------------------------------------------------------------------------------------------- -- DZEN LOG RULES for workspace names, layout image, current program title -------------------------------------------------------------------------------------------------------------------- myLogHook h = dynamicLogWithPP ( defaultPP { ppCurrent = dzenColor green0 background . pad , ppVisible = dzenColor red0 background . pad , ppHidden = dzenColor red0 background . pad , ppHiddenNoWindows = dzenColor yellow0 background. pad , ppWsSep = "" , ppSep = " " , ppLayout = wrap "^ca(1,xdotool key alt+space)" "^ca()" . dzenColor white1 background . (\x -> case x of "Full" -> "^i(~/.xmonad/dzen2/layout_full.xbm)" "Spacing 5 ResizableTall" -> "^i(~/.xmonad/dzen2/layout_tall.xbm)" "ResizableTall" -> "^i(~/.xmonad/dzen2/layout_tall.xbm)" "SimplestFloat" -> "^i(~/.xmonad/dzen2/mouse_01.xbm)" "Circle" -> "^i(~/.xmonad/dzen2/full.xbm)" _ -> "^i(~/.xmonad/dzen2/grid.xbm)" ) -- , ppTitle = wrap "^ca(1,xdotool key alt+shift+x)^fg(#222222)^i(~/.xmonad/dzen2/corner_left.xbm)^bg(#222222)^fg(#AADB0F)^fn(fkp)x^fn()" "^fg(#222222)^i(~/.xmonad/dzen2/corner_right.xbm)^ca()" . dzenColor white0 "#222222" . shorten 40 . pad , ppOrder = \(ws:l:t:_) -> [ws,l] , ppOutput = hPutStrLn h } ) -------------------------------------------------------------------------------------------------------------------- -- Spawn pipes and menus on boot, set default settings -------------------------------------------------------------------------------------------------------------------- myXmonadBar = "dzen2 -x '0' -y '0' -h '14' -w '1500' -ta 'l' -fg '"++foreground++"' -bg '"++background++"' -fn "++myFont myStatusBar = "conky -qc ~/.xmonad/.conky_dzen | dzen2 -x '1200' -w '666' -h '14' -ta 'r' -bg '"++background++"' -fg '"++foreground++"' -y '0' -fn "++myFont --myConky = "conky -c ~/conkyrc" --myStartMenu = "~/.xmonad/start ~/.xmonad/start_apps" main = do dzenLeftBar <- spawnPipe myXmonadBar dzenRightBar <- spawnPipe myStatusBar xmproc <- spawnPipe "GTK2_RC_FILES=~/.gtkdocky /usr/bin/docky" xmproc <- spawnPipe "tint2 -c ~/.config/tint2/xmonad.tint2rc" -- conky <- spawn myConky -- dzenStartMenu <- spawnPipe myStartMenu xmonad $ ewmh defaultConfig { terminal = myTerminal , borderWidth = 1 , normalBorderColor = yellow0 , focusedBorderColor = green0 , modMask = mod1Mask , layoutHook = myLayout , workspaces = myWorkspaces , manageHook = newManageHook , handleEventHook = fullscreenEventHook <+> docksEventHook , startupHook = setWMName "LG3D" , logHook = myLogHook dzenLeftBar -- >> fadeInactiveLogHook 0xdddddddd } -------------------------------------------------------------------------------------------------------------------- -- Keyboard options -------------------------------------------------------------------------------------------------------------------- `additionalKeys` [((mod1Mask .|. shiftMask , xK_b), spawn "chromium") ,((mod1Mask , xK_b), spawn "dwb") ,((mod1Mask .|. shiftMask , xK_n), spawn "xterm -fn '-*-gohufont-medium-r-normal-*-12-*-*-*-*-*-*-*' -fb '-*-gohufont-medium-r-normal-*-12-*-*-*-*-*-*-*' -fi '-*-gohufont-medium-r-normal-*-12-*-*-*-*-*-*-*'") ,((mod1Mask .|. shiftMask , xK_t), spawn "xterm -e tmux") ,((mod4Mask , xK_z), spawn "zathura") ,((mod4Mask , xK_w), spawn "lowriter") ,((mod4Mask , xK_c), spawn "localc") ,((mod4Mask , xK_m), spawn "xterm -title mutt -name mutt -e muttb") ,((mod4Mask , xK_i), spawn "xterm -title irssi -name irssi -e irssi") ,((mod4Mask , xK_n), spawn "xterm -title ncmpcpp -name ncmpcpp -e ncmpcpp") ,((mod4Mask , xK_a), spawn "xterm -title alsamixer -name alsamixer -e alsamixer") ,((mod4Mask , xK_M), spawn "xterm -title centerim -name centerim -e centerim") ,((mod1Mask , xK_r), spawn "~/scripts/lens") ,((mod1Mask .|. shiftMask , xK_r), spawn "~/scripts/dmenu/spotlight") ,((mod1Mask , xK_q), spawn "killall dzen2; killall conky; cd ~/.xmonad; ghc -threaded xmonad.hs; mv xmonad xmonad-x86_64-linux; xmonad --restart" ) ,((mod1Mask .|. shiftMask , xK_i), spawn "xcalib -invert -alter") ,((mod1Mask .|. shiftMask , xK_x), kill) ,((mod1Mask .|. shiftMask , xK_c), return()) ,((mod1Mask , xK_p), moveTo Prev NonEmptyWS) ,((mod1Mask , xK_n), moveTo Next NonEmptyWS) ,((mod1Mask , xK_c), moveTo Next EmptyWS) ,((mod1Mask .|. shiftMask , xK_l), sendMessage MirrorShrink) ,((mod1Mask .|. shiftMask , xK_h), sendMessage MirrorExpand) ,((mod1Mask , xK_a), withFocused (keysMoveWindow (-20,0))) ,((mod1Mask , xK_d), withFocused (keysMoveWindow (0,-20))) ,((mod1Mask , xK_s), withFocused (keysMoveWindow (0,20))) ,((mod1Mask , xK_f), withFocused (keysMoveWindow (20,0))) ,((mod1Mask .|. shiftMask , xK_a), withFocused (keysResizeWindow (-20,0) (0,0))) ,((mod1Mask .|. shiftMask , xK_d), withFocused (keysResizeWindow (0,-20) (0,0))) ,((mod1Mask .|. shiftMask , xK_s), withFocused (keysResizeWindow (0,20) (0,0))) ,((mod1Mask .|. shiftMask , xK_f), withFocused (keysResizeWindow (20,0) (0,0))) ,((0 , xK_Super_L), spawn "menu ~/.xmonad/apps") ,((mod1Mask , xK_Super_L), spawn "menu ~/.xmonad/configs") ,((mod1Mask , xK_F1), spawn "~/.xmonad/sc ~/.xmonad/scripts/dzen_music.sh") ,((mod1Mask , xK_F2), spawn "~/.xmonad/sc ~/.xmonad/scripts/dzen_vol.sh") ,((mod1Mask , xK_F3), spawn "~/.xmonad/sc ~/.xmonad/scripts/dzen_network.sh") ,((mod1Mask , xK_F4), spawn "~/.xmonad/sc ~/.xmonad/scripts/dzen_battery.sh") ,((mod1Mask , xK_F5), spawn "~/.xmonad/sc ~/.xmonad/scripts/dzen_hardware.sh") ,((mod1Mask , xK_F6), spawn "~/.xmonad/sc ~/.xmonad/scripts/dzen_pacman.sh") ,((mod1Mask , xK_F7), spawn "~/.xmonad/sc ~/.xmonad/scripts/dzen_date.sh") ,((mod1Mask , xK_F8), spawn "~/.xmonad/sc ~/.xmonad/scripts/dzen_log.sh") ,((0 , xK_Print), spawn "scrot & mplayer /usr/share/sounds/freedesktop/stereo/screen-capture.oga") ,((mod1Mask , xK_Print), spawn "scrot -s & mplayer /usr/share/sounds/freedesktop/stereo/screen-capture.oga") ,((0 , xF86XK_AudioLowerVolume), spawn "amixer set Master 2- & mplayer /usr/share/sounds/freedesktop/stereo/audio-volume-change.oga") ,((0 , xF86XK_AudioRaiseVolume), spawn "amixer set Master 2+ & mplayer /usr/share/sounds/freedesktop/stereo/audio-volume-change.oga") ,((0 , xF86XK_AudioMute), spawn "amixer set Master toggle") -- ,((0 , xF86XK_Display), spawn "xrandr --newmode `cvt 1366 768 | tail -n1 | cut' ' -f2`; xrandr --addmode VGA1 1368x768_60.00; xrandr --output VGA1 --mode 1368x768_60.00") ,((0 , xF86XK_Sleep), spawn "pm-suspend") ,((0 , xF86XK_AudioPlay), spawn "ncmpcpp toggle") ,((0 , xF86XK_AudioNext), spawn "ncmpcpp next") ,((0 , xF86XK_AudioPrev), spawn "ncmpcpp prev") ,((0, 0x1008FF05), spawn "asus-kbd-backlight up" ) -- XF86XK_MonBrightnessUp ,((0, 0x1008FF06), spawn "asus-kbd-backlight down" ) -- XF86XK_MonBrightnessDown , ((0, 0x1008FF12), spawn "~/common/bin/pa-vol.sh mute" ) -- XF86XK_AudioMute , ((0, 0x1008FF11), spawn "~/common/bin/pa-vol.sh minus" ) -- XF86XK_AudioLowerVolume , ((0, 0x1008FF13), spawn "~/common/bin/pa-vol.sh plus" ) -- XF86XK_AudioRaiseVolume , ((0, 0x1008FF2C), spawn "eject" ) -- XF86XK_Eject , ((0, 0x1008ff2a), spawn "sudo pm-suspend" ) -- XF86XK_PowerOff ] `additionalMouseBindings` [((mod1Mask , 6), (\_ -> moveTo Next NonEmptyWS)) ,((mod1Mask , 7), (\_ -> moveTo Prev NonEmptyWS)) ,((mod1Mask , 5), (\_ -> moveTo Prev NonEmptyWS)) ,((mod1Mask , 4), (\_ -> moveTo Next NonEmptyWS)) ] -- Define constants myTerminal = "xterm" myBitmapsDir = "~/.xmonad/dzen2/" --myFont = "-*-tamsyn-medium-*-normal-*-10-*-*-*-*-*-*-*" --myFont = "-*-terminus-medium-*-normal-*-9-*-*-*-*-*-*-*" --myFont = "-*-lime-*-*-*-*-*-*-*-*-*-*-*-*" myFont = "-*-nu-*-*-*-*-*-*-*-*-*-*-*-*" --myFont = "'sans:italic:bold:underline'" --myFont = "xft:droid sans mono:size=9" --myFont = "xft:Droid Sans:size=12" --myFont = "-*-cure-*-*-*-*-*-*-*-*-*-*-*-*" foreground= "#D3D3D3" background= "#111111" black0= "#181818" black1= "#181818" red0= "#D7D7D7" red1= "#D7D7D7" green0= "#AADB0F" green1= "#AADB0F" --green0= "#A80036" --green1= "#A80036" --green0= "#E2791B" --green1= "#E2791B" yellow0= "#666666" yellow1= "#666666" blue0= "#FFFFFF" blue1= "#FFFFFF" --magenta0= "#91BA0D" --magenta1= "#91BA0D" --magenta0= "#740629" --magenta1= "#740629" magenta0= "#BF3C0A" magenta1= "#BF3C0A" cyan0= "#D4D4D4" cyan1= "#D4D4D4" white0= "#D3D3D3" white1= "#D3D3D3"
Bryan792/dotfiles
.xmonad/xmonad.hs
mit
13,686
0
16
2,689
2,836
1,662
1,174
191
6
module Rebase.System.IO.Unsafe ( module System.IO.Unsafe ) where import System.IO.Unsafe
nikita-volkov/rebase
library/Rebase/System/IO/Unsafe.hs
mit
92
0
5
12
23
16
7
4
0
{-# LANGUAGE TupleSections, OverloadedStrings, NoImplicitPrelude #-} module ParseEmail ( parseEmail , flatten , getAttachments , getPart , subject , Email(..) ) where import ClassyPrelude hiding (try, (<|>)) import Prelude (tail) import Text.ParserCombinators.Parsec (parse, manyTill, anyChar, try, string, eof, (<?>), (<|>)) import Text.Parsec.Prim (ParsecT) import Text.Parsec.Error (ParseError) import Data.Functor.Identity (Identity) data Email = Email String Content deriving (Eq, Show) data Content = Multipart String [Content] | Singlepart String String deriving (Eq, Show) data Attachment = Attachment { extension :: String, headers :: [String], fileData :: String } deriving (Eq, Ord, Show) subject :: Email -> Either ParseError String subject (Email header content) = parse subjectFormat "(unknown)" header subjectFormat :: ParsecT [Char] u Identity String subjectFormat = do manyTill line $ try (string "Subject: ") subject <- manyTill anyChar eol return subject getContentPart :: String -> Content -> String getContentPart part (Multipart x contents) = concatMap (getContentPart part) contents getContentPart part (Singlepart contentType lines) = if (part == contentType) then show lines else "" getPart :: String -> Email -> String getPart partName (Email x contents) = getContentPart partName contents flatten :: Email -> [Content] flatten (Email header content) = flattenContent content flattenContent :: Content -> [Content] flattenContent content = case content of Multipart _ contents -> concatMap flattenContent contents Singlepart _ _ -> [content] getAttachments :: [Content] -> [Attachment] getAttachments = mapMaybe convertToAttachment convertToAttachment :: Content -> Maybe Attachment convertToAttachment content = case content of Multipart contentType contents -> Nothing Singlepart contentType headersAndData -> case headMay (lines headersAndData) of Nothing -> Nothing Just firstLine -> if not ("name" `isInfixOf` firstLine) then Nothing else let fileData = tail $ dropWhile (/= "") (lines headersAndData) headers = takeWhile (/= "") (lines headersAndData) in Just $ Attachment contentType headers (concat fileData) parseEmail :: String -> Either ParseError Email parseEmail = parse emailFormat "(unknown)" emailFormat :: ParsecT [Char] u Identity Email emailFormat = do (header, contentType) <- getHeaders body <- emailContent contentType Nothing return $ Email header body contentFormat :: Maybe [String] -> ParsecT [Char] u Identity Content contentFormat boundary = do (header, contentType) <- getHeaders body <- emailContent contentType boundary return body getHeaders :: ParsecT [Char] u Identity ([Char], [Char]) getHeaders = do header <- manyTill anyChar $ try (string "Content-Type: ") contentType <- manyTill anyChar $ string "; " return (header, contentType) emailContent :: String -> Maybe [String] -> ParsecT [Char] u Identity Content emailContent contentType boundary = if "multipart" `isInfixOf` contentType then do manyTill anyChar $ try (string "boundary=") thisBoundary <- manyTill anyChar eol newBoundary <- return $ maybe [thisBoundary] (thisBoundary :) boundary eol body <- multipart $ Just newBoundary return $ Multipart contentType body else do content <- notBoundaryLines boundary return $ Singlepart contentType content multipart :: Maybe [String] -> ParsecT [Char] u Identity [Content] multipart boundary = do contents <- manyTill (contentFormat boundary) eof return contents line :: ParsecT [Char] u Identity [Char] line = manyTill anyChar eol --Eats newlines notBoundaryLines :: Maybe [String] -> ParsecT [Char] u Identity [Char] notBoundaryLines boundary = do curLine <- line if maybeInfix curLine boundary then return "" else notBoundaryLines boundary >>= (\lines -> return $ curLine ++ lines) maybeInfix :: String -> Maybe [String] -> Bool maybeInfix string = maybe False ((any . flip isInfixOf) string) boundaries :: [String] -> ParsecT [Char] u Identity [Char] boundaries [] = try (string "hopefully this never matches #HACK aewjfkccnas") boundaries (x:xs) = try (string x) <|> boundaries xs boundaries [x] = try (string x) <?> "boundary" eol :: ParsecT [Char] u Identity [Char] eol = try (string "\n\r") <|> try (string "\r\n") <|> string "\n" <|> string "\r" <?> "end of line"
MattWis/smallEmail
smallEmail/ParseEmail.hs
mit
4,544
0
19
902
1,489
769
720
108
4
{-# LANGUAGE OverloadedStrings #-} module Console.GitHubStats.StatsSpec where import Test.Hspec import Console.GitHubStats.Stats import Console.GitHubStats.Types spec :: Spec spec = describe "mkHistogram" $ do it "sorts languages in ascending order" $ do let repos = [ Repository { repoLanguage = Just "PureScript" } , Repository { repoLanguage = Just "Ruby" } , Repository { repoLanguage = Just "Haskell" } , Repository { repoLanguage = Just "Haskell" } , Repository { repoLanguage = Just "Ruby" } , Repository { repoLanguage = Just "Haskell" } ] shouldBe (mkHistogram repos) [ "### Haskell 3" , "## Ruby 2" , "# PureScript 1" ] it "discards repositories without a language" $ do let repos = [ Repository { repoLanguage = Nothing } , Repository { repoLanguage = Just "Haskell" } , Repository { repoLanguage = Just "Haskell" } ] mkHistogram repos `shouldBe` [ "## Haskell 2" ]
acamino/ghs
test/Console/GitHubStats/StatsSpec.hs
mit
1,118
0
17
382
241
133
108
27
1
module Core.LambdaLift.MFE ( identifyMFE ) where import Common import Core.AST import Core.AnnotAST import Core.Prelude identifyMFE :: AnnotProgram Int (Annot Int Name) -> Program (Annot Int Name) identifyMFE = Program . map identifySC . getProgramF where identifySC (SupercombF name [] body) = Supercomb name [] body' where body' = identifyExpr 0 body transformMFE :: Int -> Expr (Annot Int Name) -> Expr (Annot Int Name) transformMFE k e = ELet False [(Annot (k, anonym), e)] (EVar anonym) -- check whether a redex notCandidate :: Expr (Annot Int Name) -> Bool notCandidate e = case e of EVar _ -> True ENum _ -> True EConstr _ _ -> True EAp (EVar v) _ -> elem v operators _ -> False identifyExpr :: Int -> AnnotExpr Int (Annot Int Name) -> Expr (Annot Int Name) identifyExpr cxt a@(Annot (k, e)) | cxt == k || notCandidate e' = e' | otherwise = transformMFE k e' where e' = identifyExpr1 a identifyExpr1 :: AnnotExpr Int (Annot Int Name) -> Expr (Annot Int Name) identifyExpr1 (Annot (k, e)) = case e of EVarF v -> EVar v ENumF n -> ENum n EConstrF tag arity -> EConstr tag arity EApF e1 e2 -> EAp (identifyExpr k e1) (identifyExpr k e2) ELetF rec defs body -> ELet rec defs' body' where defs' = [(Annot (k, x), identifyExpr k e) | (Annot (k, x), e) <- defs] body' = identifyExpr k body ECaseF e alts -> ECase (identifyExpr k e) (map (identifyAlter k) alts) EAbsF args body -> EAbs args (identifyExpr k' body) where k' = getAnnot (head args) identifyAlter :: Int -> AnnotAlter Int (Annot Int Name) -> Alter (Annot Int Name) identifyAlter k (AlterF tag xs body) = Alter tag xs (identifyExpr k body)
meimisaki/Rin
src/Core/LambdaLift/MFE.hs
mit
1,669
0
15
358
753
373
380
38
7
module Ternary.Performance ( performanceTest, evalPerformance) where import System.TimeIt import Ternary.Core.Digit (T2(..)) import Ternary.Util.Misc (forceElements, forceElementsIO) import Ternary.List.Exact import Ternary.List.ExactNum () import Ternary.Compiler.ArrayLookup (warmup) import Ternary.Sampling.Expression import Ternary.Sampling.Evaluation import Ternary.QuickCheckUtil (randomsR) randomT2s :: Int -> [T2] randomT2s seed = map toEnum (randomsR seed (0,4)) randomExact :: Int -> Exact randomExact seed = Exact (randomT2s seed) 0 assertWarm :: IO () assertWarm = putStr " Warmup: " >> timeIt warmup -- The time needed to construct random test samples must be excluded -- from measurements. On the flip side, the time to construct the -- final result of a computation must be included. The following -- ensures the first n digits of an exact number are fully evaluated: force :: Int -> Exact -> IO () force n = (return $!) . forceElements . take n . streamDigits timeMultiplication :: Int -> Exact -> Exact -> IO () timeMultiplication n x y = do force (n+2) x force (n+2) y putStr " Array Lookup " time multiplyAltAL putStr " Array State " time multiplyAltAS where time (**) = timeIt $ force n (x ** y) performanceTest = do putStrLn "\nPerformance:" assertWarm timeMultiplication 6000 (randomExact 0) (randomExact 1) timeExpressionEval :: Expr -> [T2] -> IO () timeExpressionEval expr as = do forceElementsIO as len <- time (evalFinite1 expr as) time (take len (streamDigits $ smartEval expr binding)) putStrLn ("Number of output digits = " ++ show len) where binding = bind (Exact as 0) time list = timeIt (forceElementsIO list >> return (length list)) evalPerformance = do timeExpressionEval (extreme Mins 20000) (take 5 $ randomT2s 0) timeExpressionEval (extreme Plus 60) (take 8000 $ randomT2s 0)
jeroennoels/exact-real
test/Ternary/Performance.hs
mit
1,887
0
12
345
597
303
294
43
1
{-# LANGUAGE TemplateHaskell #-} {-| TemplateHaskell helper for Ganeti Haskell code. As TemplateHaskell require that splices be defined in a separate module, we combine all the TemplateHaskell functionality that HTools needs in this module (except the one for unittests). -} {- Copyright (C) 2011, 2012 Google Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -} module Ganeti.THH ( declareSADT , declareIADT , makeJSONInstance , genOpID , genAllConstr , genAllOpIDs , genOpCode , genStrOfOp , genStrOfKey , genLuxiOp , Field , simpleField , defaultField , optionalField , optionalNullSerField , renameField , customField , timeStampFields , uuidFields , serialFields , tagsFields , TagSet , buildObject , buildObjectSerialisation , buildParam , DictObject(..) , genException , excErrMsg ) where import Control.Monad (liftM) import Data.Char import Data.List import Data.Maybe (fromMaybe) import qualified Data.Set as Set import Language.Haskell.TH import qualified Text.JSON as JSON import Text.JSON.Pretty (pp_value) import Ganeti.JSON -- * Exported types -- | Class of objects that can be converted to 'JSObject' -- lists-format. class DictObject a where toDict :: a -> [(String, JSON.JSValue)] -- | Optional field information. data OptionalType = NotOptional -- ^ Field is not optional | OptionalOmitNull -- ^ Field is optional, null is not serialised | OptionalSerializeNull -- ^ Field is optional, null is serialised deriving (Show, Eq) -- | Serialised field data type. data Field = Field { fieldName :: String , fieldType :: Q Type , fieldRead :: Maybe (Q Exp) , fieldShow :: Maybe (Q Exp) , fieldExtraKeys :: [String] , fieldDefault :: Maybe (Q Exp) , fieldConstr :: Maybe String , fieldIsOptional :: OptionalType } -- | Generates a simple field. simpleField :: String -> Q Type -> Field simpleField fname ftype = Field { fieldName = fname , fieldType = ftype , fieldRead = Nothing , fieldShow = Nothing , fieldExtraKeys = [] , fieldDefault = Nothing , fieldConstr = Nothing , fieldIsOptional = NotOptional } -- | Sets the renamed constructor field. renameField :: String -> Field -> Field renameField constrName field = field { fieldConstr = Just constrName } -- | Sets the default value on a field (makes it optional with a -- default value). defaultField :: Q Exp -> Field -> Field defaultField defval field = field { fieldDefault = Just defval } -- | Marks a field optional (turning its base type into a Maybe). optionalField :: Field -> Field optionalField field = field { fieldIsOptional = OptionalOmitNull } -- | Marks a field optional (turning its base type into a Maybe), but -- with 'Nothing' serialised explicitly as /null/. optionalNullSerField :: Field -> Field optionalNullSerField field = field { fieldIsOptional = OptionalSerializeNull } -- | Sets custom functions on a field. customField :: Name -- ^ The name of the read function -> Name -- ^ The name of the show function -> [String] -- ^ The name of extra field keys -> Field -- ^ The original field -> Field -- ^ Updated field customField readfn showfn extra field = field { fieldRead = Just (varE readfn), fieldShow = Just (varE showfn) , fieldExtraKeys = extra } -- | Computes the record name for a given field, based on either the -- string value in the JSON serialisation or the custom named if any -- exists. fieldRecordName :: Field -> String fieldRecordName (Field { fieldName = name, fieldConstr = alias }) = fromMaybe (camelCase name) alias -- | Computes the preferred variable name to use for the value of this -- field. If the field has a specific constructor name, then we use a -- first-letter-lowercased version of that; otherwise, we simply use -- the field name. See also 'fieldRecordName'. fieldVariable :: Field -> String fieldVariable f = case (fieldConstr f) of Just name -> ensureLower name _ -> map (\c -> if c == '-' then '_' else c) $ fieldName f -- | Compute the actual field type (taking into account possible -- optional status). actualFieldType :: Field -> Q Type actualFieldType f | fieldIsOptional f /= NotOptional = [t| Maybe $t |] | otherwise = t where t = fieldType f -- | Checks that a given field is not optional (for object types or -- fields which should not allow this case). checkNonOptDef :: (Monad m) => Field -> m () checkNonOptDef (Field { fieldIsOptional = OptionalOmitNull , fieldName = name }) = fail $ "Optional field " ++ name ++ " used in parameter declaration" checkNonOptDef (Field { fieldIsOptional = OptionalSerializeNull , fieldName = name }) = fail $ "Optional field " ++ name ++ " used in parameter declaration" checkNonOptDef (Field { fieldDefault = (Just _), fieldName = name }) = fail $ "Default field " ++ name ++ " used in parameter declaration" checkNonOptDef _ = return () -- | Produces the expression that will de-serialise a given -- field. Since some custom parsing functions might need to use the -- entire object, we do take and pass the object to any custom read -- functions. loadFn :: Field -- ^ The field definition -> Q Exp -- ^ The value of the field as existing in the JSON message -> Q Exp -- ^ The entire object in JSON object format -> Q Exp -- ^ Resulting expression loadFn (Field { fieldRead = Just readfn }) expr o = [| $expr >>= $readfn $o |] loadFn _ expr _ = expr -- * Common field declarations -- | Timestamp fields description. timeStampFields :: [Field] timeStampFields = [ defaultField [| 0::Double |] $ simpleField "ctime" [t| Double |] , defaultField [| 0::Double |] $ simpleField "mtime" [t| Double |] ] -- | Serial number fields description. serialFields :: [Field] serialFields = [ renameField "Serial" $ simpleField "serial_no" [t| Int |] ] -- | UUID fields description. uuidFields :: [Field] uuidFields = [ simpleField "uuid" [t| String |] ] -- | Tag set type alias. type TagSet = Set.Set String -- | Tag field description. tagsFields :: [Field] tagsFields = [ defaultField [| Set.empty |] $ simpleField "tags" [t| TagSet |] ] -- * Internal types -- | A simple field, in constrast to the customisable 'Field' type. type SimpleField = (String, Q Type) -- | A definition for a single constructor for a simple object. type SimpleConstructor = (String, [SimpleField]) -- | A definition for ADTs with simple fields. type SimpleObject = [SimpleConstructor] -- | A type alias for a constructor of a regular object. type Constructor = (String, [Field]) -- * Helper functions -- | Ensure first letter is lowercase. -- -- Used to convert type name to function prefix, e.g. in @data Aa -> -- aaToRaw@. ensureLower :: String -> String ensureLower [] = [] ensureLower (x:xs) = toLower x:xs -- | Ensure first letter is uppercase. -- -- Used to convert constructor name to component ensureUpper :: String -> String ensureUpper [] = [] ensureUpper (x:xs) = toUpper x:xs -- | Helper for quoted expressions. varNameE :: String -> Q Exp varNameE = varE . mkName -- | showJSON as an expression, for reuse. showJSONE :: Q Exp showJSONE = varE 'JSON.showJSON -- | makeObj as an expression, for reuse. makeObjE :: Q Exp makeObjE = varE 'JSON.makeObj -- | fromObj (Ganeti specific) as an expression, for reuse. fromObjE :: Q Exp fromObjE = varE 'fromObj -- | ToRaw function name. toRawName :: String -> Name toRawName = mkName . (++ "ToRaw") . ensureLower -- | FromRaw function name. fromRawName :: String -> Name fromRawName = mkName . (++ "FromRaw") . ensureLower -- | Converts a name to it's varE\/litE representations. reprE :: Either String Name -> Q Exp reprE = either stringE varE -- | Smarter function application. -- -- This does simply f x, except that if is 'id', it will skip it, in -- order to generate more readable code when using -ddump-splices. appFn :: Exp -> Exp -> Exp appFn f x | f == VarE 'id = x | otherwise = AppE f x -- | Builds a field for a normal constructor. buildConsField :: Q Type -> StrictTypeQ buildConsField ftype = do ftype' <- ftype return (NotStrict, ftype') -- | Builds a constructor based on a simple definition (not field-based). buildSimpleCons :: Name -> SimpleObject -> Q Dec buildSimpleCons tname cons = do decl_d <- mapM (\(cname, fields) -> do fields' <- mapM (buildConsField . snd) fields return $ NormalC (mkName cname) fields') cons return $ DataD [] tname [] decl_d [''Show, ''Eq] -- | Generate the save function for a given type. genSaveSimpleObj :: Name -- ^ Object type -> String -- ^ Function name -> SimpleObject -- ^ Object definition -> (SimpleConstructor -> Q Clause) -- ^ Constructor save fn -> Q (Dec, Dec) genSaveSimpleObj tname sname opdefs fn = do let sigt = AppT (AppT ArrowT (ConT tname)) (ConT ''JSON.JSValue) fname = mkName sname cclauses <- mapM fn opdefs return $ (SigD fname sigt, FunD fname cclauses) -- * Template code for simple raw type-equivalent ADTs -- | Generates a data type declaration. -- -- The type will have a fixed list of instances. strADTDecl :: Name -> [String] -> Dec strADTDecl name constructors = DataD [] name [] (map (flip NormalC [] . mkName) constructors) [''Show, ''Eq, ''Enum, ''Bounded, ''Ord] -- | Generates a toRaw function. -- -- This generates a simple function of the form: -- -- @ -- nameToRaw :: Name -> /traw/ -- nameToRaw Cons1 = var1 -- nameToRaw Cons2 = \"value2\" -- @ genToRaw :: Name -> Name -> Name -> [(String, Either String Name)] -> Q [Dec] genToRaw traw fname tname constructors = do let sigt = AppT (AppT ArrowT (ConT tname)) (ConT traw) -- the body clauses, matching on the constructor and returning the -- raw value clauses <- mapM (\(c, v) -> clause [recP (mkName c) []] (normalB (reprE v)) []) constructors return [SigD fname sigt, FunD fname clauses] -- | Generates a fromRaw function. -- -- The function generated is monadic and can fail parsing the -- raw value. It is of the form: -- -- @ -- nameFromRaw :: (Monad m) => /traw/ -> m Name -- nameFromRaw s | s == var1 = Cons1 -- | s == \"value2\" = Cons2 -- | otherwise = fail /.../ -- @ genFromRaw :: Name -> Name -> Name -> [(String, Name)] -> Q [Dec] genFromRaw traw fname tname constructors = do -- signature of form (Monad m) => String -> m $name sigt <- [t| (Monad m) => $(conT traw) -> m $(conT tname) |] -- clauses for a guarded pattern let varp = mkName "s" varpe = varE varp clauses <- mapM (\(c, v) -> do -- the clause match condition g <- normalG [| $varpe == $(varE v) |] -- the clause result r <- [| return $(conE (mkName c)) |] return (g, r)) constructors -- the otherwise clause (fallback) oth_clause <- do g <- normalG [| otherwise |] r <- [|fail ("Invalid string value for type " ++ $(litE (stringL (nameBase tname))) ++ ": " ++ show $varpe) |] return (g, r) let fun = FunD fname [Clause [VarP varp] (GuardedB (clauses++[oth_clause])) []] return [SigD fname sigt, fun] -- | Generates a data type from a given raw format. -- -- The format is expected to multiline. The first line contains the -- type name, and the rest of the lines must contain two words: the -- constructor name and then the string representation of the -- respective constructor. -- -- The function will generate the data type declaration, and then two -- functions: -- -- * /name/ToRaw, which converts the type to a raw type -- -- * /name/FromRaw, which (monadically) converts from a raw type to the type -- -- Note that this is basically just a custom show\/read instance, -- nothing else. declareADT :: Name -> String -> [(String, Name)] -> Q [Dec] declareADT traw sname cons = do let name = mkName sname ddecl = strADTDecl name (map fst cons) -- process cons in the format expected by genToRaw cons' = map (\(a, b) -> (a, Right b)) cons toraw <- genToRaw traw (toRawName sname) name cons' fromraw <- genFromRaw traw (fromRawName sname) name cons return $ ddecl:toraw ++ fromraw declareIADT :: String -> [(String, Name)] -> Q [Dec] declareIADT = declareADT ''Int declareSADT :: String -> [(String, Name)] -> Q [Dec] declareSADT = declareADT ''String -- | Creates the showJSON member of a JSON instance declaration. -- -- This will create what is the equivalent of: -- -- @ -- showJSON = showJSON . /name/ToRaw -- @ -- -- in an instance JSON /name/ declaration genShowJSON :: String -> Q Dec genShowJSON name = do body <- [| JSON.showJSON . $(varE (toRawName name)) |] return $ FunD 'JSON.showJSON [Clause [] (NormalB body) []] -- | Creates the readJSON member of a JSON instance declaration. -- -- This will create what is the equivalent of: -- -- @ -- readJSON s = case readJSON s of -- Ok s' -> /name/FromRaw s' -- Error e -> Error /description/ -- @ -- -- in an instance JSON /name/ declaration genReadJSON :: String -> Q Dec genReadJSON name = do let s = mkName "s" body <- [| case JSON.readJSON $(varE s) of JSON.Ok s' -> $(varE (fromRawName name)) s' JSON.Error e -> JSON.Error $ "Can't parse raw value for type " ++ $(stringE name) ++ ": " ++ e ++ " from " ++ show $(varE s) |] return $ FunD 'JSON.readJSON [Clause [VarP s] (NormalB body) []] -- | Generates a JSON instance for a given type. -- -- This assumes that the /name/ToRaw and /name/FromRaw functions -- have been defined as by the 'declareSADT' function. makeJSONInstance :: Name -> Q [Dec] makeJSONInstance name = do let base = nameBase name showJ <- genShowJSON base readJ <- genReadJSON base return [InstanceD [] (AppT (ConT ''JSON.JSON) (ConT name)) [readJ,showJ]] -- * Template code for opcodes -- | Transforms a CamelCase string into an_underscore_based_one. deCamelCase :: String -> String deCamelCase = intercalate "_" . map (map toUpper) . groupBy (\_ b -> not $ isUpper b) -- | Transform an underscore_name into a CamelCase one. camelCase :: String -> String camelCase = concatMap (ensureUpper . drop 1) . groupBy (\_ b -> b /= '_' && b /= '-') . ('_':) -- | Computes the name of a given constructor. constructorName :: Con -> Q Name constructorName (NormalC name _) = return name constructorName (RecC name _) = return name constructorName x = fail $ "Unhandled constructor " ++ show x -- | Extract all constructor names from a given type. reifyConsNames :: Name -> Q [String] reifyConsNames name = do reify_result <- reify name case reify_result of TyConI (DataD _ _ _ cons _) -> mapM (liftM nameBase . constructorName) cons o -> fail $ "Unhandled name passed to reifyConsNames, expected\ \ type constructor but got '" ++ show o ++ "'" -- | Builds the generic constructor-to-string function. -- -- This generates a simple function of the following form: -- -- @ -- fname (ConStructorOne {}) = trans_fun("ConStructorOne") -- fname (ConStructorTwo {}) = trans_fun("ConStructorTwo") -- @ -- -- This builds a custom list of name\/string pairs and then uses -- 'genToRaw' to actually generate the function. genConstrToStr :: (String -> String) -> Name -> String -> Q [Dec] genConstrToStr trans_fun name fname = do cnames <- reifyConsNames name let svalues = map (Left . trans_fun) cnames genToRaw ''String (mkName fname) name $ zip cnames svalues -- | Constructor-to-string for OpCode. genOpID :: Name -> String -> Q [Dec] genOpID = genConstrToStr deCamelCase -- | Builds a list with all defined constructor names for a type. -- -- @ -- vstr :: String -- vstr = [...] -- @ -- -- Where the actual values of the string are the constructor names -- mapped via @trans_fun@. genAllConstr :: (String -> String) -> Name -> String -> Q [Dec] genAllConstr trans_fun name vstr = do cnames <- reifyConsNames name let svalues = sort $ map trans_fun cnames vname = mkName vstr sig = SigD vname (AppT ListT (ConT ''String)) body = NormalB (ListE (map (LitE . StringL) svalues)) return $ [sig, ValD (VarP vname) body []] -- | Generates a list of all defined opcode IDs. genAllOpIDs :: Name -> String -> Q [Dec] genAllOpIDs = genAllConstr deCamelCase -- | OpCode parameter (field) type. type OpParam = (String, Q Type, Q Exp) -- | Generates the OpCode data type. -- -- This takes an opcode logical definition, and builds both the -- datatype and the JSON serialisation out of it. We can't use a -- generic serialisation since we need to be compatible with Ganeti's -- own, so we have a few quirks to work around. genOpCode :: String -- ^ Type name to use -> [Constructor] -- ^ Constructor name and parameters -> Q [Dec] genOpCode name cons = do let tname = mkName name decl_d <- mapM (\(cname, fields) -> do -- we only need the type of the field, without Q fields' <- mapM (fieldTypeInfo "op") fields return $ RecC (mkName cname) fields') cons let declD = DataD [] tname [] decl_d [''Show, ''Eq] let (allfsig, allffn) = genAllOpFields "allOpFields" cons save_decs <- genSaveOpCode tname "saveOpCode" "toDictOpCode" cons (uncurry saveConstructor) True (loadsig, loadfn) <- genLoadOpCode cons return $ [declD, allfsig, allffn, loadsig, loadfn] ++ save_decs -- | Generates the function pattern returning the list of fields for a -- given constructor. genOpConsFields :: Constructor -> Clause genOpConsFields (cname, fields) = let op_id = deCamelCase cname fvals = map (LitE . StringL) . sort . nub $ concatMap (\f -> fieldName f:fieldExtraKeys f) fields in Clause [LitP (StringL op_id)] (NormalB $ ListE fvals) [] -- | Generates a list of all fields of an opcode constructor. genAllOpFields :: String -- ^ Function name -> [Constructor] -- ^ Object definition -> (Dec, Dec) genAllOpFields sname opdefs = let cclauses = map genOpConsFields opdefs other = Clause [WildP] (NormalB (ListE [])) [] fname = mkName sname sigt = AppT (AppT ArrowT (ConT ''String)) (AppT ListT (ConT ''String)) in (SigD fname sigt, FunD fname (cclauses++[other])) -- | Generates the \"save\" clause for an entire opcode constructor. -- -- This matches the opcode with variables named the same as the -- constructor fields (just so that the spliced in code looks nicer), -- and passes those name plus the parameter definition to 'saveObjectField'. saveConstructor :: String -- ^ The constructor name -> [Field] -- ^ The parameter definitions for this -- constructor -> Q Clause -- ^ Resulting clause saveConstructor sname fields = do let cname = mkName sname fnames <- mapM (newName . fieldVariable) fields let pat = conP cname (map varP fnames) let felems = map (uncurry saveObjectField) (zip fnames fields) -- now build the OP_ID serialisation opid = [| [( $(stringE "OP_ID"), JSON.showJSON $(stringE . deCamelCase $ sname) )] |] flist = listE (opid:felems) -- and finally convert all this to a json object flist' = [| concat $flist |] clause [pat] (normalB flist') [] -- | Generates the main save opcode function. -- -- This builds a per-constructor match clause that contains the -- respective constructor-serialisation code. genSaveOpCode :: Name -- ^ Object ype -> String -- ^ To 'JSValue' function name -> String -- ^ To 'JSObject' function name -> [Constructor] -- ^ Object definition -> (Constructor -> Q Clause) -- ^ Constructor save fn -> Bool -- ^ Whether to generate -- obj or just a -- list\/tuple of values -> Q [Dec] genSaveOpCode tname jvalstr tdstr opdefs fn gen_object = do tdclauses <- mapM fn opdefs let typecon = ConT tname jvalname = mkName jvalstr jvalsig = AppT (AppT ArrowT typecon) (ConT ''JSON.JSValue) tdname = mkName tdstr tdsig <- [t| $(return typecon) -> [(String, JSON.JSValue)] |] jvalclause <- if gen_object then [| $makeObjE . $(varE tdname) |] else [| JSON.showJSON . map snd . $(varE tdname) |] return [ SigD tdname tdsig , FunD tdname tdclauses , SigD jvalname jvalsig , ValD (VarP jvalname) (NormalB jvalclause) []] -- | Generates load code for a single constructor of the opcode data type. loadConstructor :: String -> [Field] -> Q Exp loadConstructor sname fields = do let name = mkName sname fbinds <- mapM loadObjectField fields let (fnames, fstmts) = unzip fbinds let cval = foldl (\accu fn -> AppE accu (VarE fn)) (ConE name) fnames fstmts' = fstmts ++ [NoBindS (AppE (VarE 'return) cval)] return $ DoE fstmts' -- | Generates the loadOpCode function. genLoadOpCode :: [Constructor] -> Q (Dec, Dec) genLoadOpCode opdefs = do let fname = mkName "loadOpCode" arg1 = mkName "v" objname = mkName "o" opid = mkName "op_id" st1 <- bindS (varP objname) [| liftM JSON.fromJSObject (JSON.readJSON $(varE arg1)) |] st2 <- bindS (varP opid) [| $fromObjE $(varE objname) $(stringE "OP_ID") |] -- the match results (per-constructor blocks) mexps <- mapM (uncurry loadConstructor) opdefs fails <- [| fail $ "Unknown opcode " ++ $(varE opid) |] let mpats = map (\(me, c) -> let mp = LitP . StringL . deCamelCase . fst $ c in Match mp (NormalB me) [] ) $ zip mexps opdefs defmatch = Match WildP (NormalB fails) [] cst = NoBindS $ CaseE (VarE opid) $ mpats++[defmatch] body = DoE [st1, st2, cst] sigt <- [t| JSON.JSValue -> JSON.Result $(conT (mkName "OpCode")) |] return $ (SigD fname sigt, FunD fname [Clause [VarP arg1] (NormalB body) []]) -- * Template code for luxi -- | Constructor-to-string for LuxiOp. genStrOfOp :: Name -> String -> Q [Dec] genStrOfOp = genConstrToStr id -- | Constructor-to-string for MsgKeys. genStrOfKey :: Name -> String -> Q [Dec] genStrOfKey = genConstrToStr ensureLower -- | Generates the LuxiOp data type. -- -- This takes a Luxi operation definition and builds both the -- datatype and the function transforming the arguments to JSON. -- We can't use anything less generic, because the way different -- operations are serialized differs on both parameter- and top-level. -- -- There are two things to be defined for each parameter: -- -- * name -- -- * type -- genLuxiOp :: String -> [Constructor] -> Q [Dec] genLuxiOp name cons = do let tname = mkName name decl_d <- mapM (\(cname, fields) -> do -- we only need the type of the field, without Q fields' <- mapM actualFieldType fields let fields'' = zip (repeat NotStrict) fields' return $ NormalC (mkName cname) fields'') cons let declD = DataD [] (mkName name) [] decl_d [''Show, ''Eq] save_decs <- genSaveOpCode tname "opToArgs" "opToDict" cons saveLuxiConstructor False req_defs <- declareSADT "LuxiReq" . map (\(str, _) -> ("Req" ++ str, mkName ("luxiReq" ++ str))) $ cons return $ declD:save_decs ++ req_defs -- | Generates the \"save\" clause for entire LuxiOp constructor. saveLuxiConstructor :: Constructor -> Q Clause saveLuxiConstructor (sname, fields) = do let cname = mkName sname fnames <- mapM (newName . fieldVariable) fields let pat = conP cname (map varP fnames) let felems = map (uncurry saveObjectField) (zip fnames fields) flist = [| concat $(listE felems) |] clause [pat] (normalB flist) [] -- * "Objects" functionality -- | Extract the field's declaration from a Field structure. fieldTypeInfo :: String -> Field -> Q (Name, Strict, Type) fieldTypeInfo field_pfx fd = do t <- actualFieldType fd let n = mkName . (field_pfx ++) . fieldRecordName $ fd return (n, NotStrict, t) -- | Build an object declaration. buildObject :: String -> String -> [Field] -> Q [Dec] buildObject sname field_pfx fields = do let name = mkName sname fields_d <- mapM (fieldTypeInfo field_pfx) fields let decl_d = RecC name fields_d let declD = DataD [] name [] [decl_d] [''Show, ''Eq] ser_decls <- buildObjectSerialisation sname fields return $ declD:ser_decls -- | Generates an object definition: data type and its JSON instance. buildObjectSerialisation :: String -> [Field] -> Q [Dec] buildObjectSerialisation sname fields = do let name = mkName sname savedecls <- genSaveObject saveObjectField sname fields (loadsig, loadfn) <- genLoadObject loadObjectField sname fields shjson <- objectShowJSON sname rdjson <- objectReadJSON sname let instdecl = InstanceD [] (AppT (ConT ''JSON.JSON) (ConT name)) [rdjson, shjson] return $ savedecls ++ [loadsig, loadfn, instdecl] -- | The toDict function name for a given type. toDictName :: String -> Name toDictName sname = mkName ("toDict" ++ sname) -- | Generates the save object functionality. genSaveObject :: (Name -> Field -> Q Exp) -> String -> [Field] -> Q [Dec] genSaveObject save_fn sname fields = do let name = mkName sname fnames <- mapM (newName . fieldVariable) fields let pat = conP name (map varP fnames) let tdname = toDictName sname tdsigt <- [t| $(conT name) -> [(String, JSON.JSValue)] |] let felems = map (uncurry save_fn) (zip fnames fields) flist = listE felems -- and finally convert all this to a json object tdlist = [| concat $flist |] iname = mkName "i" tclause <- clause [pat] (normalB tdlist) [] cclause <- [| $makeObjE . $(varE tdname) |] let fname = mkName ("save" ++ sname) sigt <- [t| $(conT name) -> JSON.JSValue |] return [SigD tdname tdsigt, FunD tdname [tclause], SigD fname sigt, ValD (VarP fname) (NormalB cclause) []] -- | Generates the code for saving an object's field, handling the -- various types of fields that we have. saveObjectField :: Name -> Field -> Q Exp saveObjectField fvar field = case fieldIsOptional field of OptionalOmitNull -> [| case $(varE fvar) of Nothing -> [] Just v -> [( $nameE, JSON.showJSON v )] |] OptionalSerializeNull -> [| case $(varE fvar) of Nothing -> [( $nameE, JSON.JSNull )] Just v -> [( $nameE, JSON.showJSON v )] |] NotOptional -> case fieldShow field of -- Note: the order of actual:extra is important, since for -- some serialisation types (e.g. Luxi), we use tuples -- (positional info) rather than object (name info) Nothing -> [| [( $nameE, JSON.showJSON $fvarE)] |] Just fn -> [| let (actual, extra) = $fn $fvarE in ($nameE, JSON.showJSON actual):extra |] where nameE = stringE (fieldName field) fvarE = varE fvar -- | Generates the showJSON clause for a given object name. objectShowJSON :: String -> Q Dec objectShowJSON name = do body <- [| JSON.showJSON . $(varE . mkName $ "save" ++ name) |] return $ FunD 'JSON.showJSON [Clause [] (NormalB body) []] -- | Generates the load object functionality. genLoadObject :: (Field -> Q (Name, Stmt)) -> String -> [Field] -> Q (Dec, Dec) genLoadObject load_fn sname fields = do let name = mkName sname funname = mkName $ "load" ++ sname arg1 = mkName $ if null fields then "_" else "v" objname = mkName "o" opid = mkName "op_id" st1 <- bindS (varP objname) [| liftM JSON.fromJSObject (JSON.readJSON $(varE arg1)) |] fbinds <- mapM load_fn fields let (fnames, fstmts) = unzip fbinds let cval = foldl (\accu fn -> AppE accu (VarE fn)) (ConE name) fnames retstmt = [NoBindS (AppE (VarE 'return) cval)] -- FIXME: should we require an empty dict for an empty type? -- this allows any JSValue right now fstmts' = if null fields then retstmt else st1:fstmts ++ retstmt sigt <- [t| JSON.JSValue -> JSON.Result $(conT name) |] return $ (SigD funname sigt, FunD funname [Clause [VarP arg1] (NormalB (DoE fstmts')) []]) -- | Generates code for loading an object's field. loadObjectField :: Field -> Q (Name, Stmt) loadObjectField field = do let name = fieldVariable field fvar <- newName name -- these are used in all patterns below let objvar = varNameE "o" objfield = stringE (fieldName field) loadexp = if fieldIsOptional field /= NotOptional -- we treat both optional types the same, since -- 'maybeFromObj' can deal with both missing and null values -- appropriately (the same) then [| $(varE 'maybeFromObj) $objvar $objfield |] else case fieldDefault field of Just defv -> [| $(varE 'fromObjWithDefault) $objvar $objfield $defv |] Nothing -> [| $fromObjE $objvar $objfield |] bexp <- loadFn field loadexp objvar return (fvar, BindS (VarP fvar) bexp) -- | Builds the readJSON instance for a given object name. objectReadJSON :: String -> Q Dec objectReadJSON name = do let s = mkName "s" body <- [| case JSON.readJSON $(varE s) of JSON.Ok s' -> $(varE .mkName $ "load" ++ name) s' JSON.Error e -> JSON.Error $ "Can't parse value for type " ++ $(stringE name) ++ ": " ++ e |] return $ FunD 'JSON.readJSON [Clause [VarP s] (NormalB body) []] -- * Inheritable parameter tables implementation -- | Compute parameter type names. paramTypeNames :: String -> (String, String) paramTypeNames root = ("Filled" ++ root ++ "Params", "Partial" ++ root ++ "Params") -- | Compute information about the type of a parameter field. paramFieldTypeInfo :: String -> Field -> Q (Name, Strict, Type) paramFieldTypeInfo field_pfx fd = do t <- actualFieldType fd let n = mkName . (++ "P") . (field_pfx ++) . fieldRecordName $ fd return (n, NotStrict, AppT (ConT ''Maybe) t) -- | Build a parameter declaration. -- -- This function builds two different data structures: a /filled/ one, -- in which all fields are required, and a /partial/ one, in which all -- fields are optional. Due to the current record syntax issues, the -- fields need to be named differrently for the two structures, so the -- partial ones get a /P/ suffix. buildParam :: String -> String -> [Field] -> Q [Dec] buildParam sname field_pfx fields = do let (sname_f, sname_p) = paramTypeNames sname name_f = mkName sname_f name_p = mkName sname_p fields_f <- mapM (fieldTypeInfo field_pfx) fields fields_p <- mapM (paramFieldTypeInfo field_pfx) fields let decl_f = RecC name_f fields_f decl_p = RecC name_p fields_p let declF = DataD [] name_f [] [decl_f] [''Show, ''Eq] declP = DataD [] name_p [] [decl_p] [''Show, ''Eq] ser_decls_f <- buildObjectSerialisation sname_f fields ser_decls_p <- buildPParamSerialisation sname_p fields fill_decls <- fillParam sname field_pfx fields return $ [declF, declP] ++ ser_decls_f ++ ser_decls_p ++ fill_decls ++ buildParamAllFields sname fields ++ buildDictObjectInst name_f sname_f -- | Builds a list of all fields of a parameter. buildParamAllFields :: String -> [Field] -> [Dec] buildParamAllFields sname fields = let vname = mkName ("all" ++ sname ++ "ParamFields") sig = SigD vname (AppT ListT (ConT ''String)) val = ListE $ map (LitE . StringL . fieldName) fields in [sig, ValD (VarP vname) (NormalB val) []] -- | Builds the 'DictObject' instance for a filled parameter. buildDictObjectInst :: Name -> String -> [Dec] buildDictObjectInst name sname = [InstanceD [] (AppT (ConT ''DictObject) (ConT name)) [ValD (VarP 'toDict) (NormalB (VarE (toDictName sname))) []]] -- | Generates the serialisation for a partial parameter. buildPParamSerialisation :: String -> [Field] -> Q [Dec] buildPParamSerialisation sname fields = do let name = mkName sname savedecls <- genSaveObject savePParamField sname fields (loadsig, loadfn) <- genLoadObject loadPParamField sname fields shjson <- objectShowJSON sname rdjson <- objectReadJSON sname let instdecl = InstanceD [] (AppT (ConT ''JSON.JSON) (ConT name)) [rdjson, shjson] return $ savedecls ++ [loadsig, loadfn, instdecl] -- | Generates code to save an optional parameter field. savePParamField :: Name -> Field -> Q Exp savePParamField fvar field = do checkNonOptDef field let actualVal = mkName "v" normalexpr <- saveObjectField actualVal field -- we have to construct the block here manually, because we can't -- splice-in-splice return $ CaseE (VarE fvar) [ Match (ConP 'Nothing []) (NormalB (ConE '[])) [] , Match (ConP 'Just [VarP actualVal]) (NormalB normalexpr) [] ] -- | Generates code to load an optional parameter field. loadPParamField :: Field -> Q (Name, Stmt) loadPParamField field = do checkNonOptDef field let name = fieldName field fvar <- newName name -- these are used in all patterns below let objvar = varNameE "o" objfield = stringE name loadexp = [| $(varE 'maybeFromObj) $objvar $objfield |] bexp <- loadFn field loadexp objvar return (fvar, BindS (VarP fvar) bexp) -- | Builds a simple declaration of type @n_x = fromMaybe f_x p_x@. buildFromMaybe :: String -> Q Dec buildFromMaybe fname = valD (varP (mkName $ "n_" ++ fname)) (normalB [| $(varE 'fromMaybe) $(varNameE $ "f_" ++ fname) $(varNameE $ "p_" ++ fname) |]) [] -- | Builds a function that executes the filling of partial parameter -- from a full copy (similar to Python's fillDict). fillParam :: String -> String -> [Field] -> Q [Dec] fillParam sname field_pfx fields = do let fnames = map (\fd -> field_pfx ++ fieldRecordName fd) fields (sname_f, sname_p) = paramTypeNames sname oname_f = "fobj" oname_p = "pobj" name_f = mkName sname_f name_p = mkName sname_p fun_name = mkName $ "fill" ++ sname ++ "Params" le_full = ValD (ConP name_f (map (VarP . mkName . ("f_" ++)) fnames)) (NormalB . VarE . mkName $ oname_f) [] le_part = ValD (ConP name_p (map (VarP . mkName . ("p_" ++)) fnames)) (NormalB . VarE . mkName $ oname_p) [] obj_new = foldl (\accu vname -> AppE accu (VarE vname)) (ConE name_f) $ map (mkName . ("n_" ++)) fnames le_new <- mapM buildFromMaybe fnames funt <- [t| $(conT name_f) -> $(conT name_p) -> $(conT name_f) |] let sig = SigD fun_name funt fclause = Clause [VarP (mkName oname_f), VarP (mkName oname_p)] (NormalB $ LetE (le_full:le_part:le_new) obj_new) [] fun = FunD fun_name [fclause] return [sig, fun] -- * Template code for exceptions -- | Exception simple error message field. excErrMsg :: (String, Q Type) excErrMsg = ("errMsg", [t| String |]) -- | Builds an exception type definition. genException :: String -- ^ Name of new type -> SimpleObject -- ^ Constructor name and parameters -> Q [Dec] genException name cons = do let tname = mkName name declD <- buildSimpleCons tname cons (savesig, savefn) <- genSaveSimpleObj tname ("save" ++ name) cons $ uncurry saveExcCons (loadsig, loadfn) <- genLoadExc tname ("load" ++ name) cons return [declD, loadsig, loadfn, savesig, savefn] -- | Generates the \"save\" clause for an entire exception constructor. -- -- This matches the exception with variables named the same as the -- constructor fields (just so that the spliced in code looks nicer), -- and calls showJSON on it. saveExcCons :: String -- ^ The constructor name -> [SimpleField] -- ^ The parameter definitions for this -- constructor -> Q Clause -- ^ Resulting clause saveExcCons sname fields = do let cname = mkName sname fnames <- mapM (newName . fst) fields let pat = conP cname (map varP fnames) felems = if null fnames then conE '() -- otherwise, empty list has no type else listE $ map (\f -> [| JSON.showJSON $(varE f) |]) fnames let tup = tupE [ litE (stringL sname), felems ] clause [pat] (normalB [| JSON.showJSON $tup |]) [] -- | Generates load code for a single constructor of an exception. -- -- Generates the code (if there's only one argument, we will use a -- list, not a tuple: -- -- @ -- do -- (x1, x2, ...) <- readJSON args -- return $ Cons x1 x2 ... -- @ loadExcConstructor :: Name -> String -> [SimpleField] -> Q Exp loadExcConstructor inname sname fields = do let name = mkName sname f_names <- mapM (newName . fst) fields let read_args = AppE (VarE 'JSON.readJSON) (VarE inname) let binds = case f_names of [x] -> BindS (ListP [VarP x]) _ -> BindS (TupP (map VarP f_names)) cval = foldl (\accu fn -> AppE accu (VarE fn)) (ConE name) f_names return $ DoE [binds read_args, NoBindS (AppE (VarE 'return) cval)] {-| Generates the loadException function. This generates a quite complicated function, along the lines of: @ loadFn (JSArray [JSString name, args]) = case name of "A1" -> do (x1, x2, ...) <- readJSON args return $ A1 x1 x2 ... "a2" -> ... s -> fail $ "Unknown exception" ++ s loadFn v = fail $ "Expected array but got " ++ show v @ -} genLoadExc :: Name -> String -> SimpleObject -> Q (Dec, Dec) genLoadExc tname sname opdefs = do let fname = mkName sname exc_name <- newName "name" exc_args <- newName "args" exc_else <- newName "s" arg_else <- newName "v" fails <- [| fail $ "Unknown exception '" ++ $(varE exc_else) ++ "'" |] -- default match for unknown exception name let defmatch = Match (VarP exc_else) (NormalB fails) [] -- the match results (per-constructor blocks) str_matches <- mapM (\(s, params) -> do body_exp <- loadExcConstructor exc_args s params return $ Match (LitP (StringL s)) (NormalB body_exp) []) opdefs -- the first function clause; we can't use [| |] due to TH -- limitations, so we have to build the AST by hand let clause1 = Clause [ConP 'JSON.JSArray [ListP [ConP 'JSON.JSString [VarP exc_name], VarP exc_args]]] (NormalB (CaseE (AppE (VarE 'JSON.fromJSString) (VarE exc_name)) (str_matches ++ [defmatch]))) [] -- the fail expression for the second function clause fail_type <- [| fail $ "Invalid exception: expected '(string, [args])' " ++ " but got " ++ show (pp_value $(varE arg_else)) ++ "'" |] -- the second function clause let clause2 = Clause [VarP arg_else] (NormalB fail_type) [] sigt <- [t| JSON.JSValue -> JSON.Result $(conT tname) |] return $ (SigD fname sigt, FunD fname [clause1, clause2])
narurien/ganeti-ceph
src/Ganeti/THH.hs
gpl-2.0
41,430
1
21
11,079
9,539
5,049
4,490
-1
-1
module AlexandriaCli (alexandria, defaultConfig) where import qualified Config.Dyre as Dyre import Control.Monad (liftM, void) import System.Environment (getArgs) import System.Process (callProcess, readProcess) import System.FilePath ((</>),(<.>)) import System.Posix (getEnv) import Alexandria.Render import Alexandria.Config import AlexandriaCli.Stats -- | Parse command line arguments and execute the corresponding functions. alexandriaMain :: Configuration a => a -> IO () alexandriaMain conf = getArgs >>= handleArgs where handleArgs [] = return () handleArgs ["-I"] = handleArgs ["--info"] handleArgs ["--info"] = printStats conf handleArgs ["-i"] = handleArgs ["--index"] handleArgs ["--index"] = putStrLn =<< generateIndex conf handleArgs args = do docs <- findDocs conf args mapM_ (\doc -> printUrl doc >> renderResults conf [doc]) docs printUrl = putStrLn . ("file://" ++) . (cacheDirectory conf </>) . (<.> "png") configDir :: Maybe (IO FilePath) configDir = Just $ do home <- getEnv "HOME" maybe (fail "$HOME not defined") return home -- | This is the function that is called from the users configuration file to -- provide the main function. alexandria :: Configuration a => a -> IO () alexandria = Dyre.wrapMain $ Dyre.defaultParams { Dyre.projectName = "alexandria", Dyre.realMain = alexandriaMain, Dyre.showError = error "foo", Dyre.configDir = configDir }
yzhs/alexandria-cli
AlexandriaCli.hs
gpl-2.0
1,472
0
14
287
420
229
191
31
6
{-# LANGUAGE FlexibleContexts, OverloadedStrings #-} {-# LANGUAGE TypeFamilies, EmptyDataDecls, GADTs #-} {-# LANGUAGE TemplateHaskell, QuasiQuotes #-} module Model where import Data.Monoid import Data.Text (Text) import Database.Persist.Sqlite import Database.Persist.TH share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persist| Person name Text age Int sex Sex deriving Show NumPerAge ageArea AgeArea sex Sex number Int deriving Show |] data Sex = Male | Female deriving (Show, Read, Eq, Ord, Enum, Bounded) data AgeArea = Over0 | Over20 | Over40 | Over60 deriving (Show, Read, Eq, Ord, Enum, Bounded) intToAgeArea :: Int -> AgeArea intToAgeArea n | n < 20 = Over0 | n < 40 = Over20 | n < 60 = Over40 | otherwise = Over60 data AllNumPerAge = AllNumPerAge { over0Male :: Int , over0Female :: Int , over20Male :: Int , over20Female :: Int , over40Male :: Int , over40Female :: Int , over60Male :: Int , over60Female :: Int } deriving (Show, Eq, Ord) instance Monoid AllNumPerAge where mempty = AllNumPerAge 0 0 0 0 0 0 0 0 mappend x y = AllNumPerAge (over0Male x + over0Male y) (over0Female x + over0Female y) (over20Male x + over20Male y) (over20Female x + over20Female y) (over40Male x + over40Male y) (over40Female x + over40Female y) (over60Male x + over60Male y) (over60Female x + over60Female y) fromAllNumPerAge :: AllNumPerAge -> [NumPerAge] fromAllNumPerAge all = NumPerAge Over0 Male (over0Male all) : NumPerAge Over0 Female (over0Female all) : NumPerAge Over20 Male (over20Male all) : NumPerAge Over20 Female (over20Female all) : NumPerAge Over40 Male (over40Male all) : NumPerAge Over40 Female (over40Female all) : NumPerAge Over60 Male (over60Male all) : NumPerAge Over60 Female (over60Female all) : [] derivePersistField "Sex" derivePersistField "AgeArea"
seizans/batch
Model.hs
gpl-2.0
2,011
0
15
509
603
310
293
53
1
{- ----------------------------------------------------------------------------- ZDCPU16 is a DCPU-16 emulator. Copyright (C) 2012 Luis Cabellos This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ----------------------------------------------------------------------------- -} module ZDCpu16.EmuState( EmuState(..), mkEmuState ) where -- ----------------------------------------------------------------------------- import ZDCpu16.Hardware( DCPU_16(..), initialDCPU ) -- ----------------------------------------------------------------------------- data EmuState = EmuState { emuCpu :: ! DCPU_16 , totalCycles :: ! Integer , lastCycles :: ! Int , runMode :: ! Bool , speed :: ! Int , writeVRAM :: Int -> Int -> IO () } -- ----------------------------------------------------------------------------- instance Show EmuState where show st = "EmuState { cpu = " ++ (show . emuCpu) st ++ ", totalCycles = " ++ (show . totalCycles) st ++ ", lastCycles = " ++ (show . lastCycles) st ++ ", runMode = " ++ (show . runMode) st ++ " } " -- ----------------------------------------------------------------------------- mkEmuState :: (Int -> Int -> IO ()) -> IO EmuState mkEmuState fVRAM = do idcpu <- initialDCPU return $! EmuState idcpu 0 0 False 100 fVRAM -- -----------------------------------------------------------------------------
zhensydow/zdcpu16
src/ZDCpu16/EmuState.hs
gpl-3.0
2,046
10
16
412
260
143
117
29
1
{-# OPTIONS_GHC -F -pgmF htfpp #-} {- This module tests how the version parser(System.Directory.Archivemount.VersionParser) handles valid and invalid input. GPL3. License info is at the bottom of the file. -} module System.Directory.Archivemount.VersionParserTest where import System.Directory.Archivemount.VersionParser import System.Directory.Archivemount.Types import Test.Framework test_validVersion = assertEqual (InstalledVersion {archivemount=[0,8,2] ,fuse=[2,9,0] ,fusermount=[2,9,0] ,fuseKernelInterface=[7,18]}) (parseVersionInfo (unlines ["archivemount version 0.8.2" ,"FUSE library version: 2.9.0" ,"fusermount version: 2.9.0" ,"using FUSE kernel interface version 7.18"])) test_invalidVersion = do assertEqual (InstalledButVersionInfoCouldNotBeParsed) (parseVersionInfo (unlines ["FUSE library version: 2.9.0" ,"fusermount version: 2.9.0" ,"using FUSE kernel interface version 7.18"])) {- -- Copyright (C) 2013 Timothy Hobbs <[email protected]> -- -- This program is free software: you can redistribute it and/or modify -- it under the terms of the GNU General Public License as published by -- the Free Software Foundation, either version 3 of the License, or -- any later version. -- -- This program is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -- GNU General Public License for more details. -- -- You should have received a copy of the GNU General Public License -- along with this program. If not, see <http://www.gnu.org/licenses/>. -}
timthelion/archivemount-hs
test/System/Directory/Archivemount/VersionParserTest.hs
gpl-3.0
1,667
0
12
274
163
102
61
26
1
----------------------------------------------------------------------------- -- -- Module : Main -- Copyright : Giacomo Tesio -- License : GNU General Public License Version 3 -- -- Maintainer : Giacomo Tesio -- Stability : -- Portability : -- -- | -- ----------------------------------------------------------------------------- module Main ( main ) where import System.Environment( getArgs ) import Data.Maybe (isNothing) import Computation main = do args <- getArgs putStrLn $ format $ evaluate (floatingRPN) args format :: (Show a) => Either String a -> String format (Left e) = e format (Right result) = show result floatingRPN :: (Floating a, Read a) => String -> Computation a floatingRPN "+" = binary (+) floatingRPN "-" = binary (-) floatingRPN "*" = binary (*) floatingRPN "/" = binary (/) floatingRPN "^" = binary (**) floatingRPN "negate" = unary negate floatingRPN "sqrt" = unary sqrt floatingRPN token = readValue token readValue :: (Read a, Show a) => String -> Computation a readValue token = extract parsed where extract [(x, "")] = value x extract _ = parseError $ "Can not parse: " ++ token ++ "." parsed = reads token
Shamar/RPNCalculator
src/Main.hs
gpl-3.0
1,191
0
9
229
343
184
159
25
2
{-# LANGUAGE TemplateHaskell, FlexibleInstances, FlexibleContexts, ViewPatterns, RecordWildCards, NamedFieldPuns, ScopedTypeVariables, TypeSynonymInstances, NoMonomorphismRestriction, TupleSections, StandaloneDeriving, GeneralizedNewtypeDeriving #-} module Triangulation.GlueMap( -- * Reexports module EitherC, ErrorCall(..), GlueMap,gm_numberOfTetrahedra,gm_map, -- * Unsafe lenses (not checking invariant) gm_numberOfTetrahedraUnsafeL, gm_mapUnsafeL, -- * Construction unsafeMkGlueMap,mkGlueMap,mkGlueMap', -- * Misc gm_addTet ) where import Control.Applicative import Control.DeepSeq.TH import Control.Exception import Control.Monad import Data.Lens.Common import Data.Lens.Template import Data.Map(Map) import Data.Map(Map) import Data.Maybe import Data.Word import EitherC import OrderableFace import THUtil import Tetrahedron import Triangulation.FacetGluing import Util import qualified Data.Map as M import Data.List(foldl') data GlueMap = -- | \"Unsafe\" due to invariants. UnsafeGM { gm_numberOfTetrahedra :: Word, -- | INVARIANT: -- -- @lookup (i ./ t) gm_map = Just (j ./ packOrderedFace s g)@ -- -- implies -- -- @lookup (j ./ s) gm_map = Just (i ./ 'packOrderedFace' t ('inv' g'))@ -- -- (for all @f0@, @f1@, @g@). -- -- INVARIANT: The 'getTIndex' of all triangles is less than 'gm_numberOfTetrahedra'. gm_map :: Map ITriangle OITriangle -- performance-TODO: could use IntMap or maybe a vector } deriving (Show,Eq,Ord) deriveNFData ''GlueMap nameMakeLens ''GlueMap (Just . (++"UnsafeL")) -- | Doesn't check the invariant. unsafeMkGlueMap = UnsafeGM -- | Throwy variant of 'mkGlueMap'' mkGlueMap :: Word -> Map ITriangle OITriangle -> GlueMap mkGlueMap = (.) ($(unEitherC) "mkGlueMap") . mkGlueMap' mkGlueMap' :: Word -> Map ITriangle OITriangle -> EitherC (Located ErrorCall) GlueMap mkGlueMap' n mp = do forM_ (M.keys mp) (\t -> unless (getTIndex t < fi n) ($failureStr ("TIndex of triangle too large: "++show t))) forM_ (M.keys mp) (\tri -> case M.lookup tri mp of Nothing -> return () Just otri -> let mkErrMsg msg = show tri ++ " maps to " ++ show otri ++ " but " ++ show (forgetVertexOrder otri) ++ " " ++ msg in case M.lookup (forgetVertexOrder otri) mp of Nothing -> $(failureStr) (mkErrMsg "is not in the map.") Just tri2_actual -> let tri2_expected = getTIndex tri ./ packOrderedFace (forgetTIndex tri) (inv $ getVertexOrder otri) in unless (tri2_actual == tri2_expected) ($(failureStr) (mkErrMsg ("maps to "++show tri2_actual ++ " (expected: "++ show tri2_expected ++")."))) ) return (UnsafeGM n mp) instance MapTIndices GlueMap where mapTIndices f = modL gm_mapUnsafeL ($(fromListNoCollision) . map (mapTIndices f) . M.toList) mapTIndicesStrictlyMonotonic f = modL gm_mapUnsafeL (M.fromDistinctAscList . map (mapTIndicesStrictlyMonotonic f) . M.toList) -- | The 'TIndex' argument to the first argument is the newly created tet gm_addTet :: (TIndex -> [Gluing]) -> GlueMap -> EitherC (Located ErrorCall) (GlueMap,TIndex) gm_addTet gls gm = (,newTet) <$> gm' where gm' = mkGlueMap' (gm_numberOfTetrahedra gm + 1) (foldl' (\m g -> $insertNoCollision (glDom g) (glCod g) m) (gm_map gm) (symmetrizeGluings (gls newTet))) newTet = tindex . gm_numberOfTetrahedra $ gm
DanielSchuessler/hstri
Triangulation/GlueMap.hs
gpl-3.0
4,368
0
30
1,613
877
466
411
78
3
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Genomics.VariantSets.Export -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Exports variant set data to an external destination. For the definitions -- of variant sets and other genomics resources, see [Fundamentals of -- Google -- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics) -- -- /See:/ <https://cloud.google.com/genomics Genomics API Reference> for @genomics.variantsets.export@. module Network.Google.Resource.Genomics.VariantSets.Export ( -- * REST Resource VariantSetsExportResource -- * Creating a Request , variantSetsExport , VariantSetsExport -- * Request Lenses , vseXgafv , vseUploadProtocol , vsePp , vseVariantSetId , vseAccessToken , vseUploadType , vsePayload , vseBearerToken , vseCallback ) where import Network.Google.Genomics.Types import Network.Google.Prelude -- | A resource alias for @genomics.variantsets.export@ method which the -- 'VariantSetsExport' request conforms to. type VariantSetsExportResource = "v1" :> "variantsets" :> CaptureMode "variantSetId" "export" Text :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "pp" Bool :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "bearer_token" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] ExportVariantSetRequest :> Post '[JSON] Operation -- | Exports variant set data to an external destination. For the definitions -- of variant sets and other genomics resources, see [Fundamentals of -- Google -- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics) -- -- /See:/ 'variantSetsExport' smart constructor. data VariantSetsExport = VariantSetsExport' { _vseXgafv :: !(Maybe Xgafv) , _vseUploadProtocol :: !(Maybe Text) , _vsePp :: !Bool , _vseVariantSetId :: !Text , _vseAccessToken :: !(Maybe Text) , _vseUploadType :: !(Maybe Text) , _vsePayload :: !ExportVariantSetRequest , _vseBearerToken :: !(Maybe Text) , _vseCallback :: !(Maybe Text) } deriving (Eq,Show,Data,Typeable,Generic) -- | Creates a value of 'VariantSetsExport' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'vseXgafv' -- -- * 'vseUploadProtocol' -- -- * 'vsePp' -- -- * 'vseVariantSetId' -- -- * 'vseAccessToken' -- -- * 'vseUploadType' -- -- * 'vsePayload' -- -- * 'vseBearerToken' -- -- * 'vseCallback' variantSetsExport :: Text -- ^ 'vseVariantSetId' -> ExportVariantSetRequest -- ^ 'vsePayload' -> VariantSetsExport variantSetsExport pVseVariantSetId_ pVsePayload_ = VariantSetsExport' { _vseXgafv = Nothing , _vseUploadProtocol = Nothing , _vsePp = True , _vseVariantSetId = pVseVariantSetId_ , _vseAccessToken = Nothing , _vseUploadType = Nothing , _vsePayload = pVsePayload_ , _vseBearerToken = Nothing , _vseCallback = Nothing } -- | V1 error format. vseXgafv :: Lens' VariantSetsExport (Maybe Xgafv) vseXgafv = lens _vseXgafv (\ s a -> s{_vseXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). vseUploadProtocol :: Lens' VariantSetsExport (Maybe Text) vseUploadProtocol = lens _vseUploadProtocol (\ s a -> s{_vseUploadProtocol = a}) -- | Pretty-print response. vsePp :: Lens' VariantSetsExport Bool vsePp = lens _vsePp (\ s a -> s{_vsePp = a}) -- | Required. The ID of the variant set that contains variant data which -- should be exported. The caller must have READ access to this variant -- set. vseVariantSetId :: Lens' VariantSetsExport Text vseVariantSetId = lens _vseVariantSetId (\ s a -> s{_vseVariantSetId = a}) -- | OAuth access token. vseAccessToken :: Lens' VariantSetsExport (Maybe Text) vseAccessToken = lens _vseAccessToken (\ s a -> s{_vseAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). vseUploadType :: Lens' VariantSetsExport (Maybe Text) vseUploadType = lens _vseUploadType (\ s a -> s{_vseUploadType = a}) -- | Multipart request metadata. vsePayload :: Lens' VariantSetsExport ExportVariantSetRequest vsePayload = lens _vsePayload (\ s a -> s{_vsePayload = a}) -- | OAuth bearer token. vseBearerToken :: Lens' VariantSetsExport (Maybe Text) vseBearerToken = lens _vseBearerToken (\ s a -> s{_vseBearerToken = a}) -- | JSONP vseCallback :: Lens' VariantSetsExport (Maybe Text) vseCallback = lens _vseCallback (\ s a -> s{_vseCallback = a}) instance GoogleRequest VariantSetsExport where type Rs VariantSetsExport = Operation type Scopes VariantSetsExport = '["https://www.googleapis.com/auth/bigquery", "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/genomics"] requestClient VariantSetsExport'{..} = go _vseVariantSetId _vseXgafv _vseUploadProtocol (Just _vsePp) _vseAccessToken _vseUploadType _vseBearerToken _vseCallback (Just AltJSON) _vsePayload genomicsService where go = buildClient (Proxy :: Proxy VariantSetsExportResource) mempty
rueshyna/gogol
gogol-genomics/gen/Network/Google/Resource/Genomics/VariantSets/Export.hs
mpl-2.0
6,336
0
19
1,533
948
553
395
136
1
-- | This module provides a large number of useful default __Built-In Functions__ (BIFs) which you -- can use to declare the 'Language.Interpreter.Dao.Kernel.builtins' field of the Dao Lisp execution -- 'Language.Interpreter.Dao.Kernel.Environment', when evaluating the the 'evalDaoIO' function. The -- function 'defaultBIFs' provided at the top of this module could be considered the Dao Lisp -- "standard library" of functions. -- -- Each one of these so-called "standard library" functions are provided as a stand-alone Haskell -- function of type -- @('Language.Interpreter.Dao.Kernel.Atom', 'Language.Interpreter.Dao.Kernel.Outliner' ('Language.Interpreter.Dao.Kernel.DaoEval' 'Language.Interpreter.Dao.Kernel.DaoExpr'))@ -- so that you may evaluate it directly with the 'evalDefaultBIF' function, or that you may use it in your -- own customized dictionary of 'Language.Interpreter.Dao.Kernel.builtins' with a name of your -- choosing. module Language.Interpreter.Dao.DefaultBIFs ( -- * Setting-up the Environment of a Dao Interpreter newEnvironment, extendBuiltins, setupTraceBIF, setupTraceAtom, setupTraceForm, -- * Defining BIFs DefaultBIF, defaultBIFs, rename, evalDefaultBIF, -- * Arithmetic daoSum, daoSubtract, daoProduct, daoDivide, daoModulus, -- * Comparison daoEqual, daoNotEqual, daoLessEq, daoLess, daoGreaterEq, daoGreater, -- * Strings daoInterpolate, -- * Dictionaries daoGet, daoPut, -- * IO daoPrint, -- * Checks on Matched Pattern daoPredicate, -- * Exceptions daoThrow, ) where import Language.Interpreter.Dao.Kernel import Control.Applicative import Control.Monad import Control.Monad.IO.Class import Control.Monad.Trans import Data.Foldable --import Data.List.NonEmpty (toList) import qualified Data.Map as Map import qualified Data.Text as Strict import qualified Data.Text.IO as Strict --import Debug.Trace ---------------------------------------------------------------------------------------------------- -- | Create a new default 'Environment' to be used to evaluate some Dao Lisp expressions. Pass an -- update function composed (that is composed with the 'Prelude..' operator) of several setup -- functions, like 'setupBuiltins', 'setupTraceBIF', 'setupTraceAtom', and 'setupTraceForm'. -- -- Note that if you simply want to use the default 'Language.Interpreter.Dao.Kernel.Environment', -- construct it by passing 'Prelude.id' to this function. That is to say -- @('newEnvironment' 'Prelude.id')@ is the default environment. newEnvironment :: (Environment -> Environment) -> Environment newEnvironment = flip ($) emptyEnvironment{ builtins = defaultBIFs } -- | Append builtin functions to a 'newEnvironment'. A 'newEnvironment' function provides an -- environment that installs the 'defaultBIFs', but if any function you provide to 'extendBuiltins' -- has the same name as any of the 'defaultBIFs', each of these identically named 'defaultBIFs' will -- be replaced with your own version of the BIF. -- -- If the list of BIFs passed to this function contains any duplicate named entries, a Haskell -- exception is thrown. This is to notify you of a potential name conflict as quickly as possible, -- hopefully as soon as your program begins execution and before anything expensive or destructive -- happens. -- -- Exceptions are not thrown from multiple calls to 'extendBuiltins' when name conflicts occur, -- BIFs defined from prior calls to 'extendBuiltins' are silently overwritten. extendBuiltins :: [(Atom, DaoBuiltinFunction)] -> (Environment -> Environment) extendBuiltins elems env = env{ builtins = Map.union (Map.fromListWithKey noDups elems) (builtins env) } where noDups name _ _ = error $ "Cannot init Dao Lisp interpreter, " ++ "extendBuiltins called with multiple built-in functions named \"" ++ show name ++ "\"" -- | Define a trace function in the 'newEnvironment' to be called when a Built-In Function (BIF) is -- evaluated, for debugging purposes. -- -- If this function is called multiple times, the last function given is used -- be careful, "last -- function" depends on whether you use the dot @('Prelude..')@ function or the forward arrow -- @('Control.Category.>>>')@ function. When using the forward arrow, the last line of Haskell code -- you write is the "last function," when using the dot operator, the first line of Haskell code you -- write is the "last function." setupTraceBIF :: (Atom -> [DaoExpr] -> DaoEval ()) -> (Environment -> Environment) setupTraceBIF f env = env{ traceBIF = Just f } -- | Similar to 'setupTraceBIF', except this trace function is called whenever a non-Built-In -- Function is called, with the 'Atom' that labels the function, and the list of 'DaoExpr' arguments -- passed to it, usually a function defined in a Dao Lisp database. setupTraceAtom :: (Atom -> [DaoExpr] -> DaoEval ()) -> (Environment -> Environment) setupTraceAtom f env = env{ traceAtom = Just f } -- | Very similar to 'setupTraceAtom': this trace function is called whenever a non-Built-In -- Function is called, usually a function defined in a Dao Lisp database, the difference between -- this and the function you define with 'setupTraceAtom' is that the entire 'Form' is given to this -- function, not just the 'Atom' and it's arguments. setupTraceForm :: (Form -> DaoEval ()) -> (Environment -> Environment) setupTraceForm f env = env{ traceForm = Just f } ---------------------------------------------------------------------------------------------------- type DefaultBIF = (Atom, DaoBuiltinFunction) -- | This is the statica global data structure which contains all of the BIFs defined in this -- module. This table of BIFs is automatically included into your -- 'Language.Interpreter.Dao.Kernel.Environment' when you construct one with the 'newEnvironment' -- function. defaultBIFs :: Map.Map Atom DaoBuiltinFunction defaultBIFs = bifList $ fmap (uncurry bif) [ daoSum, daoSubtract, daoProduct, daoDivide , daoModulus , daoEqual, daoNotEqual, daoLess, daoLessEq, daoGreater, daoGreaterEq , daoInterpolate , daoPrint , daoThrow , daoPredicate , daoGet, daoPut ] -- | A convenient function for renaming a 'DefaulBIF' before adding it to your own dictionary of -- BIFs. rename :: DefaultBIF -> Atom -> DefaultBIF rename (_ , f) newname = (newname, f) -- | A convenient function for evaluating a 'DefaultBIF' to a -- 'Language.Interpreter.Dao.Kernel.DaoEval' function. evalDefaultBIF :: DefaultBIF -> [DaoExpr] -> DaoEval DaoExpr evalDefaultBIF = uncurry evalBIF ---------------------------------------------------------------------------------------------------- daoSum :: DefaultBIF daoSum = bif "+" $ DaoStrict $ argsDecodeAll (pure . DaoInt . sum) <|> argsDecodeAll (pure . DaoFloat . sum) <|> argsDecodeAll (pure . DaoString . mconcat) <|> argsDecodeAll (pure . dao . or) <|> argsDecodeAll (pure . DaoDict . unionDicts (\ _ _ a -> a)) daoProduct :: DefaultBIF daoProduct = bif "*" $ DaoStrict $ argsDecodeAll (pure . DaoInt . product) <|> argsDecodeAll (pure . DaoFloat . product) <|> argsDecodeAll (pure . dao . and) <|> argsDecodeAll (pure . DaoDict . intersectDicts (\ _ _ a -> a)) _nonassoc :: Num a => a -> (a -> a) -> (a -> a -> a) -> ([a] -> a) -> [a] -> a _nonassoc id neg bin join = \ case { [] -> id; [a] -> neg a; a:ax -> a `bin` join ax; } daoSubtract :: DefaultBIF daoSubtract = bif "-" $ DaoStrict $ argsDecodeAll (pure . DaoInt . _nonassoc 0 negate (-) sum) <|> argsDecodeAll (pure . DaoFloat . _nonassoc 0 negate (-) sum) daoDivide :: DefaultBIF daoDivide = bif "/" $ DaoStrict $ argsDecodeAll (pure . DaoInt . _nonassoc 1 (\ x -> if x == 1 then 1 else 0) div product) <|> argsDecodeAll (pure . DaoFloat . _nonassoc 1 recip (/) product) daoModulus :: DefaultBIF daoModulus = bif "mod" $ DaoStrict $ matchAll $ \ case [] -> return $ DaoInt 1 a:ax -> do let err param = plainError "matching" [ ("reason", DaoString "modulus require integer parameters") , ("offender", param) ] a <- case a of { DaoInt a -> return a; param -> matchQuit $ err param; } ax <- forM ax $ \ case DaoInt 0 -> matchQuit $ plainError "matching" [ ("reason", DaoString "divide by zero") , ("function", DaoAtom "mod") ] DaoInt a -> return a param -> matchQuit $ err param return $ case ax of [] -> DaoInt a a:ax -> DaoInt $ foldl mod a ax -- | Join many strings and characters into a single string. Non-strings and non-characters passed to -- this function are converted to a string value. Example: -- -- > (interpolate "Hello" ',' " world! " One Two Three.) -- -- The above will create a string @"Hello, world! OneTwoThree."@ daoInterpolate :: DefaultBIF daoInterpolate = bif "interpolate" $ DaoStrict $ matchAll $ pure . DaoString . strInterpolate -- | Call 'daoInterpolate' and push the result to the system console's standard output stream. daoPrint :: DefaultBIF daoPrint = bif "print" $ DaoStrict $ matchAll $ lift . daoVoid . liftIO . Strict.putStrLn . strInterpolate -- | This function throws an uncatchable exception which immediately halts evaluation of a -- 'Language.Interpreter.Dao.Kernel.DaoEval' function. Exceptions are, however, caught during -- pattern matching when a pattern match evaluates a predicate function that evaluates this very -- @throw@ function. If an exception is @throw@n during pattern matching, the pattern simply fails -- and another pattern is tried. daoThrow :: DefaultBIF daoThrow = bif "throw" $ DaoNonStrict $ (plainError <$> argDecode info <*> (dictAssocs <$> argDecode info) >>= lift . daoFail) <|> (matchAll $ lift . daoFail . plainError "user-error" . pure . (,) "args" . daoList) where info = [("function", DaoAtom "throw")] -- | Intended to only be used in pattern matching. This function takes a partially applied form as a -- parameter, applies the remaining arguments to evaluate the form as a function, and returns all -- arguments if the form evaluation results in a 'Language.Interpreter.Dao.Kernel.DaoTrue' value. -- Anything other value throws an exception which indicates that the pattern does not match. -- -- The result is, a partial function call preceeded by a @?@ will test if arguments passed to the -- function result in true, and if so the arguments may be assigned to a pattern match variable, -- otherwise the pattern simply does not match. -- -- Example -- > (defn bigEnough (x) (<= 5 x)) -- > rule (type== Int * 1 (:numbers (? bigEnough))) (for x in numbers (print x " is big enough")) -- -- Note the predicate creates a pattern that matches one or more integer values and assignes all -- matching integers to a list called @numbers@. The list of numbers is also typed with the -- @(? bigEnough)@ predicate, which is a function call to this very @?@ function that calls -- @bigEnough@ with all values that match the pattern in that position. daoPredicate :: DefaultBIF daoPredicate = bif "?" $ DaoNonStrict $ do fn <- argDecode [("function", DaoAtom "?")] matchAll $ \ args -> do result <- lift $ evalNamedMacro fn args case result of DaoTrue -> return $ daoList args _ -> lift $ daoFail $ plainError "predicate" [ ("reason", DaoString "predicate did not evaluate to true") , ("function", DaoAtom fn) ] _daoget :: (DaoDecode dict, DaoDecode key, Show key) -- TODO remove Show => (key -> dict -> Maybe DaoExpr) -> DaoMacro DaoExpr _daoget lookup = do let getatom = DaoAtom "get" let badkey = DaoString "incorrect key type for lookup target" let info = [("reason", badkey), ("function", getatom)] (listLen, keys) <- mplus (((,) Nothing) . pure <$> argDecode info) (do keysList <- argDecode info liftM ((,) $ Just $ length keysList) $ forM (toList $ unwrapList keysList) $ \ key -> case daoDecode key of Right key -> return key Left err -> matchQuit $ let info = errorAppendInfo in info "reason" badkey . info "function" getatom . info "bad-key" key $ err ) dict <- argEvalDeepDecode info deflt <- argEvalDeepDecode [] <|> return DaoVoid flip returnIfEnd [("function", getatom)] $ case listLen of Just len -> maybe DaoNull DaoList $ sizedMaybeList len (maybe deflt id . flip lookup dict <$> keys) deflt Nothing -> case keys of [] -> deflt key : _ -> maybe deflt id $ lookup key dict -- | Perform a lookup on a dictionary or list, returning 'DaoVoid' or an optional @default-value@ if -- the key does not exist. This is a 'DaoNonStrict' function which does not evaluate the key if it -- is an 'Language.Interpreter.Dao.Kernel.Atom'. -- -- > (get key dictionary default-value) -- > (get index list default-value) -- -- The dictionary and default values are evaluated before performing the lookup, but the key is not -- evaluated unless it is a form. This allows you to specify an atom without it being looked up in -- the local variable stack. -- -- First parameter format: dictionary and a single key, with an optional default value to be -- returned if the key is undefined. Returns the value associated with the key in the dictionary or -- list. -- -- Examples: -- -- 1. Get's the value associated with the key @aaa@. -- -- > (get aaa {:aaa 1}) ---> returns 1 -- -- 2. The key @bbb@ does not exist, so it returns the given default value zero @0@: -- -- > (get bbb {:aaa 1} 0) ---> returns 0 -- -- 3. Get the first element in the list, returns the 'Language.Interpreter.Dao.Kernel.Atom' @a@: -- -- > (get 0 [a b c d]) -- -- 4. Get an undefined index: -- -- > (get (+ 3 1) [a b c d] (atom nothing)) ---> returns ('Language.Interpreter.Dao.Kernel.DaoAtom' "nothing") -- -- 5. Get an integer index, where the integer is assigned to a variable name, returns -- @'Language.Intgerpreter.Dao.Kernel.DaoString' "one"@: -- -- > (do let key = 1; get (@ key) ["zero" "one" "two" "three"];) -- daoGet :: DefaultBIF daoGet = bif "get" $ DaoNonStrict $ _daoget lookupDict <|> _daoget indexList _daoput :: (DaoDecode dict, DaoDecode key, DaoEncode dict, Show dict, Show key) => (dict -> [(key, DaoExpr)] -> dict) -> DaoMacro DaoExpr _daoput update = do let info = [("function", DaoAtom "put")] pairs <- (argOutlineListWith info $ outlinePairsWith outlineDaoDecoder $ matchStep [] return) <|> (liftM pure $ (,) <$> argDecode info <*> argEvalDeepDecode info) <|> ( matchQuit $ plainError "arg-matching" $ info ++ [("reason", DaoString "argument index+value pair must be single pair, or list of pairs")] ) pairs <- forM pairs $ \ (atom, expr) -> ((,) atom) <$> lift (evalDeep expr) dict <- argEvalDeepDecode info returnIfEnd (dao $ update dict pairs) info -- | Perform an insertion on a dictionary or list. If the target is a list, the keys must be integer -- values. -- -- > (put key value dict) -- > (put [:key1 value1 :key2 value2 :key3 value3 ...] dict) -- -- Notice that when inserting multiple elements, you must specify a __square-bracketed__ list of -- pairs, not a curly-bracketed dictionary. daoPut :: DefaultBIF daoPut = bif "put" $ DaoNonStrict $ _daoput insertDict <|> _daoput insertList ---------------------------------------------------------------------------------------------------- type CreasingFunction a = [a] -> DaoEval DaoExpr comparing :: Atom -> (a -> a -> Bool) -> CreasingFunction a comparing atom ord = \ case { [] -> err; a:ax -> return $ loop a ax; } where err = daoFail $ plainError "matching" [ ("function", DaoAtom atom) , ("reason", DaoString "no arguments to function") ] loop a ax = case ax of [] -> DaoTrue b:ax -> if a `ord` b then loop b ax else DaoNull daoCompare :: Atom -> (forall a . (Ord a, DaoDecode a) => a -> a -> Bool) -> DefaultBIF daoCompare atom f = (,) atom $ DaoStrict $ argsDecodeAll (comparing atom f :: CreasingFunction Int) <|> argsDecodeAll (comparing atom f :: CreasingFunction Double) <|> argsDecodeAll (comparing atom f :: CreasingFunction Strict.Text) daoEqual :: DefaultBIF daoEqual = daoCompare "==" (==) daoNotEqual :: DefaultBIF daoNotEqual = daoCompare "/=" (/=) daoLess :: DefaultBIF daoLess = daoCompare "<" (<) daoLessEq :: DefaultBIF daoLessEq = daoCompare "<=" (<=) daoGreater :: DefaultBIF daoGreater = daoCompare ">" (>) daoGreaterEq :: DefaultBIF daoGreaterEq = daoCompare ">=" (>=)
RaminHAL9001/Dao
src/Language/Interpreter/Dao/DefaultBIFs.hs
agpl-3.0
16,552
0
23
3,187
3,001
1,645
1,356
-1
-1
-- | <https://tools.ietf.org/html/rfc4511#section-4.11 Abandon> operation. -- -- This operation comes in two flavours: -- -- * asynchronous, 'IO' based ('abandonAsync') -- -- * asynchronous, 'STM' based ('abandonAsyncSTM') -- -- Of those, the first one ('abandonAsync') is probably the most useful for the typical usecase. -- -- Synchronous variants are unavailable because the Directory does not -- respond to @AbandonRequest@s. module Ldap.Client.Abandon ( abandonAsync , abandonAsyncSTM ) where import Control.Monad (void) import Control.Monad.STM (STM, atomically) import qualified Ldap.Asn1.Type as Type import Ldap.Client.Internal -- | Perform the Abandon operation asynchronously. abandonAsync :: Ldap -> Async a -> IO () abandonAsync l = atomically . abandonAsyncSTM l -- | Perform the Abandon operation asynchronously. abandonAsyncSTM :: Ldap -> Async a -> STM () abandonAsyncSTM l = void . sendRequest l die . abandonRequest where die = error "Ldap.Client.Abandon: do not wait for the response to UnbindRequest" abandonRequest :: Async a -> Request abandonRequest (Async i _) = Type.AbandonRequest i
VictorDenisov/ldap-client
src/Ldap/Client/Abandon.hs
bsd-2-clause
1,166
0
8
212
189
108
81
17
1
{-# LANGUAGE TemplateHaskell, KindSignatures, TypeFamilies, FlexibleContexts, GADTs #-} module Model where import Prelude import Yesod import Data.Text (Text) import Database.Persist.Quasi import Database.Persist.MongoDB import Language.Haskell.TH.Syntax -- You can define all of your database entities in the entities file. -- You can find more information on persistent and how to declare entities -- at: -- http://www.yesodweb.com/book/persistent/ share [mkPersist MkPersistSettings { mpsBackend = ConT ''Action }, mkMigrate "migrateAll"] $(persistFileWith lowerCaseSettings "config/models")
cutsea110/blog
Model.hs
bsd-2-clause
601
0
11
74
86
51
35
10
0
module Stats (nintyFifth, Estimate(..)) where import qualified Data.Vector.Unboxed as U import Statistics.Sample (mean) import Statistics.Resampling (resample, fromResample) import Statistics.Resampling.Bootstrap (bootstrapBCA, Estimate(..) ) import System.Random.MWC (create) nintyFifth :: [Double] -> IO Estimate nintyFifth sample = do g <- create resamples <- resample g [mean] 10000 sampleU -- (length sample^2) sampleU -- print $ U.length $ fromResample $ head $ resamples -- print resamples return $ head $ bootstrapBCA 0.95 sampleU [mean] resamples where sampleU = U.fromList sample
ku-fpg/ldpc
src/Stats.hs
bsd-2-clause
647
0
9
133
164
95
69
12
1
module ImplicitRefs.Evaluator ( valueOf , run , eval , evalProgram ) where import Control.Applicative ((<|>)) import Control.Arrow (second) import Control.Monad.Except import ImplicitRefs.Data import ImplicitRefs.Parser type EvaluateResult = IOTry ExpressedValue liftMaybe :: LangError -> Maybe a -> IOTry a liftMaybe _ (Just x) = return x liftMaybe y Nothing = throwError y run :: String -> IO (Try ExpressedValue) run input = runExceptT $ do prog <- liftTry (parseProgram input) store <- liftIO initStore evalProgram store prog evalProgram :: Store -> Program -> EvaluateResult evalProgram store (Prog expr) = eval store expr eval :: Store -> Expression -> EvaluateResult eval store expr = valueOf expr empty store valueOf :: Expression -> Environment -> Store -> EvaluateResult valueOf (ConstExpr x) _ _ = evalConstExpr x valueOf (VarExpr var) env s = evalVarExpr var env s valueOf (LetRecExpr procs recBody) env s = evalLetRecExpr procs recBody env s valueOf (BinOpExpr op expr1 expr2) env s = evalBinOpExpr op expr1 expr2 env s valueOf (UnaryOpExpr op expr) env s = evalUnaryOpExpr op expr env s valueOf (CondExpr pairs) env s = evalCondExpr pairs env s valueOf (LetExpr bindings body) env s = evalLetExpr bindings body env s valueOf (ProcExpr params body) env _ = evalProcExpr params body env valueOf (CallExpr rator rands) env s = evalCallExpr rator rands env s valueOf (BeginExpr exprs) env s = evalBeginExpr exprs env s valueOf (AssignExpr name expr) env s = evalAssignExpr name expr env s valueOf (SetDynamicExpr n e b) env s = evalSetDynamicExpr n e b env s valueOf (RefExpr name) env s = evalRefExpr name env valueOf (DeRefExpr name) env s = evalDeRefExpr name env s valueOf (SetRefExpr name expr) env s = evalSetRefExpr name expr env s evalRefExpr :: String -> Environment -> EvaluateResult evalRefExpr name env = do ref <- getRef env name return $ ExprRef ref unpackExprRef :: ExpressedValue -> IOTry Ref unpackExprRef (ExprRef ref) = return ref unpackExprRef notRef = throwError $ TypeMismatch "reference" notRef unpackProc :: ExpressedValue -> IOTry Procedure unpackProc (ExprProc proc) = return proc unpackProc notProc = throwError $ TypeMismatch "procedure" notProc getExprRef :: String -> Environment -> Store -> IOTry Ref getExprRef name env store = do refRef <- getRef env name refVal <- deRef store refRef unpackExprRef refVal evalDeRefExpr :: String -> Environment -> Store -> EvaluateResult evalDeRefExpr name env store = do ref <- getExprRef name env store deRef store ref evalSetRefExpr :: String -> Expression -> Environment -> Store -> EvaluateResult evalSetRefExpr name expr env store = do ref <- getExprRef name env store val <- valueOf expr env store setRef store ref val return $ ExprBool False evalSetDynamicExpr :: String -> Expression -> Expression -> Environment -> Store -> EvaluateResult evalSetDynamicExpr name expr body env store = do ref <- getRef env name oldVal <- deRef store ref newVal <- valueOf expr env store setRef store ref newVal result <- valueOf body env store setRef store ref oldVal return result getRef :: Environment -> String -> IOTry Ref getRef env name = case apply env name of Just (DenoRef ref) -> return ref Nothing -> throwError $ UnboundVar name evalAssignExpr :: String -> Expression -> Environment -> Store -> EvaluateResult evalAssignExpr name expr env store = do val <- valueOf expr env store ref <- getRef env name setRef store ref val return $ ExprBool False evalBeginExpr :: [Expression] -> Environment -> Store -> EvaluateResult evalBeginExpr exprs env store = foldl func (return $ ExprBool False) exprs where func acc ele = do acc valueOf ele env store evalExpressionList :: [Expression] -> Environment -> Store -> IOTry [ExpressedValue] evalExpressionList lst env store = reverse <$> evaledList where func acc expr = do lst <- acc ele <- valueOf expr env store return $ ele:lst evaledList = foldl func (return []) lst evalConstExpr :: ExpressedValue -> EvaluateResult evalConstExpr = return evalVarExpr :: String -> Environment -> Store -> EvaluateResult evalVarExpr name env store = do denoRef <- liftMaybe (UnboundVar name) (apply env name) let (DenoRef ref) = denoRef deRef store ref evalLetRecExpr :: [(String, [String], Expression)] -> Expression -> Environment -> Store -> EvaluateResult evalLetRecExpr procsSubUnits recBody env store = do newEnv <- extendRecMany store procsSubUnits env valueOf recBody newEnv store binBoolOpMap :: [(BinOp, Bool -> Bool -> Bool)] binBoolOpMap = [] binNumToNumOpMap :: [(BinOp, Integer -> Integer -> Integer)] binNumToNumOpMap = [(Add, (+)), (Sub, (-)), (Mul, (*)), (Div, div)] binNumToBoolOpMap :: [(BinOp, Integer -> Integer -> Bool)] binNumToBoolOpMap = [(Gt, (>)), (Le, (<)), (Eq, (==))] unaryBoolOpMap :: [(UnaryOp, Bool -> Bool)] unaryBoolOpMap = [] unaryNumToNumOpMap :: [(UnaryOp, Integer -> Integer)] unaryNumToNumOpMap = [(Minus, negate)] unaryNumToBoolOpMap :: [(UnaryOp, Integer -> Bool)] unaryNumToBoolOpMap = [(IsZero, (0 ==))] unpackNum :: ExpressedValue -> IOTry Integer unpackNum (ExprNum n) = return n unpackNum notNum = throwError $ TypeMismatch "number" notNum unpackBool :: ExpressedValue -> IOTry Bool unpackBool (ExprBool b) = return b unpackBool notBool = throwError $ TypeMismatch "boolean" notBool tryFind :: Eq a => LangError -> a -> [(a, b)] -> IOTry b tryFind err x pairs = liftMaybe err (lookup x pairs) tryFindOp :: (Eq a, Show a) => a -> [(a, b)] -> IOTry b tryFindOp op = tryFind (UnknownOperator $ show op) op binOpConverter :: (ExpressedValue -> IOTry a) -> (ExpressedValue -> IOTry b) -> (c -> ExpressedValue) -> (a -> b -> c) -> (ExpressedValue -> ExpressedValue -> EvaluateResult) binOpConverter unpack1 unpack2 trans func val1 val2 = do va <- unpack1 val1 vb <- unpack2 val2 return . trans $ func va vb binOps :: [(BinOp, ExpressedValue -> ExpressedValue -> EvaluateResult)] binOps = concat [binNum2Num, binNum2Bool, binBool2Bool] where n2nTrans = binOpConverter unpackNum unpackNum ExprNum binNum2Num = fmap (second n2nTrans) binNumToNumOpMap n2bTrans = binOpConverter unpackNum unpackNum ExprBool binNum2Bool = fmap (second n2bTrans) binNumToBoolOpMap b2bTrans = binOpConverter unpackBool unpackBool ExprBool binBool2Bool = fmap (second b2bTrans) binBoolOpMap unaryOpConverter :: (ExpressedValue -> IOTry a) -> (b -> ExpressedValue) -> (a -> b) -> (ExpressedValue -> EvaluateResult) unaryOpConverter unpack trans func val = do va <- unpack val return . trans $ func va unaryOps :: [(UnaryOp, ExpressedValue -> EvaluateResult)] unaryOps = concat [unaryNum2Num, unaryNum2Bool, unaryBool2Bool] where n2nTrans = unaryOpConverter unpackNum ExprNum unaryNum2Num = fmap (second n2nTrans) unaryNumToNumOpMap n2bTrans = unaryOpConverter unpackNum ExprBool unaryNum2Bool = fmap (second n2bTrans) unaryNumToBoolOpMap b2bTrans = unaryOpConverter unpackBool ExprBool unaryBool2Bool = fmap (second b2bTrans) unaryBoolOpMap evalBinOpExpr :: BinOp -> Expression -> Expression -> Environment -> Store -> EvaluateResult evalBinOpExpr op expr1 expr2 env store = do func <- tryFindOp op binOps v1 <- valueOf expr1 env store v2 <- valueOf expr2 env store func v1 v2 evalUnaryOpExpr :: UnaryOp -> Expression -> Environment -> Store -> EvaluateResult evalUnaryOpExpr op expr env store = do func <- tryFindOp op unaryOps v <- valueOf expr env store func v evalCondExpr :: [(Expression, Expression)] -> Environment -> Store -> EvaluateResult evalCondExpr [] _ _ = throwError $ RuntimeError "No predicate is true." evalCondExpr ((e1, e2):pairs) env store = do val <- valueOf e1 env store bool <- unpackBool val if bool then valueOf e2 env store else evalCondExpr pairs env store evalLetExpr :: [(String, Expression)] -> Expression -> Environment -> Store -> EvaluateResult evalLetExpr bindings body env store = evalLetExpr' bindings body env where evalLetExpr' [] body newEnv = valueOf body newEnv store evalLetExpr' ((name, expr):xs) body newEnv = do val <- valueOf expr env store ref <- newRef store val evalLetExpr' xs body (extend name (DenoRef ref) newEnv) evalProcExpr :: [String] -> Expression -> Environment -> EvaluateResult evalProcExpr params body env = return . ExprProc $ Procedure params body env evalCallExpr :: Expression -> [Expression] -> Environment -> Store -> EvaluateResult evalCallExpr ratorExpr randExprs env store = do rator <- valueOf ratorExpr env store content <- unpackProc rator rands <- evalExpressionList randExprs env store applyProcedure content rands where safeZip :: [String] -> [ExpressedValue] -> IOTry [(String, ExpressedValue)] safeZip as bs = let na = length as nb = length bs in if na /= nb then throwError $ ArgNumMismatch (toInteger na) bs else return $ zip as bs allocateAll :: [(String, ExpressedValue)] -> Environment -> IOTry Environment allocateAll [] env = return env allocateAll ((name, val):pairs) env = do ref <- newRef store val allocateAll pairs (extend name (DenoRef ref) env) applyProcedure :: Procedure -> [ExpressedValue] -> EvaluateResult applyProcedure (Procedure params body savedEnv) rands = do pairs <- safeZip params rands newEnv <- allocateAll pairs savedEnv valueOf body newEnv store
li-zhirui/EoplLangs
src/ImplicitRefs/Evaluator.hs
bsd-3-clause
9,909
0
13
2,213
3,366
1,687
1,679
219
3
module Main where import Text.Regex.Posix ((=~)) import System.Environment (getArgs) myGrep :: String -> String -> IO () myGrep regex filename = do fileSlurp <- readFile filename mapM_ putStrLn $ filter (=~ regex) (lines fileSlurp) main :: IO () main = do (myRegex:filenames) <- getArgs mapM_ (myGrep myRegex) filenames
mrordinaire/data-analysis
app/hgrep.hs
bsd-3-clause
335
0
10
63
133
69
64
12
1
{-# LANGUAGE KindSignatures #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE TemplateHaskell #-} module Main where import Control.THEff import Control.THEff.Fresh mkEff "UnicalChar" ''Fresh ''Char ''NoEff main:: IO () main = putStrLn $ runUnicalChar 'A' $ do a <- fresh b <- fresh c <- fresh return $ a:b:[c]
KolodeznyDiver/THEff
samples/SampleFresh.hs
bsd-3-clause
412
0
10
112
102
53
49
14
1
{-| Module : Types.BooleanLogic Description : Some type families on the kind Bool. Copyright : (c) Alexander Vieth, 2015 Licence : BSD3 Maintainer : [email protected] Stability : experimental Portability : non-portable (GHC only) -} {-# LANGUAGE AutoDeriveTypeable #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE UndecidableInstances #-} module Types.BooleanLogic ( And , Or , Not , Any , All ) where type family And (a :: Bool) (b :: Bool) :: Bool where And 'True 'True = 'True And a b = 'False type family Or (a :: Bool) (b :: Bool) :: Bool where Or 'False 'False = 'False Or a b = 'True type family Not (a :: Bool) :: Bool where Not 'True = 'False Not 'False = 'True type family Any (bs :: [Bool]) :: Bool where Any '[] = False Any (b ': bs) = Or b (Any bs) type family All (bs :: [Bool]) :: Bool where All '[] = True All (b ': bs) = And b (All bs)
avieth/Relational
Types/BooleanLogic.hs
bsd-3-clause
998
0
8
262
289
170
119
26
0
{-# Language DataKinds, OverloadedStrings #-} {-# Language RankNTypes, TypeOperators #-} {-# Language PatternSynonyms #-} {-# LANGUAGE ImplicitParams #-} {-# LANGUAGE GADTs #-} module SAWScript.X86 ( Options(..) , proof , proofWithOptions , linuxInfo , bsdInfo , Fun(..) , Goal(..) , gGoal , getGoals , X86Error(..) , X86Unsupported(..) , SharedContext , CallHandler , Sym , RelevantElf(..) , getElf , getRelevant , findSymbols , posFn , loadGlobal ) where import Control.Lens ((^.)) import Control.Exception(Exception(..),throwIO) import Control.Monad.IO.Class(liftIO) import qualified Data.BitVector.Sized as BV import Data.ByteString (ByteString) import qualified Data.ByteString as BS import qualified Data.ByteString.Char8 as BSC import qualified Data.Map as Map import qualified Data.Text as Text import Data.Text.Encoding(decodeUtf8) import System.IO(hFlush,stdout) import Data.Maybe(mapMaybe) -- import Text.PrettyPrint.ANSI.Leijen(pretty) import qualified Data.ElfEdit as Elf import Data.Parameterized.Some(Some(..)) import Data.Parameterized.Context(EmptyCtx,(::>),singleton) -- What4 import What4.Interface(asNat,asBV) import qualified What4.Interface as W4 import qualified What4.Config as W4 import What4.FunctionName(functionNameFromText) import What4.ProgramLoc(ProgramLoc,Position(OtherPos)) -- Crucible import Lang.Crucible.Analysis.Postdom (postdomInfo) import Lang.Crucible.CFG.Core(SomeCFG(..), TypeRepr(..), cfgHandle) import Lang.Crucible.CFG.Common(GlobalVar) import Lang.Crucible.Simulator.RegMap(regValue, RegMap(..), RegEntry(..)) import Lang.Crucible.Simulator.RegValue(RegValue'(..)) import Lang.Crucible.Simulator.GlobalState(insertGlobal,emptyGlobals) import Lang.Crucible.Simulator.Operations(defaultAbortHandler) import Lang.Crucible.Simulator.OverrideSim(runOverrideSim, callCFG, readGlobal) import Lang.Crucible.Simulator.EvalStmt(executeCrucible) import Lang.Crucible.Simulator.ExecutionTree (ExecResult(..), SimContext(..), FnState(..) , ExecState(InitialState) , FunctionBindings(..) ) import Lang.Crucible.Simulator.SimError(SimError(..), SimErrorReason) import Lang.Crucible.Backend (getProofObligations,ProofGoal(..),labeledPredMsg,labeledPred,goalsToList ,assumptionsPred,IsSymBackend(..),SomeBackend(..),HasSymInterface(..)) import Lang.Crucible.FunctionHandle(HandleAllocator,newHandleAllocator,insertHandleMap,emptyHandleMap) -- Crucible LLVM import SAWScript.Crucible.LLVM.CrucibleLLVM (Mem, ppPtr, pattern LLVMPointer, bytesToInteger) import Lang.Crucible.LLVM.Intrinsics(llvmIntrinsicTypes) import Lang.Crucible.LLVM.MemModel (mkMemVar) import qualified Lang.Crucible.LLVM.MemModel as Crucible -- Macaw import Data.Macaw.Architecture.Info(ArchitectureInfo) import Data.Macaw.Discovery(analyzeFunction) import Data.Macaw.Discovery.State(FunctionExploreReason(UserRequest) , emptyDiscoveryState, AddrSymMap) import Data.Macaw.Memory( Memory, MemSegment(..), MemSegmentOff(..) , segmentBase, segmentOffset , addrOffset, memWordToUnsigned , segoffAddr, incAddr , readWord8, readWord16le, readWord32le, readWord64le) import Data.Macaw.Memory.ElfLoader( LoadOptions(..) , memoryForElfAllSymbols , memoryForElf , MemSymbol(..) ) import Data.Macaw.Symbolic( ArchRegStruct , mkFunCFG , GlobalMap , MacawSimulatorState(..) , macawExtensions , unsupportedSyscalls , defaultMacawArchStmtExtensionOverride ) import qualified Data.Macaw.Symbolic as Macaw ( LookupFunctionHandle(..) ) import Data.Macaw.Symbolic( MacawExt , MacawFunctionArgs ) import Data.Macaw.Symbolic.Backend(MacawSymbolicArchFunctions(..), crucArchRegTypes) import Data.Macaw.X86(X86Reg(..), x86_64_linux_info,x86_64_freeBSD_info) import Data.Macaw.X86.ArchTypes(X86_64) import Data.Macaw.X86.Symbolic ( x86_64MacawSymbolicFns, x86_64MacawEvalFn, newSymFuns , lookupX86Reg ) import Data.Macaw.X86.Crucible(SymFuns(..)) -- Saw Core import Verifier.SAW.SharedTerm(Term, mkSharedContext, SharedContext, scImplies) import Verifier.SAW.Term.Pretty(showTerm) import Verifier.SAW.Recognizer(asBool) import Verifier.SAW.Simulator.What4.ReturnTrip (sawRegisterSymFunInterp, toSC, saw_ctx) -- Cryptol Verifier import Verifier.SAW.CryptolEnv(CryptolEnv,initCryptolEnv,loadCryptolModule,defaultPrimitiveOptions) import Verifier.SAW.Cryptol.Prelude(scLoadPreludeModule,scLoadCryptolModule) -- SAWScript import SAWScript.X86Spec hiding (Prop) import SAWScript.Proof(boolToProp, Prop) import SAWScript.Crucible.Common ( newSAWCoreBackend, newSAWCoreExprBuilder , sawCoreState, SomeOnlineBackend(..) ) -------------------------------------------------------------------------------- -- Input Options -- | What we'd like done, plus additional information from the "outside world". data Options = Options { fileName :: FilePath -- ^ Name of the elf file to process. , function :: Fun -- ^ Function that we'd like to extract. , archInfo :: ArchitectureInfo X86_64 -- ^ Architectural flavor. See "linuxInfo" and "bsdInfo". , backend :: SomeBackend Sym -- ^ The Crucible backend to use. , allocator :: HandleAllocator -- ^ The handle allocator used to allocate @memvar@ , memvar :: GlobalVar Mem -- ^ The global variable storing the heap , cryEnv :: CryptolEnv , extraGlobals :: [(ByteString,Integer,Unit)] -- ^ Additional globals to auto-load from the ELF file } linuxInfo :: ArchitectureInfo X86_64 linuxInfo = x86_64_linux_info bsdInfo :: ArchitectureInfo X86_64 bsdInfo = x86_64_freeBSD_info -------------------------------------------------------------------------------- -- Spec data Fun = Fun { funName :: ByteString, funSpec :: FunSpec } -------------------------------------------------------------------------------- type CallHandler = Sym -> Macaw.LookupFunctionHandle (MacawSimulatorState Sym) Sym X86_64 -- | Run a top-level proof. -- Should be used when making a standalone proof script. proof :: (FilePath -> IO ByteString) -> ArchitectureInfo X86_64 -> FilePath {- ^ ELF binary -} -> Maybe FilePath {- ^ Cryptol spec, if any -} -> [(ByteString,Integer,Unit)] -> Fun -> IO (SharedContext,Integer,[Goal]) proof fileReader archi file mbCry globs fun = do sc <- mkSharedContext halloc <- newHandleAllocator scLoadPreludeModule sc scLoadCryptolModule sc sym <- newSAWCoreExprBuilder sc SomeOnlineBackend bak <- newSAWCoreBackend sym let ?fileReader = fileReader cenv <- loadCry sym mbCry mvar <- mkMemVar "saw_x86:llvm_memory" halloc proofWithOptions Options { fileName = file , function = fun , archInfo = archi , backend = SomeBackend bak , allocator = halloc , memvar = mvar , cryEnv = cenv , extraGlobals = globs } -- | Run a proof using the given backend. -- Useful for integrating with other tool. proofWithOptions :: Options -> IO (SharedContext,Integer,[Goal]) proofWithOptions opts = do elf <- getRelevant =<< getElf (fileName opts) translate opts elf (function opts) -- | Add interpretations for the symbolic functions, by looking -- them up in the Cryptol environment. There should be definitions -- for "aesenc", "aesenclast", and "clmul". registerSymFuns :: Opts -> IO (SymFuns Sym) registerSymFuns opts = do let sym = optsSym opts st <- sawCoreState sym sfs <- newSymFuns sym sawRegisterSymFunInterp st (fnAesEnc sfs) (mk2 "aesenc") sawRegisterSymFunInterp st (fnAesEncLast sfs) (mk2 "aesenclast") sawRegisterSymFunInterp st (fnClMul sfs) (mk2 "clmul") return sfs where err nm xs = unlines [ "Type error in call to " ++ show (nm::String) ++ ":" , "*** Expected: 2 arguments" , "*** Given: " ++ show (length xs) ++ " arguments" ] mk2 nm _sc xs = case xs of [_,_] -> cryTerm opts nm xs _ -> fail (err nm xs) -------------------------------------------------------------------------------- -- ELF -- | These are the parts of the ELF file that we care about. data RelevantElf = RelevantElf { memory :: Memory 64 , funSymMap :: AddrSymMap 64 , symMap :: AddrSymMap 64 } -- | Parse an elf file. getElf :: FilePath -> IO (Elf.ElfHeaderInfo 64) getElf path = do bs <- BS.readFile path case Elf.decodeElfHeaderInfo bs of Right (Elf.SomeElf hdr) | Elf.ELFCLASS64 <- Elf.headerClass (Elf.header hdr) -> pure hdr | otherwise -> unsupported "32-bit ELF format" Left (off, msg) -> malformed $ mconcat [ "Invalid ELF header at offset " , show off , ": " , msg ] -- | Extract a Macaw "memory" from an ELF file and resolve symbols. getRelevant :: Elf.ElfHeaderInfo 64 -> IO RelevantElf getRelevant elf = case (memoryForElf opts elf, memoryForElfAllSymbols opts elf) of (Left err, _) -> malformed err (_, Left err) -> malformed err (Right (mem, faddrs, _warnings, _errs), Right (_, addrs, _, _)) -> do let toEntry msym = (memSymbolStart msym, memSymbolName msym) return RelevantElf { memory = mem , funSymMap = Map.fromList (map toEntry faddrs) , symMap = Map.fromList (map toEntry addrs) } where -- XXX: What options do we want? opts = LoadOptions { loadOffset = Just 0 } -- | Find the address(es) of a symbol by name. findSymbols :: AddrSymMap 64 -> ByteString -> [ MemSegmentOff 64 ] findSymbols addrs nm = Map.findWithDefault [] nm invertedMap where invertedMap = Map.fromListWith (++) [ (y,[x]) | (x,y) <- Map.toList addrs ] -- | Find the single address of a symbol, or fail. findSymbol :: AddrSymMap 64 -> ByteString -> IO (MemSegmentOff 64) findSymbol addrs nm = case findSymbols addrs nm of [addr] -> return $! addr [] -> malformed ("Could not find function " ++ show nm) _ -> malformed ("Multiple definitions for " ++ show nm) loadGlobal :: RelevantElf -> (ByteString, Integer, Unit) -> IO [(String, Integer, Unit, [Integer])] loadGlobal elf (nm,n,u) = case findSymbols (symMap elf) nm of [] -> do print $ symMap elf err "Global not found" _ -> mapM loadLoc (findSymbols (symMap elf) nm) where mem = memory elf sname = BSC.unpack nm readOne a = case u of Bytes -> check (readWord8 mem a) Words -> check (readWord16le mem a) DWords -> check (readWord32le mem a) QWords -> check (readWord64le mem a) _ -> err ("unsuported global size: " ++ show u) nextAddr = incAddr (bytesToInteger (1 *. u)) addrsFor o = take (fromIntegral n) (iterate nextAddr o) check :: (Show b, Integral a) => Either b a -> IO Integer check res = case res of Left e -> err (show e) Right a -> return (fromIntegral a) loadLoc off = do let start = segoffAddr off a = memWordToUnsigned (addrOffset start) is <- mapM readOne (addrsFor start) return (sname, a, u, is) err :: [Char] -> IO a err xs = fail $ unlines [ "Failed to load global." , "*** Global: " ++ show nm , "*** Error: " ++ xs ] -- | The position associated with a specific location. posFn :: MemSegmentOff 64 -> Position posFn = OtherPos . Text.pack . show -- | Load a file with Cryptol decls. loadCry :: (?fileReader :: FilePath -> IO ByteString) => Sym -> Maybe FilePath -> IO CryptolEnv loadCry sym mb = do sc <- saw_ctx <$> sawCoreState sym env <- initCryptolEnv sc case mb of Nothing -> return env Just file -> snd <$> loadCryptolModule sc defaultPrimitiveOptions env file -------------------------------------------------------------------------------- -- Translation callHandler :: Overrides -> CallHandler callHandler callMap sym = Macaw.LookupFunctionHandle $ \st mem regs -> do case lookupX86Reg X86_IP regs of Just (RV ptr) | LLVMPointer base off <- ptr -> case (asNat base, BV.asUnsigned <$> asBV off) of (Just b, Just o) -> case Map.lookup (b,o) callMap of Just h -> case h sym of Macaw.LookupFunctionHandle f -> f st mem regs Nothing -> fail ("No over-ride for function: " ++ show (ppPtr ptr)) _ -> fail ("Non-static call: " ++ show (ppPtr ptr)) _ -> fail "[Bug?] Failed to obtain the value of the IP register." -- | Verify the given function. The function matches it sepcification, -- as long as the returned goals can be discharged. -- Returns the shared context and the goals (from the Sym) -- and the integer is the (aboslute) address of the function. translate :: Options -> RelevantElf -> Fun -> IO (SharedContext, Integer, [Goal]) translate opts elf fun = do let name = funName fun sayLn ("Translating function: " ++ BSC.unpack name) let ?memOpts = Crucible.defaultMemOptions let ?recordLLVMAnnotation = \_ _ _ -> return () let bak = backend opts sym = case bak of SomeBackend b -> backendGetSym b sopts = Opts { optsBackend = bak, optsCry = cryEnv opts, optsMvar = memvar opts } sfs <- registerSymFuns sopts (globs,st,checkPost) <- case funSpec fun of NewStyle mkSpec debug -> do gss <- mapM (loadGlobal elf) (extraGlobals opts) spec0 <- mkSpec (cryEnv opts) let spec = spec0 {specGlobsRO = concat (specGlobsRO spec0:gss)} (gs,st,po) <- verifyMode spec sopts debug st return (gs,st,\st1 -> debug st1 >> po st1) addr <- doSim opts elf sfs name globs st checkPost gs <- getGoals bak sc <- saw_ctx <$> sawCoreState sym return (sc, addr, gs) setSimulatorVerbosity :: (W4.IsSymExprBuilder sym) => Int -> sym -> IO () setSimulatorVerbosity verbosity sym = do verbSetting <- W4.getOptionSetting W4.verbosity (W4.getConfiguration sym) _ <- W4.setOpt verbSetting (toInteger verbosity) return () doSim :: (?memOpts::Crucible.MemOptions, Crucible.HasLLVMAnn Sym) => Options -> RelevantElf -> SymFuns Sym -> ByteString -> (GlobalMap Sym Crucible.Mem 64, Overrides) -> State -> (State -> IO ()) -> IO Integer doSim opts elf sfs name (globs,overs) st checkPost = do say " Looking for address... " addr <- findSymbol (symMap elf) name -- addr :: MemSegmentOff 64 let addrInt = let seg :: MemSegment 64 seg = segoffSegment addr in if segmentBase seg == 0 then toInteger (segmentOffset seg + segoffOffset addr) else error " Not an absolute address" sayLn (show addr) SomeCFG cfg <- statusBlock " Constructing CFG... " $ makeCFG opts elf name addr -- writeFile "XXX.hs" (show cfg) let sym = case backend opts of SomeBackend bak -> backendGetSym bak mvar = memvar opts setSimulatorVerbosity 0 sym execResult <- statusBlock " Simulating... " $ do let crucRegTypes = crucArchRegTypes x86 let macawStructRepr = StructRepr crucRegTypes -- The global pointer validity predicate is required if your memory -- representation has gaps that are not supposed to be mapped and you -- want to verify that no memory accesses touch unmapped regions. -- -- The memory setup for this verifier does not have that problem, and -- thus does not need any additional validity predicates. let noExtraValidityPred _ _ _ _ = return Nothing let archEvalFns = x86_64MacawEvalFn sfs defaultMacawArchStmtExtensionOverride let lookupSyscall = unsupportedSyscalls "saw-script" let ctx :: SimContext (MacawSimulatorState Sym) Sym (MacawExt X86_64) ctx = SimContext { _ctxBackend = backend opts , ctxSolverProof = \a -> a , ctxIntrinsicTypes = llvmIntrinsicTypes , simHandleAllocator = allocator opts , printHandle = stdout , extensionImpl = macawExtensions archEvalFns mvar globs (callHandler overs sym) lookupSyscall noExtraValidityPred , _functionBindings = FnBindings $ insertHandleMap (cfgHandle cfg) (UseCFG cfg (postdomInfo cfg)) emptyHandleMap , _cruciblePersonality = MacawSimulatorState , _profilingMetrics = Map.empty } let initGlobals = insertGlobal mvar (stateMem st) emptyGlobals executeCrucible [] $ InitialState ctx initGlobals defaultAbortHandler macawStructRepr $ runOverrideSim macawStructRepr $ do let args :: RegMap Sym (MacawFunctionArgs X86_64) args = RegMap (singleton (RegEntry macawStructRepr (stateRegs st))) crucGenArchConstraints x86 $ do r <- callCFG cfg args mem <- readGlobal mvar let regs = regValue r let sta = State { stateMem = mem, stateRegs = regs } liftIO (checkPost sta) pure regs case execResult of FinishedResult {} -> pure () AbortedResult {} -> sayLn "[Warning] Function never returns" TimeoutResult {} -> malformed $ unlines [ "Execution timed out" ] return addrInt type TheCFG = SomeCFG (MacawExt X86_64) (EmptyCtx ::> ArchRegStruct X86_64) (ArchRegStruct X86_64) -- | Generate a CFG for the function at the given address. makeCFG :: Options -> RelevantElf -> ByteString -> MemSegmentOff 64 -> IO TheCFG makeCFG opts elf name addr = do (_,Some funInfo) <- return $ analyzeFunction addr UserRequest empty -- writeFile "MACAW.cfg" (show (pretty funInfo)) mkFunCFG x86 (allocator opts) cruxName posFn funInfo where txtName = decodeUtf8 name cruxName = functionNameFromText txtName empty = emptyDiscoveryState (memory elf) (funSymMap elf) (archInfo opts) -------------------------------------------------------------------------------- -- Goals data Goal = Goal { gAssumes :: [ Term ] -- ^ Assuming these , gShows :: Term -- ^ We need to show this , gLoc :: ProgramLoc -- ^ The goal came from here , gMessage :: SimErrorReason -- ^ We should say this if the proof fails } -- | The proposition that needs proving (i.e., assumptions imply conclusion) gGoal :: SharedContext -> Goal -> IO Prop gGoal sc g0 = boolToProp sc [] =<< go (gAssumes g) where g = g0 { gAssumes = mapMaybe skip (gAssumes g0) } _shG = do putStrLn "Assuming:" mapM_ _shT (gAssumes g) putStrLn "Shows:" _shT (gShows g) _shT t = putStrLn (" " ++ showTerm t) skip a = case asBool a of Just True -> Nothing _ -> Just a go xs = case xs of [] -> return (gShows g) a : as -> scImplies sc a =<< go as getGoals :: SomeBackend Sym -> IO [Goal] getGoals (SomeBackend bak) = do obls <- maybe [] goalsToList <$> getProofObligations bak st <- sawCoreState sym mapM (toGoal st) obls where sym = backendGetSym bak toGoal st (ProofGoal asmps g) = do a1 <- toSC sym st =<< assumptionsPred sym asmps p <- toSC sym st (g ^. labeledPred) let SimError loc msg = g^.labeledPredMsg return Goal { gAssumes = [a1] , gShows = p , gLoc = loc , gMessage = msg } instance Show Goal where showsPrec _ g = showString "Goal { gAssumes = " . showList (map (show . showTerm) (gAssumes g)) . showString ", gShows = " . shows (showTerm (gShows g)) . showString ", gLoc = " . shows (gLoc g) . showString ", gMessage = " . shows (show (gMessage g)) . showString " }" -------------------------------------------------------------------------------- -- Specialize the generic functions to the X86. -- | All functions related to X86. x86 :: MacawSymbolicArchFunctions X86_64 x86 = x86_64MacawSymbolicFns -------------------------------------------------------------------------------- -- Calling Convention -- see: http://refspecs.linuxfoundation.org/elf/x86_64-abi-0.99.pdf -- Need to preserve: %rbp, %rbx, %r12--%r15 -- Preserve control bits in MXCSR -- Preserve x87 control word. -- On entry: -- CPU is in x87 mode -- DF in $rFLAGS is clear one entry and return. -- "Red zone" 128 bytes past the end of the stack %rsp. -- * not modified by interrupts -------------------------------------------------------------------------------- -- Errors data X86Unsupported = X86Unsupported String deriving Show data X86Error = X86Error String deriving Show instance Exception X86Unsupported instance Exception X86Error unsupported :: String -> IO a unsupported x = throwIO (X86Unsupported x) malformed :: String -> IO a malformed x = throwIO (X86Error x) -------------------------------------------------------------------------------- -- Status output say :: String -> IO () say x = putStr x >> hFlush stdout sayLn :: String -> IO () sayLn = putStrLn sayOK :: IO () sayOK = sayLn "[OK]" statusBlock :: String -> IO a -> IO a statusBlock msg m = do say msg a <- m sayOK return a
GaloisInc/saw-script
src/SAWScript/X86.hs
bsd-3-clause
22,551
0
24
6,155
5,548
2,969
2,579
-1
-1
{-# LANGUAGE OverloadedStrings #-} module Advent.Day11 where import qualified Data.Char as C import qualified Data.List as L increment :: String -> String increment str = reverse (increment' (reverse str)) where increment' ('z':rest) = 'a' : increment' rest increment' (c:rest) = nextChar c : rest increment' "" = "" nextChar c = C.chr ((C.ord c) + 1) hasIncreasing :: String -> Bool hasIncreasing (a:b:c:rest) = (b' == a' + 1 && c' == b' + 1) || hasIncreasing (b:c:rest) where a' = C.ord a b' = C.ord b c' = C.ord c hasIncreasing _ = False hasBadChar :: String -> Bool hasBadChar str = "i" `L.isInfixOf` str || "o" `L.isInfixOf` str || "l" `L.isInfixOf` str hasPairs :: String -> Bool hasPairs str = (length (filter (\g -> (length g) >= 2) (L.group str))) >= 2 goodPassword :: String -> Bool goodPassword pass = hasIncreasing pass && not (hasBadChar pass) && hasPairs pass nextPassword :: String -> String nextPassword pass = head (filter goodPassword (iterate increment pass))
micxjo/hs-advent
src/Advent/Day11.hs
bsd-3-clause
1,061
0
14
247
436
229
207
29
3
{-# language CPP #-} -- No documentation found for Chapter "CommandBufferResetFlagBits" module Vulkan.Core10.Enums.CommandBufferResetFlagBits ( CommandBufferResetFlags , CommandBufferResetFlagBits( COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT , .. ) ) where import Vulkan.Internal.Utils (enumReadPrec) import Vulkan.Internal.Utils (enumShowsPrec) import GHC.Show (showString) import Numeric (showHex) import Vulkan.Zero (Zero) import Data.Bits (Bits) import Data.Bits (FiniteBits) import Foreign.Storable (Storable) import GHC.Read (Read(readPrec)) import GHC.Show (Show(showsPrec)) import Vulkan.Core10.FundamentalTypes (Flags) type CommandBufferResetFlags = CommandBufferResetFlagBits -- | VkCommandBufferResetFlagBits - Bitmask controlling behavior of a command -- buffer reset -- -- = See Also -- -- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_VERSION_1_0 VK_VERSION_1_0>, -- 'CommandBufferResetFlags' newtype CommandBufferResetFlagBits = CommandBufferResetFlagBits Flags deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits) -- | 'COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT' specifies that most or all -- memory resources currently owned by the command buffer /should/ be -- returned to the parent command pool. If this flag is not set, then the -- command buffer /may/ hold onto memory resources and reuse them when -- recording commands. @commandBuffer@ is moved to the -- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#commandbuffers-lifecycle initial state>. pattern COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = CommandBufferResetFlagBits 0x00000001 conNameCommandBufferResetFlagBits :: String conNameCommandBufferResetFlagBits = "CommandBufferResetFlagBits" enumPrefixCommandBufferResetFlagBits :: String enumPrefixCommandBufferResetFlagBits = "COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT" showTableCommandBufferResetFlagBits :: [(CommandBufferResetFlagBits, String)] showTableCommandBufferResetFlagBits = [(COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT, "")] instance Show CommandBufferResetFlagBits where showsPrec = enumShowsPrec enumPrefixCommandBufferResetFlagBits showTableCommandBufferResetFlagBits conNameCommandBufferResetFlagBits (\(CommandBufferResetFlagBits x) -> x) (\x -> showString "0x" . showHex x) instance Read CommandBufferResetFlagBits where readPrec = enumReadPrec enumPrefixCommandBufferResetFlagBits showTableCommandBufferResetFlagBits conNameCommandBufferResetFlagBits CommandBufferResetFlagBits
expipiplus1/vulkan
src/Vulkan/Core10/Enums/CommandBufferResetFlagBits.hs
bsd-3-clause
2,983
1
10
694
337
204
133
-1
-1
{-# LANGUAGE CPP #-} module TcFlatten( FlattenEnv(..), FlattenMode(..), mkFlattenEnv, flatten, flattenMany, flatten_many, flattenFamApp, flattenTyVarOuter, unflatten, eqCanRewrite, eqCanRewriteFR, canRewriteOrSame, CtFlavourRole, ctEvFlavourRole, ctFlavourRole ) where #include "HsVersions.h" import TcRnTypes import TcType import Type import TcEvidence import TyCon import TypeRep import Kind( isSubKind ) import Coercion ( tyConRolesX ) import Var import VarEnv import NameEnv import Outputable import VarSet import TcSMonad as TcS import DynFlags( DynFlags ) import Util import Bag import FastString import Control.Monad( when, liftM ) import MonadUtils ( zipWithAndUnzipM ) import GHC.Exts ( inline ) {- Note [The flattening story] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * A CFunEqCan is either of form [G] <F xis> : F xis ~ fsk -- fsk is a FlatSkol [W] x : F xis ~ fmv -- fmv is a unification variable, -- but untouchable, -- with MetaInfo = FlatMetaTv where x is the witness variable fsk/fmv is a flatten skolem xis are function-free CFunEqCans are always [Wanted], or [Given], never [Derived] fmv untouchable just means that in a CTyVarEq, say, fmv ~ Int we do NOT unify fmv. * KEY INSIGHTS: - A given flatten-skolem, fsk, is known a-priori to be equal to F xis (the LHS), with <F xis> evidence - A unification flatten-skolem, fmv, stands for the as-yet-unknown type to which (F xis) will eventually reduce * Inert set invariant: if F xis1 ~ fsk1, F xis2 ~ fsk2 then xis1 /= xis2 i.e. at most one CFunEqCan with a particular LHS * Each canonical CFunEqCan x : F xis ~ fsk/fmv has its own distinct evidence variable x and flatten-skolem fsk/fmv. Why? We make a fresh fsk/fmv when the constraint is born; and we never rewrite the RHS of a CFunEqCan. * Function applications can occur in the RHS of a CTyEqCan. No reason not allow this, and it reduces the amount of flattening that must occur. * Flattening a type (F xis): - If we are flattening in a Wanted/Derived constraint then create new [W] x : F xis ~ fmv else create new [G] x : F xis ~ fsk with fresh evidence variable x and flatten-skolem fsk/fmv - Add it to the work list - Replace (F xis) with fsk/fmv in the type you are flattening - You can also add the CFunEqCan to the "flat cache", which simply keeps track of all the function applications you have flattened. - If (F xis) is in the cache already, just use its fsk/fmv and evidence x, and emit nothing. - No need to substitute in the flat-cache. It's not the end of the world if we start with, say (F alpha ~ fmv1) and (F Int ~ fmv2) and then find alpha := Int. Athat will simply give rise to fmv1 := fmv2 via [Interacting rule] below * Canonicalising a CFunEqCan [G/W] x : F xis ~ fsk/fmv - Flatten xis (to substitute any tyvars; there are already no functions) cos :: xis ~ flat_xis - New wanted x2 :: F flat_xis ~ fsk/fmv - Add new wanted to flat cache - Discharge x = F cos ; x2 * Unification flatten-skolems, fmv, ONLY get unified when either a) The CFunEqCan takes a step, using an axiom b) During un-flattening They are never unified in any other form of equality. For example [W] ffmv ~ Int is stuck; it does not unify with fmv. * We *never* substitute in the RHS (i.e. the fsk/fmv) of a CFunEqCan. That would destroy the invariant about the shape of a CFunEqCan, and it would risk wanted/wanted interactions. The only way we learn information about fsk is when the CFunEqCan takes a step. However we *do* substitute in the LHS of a CFunEqCan (else it would never get to fire!) * [Interacting rule] (inert) [W] x1 : F tys ~ fmv1 (work item) [W] x2 : F tys ~ fmv2 Just solve one from the other: x2 := x1 fmv2 := fmv1 This just unites the two fsks into one. Always solve given from wanted if poss. * [Firing rule: wanteds] (work item) [W] x : F tys ~ fmv instantiate axiom: ax_co : F tys ~ rhs Dischard fmv: fmv := alpha x := ax_co ; sym x2 [W] x2 : alpha ~ rhs (Non-canonical) discharging the work item. This is the way that fmv's get unified; even though they are "untouchable". NB: this deals with the case where fmv appears in xi, which can happen; it just happens through the non-canonical stuff Possible short cut (shortCutReduction) if rhs = G rhs_tys, where G is a type function. Then - Flatten rhs_tys (cos : rhs_tys ~ rhs_xis) - Add G rhs_xis ~ fmv to flat cache - New wanted [W] x2 : G rhs_xis ~ fmv - Discharge x := co ; G cos ; x2 * [Firing rule: givens] (work item) [G] g : F tys ~ fsk instantiate axiom: co : F tys ~ rhs Now add non-canonical (since rhs is not flat) [G] (sym g ; co) : fsk ~ rhs Short cut (shortCutReduction) for when rhs = G rhs_tys and G is a type function [G] (co ; g) : G tys ~ fsk But need to flatten tys: flat_cos : tys ~ flat_tys [G] (sym (G flat_cos) ; co ; g) : G flat_tys ~ fsk Why given-fsks, alone, doesn't work ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Could we get away with only flatten meta-tyvars, with no flatten-skolems? No. [W] w : alpha ~ [F alpha Int] ---> flatten w = ...w'... [W] w' : alpha ~ [fsk] [G] <F alpha Int> : F alpha Int ~ fsk --> unify (no occurs check) alpha := [fsk] But since fsk = F alpha Int, this is really an occurs check error. If that is all we know about alpha, we will succeed in constraint solving, producing a program with an infinite type. Even if we did finally get (g : fsk ~ Boo)l by solving (F alpha Int ~ fsk) using axiom, zonking would not see it, so (x::alpha) sitting in the tree will get zonked to an infinite type. (Zonking always only does refl stuff.) Why flatten-meta-vars, alone doesn't work ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Look at Simple13, with unification-fmvs only [G] g : a ~ [F a] ---> Flatten given g' = g;[x] [G] g' : a ~ [fmv] [W] x : F a ~ fmv --> subst a in x x = F g' ; x2 [W] x2 : F [fmv] ~ fmv And now we have an evidence cycle between g' and x! If we used a given instead (ie current story) [G] g : a ~ [F a] ---> Flatten given g' = g;[x] [G] g' : a ~ [fsk] [G] <F a> : F a ~ fsk ---> Substitute for a [G] g' : a ~ [fsk] [G] F (sym g'); <F a> : F [fsk] ~ fsk Why is it right to treat fmv's differently to ordinary unification vars? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ f :: forall a. a -> a -> Bool g :: F Int -> F Int -> Bool Consider f (x:Int) (y:Bool) This gives alpha~Int, alpha~Bool. There is an inconsistency, but really only one error. SherLoc may tell you which location is most likely, based on other occurrences of alpha. Consider g (x:Int) (y:Bool) Here we get (F Int ~ Int, F Int ~ Bool), which flattens to (fmv ~ Int, fmv ~ Bool) But there are really TWO separate errors. We must not complain about Int~Bool. Moreover these two errors could arise in entirely unrelated parts of the code. (In the alpha case, there must be *some* connection (eg v:alpha in common envt).) Note [Orient equalities with flatten-meta-vars on the left] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This example comes from IndTypesPerfMerge From the ambiguity check for f :: (F a ~ a) => a we get: [G] F a ~ a [W] F alpha ~ alpha, alpha ~ a From Givens we get [G] F a ~ fsk, fsk ~ a Now if we flatten we get [W] alpha ~ fmv, F alpha ~ fmv, alpha ~ a Now, processing the first one first, choosing alpha := fmv [W] F fmv ~ fmv, fmv ~ a And now we are stuck. We must either *unify* fmv := a, or use the fmv ~ a to rewrite F fmv ~ fmv, so we can make it meet up with the given F a ~ blah. Solution: always put fmvs on the left, so we get [W] fmv ~ alpha, F alpha ~ fmv, alpha ~ a The point is that fmvs are very uninformative, so doing alpha := fmv is a bad idea. We want to use other constraints on alpha first. Note [Derived constraints from wanted CTyEqCans] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Is this type ambiguous: (Foo e ~ Maybe e) => Foo e (indexed-types/should_fail/T4093a) [G] Foo e ~ Maybe e [W] Foo e ~ Foo ee -- ee is a unification variable [W] Foo ee ~ Maybe ee) --- [G] Foo e ~ fsk [G] fsk ~ Maybe e [W] Foo e ~ fmv1 [W] Foo ee ~ fmv2 [W] fmv1 ~ fmv2 [W] fmv2 ~ Maybe ee ---> fmv1 := fsk by matching LHSs [W] Foo ee ~ fmv2 [W] fsk ~ fmv2 [W] fmv2 ~ Maybe ee ---> [W] Foo ee ~ fmv2 [W] fmv2 ~ Maybe e [W] fmv2 ~ Maybe ee Now maybe we shuld get [D] e ~ ee, and then we'd solve it entirely. But if in a smilar situation we got [D] Int ~ Bool we'd be back to complaining about wanted/wanted interactions. Maybe this arises also for fundeps? Here's another example: f :: [a] -> [b] -> blah f (e1 :: F Int) (e2 :: F Int) we get F Int ~ fmv fmv ~ [alpha] fmv ~ [beta] We want: alpha := beta (which might unlock something else). If we generated [D] [alpha] ~ [beta] we'd be good here. Current story: we don't generate these derived constraints. We could, but we'd want to make them very weak, so we didn't get the Int~Bool complaint. ************************************************************************ * * * Other notes (Oct 14) I have not revisted these, but I didn't want to discard them * * ************************************************************************ Try: rewrite wanted with wanted only for fmvs (not all meta-tyvars) But: fmv ~ alpha[0] alpha[0] ~ fmv’ Now we don’t see that fmv ~ fmv’, which is a problem for injectivity detection. Conclusion: rewrite wanteds with wanted for all untouchables. skol ~ untch, must re-orieint to untch ~ skol, so that we can use it to rewrite. ************************************************************************ * * * Examples Here is a long series of examples I had to work through * * ************************************************************************ Simple20 ~~~~~~~~ axiom F [a] = [F a] [G] F [a] ~ a --> [G] fsk ~ a [G] [F a] ~ fsk (nc) --> [G] F a ~ fsk2 [G] fsk ~ [fsk2] [G] fsk ~ a --> [G] F a ~ fsk2 [G] a ~ [fsk2] [G] fsk ~ a ----------------------------------- ---------------------------------------- indexed-types/should_compile/T44984 [W] H (F Bool) ~ H alpha [W] alpha ~ F Bool --> F Bool ~ fmv0 H fmv0 ~ fmv1 H alpha ~ fmv2 fmv1 ~ fmv2 fmv0 ~ alpha flatten ~~~~~~~ fmv0 := F Bool fmv1 := H (F Bool) fmv2 := H alpha alpha := F Bool plus fmv1 ~ fmv2 But these two are equal under the above assumptions. Solve by Refl. --- under plan B, namely solve fmv1:=fmv2 eagerly --- [W] H (F Bool) ~ H alpha [W] alpha ~ F Bool --> F Bool ~ fmv0 H fmv0 ~ fmv1 H alpha ~ fmv2 fmv1 ~ fmv2 fmv0 ~ alpha --> F Bool ~ fmv0 H fmv0 ~ fmv1 H alpha ~ fmv2 fmv2 := fmv1 fmv0 ~ alpha flatten fmv0 := F Bool fmv1 := H fmv0 = H (F Bool) retain H alpha ~ fmv2 because fmv2 has been filled alpha := F Bool ---------------------------- indexed-types/should_failt/T4179 after solving [W] fmv_1 ~ fmv_2 [W] A3 (FCon x) ~ fmv_1 (CFunEqCan) [W] A3 (x (aoa -> fmv_2)) ~ fmv_2 (CFunEqCan) ---------------------------------------- indexed-types/should_fail/T7729a a) [W] BasePrimMonad (Rand m) ~ m1 b) [W] tt m1 ~ BasePrimMonad (Rand m) ---> process (b) first BasePrimMonad (Ramd m) ~ fmv_atH fmv_atH ~ tt m1 ---> now process (a) m1 ~ s_atH ~ tt m1 -- An obscure occurs check ---------------------------------------- typecheck/TcTypeNatSimple Original constraint [W] x + y ~ x + alpha (non-canonical) ==> [W] x + y ~ fmv1 (CFunEqCan) [W] x + alpha ~ fmv2 (CFuneqCan) [W] fmv1 ~ fmv2 (CTyEqCan) (sigh) ---------------------------------------- indexed-types/should_fail/GADTwrong1 [G] Const a ~ () ==> flatten [G] fsk ~ () work item: Const a ~ fsk ==> fire top rule [G] fsk ~ () work item fsk ~ () Surely the work item should rewrite to () ~ ()? Well, maybe not; it'a very special case. More generally, our givens look like F a ~ Int, where (F a) is not reducible. ---------------------------------------- indexed_types/should_fail/T8227: Why using a different can-rewrite rule in CFunEqCan heads does not work. Assuming NOT rewriting wanteds with wanteds Inert: [W] fsk_aBh ~ fmv_aBk -> fmv_aBk [W] fmv_aBk ~ fsk_aBh [G] Scalar fsk_aBg ~ fsk_aBh [G] V a ~ f_aBg Worklist includes [W] Scalar fmv_aBi ~ fmv_aBk fmv_aBi, fmv_aBk are flatten unificaiton variables Work item: [W] V fsk_aBh ~ fmv_aBi Note that the inert wanteds are cyclic, because we do not rewrite wanteds with wanteds. Then we go into a loop when normalise the work-item, because we use rewriteOrSame on the argument of V. Conclusion: Don't make canRewrite context specific; instead use [W] a ~ ty to rewrite a wanted iff 'a' is a unification variable. ---------------------------------------- Here is a somewhat similar case: type family G a :: * blah :: (G a ~ Bool, Eq (G a)) => a -> a blah = error "urk" foo x = blah x For foo we get [W] Eq (G a), G a ~ Bool Flattening [W] G a ~ fmv, Eq fmv, fmv ~ Bool We can't simplify away the Eq Bool unless we substitute for fmv. Maybe that doesn't matter: we would still be left with unsolved G a ~ Bool. -------------------------- Trac #9318 has a very simple program leading to [W] F Int ~ Int [W] F Int ~ Bool We don't want to get "Error Int~Bool". But if fmv's can rewrite wanteds, we will [W] fmv ~ Int [W] fmv ~ Bool ---> [W] Int ~ Bool ************************************************************************ * * * The main flattening functions * * ************************************************************************ Note [Flattening] ~~~~~~~~~~~~~~~~~~~~ flatten ty ==> (xi, cc) where xi has no type functions, unless they appear under ForAlls cc = Auxiliary given (equality) constraints constraining the fresh type variables in xi. Evidence for these is always the identity coercion, because internally the fresh flattening skolem variables are actually identified with the types they have been generated to stand in for. Note that it is flatten's job to flatten *every type function it sees*. flatten is only called on *arguments* to type functions, by canEqGiven. Recall that in comments we use alpha[flat = ty] to represent a flattening skolem variable alpha which has been generated to stand in for ty. ----- Example of flattening a constraint: ------ flatten (List (F (G Int))) ==> (xi, cc) where xi = List alpha cc = { G Int ~ beta[flat = G Int], F beta ~ alpha[flat = F beta] } Here * alpha and beta are 'flattening skolem variables'. * All the constraints in cc are 'given', and all their coercion terms are the identity. NB: Flattening Skolems only occur in canonical constraints, which are never zonked, so we don't need to worry about zonking doing accidental unflattening. Note that we prefer to leave type synonyms unexpanded when possible, so when the flattener encounters one, it first asks whether its transitive expansion contains any type function applications. If so, it expands the synonym and proceeds; if not, it simply returns the unexpanded synonym. Note [Flattener EqRels] ~~~~~~~~~~~~~~~~~~~~~~~ When flattening, we need to know which equality relation -- nominal or representation -- we should be respecting. The only difference is that we rewrite variables by representational equalities when fe_eq_rel is ReprEq. -} data FlattenEnv = FE { fe_mode :: FlattenMode , fe_loc :: CtLoc , fe_flavour :: CtFlavour , fe_eq_rel :: EqRel } -- See Note [Flattener EqRels] data FlattenMode -- Postcondition for all three: inert wrt the type substitution = FM_FlattenAll -- Postcondition: function-free | FM_Avoid TcTyVar Bool -- See Note [Lazy flattening] -- Postcondition: -- * tyvar is only mentioned in result under a rigid path -- e.g. [a] is ok, but F a won't happen -- * If flat_top is True, top level is not a function application -- (but under type constructors is ok e.g. [F a]) | FM_SubstOnly -- See Note [Flattening under a forall] mkFlattenEnv :: FlattenMode -> CtEvidence -> FlattenEnv mkFlattenEnv fm ctev = FE { fe_mode = fm , fe_loc = ctEvLoc ctev , fe_flavour = ctEvFlavour ctev , fe_eq_rel = ctEvEqRel ctev } feRole :: FlattenEnv -> Role feRole = eqRelRole . fe_eq_rel {- Note [Lazy flattening] ~~~~~~~~~~~~~~~~~~~~~~ The idea of FM_Avoid mode is to flatten less aggressively. If we have a ~ [F Int] there seems to be no great merit in lifting out (F Int). But if it was a ~ [G a Int] then we *do* want to lift it out, in case (G a Int) reduces to Bool, say, which gets rid of the occurs-check problem. (For the flat_top Bool, see comments above and at call sites.) HOWEVER, the lazy flattening actually seems to make type inference go *slower*, not faster. perf/compiler/T3064 is a case in point; it gets *dramatically* worse with FM_Avoid. I think it may be because floating the types out means we normalise them, and that often makes them smaller and perhaps allows more re-use of previously solved goals. But to be honest I'm not absolutely certain, so I am leaving FM_Avoid in the code base. What I'm removing is the unique place where it is *used*, namely in TcCanonical.canEqTyVar. See also Note [Conservative unification check] in TcUnify, which gives other examples where lazy flattening caused problems. Bottom line: FM_Avoid is unused for now (Nov 14). Note: T5321Fun got faster when I disabled FM_Avoid T5837 did too, but it's pathalogical anyway Note [Phantoms in the flattener] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Suppose we have data Proxy p = Proxy and we're flattening (Proxy ty) w.r.t. ReprEq. Then, we know that `ty` is really irrelevant -- it will be ignored when solving for representational equality later on. So, we omit flattening `ty` entirely. This may violate the expectation of "xi"s for a bit, but the canonicaliser will soon throw out the phantoms when decomposing a TyConApp. (Or, the canonicaliser will emit an insoluble, in which case the unflattened version yields a better error message anyway.) Note [flatten_many performance] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In programs with lots of type-level evaluation, flatten_many becomes part of a tight loop. For example, see test perf/compiler/T9872a, which calls flatten_many a whopping 7,106,808 times. It is thus important that flatten_many be efficient. Performance testing showed that the current implementation is indeed efficient. It's critically important that zipWithAndUnzipM be specialized to TcS, and it's also quite helpful to actually `inline` it. On test T9872a, here are the allocation stats (Dec 16, 2014): * Unspecialized, uninlined: 8,472,613,440 bytes allocated in the heap * Specialized, uninlined: 6,639,253,488 bytes allocated in the heap * Specialized, inlined: 6,281,539,792 bytes allocated in the heap To improve performance even further, flatten_many_nom is split off from flatten_many, as nominal equality is the common case. This would be natural to write using mapAndUnzipM, but even inlined, that function is not as performant as a hand-written loop. * mapAndUnzipM, inlined: 7,463,047,432 bytes allocated in the heap * hand-written recursion: 5,848,602,848 bytes allocated in the heap If you make any change here, pay close attention to the T9872{a,b,c} tests and T5321Fun. If we need to make this yet more performant, a possible way forward is to duplicate the flattener code for the nominal case, and make that case faster. This doesn't seem quite worth it, yet. -} ------------------ flatten :: FlattenMode -> CtEvidence -> TcType -> TcS (Xi, TcCoercion) flatten mode ev ty = runFlatten (flatten_one fmode ty) where fmode = mkFlattenEnv mode ev flattenMany :: FlattenMode -> CtEvidence -> [Role] -> [TcType] -> TcS ([Xi], [TcCoercion]) -- Flatten a bunch of types all at once. Roles on the coercions returned -- always match the corresponding roles passed in. flattenMany mode ev roles tys = runFlatten (flatten_many fmode roles tys) where fmode = mkFlattenEnv mode ev flattenFamApp :: FlattenMode -> CtEvidence -> TyCon -> [TcType] -> TcS (Xi, TcCoercion) flattenFamApp mode ev tc tys = runFlatten (flatten_fam_app fmode tc tys) where fmode = mkFlattenEnv mode ev ------------------ flatten_many :: FlattenEnv -> [Role] -> [Type] -> TcS ([Xi], [TcCoercion]) -- Coercions :: Xi ~ Type, at roles given -- Returns True iff (no flattening happened) -- NB: The EvVar inside the 'fe_ev :: CtEvidence' is unused, -- we merely want (a) Given/Solved/Derived/Wanted info -- (b) the GivenLoc/WantedLoc for when we create new evidence flatten_many fmode roles tys -- See Note [flatten_many performance] = inline zipWithAndUnzipM go roles tys where go Nominal ty = flatten_one (setFEEqRel fmode NomEq) ty go Representational ty = flatten_one (setFEEqRel fmode ReprEq) ty go Phantom ty = -- See Note [Phantoms in the flattener] return (ty, mkTcPhantomCo ty ty) -- | Like 'flatten_many', but assumes that every role is nominal. flatten_many_nom :: FlattenEnv -> [Type] -> TcS ([Xi], [TcCoercion]) flatten_many_nom _ [] = return ([], []) -- See Note [flatten_many performance] flatten_many_nom fmode (ty:tys) = ASSERT( fe_eq_rel fmode == NomEq ) do { (xi, co) <- flatten_one fmode ty ; (xis, cos) <- flatten_many_nom fmode tys ; return (xi:xis, co:cos) } ------------------ flatten_one :: FlattenEnv -> TcType -> TcS (Xi, TcCoercion) -- Flatten a type to get rid of type function applications, returning -- the new type-function-free type, and a collection of new equality -- constraints. See Note [Flattening] for more detail. -- -- Postcondition: Coercion :: Xi ~ TcType -- The role on the result coercion matches the EqRel in the FlattenEnv flatten_one fmode xi@(LitTy {}) = return (xi, mkTcReflCo (feRole fmode) xi) flatten_one fmode (TyVarTy tv) = flattenTyVar fmode tv flatten_one fmode (AppTy ty1 ty2) = do { (xi1,co1) <- flatten_one fmode ty1 ; case (fe_eq_rel fmode, nextRole xi1) of (NomEq, _) -> flatten_rhs xi1 co1 NomEq (ReprEq, Nominal) -> flatten_rhs xi1 co1 NomEq (ReprEq, Representational) -> flatten_rhs xi1 co1 ReprEq (ReprEq, Phantom) -> return (mkAppTy xi1 ty2, co1 `mkTcAppCo` mkTcNomReflCo ty2) } where flatten_rhs xi1 co1 eq_rel2 = do { (xi2,co2) <- flatten_one (setFEEqRel fmode eq_rel2) ty2 ; traceTcS "flatten/appty" (ppr ty1 $$ ppr ty2 $$ ppr xi1 $$ ppr co1 $$ ppr xi2 $$ ppr co2) ; let role1 = feRole fmode role2 = eqRelRole eq_rel2 ; return ( mkAppTy xi1 xi2 , mkTcTransAppCo role1 co1 xi1 ty1 role2 co2 xi2 ty2 role1 ) } -- output should match fmode flatten_one fmode (FunTy ty1 ty2) = do { (xi1,co1) <- flatten_one fmode ty1 ; (xi2,co2) <- flatten_one fmode ty2 ; return (mkFunTy xi1 xi2, mkTcFunCo (feRole fmode) co1 co2) } flatten_one fmode (TyConApp tc tys) -- Expand type synonyms that mention type families -- on the RHS; see Note [Flattening synonyms] | Just (tenv, rhs, tys') <- tcExpandTyCon_maybe tc tys , let expanded_ty = mkAppTys (substTy (mkTopTvSubst tenv) rhs) tys' = case fe_mode fmode of FM_FlattenAll | anyNameEnv isTypeFamilyTyCon (tyConsOfType rhs) -> flatten_one fmode expanded_ty | otherwise -> flattenTyConApp fmode tc tys _ -> flattenTyConApp fmode tc tys -- Otherwise, it's a type function application, and we have to -- flatten it away as well, and generate a new given equality constraint -- between the application and a newly generated flattening skolem variable. | isTypeFamilyTyCon tc = flatten_fam_app fmode tc tys -- For * a normal data type application -- * data family application -- we just recursively flatten the arguments. | otherwise -- FM_Avoid stuff commented out; see Note [Lazy flattening] -- , let fmode' = case fmode of -- Switch off the flat_top bit in FM_Avoid -- FE { fe_mode = FM_Avoid tv _ } -- -> fmode { fe_mode = FM_Avoid tv False } -- _ -> fmode = flattenTyConApp fmode tc tys flatten_one fmode ty@(ForAllTy {}) -- We allow for-alls when, but only when, no type function -- applications inside the forall involve the bound type variables. = do { let (tvs, rho) = splitForAllTys ty ; (rho', co) <- flatten_one (setFEMode fmode FM_SubstOnly) rho -- Substitute only under a forall -- See Note [Flattening under a forall] ; return (mkForAllTys tvs rho', foldr mkTcForAllCo co tvs) } flattenTyConApp :: FlattenEnv -> TyCon -> [TcType] -> TcS (Xi, TcCoercion) flattenTyConApp fmode tc tys = do { (xis, cos) <- case fe_eq_rel fmode of NomEq -> flatten_many_nom fmode tys ReprEq -> flatten_many fmode (tyConRolesX role tc) tys ; return (mkTyConApp tc xis, mkTcTyConAppCo role tc cos) } where role = feRole fmode {- Note [Flattening synonyms] ~~~~~~~~~~~~~~~~~~~~~~~~~~ Not expanding synonyms aggressively improves error messages, and keeps types smaller. But we need to take care. Suppose type T a = a -> a and we want to flatten the type (T (F a)). Then we can safely flatten the (F a) to a skolem, and return (T fsk). We don't need to expand the synonym. This works because TcTyConAppCo can deal with synonyms (unlike TyConAppCo), see Note [TcCoercions] in TcEvidence. But (Trac #8979) for type T a = (F a, a) where F is a type function we must expand the synonym in (say) T Int, to expose the type function to the flattener. Note [Flattening under a forall] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Under a forall, we (a) MUST apply the inert substitution (b) MUST NOT flatten type family applications Hence FMSubstOnly. For (a) consider c ~ a, a ~ T (forall b. (b, [c])) If we don't apply the c~a substitution to the second constraint we won't see the occurs-check error. For (b) consider (a ~ forall b. F a b), we don't want to flatten to (a ~ forall b.fsk, F a b ~ fsk) because now the 'b' has escaped its scope. We'd have to flatten to (a ~ forall b. fsk b, forall b. F a b ~ fsk b) and we have not begun to think about how to make that work! ************************************************************************ * * Flattening a type-family application * * ************************************************************************ -} flatten_fam_app, flatten_exact_fam_app, flatten_exact_fam_app_fully :: FlattenEnv -> TyCon -> [TcType] -> TcS (Xi, TcCoercion) -- flatten_fam_app can be over-saturated -- flatten_exact_fam_app is exactly saturated -- flatten_exact_fam_app_fully lifts out the application to top level -- Postcondition: Coercion :: Xi ~ F tys flatten_fam_app fmode tc tys -- Can be over-saturated = ASSERT( tyConArity tc <= length tys ) -- Type functions are saturated -- The type function might be *over* saturated -- in which case the remaining arguments should -- be dealt with by AppTys do { let (tys1, tys_rest) = splitAt (tyConArity tc) tys ; (xi1, co1) <- flatten_exact_fam_app fmode tc tys1 -- co1 :: xi1 ~ F tys1 -- all Nominal roles b/c the tycon is oversaturated ; (xis_rest, cos_rest) <- flatten_many fmode (repeat Nominal) tys_rest -- cos_res :: xis_rest ~ tys_rest ; return ( mkAppTys xi1 xis_rest -- NB mkAppTys: rhs_xi might not be a type variable -- cf Trac #5655 , mkTcAppCos co1 cos_rest -- (rhs_xi :: F xis) ; (F cos :: F xis ~ F tys) ) } flatten_exact_fam_app fmode tc tys = case fe_mode fmode of FM_FlattenAll -> flatten_exact_fam_app_fully fmode tc tys FM_SubstOnly -> do { (xis, cos) <- flatten_many fmode roles tys ; return ( mkTyConApp tc xis , mkTcTyConAppCo (feRole fmode) tc cos ) } FM_Avoid tv flat_top -> do { (xis, cos) <- flatten_many fmode roles tys ; if flat_top || tv `elemVarSet` tyVarsOfTypes xis then flatten_exact_fam_app_fully fmode tc tys else return ( mkTyConApp tc xis , mkTcTyConAppCo (feRole fmode) tc cos ) } where -- These are always going to be Nominal for now, -- but not if #8177 is implemented roles = tyConRolesX (feRole fmode) tc flatten_exact_fam_app_fully fmode tc tys = do { (xis, cos) <- flatten_many_nom (setFEEqRel (setFEMode fmode FM_FlattenAll) NomEq) tys ; let ret_co = mkTcTyConAppCo (feRole fmode) tc cos -- ret_co :: F xis ~ F tys ; mb_ct <- lookupFlatCache tc xis ; case mb_ct of Just (co, rhs_ty, flav) -- co :: F xis ~ fsk | (flav, NomEq) `canRewriteOrSameFR` (feFlavourRole fmode) -> -- Usable hit in the flat-cache -- We certainly *can* use a Wanted for a Wanted do { traceTcS "flatten/flat-cache hit" $ (ppr tc <+> ppr xis $$ ppr rhs_ty $$ ppr co) ; (fsk_xi, fsk_co) <- flatten_one fmode rhs_ty -- The fsk may already have been unified, so flatten it -- fsk_co :: fsk_xi ~ fsk ; return (fsk_xi, fsk_co `mkTcTransCo` maybeTcSubCo (fe_eq_rel fmode) (mkTcSymCo co) `mkTcTransCo` ret_co) } -- :: fsk_xi ~ F xis -- Try to reduce the family application right now -- See Note [Reduce type family applications eagerly] _ -> do { mb_match <- matchFam tc xis ; case mb_match of { Just (norm_co, norm_ty) -> do { (xi, final_co) <- flatten_one fmode norm_ty ; let co = norm_co `mkTcTransCo` mkTcSymCo final_co ; extendFlatCache tc xis ( co, xi , fe_flavour fmode ) ; return (xi, mkTcSymCo co `mkTcTransCo` ret_co) } ; Nothing -> do { let fam_ty = mkTyConApp tc xis ; (ev, fsk) <- newFlattenSkolem (fe_flavour fmode) (fe_loc fmode) fam_ty ; let fsk_ty = mkTyVarTy fsk co = ctEvCoercion ev ; extendFlatCache tc xis (co, fsk_ty, ctEvFlavour ev) -- The new constraint (F xis ~ fsk) is not necessarily inert -- (e.g. the LHS may be a redex) so we must put it in the work list ; let ct = CFunEqCan { cc_ev = ev , cc_fun = tc , cc_tyargs = xis , cc_fsk = fsk } ; emitFlatWork ct ; traceTcS "flatten/flat-cache miss" $ (ppr fam_ty $$ ppr fsk $$ ppr ev) ; return (fsk_ty, maybeTcSubCo (fe_eq_rel fmode) (mkTcSymCo co) `mkTcTransCo` ret_co) } } } } {- Note [Reduce type family applications eagerly] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If we come across a type-family application like (Append (Cons x Nil) t), then, rather than flattening to a skolem etc, we may as well just reduce it on the spot to (Cons x t). This saves a lot of intermediate steps. Examples that are helped are tests T9872, and T5321Fun. So just before we create the new skolem, we attempt to reduce it by one step (using matchFam). If that works, then recursively flatten the rhs, which may in turn do lots more reductions. Once we've got a flat rhs, we extend the flatten-cache to record the result. Doing so can save lots of work when the same redex shows up more than once. Note that we record the link from the redex all the way to its *final* value, not just the single step reduction. ************************************************************************ * * Flattening a type variable * * ************************************************************************ Note [The inert equalities] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Definition [Can-rewrite relation] A "can-rewrite" relation between flavours, written f1 >= f2, is a binary relation with the following properties R1. >= is transitive R2. If f1 >= f, and f2 >= f, then either f1 >= f2 or f2 >= f1 Lemma. If f1 >= f then f1 >= f1 Proof. By property (R2), with f1=f2 Definition [Generalised substitution] A "generalised substitution" S is a set of triples (a -f-> t), where a is a type variable t is a type f is a flavour such that (WF1) if (a -f1-> t1) in S (a -f2-> t2) in S then neither (f1 >= f2) nor (f2 >= f1) hold (WF2) if (a -f-> t) is in S, then t /= a Definition [Applying a generalised substitution] If S is a generalised substitution S(f,a) = t, if (a -fs-> t) in S, and fs >= f = a, otherwise Application extends naturally to types S(f,t), modulo roles. See Note [Flavours with roles]. Theorem: S(f,a) is well defined as a function. Proof: Suppose (a -f1-> t1) and (a -f2-> t2) are both in S, and f1 >= f and f2 >= f Then by (R2) f1 >= f2 or f2 >= f1, which contradicts (WF) Notation: repeated application. S^0(f,t) = t S^(n+1)(f,t) = S(f, S^n(t)) Definition: inert generalised substitution A generalised substitution S is "inert" iff (IG1) there is an n such that for every f,t, S^n(f,t) = S^(n+1)(f,t) (IG2) if (b -f-> t) in S, and f >= f, then S(f,t) = t that is, each individual binding is "self-stable" ---------------------------------------------------------------- Our main invariant: the inert CTyEqCans should be an inert generalised substitution ---------------------------------------------------------------- Note that inertness is not the same as idempotence. To apply S to a type, you may have to apply it recursive. But inertness does guarantee that this recursive use will terminate. ---------- The main theorem -------------- Suppose we have a "work item" a -fw-> t and an inert generalised substitution S, such that (T1) S(fw,a) = a -- LHS of work-item is a fixpoint of S(fw,_) (T2) S(fw,t) = t -- RHS of work-item is a fixpoint of S(fw,_) (T3) a not in t -- No occurs check in the work item (K1) if (a -fs-> s) is in S then not (fw >= fs) (K2) if (b -fs-> s) is in S, where b /= a, then (K2a) not (fs >= fs) or (K2b) not (fw >= fs) or (K2c) a not in s (K3) If (b -fs-> s) is in S with (fw >= fs), then (K3a) If the role of fs is nominal: s /= a (K3b) If the role of fs is representational: EITHER a not in s, OR the path from the top of s to a includes at least one non-newtype then the extended substition T = S+(a -fw-> t) is an inert generalised substitution. The idea is that * (T1-2) are guaranteed by exhaustively rewriting the work-item with S(fw,_). * T3 is guaranteed by a simple occurs-check on the work item. * (K1-3) are the "kick-out" criteria. (As stated, they are really the "keep" criteria.) If the current inert S contains a triple that does not satisfy (K1-3), then we remove it from S by "kicking it out", and re-processing it. * Note that kicking out is a Bad Thing, because it means we have to re-process a constraint. The less we kick out, the better. TODO: Make sure that kicking out really *is* a Bad Thing. We've assumed this but haven't done the empirical study to check. * Assume we have G>=G, G>=W, D>=D, and that's all. Then, when performing a unification we add a new given a -G-> ty. But doing so does NOT require us to kick out an inert wanted that mentions a, because of (K2a). This is a common case, hence good not to kick out. * Lemma (L1): The conditions of the Main Theorem imply that there is no (a fs-> t) in S, s.t. (fs >= fw). Proof. Suppose the contrary (fs >= fw). Then because of (T1), S(fw,a)=a. But since fs>=fw, S(fw,a) = s, hence s=a. But now we have (a -fs-> a) in S, which contradicts (WF2). * The extended substitution satisfies (WF1) and (WF2) - (K1) plus (L1) guarantee that the extended substiution satisfies (WF1). - (T3) guarantees (WF2). * (K2) is about inertness. Intuitively, any infinite chain T^0(f,t), T^1(f,t), T^2(f,T).... must pass through the new work item infnitely often, since the substution without the work item is inert; and must pass through at least one of the triples in S infnitely often. - (K2a): if not(fs>=fs) then there is no f that fs can rewrite (fs>=f), and hence this triple never plays a role in application S(f,a). It is always safe to extend S with such a triple. (NB: we could strengten K1) in this way too, but see K3. - (K2b): If this holds, we can't pass through this triple infinitely often, because if we did then fs>=f, fw>=f, hence fs>=fw, contradicting (L1), or fw>=fs contradicting K2b. - (K2c): if a not in s, we hae no further opportunity to apply the work item. NB: this reasoning isn't water tight. Key lemma to make it watertight. Under the conditions of the Main Theorem, forall f st fw >= f, a is not in S^k(f,t), for any k Also, consider roles more carefully. See Note [Flavours with roles]. Completeness ~~~~~~~~~~~~~ K3: completeness. (K3) is not necessary for the extended substitution to be inert. In fact K1 could be made stronger by saying ... then (not (fw >= fs) or not (fs >= fs)) But it's not enough for S to be inert; we also want completeness. That is, we want to be able to solve all soluble wanted equalities. Suppose we have work-item b -G-> a inert-item a -W-> b Assuming (G >= W) but not (W >= W), this fulfills all the conditions, so we could extend the inerts, thus: inert-items b -G-> a a -W-> b But if we kicked-out the inert item, we'd get work-item a -W-> b inert-item b -G-> a Then rewrite the work-item gives us (a -W-> a), which is soluble via Refl. So we add one more clause to the kick-out criteria Another way to understand (K3) is that we treat an inert item a -f-> b in the same way as b -f-> a So if we kick out one, we should kick out the other. The orientation is somewhat accidental. When considering roles, we also need the second clause (K3b). Consider inert-item a -W/R-> b c work-item c -G/N-> a The work-item doesn't get rewritten by the inert, because (>=) doesn't hold. We've satisfied conditions (T1)-(T3) and (K1) and (K2). If all we had were condition (K3a), then we would keep the inert around and add the work item. But then, consider if we hit the following: work-item2 b -G/N-> Id where newtype Id x = Id x For similar reasons, if we only had (K3a), we wouldn't kick the representational inert out. And then, we'd miss solving the inert, which now reduced to reflexivity. The solution here is to kick out representational inerts whenever the tyvar of a work item is "exposed", where exposed means not under some proper data-type constructor, like [] or Maybe. See isTyVarExposed in TcType. This is encoded in (K3b). Note [Flavours with roles] ~~~~~~~~~~~~~~~~~~~~~~~~~~ The system described in Note [The inert equalities] discusses an abstract set of flavours. In GHC, flavours have two components: the flavour proper, taken from {Wanted, Derived, Given}; and the equality relation (often called role), taken from {NomEq, ReprEq}. When substituting w.r.t. the inert set, as described in Note [The inert equalities], we must be careful to respect roles. For example, if we have inert set: a -G/R-> Int b -G/R-> Bool type role T nominal representational and we wish to compute S(W/R, T a b), the correct answer is T a Bool, NOT T Int Bool. The reason is that T's first parameter has a nominal role, and thus rewriting a to Int in T a b is wrong. Indeed, this non-congruence of subsitution means that the proof in Note [The inert equalities] may need to be revisited, but we don't think that the end conclusion is wrong. -} flattenTyVar :: FlattenEnv -> TcTyVar -> TcS (Xi, TcCoercion) -- "Flattening" a type variable means to apply the substitution to it -- The substitution is actually the union of -- * the unifications that have taken place (either before the -- solver started, or in TcInteract.solveByUnification) -- * the CTyEqCans held in the inert set -- -- Postcondition: co : xi ~ tv flattenTyVar fmode tv = do { mb_yes <- flattenTyVarOuter fmode tv ; case mb_yes of Left tv' -> -- Done do { traceTcS "flattenTyVar1" (ppr tv $$ ppr (tyVarKind tv')) ; return (ty', mkTcReflCo (feRole fmode) ty') } where ty' = mkTyVarTy tv' Right (ty1, co1) -- Recurse -> do { (ty2, co2) <- flatten_one fmode ty1 ; traceTcS "flattenTyVar3" (ppr tv $$ ppr ty2) ; return (ty2, co2 `mkTcTransCo` co1) } } flattenTyVarOuter :: FlattenEnv -> TcTyVar -> TcS (Either TyVar (TcType, TcCoercion)) -- Look up the tyvar in -- a) the internal MetaTyVar box -- b) the tyvar binds -- c) the inerts -- Return (Left tv') if it is not found, tv' has a properly zonked kind -- (Right (ty, co) if found, with co :: ty ~ tv; flattenTyVarOuter fmode tv | not (isTcTyVar tv) -- Happens when flatten under a (forall a. ty) = Left `liftM` flattenTyVarFinal fmode tv -- So ty contains refernces to the non-TcTyVar a | otherwise = do { mb_ty <- isFilledMetaTyVar_maybe tv ; case mb_ty of { Just ty -> do { traceTcS "Following filled tyvar" (ppr tv <+> equals <+> ppr ty) ; return (Right (ty, mkTcReflCo (feRole fmode) ty)) } ; Nothing -> -- Try in the inert equalities -- See Definition [Applying a generalised substitution] do { ieqs <- getInertEqs ; case lookupVarEnv ieqs tv of Just (ct:_) -- If the first doesn't work, -- the subsequent ones won't either | CTyEqCan { cc_ev = ctev, cc_tyvar = tv, cc_rhs = rhs_ty } <- ct , ctEvFlavourRole ctev `eqCanRewriteFR` feFlavourRole fmode -> do { traceTcS "Following inert tyvar" (ppr tv <+> equals <+> ppr rhs_ty $$ ppr ctev) ; let rewrite_co1 = mkTcSymCo (ctEvCoercion ctev) rewrite_co = case (ctEvEqRel ctev, fe_eq_rel fmode) of (ReprEq, _rel) -> ASSERT( _rel == ReprEq ) -- if this ASSERT fails, then -- eqCanRewriteFR answered incorrectly rewrite_co1 (NomEq, NomEq) -> rewrite_co1 (NomEq, ReprEq) -> mkTcSubCo rewrite_co1 ; return (Right (rhs_ty, rewrite_co)) } -- NB: ct is Derived then fmode must be also, hence -- we are not going to touch the returned coercion -- so ctEvCoercion is fine. _other -> Left `liftM` flattenTyVarFinal fmode tv } } } flattenTyVarFinal :: FlattenEnv -> TcTyVar -> TcS TyVar flattenTyVarFinal fmode tv = -- Done, but make sure the kind is zonked do { let kind = tyVarKind tv kind_fmode = setFEMode fmode FM_SubstOnly ; (new_knd, _kind_co) <- flatten_one kind_fmode kind ; return (setVarType tv new_knd) } {- Note [An alternative story for the inert substitution] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ (This entire note is just background, left here in case we ever want to return the the previousl state of affairs) We used (GHC 7.8) to have this story for the inert substitution inert_eqs * 'a' is not in fvs(ty) * They are *inert* in the weaker sense that there is no infinite chain of (i1 `eqCanRewrite` i2), (i2 `eqCanRewrite` i3), etc This means that flattening must be recursive, but it does allow [G] a ~ [b] [G] b ~ Maybe c This avoids "saturating" the Givens, which can save a modest amount of work. It is easy to implement, in TcInteract.kick_out, by only kicking out an inert only if (a) the work item can rewrite the inert AND (b) the inert cannot rewrite the work item This is signifcantly harder to think about. It can save a LOT of work in occurs-check cases, but we don't care about them much. Trac #5837 is an example; all the constraints here are Givens [G] a ~ TF (a,Int) --> work TF (a,Int) ~ fsk inert fsk ~ a ---> work fsk ~ (TF a, TF Int) inert fsk ~ a ---> work a ~ (TF a, TF Int) inert fsk ~ a ---> (attempting to flatten (TF a) so that it does not mention a work TF a ~ fsk2 inert a ~ (fsk2, TF Int) inert fsk ~ (fsk2, TF Int) ---> (substitute for a) work TF (fsk2, TF Int) ~ fsk2 inert a ~ (fsk2, TF Int) inert fsk ~ (fsk2, TF Int) ---> (top-level reduction, re-orient) work fsk2 ~ (TF fsk2, TF Int) inert a ~ (fsk2, TF Int) inert fsk ~ (fsk2, TF Int) ---> (attempt to flatten (TF fsk2) to get rid of fsk2 work TF fsk2 ~ fsk3 work fsk2 ~ (fsk3, TF Int) inert a ~ (fsk2, TF Int) inert fsk ~ (fsk2, TF Int) ---> work TF fsk2 ~ fsk3 inert fsk2 ~ (fsk3, TF Int) inert a ~ ((fsk3, TF Int), TF Int) inert fsk ~ ((fsk3, TF Int), TF Int) Because the incoming given rewrites all the inert givens, we get more and more duplication in the inert set. But this really only happens in pathalogical casee, so we don't care. -} eqCanRewrite :: CtEvidence -> CtEvidence -> Bool eqCanRewrite ev1 ev2 = ctEvFlavourRole ev1 `eqCanRewriteFR` ctEvFlavourRole ev2 -- | Whether or not one 'Ct' can rewrite another is determined by its -- flavour and its equality relation type CtFlavourRole = (CtFlavour, EqRel) -- | Extract the flavour and role from a 'CtEvidence' ctEvFlavourRole :: CtEvidence -> CtFlavourRole ctEvFlavourRole ev = (ctEvFlavour ev, ctEvEqRel ev) -- | Extract the flavour and role from a 'Ct' ctFlavourRole :: Ct -> CtFlavourRole ctFlavourRole = ctEvFlavourRole . cc_ev -- | Extract the flavour and role from a 'FlattenEnv' feFlavourRole :: FlattenEnv -> CtFlavourRole feFlavourRole (FE { fe_flavour = flav, fe_eq_rel = eq_rel }) = (flav, eq_rel) eqCanRewriteFR :: CtFlavourRole -> CtFlavourRole -> Bool -- Very important function! -- See Note [eqCanRewrite] eqCanRewriteFR (Given, NomEq) (_, _) = True eqCanRewriteFR (Given, ReprEq) (_, ReprEq) = True eqCanRewriteFR _ _ = False canRewriteOrSame :: CtEvidence -> CtEvidence -> Bool -- See Note [canRewriteOrSame] canRewriteOrSame ev1 ev2 = ev1 `eqCanRewrite` ev2 || ctEvFlavourRole ev1 == ctEvFlavourRole ev2 canRewriteOrSameFR :: CtFlavourRole -> CtFlavourRole -> Bool canRewriteOrSameFR fr1 fr2 = fr1 `eqCanRewriteFR` fr2 || fr1 == fr2 {- Note [eqCanRewrite] ~~~~~~~~~~~~~~~~~~~ (eqCanRewrite ct1 ct2) holds if the constraint ct1 (a CTyEqCan of form tv ~ ty) can be used to rewrite ct2. It must satisfy the properties of a can-rewrite relation, see Definition [Can-rewrite relation] At the moment we don't allow Wanteds to rewrite Wanteds, because that can give rise to very confusing type error messages. A good example is Trac #8450. Here's another f :: a -> Bool f x = ( [x,'c'], [x,True] ) `seq` True Here we get [W] a ~ Char [W] a ~ Bool but we do not want to complain about Bool ~ Char! Accordingly, we also don't let Deriveds rewrite Deriveds. With the solver handling Coercible constraints like equality constraints, the rewrite conditions must take role into account, never allowing a representational equality to rewrite a nominal one. Note [canRewriteOrSame] ~~~~~~~~~~~~~~~~~~~~~~~ canRewriteOrSame is similar but * returns True for Wanted/Wanted. * works for all kinds of constraints, not just CTyEqCans See the call sites for explanations. ************************************************************************ * * Unflattening * * ************************************************************************ An unflattening example: [W] F a ~ alpha flattens to [W] F a ~ fmv (CFunEqCan) [W] fmv ~ alpha (CTyEqCan) We must solve both! -} unflatten :: Cts -> Cts -> TcS Cts unflatten tv_eqs funeqs = do { dflags <- getDynFlags ; tclvl <- getTcLevel ; traceTcS "Unflattening" $ braces $ vcat [ ptext (sLit "Funeqs =") <+> pprCts funeqs , ptext (sLit "Tv eqs =") <+> pprCts tv_eqs ] -- Step 1: unflatten the CFunEqCans, except if that causes an occurs check -- See Note [Unflatten using funeqs first] ; funeqs <- foldrBagM (unflatten_funeq dflags) emptyCts funeqs ; traceTcS "Unflattening 1" $ braces (pprCts funeqs) -- Step 2: unify the irreds, if possible ; tv_eqs <- foldrBagM (unflatten_eq dflags tclvl) emptyCts tv_eqs ; traceTcS "Unflattening 2" $ braces (pprCts tv_eqs) -- Step 3: fill any remaining fmvs with fresh unification variables ; funeqs <- mapBagM finalise_funeq funeqs ; traceTcS "Unflattening 3" $ braces (pprCts funeqs) -- Step 4: remove any irreds that look like ty ~ ty ; tv_eqs <- foldrBagM finalise_eq emptyCts tv_eqs ; let all_flat = tv_eqs `andCts` funeqs ; traceTcS "Unflattening done" $ braces (pprCts all_flat) ; return all_flat } where ---------------- unflatten_funeq :: DynFlags -> Ct -> Cts -> TcS Cts unflatten_funeq dflags ct@(CFunEqCan { cc_fun = tc, cc_tyargs = xis , cc_fsk = fmv, cc_ev = ev }) rest = do { -- fmv should be a flatten meta-tv; we now fix its final -- value, and then zonking will eliminate it filled <- tryFill dflags fmv (mkTyConApp tc xis) ev ; return (if filled then rest else ct `consCts` rest) } unflatten_funeq _ other_ct _ = pprPanic "unflatten_funeq" (ppr other_ct) ---------------- finalise_funeq :: Ct -> TcS Ct finalise_funeq (CFunEqCan { cc_fsk = fmv, cc_ev = ev }) = do { demoteUnfilledFmv fmv ; return (mkNonCanonical ev) } finalise_funeq ct = pprPanic "finalise_funeq" (ppr ct) ---------------- unflatten_eq :: DynFlags -> TcLevel -> Ct -> Cts -> TcS Cts unflatten_eq dflags tclvl ct@(CTyEqCan { cc_ev = ev, cc_tyvar = tv, cc_rhs = rhs }) rest | isFmvTyVar tv = do { lhs_elim <- tryFill dflags tv rhs ev ; if lhs_elim then return rest else do { rhs_elim <- try_fill dflags tclvl ev rhs (mkTyVarTy tv) ; if rhs_elim then return rest else return (ct `consCts` rest) } } | otherwise = return (ct `consCts` rest) unflatten_eq _ _ ct _ = pprPanic "unflatten_irred" (ppr ct) ---------------- finalise_eq :: Ct -> Cts -> TcS Cts finalise_eq (CTyEqCan { cc_ev = ev, cc_tyvar = tv , cc_rhs = rhs, cc_eq_rel = eq_rel }) rest | isFmvTyVar tv = do { ty1 <- zonkTcTyVar tv ; ty2 <- zonkTcType rhs ; let is_refl = ty1 `tcEqType` ty2 ; if is_refl then do { when (isWanted ev) $ setEvBind (ctEvId ev) (EvCoercion $ mkTcReflCo (eqRelRole eq_rel) rhs) ; return rest } else return (mkNonCanonical ev `consCts` rest) } | otherwise = return (mkNonCanonical ev `consCts` rest) finalise_eq ct _ = pprPanic "finalise_irred" (ppr ct) ---------------- try_fill dflags tclvl ev ty1 ty2 | Just tv1 <- tcGetTyVar_maybe ty1 , isTouchableOrFmv tclvl tv1 , typeKind ty1 `isSubKind` tyVarKind tv1 = tryFill dflags tv1 ty2 ev | otherwise = return False tryFill :: DynFlags -> TcTyVar -> TcType -> CtEvidence -> TcS Bool -- (tryFill tv rhs ev) sees if 'tv' is an un-filled MetaTv -- If so, and if tv does not appear in 'rhs', set tv := rhs -- bind the evidence (which should be a CtWanted) to Refl<rhs> -- and return True. Otherwise return False tryFill dflags tv rhs ev = ASSERT2( not (isGiven ev), ppr ev ) do { is_filled <- isFilledMetaTyVar tv ; if is_filled then return False else do { rhs' <- zonkTcType rhs ; case occurCheckExpand dflags tv rhs' of OC_OK rhs'' -- Normal case: fill the tyvar -> do { when (isWanted ev) $ setEvBind (ctEvId ev) (EvCoercion (mkTcReflCo (ctEvRole ev) rhs'')) ; setWantedTyBind tv rhs'' ; return True } _ -> -- Occurs check return False } } {- Note [Unflatten using funeqs first] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ [W] G a ~ Int [W] F (G a) ~ G a do not want to end up with [W} F Int ~ Int because that might actually hold! Better to end up with the two above unsolved constraints. The flat form will be G a ~ fmv1 (CFunEqCan) F fmv1 ~ fmv2 (CFunEqCan) fmv1 ~ Int (CTyEqCan) fmv1 ~ fmv2 (CTyEqCan) Flatten using the fun-eqs first. -} -- | Change the 'EqRel' in a 'FlattenEnv'. Avoids allocating a -- new 'FlattenEnv' where possible. setFEEqRel :: FlattenEnv -> EqRel -> FlattenEnv setFEEqRel fmode@(FE { fe_eq_rel = old_eq_rel }) new_eq_rel | old_eq_rel == new_eq_rel = fmode | otherwise = fmode { fe_eq_rel = new_eq_rel } -- | Change the 'FlattenMode' in a 'FlattenEnv'. Avoids allocating -- a new 'FlattenEnv' where possible. setFEMode :: FlattenEnv -> FlattenMode -> FlattenEnv setFEMode fmode@(FE { fe_mode = old_mode }) new_mode | old_mode `eq` new_mode = fmode | otherwise = fmode { fe_mode = new_mode } where FM_FlattenAll `eq` FM_FlattenAll = True FM_SubstOnly `eq` FM_SubstOnly = True FM_Avoid tv1 b1 `eq` FM_Avoid tv2 b2 = tv1 == tv2 && b1 == b2 _ `eq` _ = False
bitemyapp/ghc
compiler/typecheck/TcFlatten.hs
bsd-3-clause
57,104
47
27
16,268
5,029
2,711
2,318
-1
-1
{-# LANGUAGE DoAndIfThenElse #-} {-# LANGUAGE EmptyDataDecls #-} {-# LANGUAGE TypeFamilies #-} module Wavecore.ECDIS.SeaMap where import Control.Applicative import Data.Geo.TransverseMercator import Data.Geo.UTM import Data.Maybe import FRP.Sodium import Numeric.Units.Dimensional.TF.Prelude import qualified Prelude as P () import Wavecore.ECDIS.Controller newtype Coordinate = MkCoordinate (PlaneAngle Double, PlaneAngle Double) deriving (Eq) newtype UTMCoordinate = MkUTMCoordinate (Length Double, Length Double) deriving (Eq) newtype UTMZone = UTMZone Int deriving (Eq) newtype UTMZonedCoordinate = MkZonedCoordinate ( ((UTMZone, Bool), (PlaneAngle Double, Dimensionless Double)), UTMCoordinate) deriving (Eq) _utmForward :: Coordinate -> UTMZonedCoordinate _utmForward (MkCoordinate (lat, lon)) = let ((z',n), (TM x y conv scale)) = maybe (error "utmForward: no result") id $ utmForward' lat lon utm = MkUTMCoordinate (x, y) cs = (conv, scale) zn = (UTMZone z', n) in MkZonedCoordinate ((zn, cs), utm) _utmZonedForward :: UTMZone -> Coordinate -> UTMCoordinate _utmZonedForward (UTMZone z) (MkCoordinate (lat,lon)) = let (_, (TM x y _ _)) = maybe (error "utmZonedForward: no result") id $ utmZonedForward z lat lon in MkUTMCoordinate (x, y) _utmReverse :: Bool -> UTMZone -> UTMCoordinate -> Coordinate _utmReverse n (UTMZone z) (MkUTMCoordinate (x,y)) = MkCoordinate $ maybe (error "utmReverse: no result") id $ utmReverse z n x y -- -- SEA MAP -- data SeaMap data SeaMapNorthing = TrueNorth | MapNorth | HeadingNorth deriving (Eq) newtype SeaMapZoom = SeaMapZoom (Dimensionless Int) deriving (Eq) class SeaMapProjection proj where instance Controller SeaMap where data ControllerCommand SeaMap = SeaMapToggleInput | SeaMapSetNorthing SeaMapNorthing | SeaMapSetIntPos Coordinate deriving (Eq) data ControllerInput SeaMap = SeaMapInput { _smInPosition :: Behavior (Maybe Coordinate), _smInZoomFactor :: Behavior (SeaMapZoom), _smInMapSize :: Behavior (Dimensionless Double, Dimensionless Double), _smInPixelFactor :: Behavior (Length Double), _smInExtHeading :: Behavior (Maybe (PlaneAngle Double)) } data ControllerOutput SeaMap = SeaMapOutput { _smUTMForward :: Behavior (Coordinate -> UTMCoordinate), _smUTMReverse :: Behavior (UTMCoordinate -> Coordinate), _smExtPosAvail :: Behavior Bool, _smExtHeadingAvail :: Behavior Bool, _smCurrentPosition :: Behavior (Coordinate), _smCurrentPositionIsExternal :: Behavior Bool, _smZone :: Behavior (UTMZone), _smIsNorth :: Behavior Bool, _smMeridianConvergence :: Behavior (PlaneAngle Double), _smProjectionScale :: Behavior (Dimensionless Double), _smRotation :: Behavior (PlaneAngle Double), _smNorthing :: Behavior SeaMapNorthing, _smMapOrigin :: Behavior (Length Double, Length Double), _smMapDim :: Behavior (Length Double, Length Double) } newController i e = let (toggleInputE, setNorthingE, setIntPosE) = seaMapSplitInputEvents e extPosAvailE = fmap isJust $ updates (_smInPosition i) extHeadingAvailE = fmap isJust $ updates (_smInExtHeading i) in do -- external inputs extPosAvail <- hold False extPosAvailE extHeadingAvail <- hold False extHeadingAvailE -- the switching of position input intPos <- hold smDefaultCoord $ fmap (\(SeaMapSetIntPos p) -> p) setIntPosE (isExternal, pushIsExternal) <- newBehavior False let checkInputs :: () -> Reactive (Behavior Coordinate) checkInputs _ = do extAvail <- sample extPosAvail if (not extAvail) then (pushIsExternal False >> return intPos) else do isExt <- sample isExternal if (isExt) then (pushIsExternal False >> return intPos) else do pushIsExternal True let frJust = maybe (error "SeaMap Controller: no external pos") id return $ fmap frJust $ _smInPosition i switchPosSrcE :: Event (Behavior Coordinate) switchPosSrcE = execute $ fmap checkInputs $ let toggle = fmap (\_ -> ()) toggleInputE extLost = fmap (\_ -> ()) $ filterE (not) (updates extPosAvail) in toggle `merge` extLost posSrcSwitch <- hold intPos $ switchPosSrcE curPos <- switch posSrcSwitch -- position and map transformation let curPosUTMZoned' = fmap _utmForward curPos curPosUTMZoned = fmap (\(MkZonedCoordinate a) -> a) curPosUTMZoned' northp = fmap (snd.fst.fst) curPosUTMZoned zone = fmap (fst.fst.fst) curPosUTMZoned meridianConvergence = fmap (fst.snd.fst) curPosUTMZoned projectionScale = fmap (snd.snd.fst) curPosUTMZoned curPosUTM = fmap snd curPosUTMZoned utmRv = _utmReverse <$> northp <*> zone utmFw = _utmZonedForward <$> zone -- map viewport let pxZoom = liftA2 (\px (SeaMapZoom z) -> px * fmap fromIntegral z) (_smInPixelFactor i) (_smInZoomFactor i) mapWidth = liftA2 (*) pxZoom . fmap fst $ (_smInMapSize i) mapWidth2 = liftA2 (/) mapWidth (pure _2) mapHeight = liftA2 (*) pxZoom . fmap snd $ (_smInMapSize i) mapHeight2 = liftA2 (/) mapHeight (pure _2) mapDim = liftA2 (\a b -> (a,b)) mapWidth mapHeight mapOrigin = liftA3 (\(MkUTMCoordinate (x,y)) w2 h2 -> (x - w2, y - h2)) curPosUTM mapWidth2 mapHeight2 -- rotation / heading let mapNorth = pure $ 0 *~ degree trueNorth = liftA2 (-) mapNorth meridianConvergence headingLostE = fmap (\_ -> SeaMapSetNorthing MapNorth) $ filterE not . updates $ extHeadingAvail northingChange = headingLostE `merge` setNorthingE (northingSwitch, pushNorthingSwitch) <- newBehavior MapNorth let frJust = maybe (error "SeaMap Controller: no extern heading") id onNorthingChange (SeaMapSetNorthing n) = case n of TrueNorth -> do pushNorthingSwitch TrueNorth >> return trueNorth MapNorth -> do pushNorthingSwitch MapNorth >> return mapNorth HeadingNorth -> do extAvail <- sample extHeadingAvail if (extAvail) then do pushNorthingSwitch HeadingNorth return $ fmap frJust (_smInExtHeading i) else do pushNorthingSwitch MapNorth >> return mapNorth onNorthingChange _ = error "SeaMap Controller: unexpxted Northing event" northingSwitchToE = execute $ fmap onNorthingChange northingChange rotationSwitch <- hold mapNorth northingSwitchToE rotation <- switch rotationSwitch return $ SeaMapOutput { _smExtPosAvail = extPosAvail, _smExtHeadingAvail = extHeadingAvail, _smCurrentPosition = curPos, _smCurrentPositionIsExternal = isExternal, _smZone = zone, _smIsNorth = northp, _smMeridianConvergence = meridianConvergence, _smProjectionScale = projectionScale, _smRotation = rotation, _smNorthing = northingSwitch, _smUTMReverse = utmRv, _smUTMForward = utmFw, _smMapOrigin = mapOrigin, _smMapDim = mapDim } smDefaultCoord :: Coordinate smDefaultCoord = MkCoordinate (52.3 *~ degree, 7.1 *~ degree) seaMapSplitInputEvents :: Event (ControllerCommand SeaMap) -> ( Event (ControllerCommand SeaMap) , Event (ControllerCommand SeaMap) , Event (ControllerCommand SeaMap) ) seaMapSplitInputEvents e = let toggleInputE = filterE ((==) SeaMapToggleInput) e setNorthingE = filterE (\e' -> case e' of SeaMapSetNorthing _ -> True _ -> False) e setIntPosE = filterE (\e' -> case e' of SeaMapSetIntPos _ -> True _ -> False) e in (toggleInputE, setNorthingE, setIntPosE) instance Controller SeaMapZoom where data ControllerCommand SeaMapZoom = ZoomIn | ZoomOut data ControllerInput SeaMapZoom = SeaMapZoomInput { _initZoom :: SeaMapZoom, _zoomFactor :: (Dimensionless Int) } data ControllerOutput SeaMapZoom = SeaMapZoomOutput { _seaMapZoom :: Behavior SeaMapZoom } newController i e = let zoomF ZoomIn (SeaMapZoom z) = SeaMapZoom $ z - (_zoomFactor i) zoomF ZoomOut (SeaMapZoom z) = SeaMapZoom $ z + (_zoomFactor i) in do a <- accum (_initZoom i) (fmap zoomF e) return SeaMapZoomOutput { _seaMapZoom = a }
wavecorenautic/ecdis-client
src/Wavecore/ECDIS/SeaMap.hs
bsd-3-clause
9,216
0
26
2,789
2,439
1,278
1,161
-1
-1
{- (c) Galois, 2006 (c) University of Glasgow, 2007 -} {-# LANGUAGE CPP, NondecreasingIndentation, RecordWildCards #-} module Coverage (addTicksToBinds, hpcInitCode) where #ifdef GHCI import qualified GHCi import GHCi.RemoteTypes import Data.Array import ByteCodeTypes import GHC.Stack.CCS #endif import Type import HsSyn import Module import Outputable import DynFlags import Control.Monad import SrcLoc import ErrUtils import NameSet hiding (FreeVars) import Name import Bag import CostCentre import CoreSyn import Id import VarSet import Data.List import FastString import HscTypes import TyCon import UniqSupply import BasicTypes import MonadUtils import Maybes import CLabel import Util import Data.Time import System.Directory import Trace.Hpc.Mix import Trace.Hpc.Util import Data.Map (Map) import qualified Data.Map as Map {- ************************************************************************ * * * The main function: addTicksToBinds * * ************************************************************************ -} addTicksToBinds :: HscEnv -> Module -> ModLocation -- ... off the current module -> NameSet -- Exported Ids. When we call addTicksToBinds, -- isExportedId doesn't work yet (the desugarer -- hasn't set it), so we have to work from this set. -> [TyCon] -- Type constructor in this module -> LHsBinds Id -> IO (LHsBinds Id, HpcInfo, Maybe ModBreaks) addTicksToBinds hsc_env mod mod_loc exports tyCons binds | let dflags = hsc_dflags hsc_env passes = coveragePasses dflags, not (null passes), Just orig_file <- ml_hs_file mod_loc = do if "boot" `isSuffixOf` orig_file then return (binds, emptyHpcInfo False, Nothing) else do us <- mkSplitUniqSupply 'C' -- for cost centres let orig_file2 = guessSourceFile binds orig_file tickPass tickish (binds,st) = let env = TTE { fileName = mkFastString orig_file2 , declPath = [] , tte_dflags = dflags , exports = exports , inlines = emptyVarSet , inScope = emptyVarSet , blackList = Map.fromList [ (getSrcSpan (tyConName tyCon),()) | tyCon <- tyCons ] , density = mkDensity tickish dflags , this_mod = mod , tickishType = tickish } (binds',_,st') = unTM (addTickLHsBinds binds) env st in (binds', st') initState = TT { tickBoxCount = 0 , mixEntries = [] , uniqSupply = us } (binds1,st) = foldr tickPass (binds, initState) passes let tickCount = tickBoxCount st entries = reverse $ mixEntries st hashNo <- writeMixEntries dflags mod tickCount entries orig_file2 modBreaks <- mkModBreaks hsc_env mod tickCount entries when (dopt Opt_D_dump_ticked dflags) $ log_action dflags dflags SevDump noSrcSpan defaultDumpStyle (pprLHsBinds binds1) return (binds1, HpcInfo tickCount hashNo, Just modBreaks) | otherwise = return (binds, emptyHpcInfo False, Nothing) guessSourceFile :: LHsBinds Id -> FilePath -> FilePath guessSourceFile binds orig_file = -- Try look for a file generated from a .hsc file to a -- .hs file, by peeking ahead. let top_pos = catMaybes $ foldrBag (\ (L pos _) rest -> srcSpanFileName_maybe pos : rest) [] binds in case top_pos of (file_name:_) | ".hsc" `isSuffixOf` unpackFS file_name -> unpackFS file_name _ -> orig_file mkModBreaks :: HscEnv -> Module -> Int -> [MixEntry_] -> IO ModBreaks #ifndef GHCI mkModBreaks _hsc_env _mod _count _entries = return emptyModBreaks #else mkModBreaks hsc_env mod count entries | HscInterpreted <- hscTarget (hsc_dflags hsc_env) = do breakArray <- GHCi.newBreakArray hsc_env (length entries) ccs <- mkCCSArray hsc_env mod count entries let locsTicks = listArray (0,count-1) [ span | (span,_,_,_) <- entries ] varsTicks = listArray (0,count-1) [ vars | (_,_,vars,_) <- entries ] declsTicks = listArray (0,count-1) [ decls | (_,decls,_,_) <- entries ] return emptyModBreaks { modBreaks_flags = breakArray , modBreaks_locs = locsTicks , modBreaks_vars = varsTicks , modBreaks_decls = declsTicks , modBreaks_ccs = ccs } | otherwise = return emptyModBreaks mkCCSArray :: HscEnv -> Module -> Int -> [MixEntry_] -> IO (Array BreakIndex (RemotePtr GHC.Stack.CCS.CostCentre)) mkCCSArray hsc_env modul count entries = do if interpreterProfiled dflags then do let module_str = moduleNameString (moduleName modul) costcentres <- GHCi.mkCostCentres hsc_env module_str (map mk_one entries) return (listArray (0,count-1) costcentres) else do return (listArray (0,-1) []) where dflags = hsc_dflags hsc_env mk_one (srcspan, decl_path, _, _) = (name, src) where name = concat (intersperse "." decl_path) src = showSDoc dflags (ppr srcspan) #endif writeMixEntries :: DynFlags -> Module -> Int -> [MixEntry_] -> FilePath -> IO Int writeMixEntries dflags mod count entries filename | not (gopt Opt_Hpc dflags) = return 0 | otherwise = do let hpc_dir = hpcDir dflags mod_name = moduleNameString (moduleName mod) hpc_mod_dir | moduleUnitId mod == mainUnitId = hpc_dir | otherwise = hpc_dir ++ "/" ++ unitIdString (moduleUnitId mod) tabStop = 8 -- <tab> counts as a normal char in GHC's -- location ranges. createDirectoryIfMissing True hpc_mod_dir modTime <- getModificationUTCTime filename let entries' = [ (hpcPos, box) | (span,_,_,box) <- entries, hpcPos <- [mkHpcPos span] ] when (length entries' /= count) $ do panic "the number of .mix entries are inconsistent" let hashNo = mixHash filename modTime tabStop entries' mixCreate hpc_mod_dir mod_name $ Mix filename modTime (toHash hashNo) tabStop entries' return hashNo -- ----------------------------------------------------------------------------- -- TickDensity: where to insert ticks data TickDensity = TickForCoverage -- for Hpc | TickForBreakPoints -- for GHCi | TickAllFunctions -- for -prof-auto-all | TickTopFunctions -- for -prof-auto-top | TickExportedFunctions -- for -prof-auto-exported | TickCallSites -- for stack tracing deriving Eq mkDensity :: TickishType -> DynFlags -> TickDensity mkDensity tickish dflags = case tickish of HpcTicks -> TickForCoverage SourceNotes -> TickForCoverage Breakpoints -> TickForBreakPoints ProfNotes -> case profAuto dflags of ProfAutoAll -> TickAllFunctions ProfAutoTop -> TickTopFunctions ProfAutoExports -> TickExportedFunctions ProfAutoCalls -> TickCallSites _other -> panic "mkDensity" -- | Decide whether to add a tick to a binding or not. shouldTickBind :: TickDensity -> Bool -- top level? -> Bool -- exported? -> Bool -- simple pat bind? -> Bool -- INLINE pragma? -> Bool shouldTickBind density top_lev exported _simple_pat inline = case density of TickForBreakPoints -> False -- we never add breakpoints to simple pattern bindings -- (there's always a tick on the rhs anyway). TickAllFunctions -> not inline TickTopFunctions -> top_lev && not inline TickExportedFunctions -> exported && not inline TickForCoverage -> True TickCallSites -> False shouldTickPatBind :: TickDensity -> Bool -> Bool shouldTickPatBind density top_lev = case density of TickForBreakPoints -> False TickAllFunctions -> True TickTopFunctions -> top_lev TickExportedFunctions -> False TickForCoverage -> False TickCallSites -> False -- ----------------------------------------------------------------------------- -- Adding ticks to bindings addTickLHsBinds :: LHsBinds Id -> TM (LHsBinds Id) addTickLHsBinds = mapBagM addTickLHsBind addTickLHsBind :: LHsBind Id -> TM (LHsBind Id) addTickLHsBind (L pos bind@(AbsBinds { abs_binds = binds, abs_exports = abs_exports })) = do withEnv add_exports $ do withEnv add_inlines $ do binds' <- addTickLHsBinds binds return $ L pos $ bind { abs_binds = binds' } where -- in AbsBinds, the Id on each binding is not the actual top-level -- Id that we are defining, they are related by the abs_exports -- field of AbsBinds. So if we're doing TickExportedFunctions we need -- to add the local Ids to the set of exported Names so that we know to -- tick the right bindings. add_exports env = env{ exports = exports env `extendNameSetList` [ idName mid | ABE{ abe_poly = pid, abe_mono = mid } <- abs_exports , idName pid `elemNameSet` (exports env) ] } add_inlines env = env{ inlines = inlines env `extendVarSetList` [ mid | ABE{ abe_poly = pid, abe_mono = mid } <- abs_exports , isAnyInlinePragma (idInlinePragma pid) ] } addTickLHsBind (L pos bind@(AbsBindsSig { abs_sig_bind = val_bind , abs_sig_export = poly_id })) | L _ FunBind { fun_id = L _ mono_id } <- val_bind = do withEnv (add_export mono_id) $ do withEnv (add_inlines mono_id) $ do val_bind' <- addTickLHsBind val_bind return $ L pos $ bind { abs_sig_bind = val_bind' } | otherwise = pprPanic "addTickLHsBind" (ppr bind) where -- see AbsBinds comments add_export mono_id env | idName poly_id `elemNameSet` exports env = env { exports = exports env `extendNameSet` idName mono_id } | otherwise = env add_inlines mono_id env | isAnyInlinePragma (idInlinePragma poly_id) = env { inlines = inlines env `extendVarSet` mono_id } | otherwise = env addTickLHsBind (L pos (funBind@(FunBind { fun_id = (L _ id) }))) = do let name = getOccString id decl_path <- getPathEntry density <- getDensity inline_ids <- liftM inlines getEnv let inline = isAnyInlinePragma (idInlinePragma id) || id `elemVarSet` inline_ids -- See Note [inline sccs] tickish <- tickishType `liftM` getEnv if inline && tickish == ProfNotes then return (L pos funBind) else do (fvs, mg@(MG { mg_alts = matches' })) <- getFreeVars $ addPathEntry name $ addTickMatchGroup False (fun_matches funBind) blackListed <- isBlackListed pos exported_names <- liftM exports getEnv -- We don't want to generate code for blacklisted positions -- We don't want redundant ticks on simple pattern bindings -- We don't want to tick non-exported bindings in TickExportedFunctions let simple = isSimplePatBind funBind toplev = null decl_path exported = idName id `elemNameSet` exported_names tick <- if not blackListed && shouldTickBind density toplev exported simple inline then bindTick density name pos fvs else return Nothing let mbCons = maybe Prelude.id (:) return $ L pos $ funBind { fun_matches = mg { mg_alts = matches' } , fun_tick = tick `mbCons` fun_tick funBind } where -- a binding is a simple pattern binding if it is a funbind with -- zero patterns isSimplePatBind :: HsBind a -> Bool isSimplePatBind funBind = matchGroupArity (fun_matches funBind) == 0 -- TODO: Revisit this addTickLHsBind (L pos (pat@(PatBind { pat_lhs = lhs, pat_rhs = rhs }))) = do let name = "(...)" (fvs, rhs') <- getFreeVars $ addPathEntry name $ addTickGRHSs False False rhs let pat' = pat { pat_rhs = rhs'} -- Should create ticks here? density <- getDensity decl_path <- getPathEntry let top_lev = null decl_path if not (shouldTickPatBind density top_lev) then return (L pos pat') else do -- Allocate the ticks rhs_tick <- bindTick density name pos fvs let patvars = map getOccString (collectPatBinders lhs) patvar_ticks <- mapM (\v -> bindTick density v pos fvs) patvars -- Add to pattern let mbCons = maybe id (:) rhs_ticks = rhs_tick `mbCons` fst (pat_ticks pat') patvar_tickss = zipWith mbCons patvar_ticks (snd (pat_ticks pat') ++ repeat []) return $ L pos $ pat' { pat_ticks = (rhs_ticks, patvar_tickss) } -- Only internal stuff, not from source, uses VarBind, so we ignore it. addTickLHsBind var_bind@(L _ (VarBind {})) = return var_bind addTickLHsBind patsyn_bind@(L _ (PatSynBind {})) = return patsyn_bind bindTick :: TickDensity -> String -> SrcSpan -> FreeVars -> TM (Maybe (Tickish Id)) bindTick density name pos fvs = do decl_path <- getPathEntry let toplev = null decl_path count_entries = toplev || density == TickAllFunctions top_only = density /= TickAllFunctions box_label = if toplev then TopLevelBox [name] else LocalBox (decl_path ++ [name]) -- allocATickBox box_label count_entries top_only pos fvs -- Note [inline sccs] -- -- It should be reasonable to add ticks to INLINE functions; however -- currently this tickles a bug later on because the SCCfinal pass -- does not look inside unfoldings to find CostCentres. It would be -- difficult to fix that, because SCCfinal currently works on STG and -- not Core (and since it also generates CostCentres for CAFs, -- changing this would be difficult too). -- -- Another reason not to add ticks to INLINE functions is that this -- sometimes handy for avoiding adding a tick to a particular function -- (see #6131) -- -- So for now we do not add any ticks to INLINE functions at all. -- ----------------------------------------------------------------------------- -- Decorate an LHsExpr with ticks -- selectively add ticks to interesting expressions addTickLHsExpr :: LHsExpr Id -> TM (LHsExpr Id) addTickLHsExpr e@(L pos e0) = do d <- getDensity case d of TickForBreakPoints | isGoodBreakExpr e0 -> tick_it TickForCoverage -> tick_it TickCallSites | isCallSite e0 -> tick_it _other -> dont_tick_it where tick_it = allocTickBox (ExpBox False) False False pos $ addTickHsExpr e0 dont_tick_it = addTickLHsExprNever e -- Add a tick to an expression which is the RHS of an equation or a binding. -- We always consider these to be breakpoints, unless the expression is a 'let' -- (because the body will definitely have a tick somewhere). ToDo: perhaps -- we should treat 'case' and 'if' the same way? addTickLHsExprRHS :: LHsExpr Id -> TM (LHsExpr Id) addTickLHsExprRHS e@(L pos e0) = do d <- getDensity case d of TickForBreakPoints | HsLet{} <- e0 -> dont_tick_it | otherwise -> tick_it TickForCoverage -> tick_it TickCallSites | isCallSite e0 -> tick_it _other -> dont_tick_it where tick_it = allocTickBox (ExpBox False) False False pos $ addTickHsExpr e0 dont_tick_it = addTickLHsExprNever e -- The inner expression of an evaluation context: -- let binds in [], ( [] ) -- we never tick these if we're doing HPC, but otherwise -- we treat it like an ordinary expression. addTickLHsExprEvalInner :: LHsExpr Id -> TM (LHsExpr Id) addTickLHsExprEvalInner e = do d <- getDensity case d of TickForCoverage -> addTickLHsExprNever e _otherwise -> addTickLHsExpr e -- | A let body is treated differently from addTickLHsExprEvalInner -- above with TickForBreakPoints, because for breakpoints we always -- want to tick the body, even if it is not a redex. See test -- break012. This gives the user the opportunity to inspect the -- values of the let-bound variables. addTickLHsExprLetBody :: LHsExpr Id -> TM (LHsExpr Id) addTickLHsExprLetBody e@(L pos e0) = do d <- getDensity case d of TickForBreakPoints | HsLet{} <- e0 -> dont_tick_it | otherwise -> tick_it _other -> addTickLHsExprEvalInner e where tick_it = allocTickBox (ExpBox False) False False pos $ addTickHsExpr e0 dont_tick_it = addTickLHsExprNever e -- version of addTick that does not actually add a tick, -- because the scope of this tick is completely subsumed by -- another. addTickLHsExprNever :: LHsExpr Id -> TM (LHsExpr Id) addTickLHsExprNever (L pos e0) = do e1 <- addTickHsExpr e0 return $ L pos e1 -- general heuristic: expressions which do not denote values are good -- break points isGoodBreakExpr :: HsExpr Id -> Bool isGoodBreakExpr (HsApp {}) = True isGoodBreakExpr (OpApp {}) = True isGoodBreakExpr _other = False isCallSite :: HsExpr Id -> Bool isCallSite HsApp{} = True isCallSite OpApp{} = True isCallSite _ = False addTickLHsExprOptAlt :: Bool -> LHsExpr Id -> TM (LHsExpr Id) addTickLHsExprOptAlt oneOfMany (L pos e0) = ifDensity TickForCoverage (allocTickBox (ExpBox oneOfMany) False False pos $ addTickHsExpr e0) (addTickLHsExpr (L pos e0)) addBinTickLHsExpr :: (Bool -> BoxLabel) -> LHsExpr Id -> TM (LHsExpr Id) addBinTickLHsExpr boxLabel (L pos e0) = ifDensity TickForCoverage (allocBinTickBox boxLabel pos $ addTickHsExpr e0) (addTickLHsExpr (L pos e0)) -- ----------------------------------------------------------------------------- -- Decorate the body of an HsExpr with ticks. -- (Whether to put a tick around the whole expression was already decided, -- in the addTickLHsExpr family of functions.) addTickHsExpr :: HsExpr Id -> TM (HsExpr Id) addTickHsExpr e@(HsVar (L _ id)) = do freeVar id; return e addTickHsExpr (HsUnboundVar {}) = panic "addTickHsExpr.HsUnboundVar" addTickHsExpr e@(HsIPVar _) = return e addTickHsExpr e@(HsOverLit _) = return e addTickHsExpr e@(HsOverLabel _) = return e addTickHsExpr e@(HsLit _) = return e addTickHsExpr (HsLam matchgroup) = liftM HsLam (addTickMatchGroup True matchgroup) addTickHsExpr (HsLamCase ty mgs) = liftM (HsLamCase ty) (addTickMatchGroup True mgs) addTickHsExpr (HsApp e1 e2) = liftM2 HsApp (addTickLHsExprNever e1) e2' -- This might be a type application. Then don't put a tick around e2, -- or dsExpr won't recognize it as a type application any more (#11329). -- It doesn't make sense to put a tick on a type anyways. where e2' | isLHsTypeExpr e2 = return e2 | otherwise = addTickLHsExpr e2 addTickHsExpr (OpApp e1 e2 fix e3) = liftM4 OpApp (addTickLHsExpr e1) (addTickLHsExprNever e2) (return fix) (addTickLHsExpr e3) addTickHsExpr (NegApp e neg) = liftM2 NegApp (addTickLHsExpr e) (addTickSyntaxExpr hpcSrcSpan neg) addTickHsExpr (HsPar e) = liftM HsPar (addTickLHsExprEvalInner e) addTickHsExpr (SectionL e1 e2) = liftM2 SectionL (addTickLHsExpr e1) (addTickLHsExprNever e2) addTickHsExpr (SectionR e1 e2) = liftM2 SectionR (addTickLHsExprNever e1) (addTickLHsExpr e2) addTickHsExpr (ExplicitTuple es boxity) = liftM2 ExplicitTuple (mapM addTickTupArg es) (return boxity) addTickHsExpr (HsCase e mgs) = liftM2 HsCase (addTickLHsExpr e) -- not an EvalInner; e might not necessarily -- be evaluated. (addTickMatchGroup False mgs) addTickHsExpr (HsIf cnd e1 e2 e3) = liftM3 (HsIf cnd) (addBinTickLHsExpr (BinBox CondBinBox) e1) (addTickLHsExprOptAlt True e2) (addTickLHsExprOptAlt True e3) addTickHsExpr (HsMultiIf ty alts) = do { let isOneOfMany = case alts of [_] -> False; _ -> True ; alts' <- mapM (liftL $ addTickGRHS isOneOfMany False) alts ; return $ HsMultiIf ty alts' } addTickHsExpr (HsLet (L l binds) e) = bindLocals (collectLocalBinders binds) $ liftM2 (HsLet . L l) (addTickHsLocalBinds binds) -- to think about: !patterns. (addTickLHsExprLetBody e) addTickHsExpr (HsDo cxt (L l stmts) srcloc) = do { (stmts', _) <- addTickLStmts' forQual stmts (return ()) ; return (HsDo cxt (L l stmts') srcloc) } where forQual = case cxt of ListComp -> Just $ BinBox QualBinBox _ -> Nothing addTickHsExpr (ExplicitList ty wit es) = liftM3 ExplicitList (return ty) (addTickWit wit) (mapM (addTickLHsExpr) es) where addTickWit Nothing = return Nothing addTickWit (Just fln) = do fln' <- addTickSyntaxExpr hpcSrcSpan fln return (Just fln') addTickHsExpr (ExplicitPArr ty es) = liftM2 ExplicitPArr (return ty) (mapM (addTickLHsExpr) es) addTickHsExpr (HsStatic e) = HsStatic <$> addTickLHsExpr e addTickHsExpr expr@(RecordCon { rcon_flds = rec_binds }) = do { rec_binds' <- addTickHsRecordBinds rec_binds ; return (expr { rcon_flds = rec_binds' }) } addTickHsExpr expr@(RecordUpd { rupd_expr = e, rupd_flds = flds }) = do { e' <- addTickLHsExpr e ; flds' <- mapM addTickHsRecField flds ; return (expr { rupd_expr = e', rupd_flds = flds' }) } addTickHsExpr (ExprWithTySig e ty) = liftM2 ExprWithTySig (addTickLHsExprNever e) -- No need to tick the inner expression -- for expressions with signatures (return ty) addTickHsExpr (ArithSeq ty wit arith_seq) = liftM3 ArithSeq (return ty) (addTickWit wit) (addTickArithSeqInfo arith_seq) where addTickWit Nothing = return Nothing addTickWit (Just fl) = do fl' <- addTickSyntaxExpr hpcSrcSpan fl return (Just fl') -- We might encounter existing ticks (multiple Coverage passes) addTickHsExpr (HsTick t e) = liftM (HsTick t) (addTickLHsExprNever e) addTickHsExpr (HsBinTick t0 t1 e) = liftM (HsBinTick t0 t1) (addTickLHsExprNever e) addTickHsExpr (HsTickPragma _ _ _ (L pos e0)) = do e2 <- allocTickBox (ExpBox False) False False pos $ addTickHsExpr e0 return $ unLoc e2 addTickHsExpr (PArrSeq ty arith_seq) = liftM2 PArrSeq (return ty) (addTickArithSeqInfo arith_seq) addTickHsExpr (HsSCC src nm e) = liftM3 HsSCC (return src) (return nm) (addTickLHsExpr e) addTickHsExpr (HsCoreAnn src nm e) = liftM3 HsCoreAnn (return src) (return nm) (addTickLHsExpr e) addTickHsExpr e@(HsBracket {}) = return e addTickHsExpr e@(HsTcBracketOut {}) = return e addTickHsExpr e@(HsRnBracketOut {}) = return e addTickHsExpr e@(HsSpliceE {}) = return e addTickHsExpr (HsProc pat cmdtop) = liftM2 HsProc (addTickLPat pat) (liftL (addTickHsCmdTop) cmdtop) addTickHsExpr (HsWrap w e) = liftM2 HsWrap (return w) (addTickHsExpr e) -- Explicitly no tick on inside addTickHsExpr (ExprWithTySigOut e ty) = liftM2 ExprWithTySigOut (addTickLHsExprNever e) -- No need to tick the inner expression (return ty) -- for expressions with signatures -- Others should never happen in expression content. addTickHsExpr e = pprPanic "addTickHsExpr" (ppr e) addTickTupArg :: LHsTupArg Id -> TM (LHsTupArg Id) addTickTupArg (L l (Present e)) = do { e' <- addTickLHsExpr e ; return (L l (Present e')) } addTickTupArg (L l (Missing ty)) = return (L l (Missing ty)) addTickMatchGroup :: Bool{-is lambda-} -> MatchGroup Id (LHsExpr Id) -> TM (MatchGroup Id (LHsExpr Id)) addTickMatchGroup is_lam mg@(MG { mg_alts = L l matches }) = do let isOneOfMany = matchesOneOfMany matches matches' <- mapM (liftL (addTickMatch isOneOfMany is_lam)) matches return $ mg { mg_alts = L l matches' } addTickMatch :: Bool -> Bool -> Match Id (LHsExpr Id) -> TM (Match Id (LHsExpr Id)) addTickMatch isOneOfMany isLambda (Match mf pats opSig gRHSs) = bindLocals (collectPatsBinders pats) $ do gRHSs' <- addTickGRHSs isOneOfMany isLambda gRHSs return $ Match mf pats opSig gRHSs' addTickGRHSs :: Bool -> Bool -> GRHSs Id (LHsExpr Id) -> TM (GRHSs Id (LHsExpr Id)) addTickGRHSs isOneOfMany isLambda (GRHSs guarded (L l local_binds)) = do bindLocals binders $ do local_binds' <- addTickHsLocalBinds local_binds guarded' <- mapM (liftL (addTickGRHS isOneOfMany isLambda)) guarded return $ GRHSs guarded' (L l local_binds') where binders = collectLocalBinders local_binds addTickGRHS :: Bool -> Bool -> GRHS Id (LHsExpr Id) -> TM (GRHS Id (LHsExpr Id)) addTickGRHS isOneOfMany isLambda (GRHS stmts expr) = do (stmts',expr') <- addTickLStmts' (Just $ BinBox $ GuardBinBox) stmts (addTickGRHSBody isOneOfMany isLambda expr) return $ GRHS stmts' expr' addTickGRHSBody :: Bool -> Bool -> LHsExpr Id -> TM (LHsExpr Id) addTickGRHSBody isOneOfMany isLambda expr@(L pos e0) = do d <- getDensity case d of TickForCoverage -> addTickLHsExprOptAlt isOneOfMany expr TickAllFunctions | isLambda -> addPathEntry "\\" $ allocTickBox (ExpBox False) True{-count-} False{-not top-} pos $ addTickHsExpr e0 _otherwise -> addTickLHsExprRHS expr addTickLStmts :: (Maybe (Bool -> BoxLabel)) -> [ExprLStmt Id] -> TM [ExprLStmt Id] addTickLStmts isGuard stmts = do (stmts, _) <- addTickLStmts' isGuard stmts (return ()) return stmts addTickLStmts' :: (Maybe (Bool -> BoxLabel)) -> [ExprLStmt Id] -> TM a -> TM ([ExprLStmt Id], a) addTickLStmts' isGuard lstmts res = bindLocals (collectLStmtsBinders lstmts) $ do { lstmts' <- mapM (liftL (addTickStmt isGuard)) lstmts ; a <- res ; return (lstmts', a) } addTickStmt :: (Maybe (Bool -> BoxLabel)) -> Stmt Id (LHsExpr Id) -> TM (Stmt Id (LHsExpr Id)) addTickStmt _isGuard (LastStmt e noret ret) = do liftM3 LastStmt (addTickLHsExpr e) (pure noret) (addTickSyntaxExpr hpcSrcSpan ret) addTickStmt _isGuard (BindStmt pat e bind fail ty) = do liftM5 BindStmt (addTickLPat pat) (addTickLHsExprRHS e) (addTickSyntaxExpr hpcSrcSpan bind) (addTickSyntaxExpr hpcSrcSpan fail) (return ty) addTickStmt isGuard (BodyStmt e bind' guard' ty) = do liftM4 BodyStmt (addTick isGuard e) (addTickSyntaxExpr hpcSrcSpan bind') (addTickSyntaxExpr hpcSrcSpan guard') (return ty) addTickStmt _isGuard (LetStmt (L l binds)) = do liftM (LetStmt . L l) (addTickHsLocalBinds binds) addTickStmt isGuard (ParStmt pairs mzipExpr bindExpr ty) = do liftM4 ParStmt (mapM (addTickStmtAndBinders isGuard) pairs) (unLoc <$> addTickLHsExpr (L hpcSrcSpan mzipExpr)) (addTickSyntaxExpr hpcSrcSpan bindExpr) (return ty) addTickStmt isGuard (ApplicativeStmt args mb_join body_ty) = do args' <- mapM (addTickApplicativeArg isGuard) args return (ApplicativeStmt args' mb_join body_ty) addTickStmt isGuard stmt@(TransStmt { trS_stmts = stmts , trS_by = by, trS_using = using , trS_ret = returnExpr, trS_bind = bindExpr , trS_fmap = liftMExpr }) = do t_s <- addTickLStmts isGuard stmts t_y <- fmapMaybeM addTickLHsExprRHS by t_u <- addTickLHsExprRHS using t_f <- addTickSyntaxExpr hpcSrcSpan returnExpr t_b <- addTickSyntaxExpr hpcSrcSpan bindExpr L _ t_m <- addTickLHsExpr (L hpcSrcSpan liftMExpr) return $ stmt { trS_stmts = t_s, trS_by = t_y, trS_using = t_u , trS_ret = t_f, trS_bind = t_b, trS_fmap = t_m } addTickStmt isGuard stmt@(RecStmt {}) = do { stmts' <- addTickLStmts isGuard (recS_stmts stmt) ; ret' <- addTickSyntaxExpr hpcSrcSpan (recS_ret_fn stmt) ; mfix' <- addTickSyntaxExpr hpcSrcSpan (recS_mfix_fn stmt) ; bind' <- addTickSyntaxExpr hpcSrcSpan (recS_bind_fn stmt) ; return (stmt { recS_stmts = stmts', recS_ret_fn = ret' , recS_mfix_fn = mfix', recS_bind_fn = bind' }) } addTick :: Maybe (Bool -> BoxLabel) -> LHsExpr Id -> TM (LHsExpr Id) addTick isGuard e | Just fn <- isGuard = addBinTickLHsExpr fn e | otherwise = addTickLHsExprRHS e addTickApplicativeArg :: Maybe (Bool -> BoxLabel) -> (SyntaxExpr Id, ApplicativeArg Id Id) -> TM (SyntaxExpr Id, ApplicativeArg Id Id) addTickApplicativeArg isGuard (op, arg) = liftM2 (,) (addTickSyntaxExpr hpcSrcSpan op) (addTickArg arg) where addTickArg (ApplicativeArgOne pat expr) = ApplicativeArgOne <$> addTickLPat pat <*> addTickLHsExpr expr addTickArg (ApplicativeArgMany stmts ret pat) = ApplicativeArgMany <$> addTickLStmts isGuard stmts <*> (unLoc <$> addTickLHsExpr (L hpcSrcSpan ret)) <*> addTickLPat pat addTickStmtAndBinders :: Maybe (Bool -> BoxLabel) -> ParStmtBlock Id Id -> TM (ParStmtBlock Id Id) addTickStmtAndBinders isGuard (ParStmtBlock stmts ids returnExpr) = liftM3 ParStmtBlock (addTickLStmts isGuard stmts) (return ids) (addTickSyntaxExpr hpcSrcSpan returnExpr) addTickHsLocalBinds :: HsLocalBinds Id -> TM (HsLocalBinds Id) addTickHsLocalBinds (HsValBinds binds) = liftM HsValBinds (addTickHsValBinds binds) addTickHsLocalBinds (HsIPBinds binds) = liftM HsIPBinds (addTickHsIPBinds binds) addTickHsLocalBinds (EmptyLocalBinds) = return EmptyLocalBinds addTickHsValBinds :: HsValBindsLR Id a -> TM (HsValBindsLR Id b) addTickHsValBinds (ValBindsOut binds sigs) = liftM2 ValBindsOut (mapM (\ (rec,binds') -> liftM2 (,) (return rec) (addTickLHsBinds binds')) binds) (return sigs) addTickHsValBinds _ = panic "addTickHsValBinds" addTickHsIPBinds :: HsIPBinds Id -> TM (HsIPBinds Id) addTickHsIPBinds (IPBinds ipbinds dictbinds) = liftM2 IPBinds (mapM (liftL (addTickIPBind)) ipbinds) (return dictbinds) addTickIPBind :: IPBind Id -> TM (IPBind Id) addTickIPBind (IPBind nm e) = liftM2 IPBind (return nm) (addTickLHsExpr e) -- There is no location here, so we might need to use a context location?? addTickSyntaxExpr :: SrcSpan -> SyntaxExpr Id -> TM (SyntaxExpr Id) addTickSyntaxExpr pos syn@(SyntaxExpr { syn_expr = x }) = do L _ x' <- addTickLHsExpr (L pos x) return $ syn { syn_expr = x' } -- we do not walk into patterns. addTickLPat :: LPat Id -> TM (LPat Id) addTickLPat pat = return pat addTickHsCmdTop :: HsCmdTop Id -> TM (HsCmdTop Id) addTickHsCmdTop (HsCmdTop cmd tys ty syntaxtable) = liftM4 HsCmdTop (addTickLHsCmd cmd) (return tys) (return ty) (return syntaxtable) addTickLHsCmd :: LHsCmd Id -> TM (LHsCmd Id) addTickLHsCmd (L pos c0) = do c1 <- addTickHsCmd c0 return $ L pos c1 addTickHsCmd :: HsCmd Id -> TM (HsCmd Id) addTickHsCmd (HsCmdLam matchgroup) = liftM HsCmdLam (addTickCmdMatchGroup matchgroup) addTickHsCmd (HsCmdApp c e) = liftM2 HsCmdApp (addTickLHsCmd c) (addTickLHsExpr e) {- addTickHsCmd (OpApp e1 c2 fix c3) = liftM4 OpApp (addTickLHsExpr e1) (addTickLHsCmd c2) (return fix) (addTickLHsCmd c3) -} addTickHsCmd (HsCmdPar e) = liftM HsCmdPar (addTickLHsCmd e) addTickHsCmd (HsCmdCase e mgs) = liftM2 HsCmdCase (addTickLHsExpr e) (addTickCmdMatchGroup mgs) addTickHsCmd (HsCmdIf cnd e1 c2 c3) = liftM3 (HsCmdIf cnd) (addBinTickLHsExpr (BinBox CondBinBox) e1) (addTickLHsCmd c2) (addTickLHsCmd c3) addTickHsCmd (HsCmdLet (L l binds) c) = bindLocals (collectLocalBinders binds) $ liftM2 (HsCmdLet . L l) (addTickHsLocalBinds binds) -- to think about: !patterns. (addTickLHsCmd c) addTickHsCmd (HsCmdDo (L l stmts) srcloc) = do { (stmts', _) <- addTickLCmdStmts' stmts (return ()) ; return (HsCmdDo (L l stmts') srcloc) } addTickHsCmd (HsCmdArrApp e1 e2 ty1 arr_ty lr) = liftM5 HsCmdArrApp (addTickLHsExpr e1) (addTickLHsExpr e2) (return ty1) (return arr_ty) (return lr) addTickHsCmd (HsCmdArrForm e fix cmdtop) = liftM3 HsCmdArrForm (addTickLHsExpr e) (return fix) (mapM (liftL (addTickHsCmdTop)) cmdtop) addTickHsCmd (HsCmdWrap w cmd) = liftM2 HsCmdWrap (return w) (addTickHsCmd cmd) -- Others should never happen in a command context. --addTickHsCmd e = pprPanic "addTickHsCmd" (ppr e) addTickCmdMatchGroup :: MatchGroup Id (LHsCmd Id) -> TM (MatchGroup Id (LHsCmd Id)) addTickCmdMatchGroup mg@(MG { mg_alts = L l matches }) = do matches' <- mapM (liftL addTickCmdMatch) matches return $ mg { mg_alts = L l matches' } addTickCmdMatch :: Match Id (LHsCmd Id) -> TM (Match Id (LHsCmd Id)) addTickCmdMatch (Match mf pats opSig gRHSs) = bindLocals (collectPatsBinders pats) $ do gRHSs' <- addTickCmdGRHSs gRHSs return $ Match mf pats opSig gRHSs' addTickCmdGRHSs :: GRHSs Id (LHsCmd Id) -> TM (GRHSs Id (LHsCmd Id)) addTickCmdGRHSs (GRHSs guarded (L l local_binds)) = do bindLocals binders $ do local_binds' <- addTickHsLocalBinds local_binds guarded' <- mapM (liftL addTickCmdGRHS) guarded return $ GRHSs guarded' (L l local_binds') where binders = collectLocalBinders local_binds addTickCmdGRHS :: GRHS Id (LHsCmd Id) -> TM (GRHS Id (LHsCmd Id)) -- The *guards* are *not* Cmds, although the body is -- C.f. addTickGRHS for the BinBox stuff addTickCmdGRHS (GRHS stmts cmd) = do { (stmts',expr') <- addTickLStmts' (Just $ BinBox $ GuardBinBox) stmts (addTickLHsCmd cmd) ; return $ GRHS stmts' expr' } addTickLCmdStmts :: [LStmt Id (LHsCmd Id)] -> TM [LStmt Id (LHsCmd Id)] addTickLCmdStmts stmts = do (stmts, _) <- addTickLCmdStmts' stmts (return ()) return stmts addTickLCmdStmts' :: [LStmt Id (LHsCmd Id)] -> TM a -> TM ([LStmt Id (LHsCmd Id)], a) addTickLCmdStmts' lstmts res = bindLocals binders $ do lstmts' <- mapM (liftL addTickCmdStmt) lstmts a <- res return (lstmts', a) where binders = collectLStmtsBinders lstmts addTickCmdStmt :: Stmt Id (LHsCmd Id) -> TM (Stmt Id (LHsCmd Id)) addTickCmdStmt (BindStmt pat c bind fail ty) = do liftM5 BindStmt (addTickLPat pat) (addTickLHsCmd c) (return bind) (return fail) (return ty) addTickCmdStmt (LastStmt c noret ret) = do liftM3 LastStmt (addTickLHsCmd c) (pure noret) (addTickSyntaxExpr hpcSrcSpan ret) addTickCmdStmt (BodyStmt c bind' guard' ty) = do liftM4 BodyStmt (addTickLHsCmd c) (addTickSyntaxExpr hpcSrcSpan bind') (addTickSyntaxExpr hpcSrcSpan guard') (return ty) addTickCmdStmt (LetStmt (L l binds)) = do liftM (LetStmt . L l) (addTickHsLocalBinds binds) addTickCmdStmt stmt@(RecStmt {}) = do { stmts' <- addTickLCmdStmts (recS_stmts stmt) ; ret' <- addTickSyntaxExpr hpcSrcSpan (recS_ret_fn stmt) ; mfix' <- addTickSyntaxExpr hpcSrcSpan (recS_mfix_fn stmt) ; bind' <- addTickSyntaxExpr hpcSrcSpan (recS_bind_fn stmt) ; return (stmt { recS_stmts = stmts', recS_ret_fn = ret' , recS_mfix_fn = mfix', recS_bind_fn = bind' }) } addTickCmdStmt ApplicativeStmt{} = panic "ToDo: addTickCmdStmt ApplicativeLastStmt" -- Others should never happen in a command context. addTickCmdStmt stmt = pprPanic "addTickHsCmd" (ppr stmt) addTickHsRecordBinds :: HsRecordBinds Id -> TM (HsRecordBinds Id) addTickHsRecordBinds (HsRecFields fields dd) = do { fields' <- mapM addTickHsRecField fields ; return (HsRecFields fields' dd) } addTickHsRecField :: LHsRecField' id (LHsExpr Id) -> TM (LHsRecField' id (LHsExpr Id)) addTickHsRecField (L l (HsRecField id expr pun)) = do { expr' <- addTickLHsExpr expr ; return (L l (HsRecField id expr' pun)) } addTickArithSeqInfo :: ArithSeqInfo Id -> TM (ArithSeqInfo Id) addTickArithSeqInfo (From e1) = liftM From (addTickLHsExpr e1) addTickArithSeqInfo (FromThen e1 e2) = liftM2 FromThen (addTickLHsExpr e1) (addTickLHsExpr e2) addTickArithSeqInfo (FromTo e1 e2) = liftM2 FromTo (addTickLHsExpr e1) (addTickLHsExpr e2) addTickArithSeqInfo (FromThenTo e1 e2 e3) = liftM3 FromThenTo (addTickLHsExpr e1) (addTickLHsExpr e2) (addTickLHsExpr e3) liftL :: (Monad m) => (a -> m a) -> Located a -> m (Located a) liftL f (L loc a) = do a' <- f a return $ L loc a' data TickTransState = TT { tickBoxCount:: Int , mixEntries :: [MixEntry_] , uniqSupply :: UniqSupply } data TickTransEnv = TTE { fileName :: FastString , density :: TickDensity , tte_dflags :: DynFlags , exports :: NameSet , inlines :: VarSet , declPath :: [String] , inScope :: VarSet , blackList :: Map SrcSpan () , this_mod :: Module , tickishType :: TickishType } -- deriving Show data TickishType = ProfNotes | HpcTicks | Breakpoints | SourceNotes deriving (Eq) coveragePasses :: DynFlags -> [TickishType] coveragePasses dflags = ifa (hscTarget dflags == HscInterpreted) Breakpoints $ ifa (gopt Opt_Hpc dflags) HpcTicks $ ifa (gopt Opt_SccProfilingOn dflags && profAuto dflags /= NoProfAuto) ProfNotes $ ifa (debugLevel dflags > 0) SourceNotes [] where ifa f x xs | f = x:xs | otherwise = xs -- | Tickishs that only make sense when their source code location -- refers to the current file. This might not always be true due to -- LINE pragmas in the code - which would confuse at least HPC. tickSameFileOnly :: TickishType -> Bool tickSameFileOnly HpcTicks = True tickSameFileOnly _other = False type FreeVars = OccEnv Id noFVs :: FreeVars noFVs = emptyOccEnv -- Note [freevars] -- For breakpoints we want to collect the free variables of an -- expression for pinning on the HsTick. We don't want to collect -- *all* free variables though: in particular there's no point pinning -- on free variables that are will otherwise be in scope at the GHCi -- prompt, which means all top-level bindings. Unfortunately detecting -- top-level bindings isn't easy (collectHsBindsBinders on the top-level -- bindings doesn't do it), so we keep track of a set of "in-scope" -- variables in addition to the free variables, and the former is used -- to filter additions to the latter. This gives us complete control -- over what free variables we track. data TM a = TM { unTM :: TickTransEnv -> TickTransState -> (a,FreeVars,TickTransState) } -- a combination of a state monad (TickTransState) and a writer -- monad (FreeVars). instance Functor TM where fmap = liftM instance Applicative TM where pure a = TM $ \ _env st -> (a,noFVs,st) (<*>) = ap instance Monad TM where (TM m) >>= k = TM $ \ env st -> case m env st of (r1,fv1,st1) -> case unTM (k r1) env st1 of (r2,fv2,st2) -> (r2, fv1 `plusOccEnv` fv2, st2) instance HasDynFlags TM where getDynFlags = TM $ \ env st -> (tte_dflags env, noFVs, st) instance MonadUnique TM where getUniqueSupplyM = TM $ \_ st -> (uniqSupply st, noFVs, st) getUniqueM = TM $ \_ st -> let (u, us') = takeUniqFromSupply (uniqSupply st) in (u, noFVs, st { uniqSupply = us' }) getState :: TM TickTransState getState = TM $ \ _ st -> (st, noFVs, st) setState :: (TickTransState -> TickTransState) -> TM () setState f = TM $ \ _ st -> ((), noFVs, f st) getEnv :: TM TickTransEnv getEnv = TM $ \ env st -> (env, noFVs, st) withEnv :: (TickTransEnv -> TickTransEnv) -> TM a -> TM a withEnv f (TM m) = TM $ \ env st -> case m (f env) st of (a, fvs, st') -> (a, fvs, st') getDensity :: TM TickDensity getDensity = TM $ \env st -> (density env, noFVs, st) ifDensity :: TickDensity -> TM a -> TM a -> TM a ifDensity d th el = do d0 <- getDensity; if d == d0 then th else el getFreeVars :: TM a -> TM (FreeVars, a) getFreeVars (TM m) = TM $ \ env st -> case m env st of (a, fv, st') -> ((fv,a), fv, st') freeVar :: Id -> TM () freeVar id = TM $ \ env st -> if id `elemVarSet` inScope env then ((), unitOccEnv (nameOccName (idName id)) id, st) else ((), noFVs, st) addPathEntry :: String -> TM a -> TM a addPathEntry nm = withEnv (\ env -> env { declPath = declPath env ++ [nm] }) getPathEntry :: TM [String] getPathEntry = declPath `liftM` getEnv getFileName :: TM FastString getFileName = fileName `liftM` getEnv isGoodSrcSpan' :: SrcSpan -> Bool isGoodSrcSpan' pos@(RealSrcSpan _) = srcSpanStart pos /= srcSpanEnd pos isGoodSrcSpan' (UnhelpfulSpan _) = False isGoodTickSrcSpan :: SrcSpan -> TM Bool isGoodTickSrcSpan pos = do file_name <- getFileName tickish <- tickishType `liftM` getEnv let need_same_file = tickSameFileOnly tickish same_file = Just file_name == srcSpanFileName_maybe pos return (isGoodSrcSpan' pos && (not need_same_file || same_file)) ifGoodTickSrcSpan :: SrcSpan -> TM a -> TM a -> TM a ifGoodTickSrcSpan pos then_code else_code = do good <- isGoodTickSrcSpan pos if good then then_code else else_code bindLocals :: [Id] -> TM a -> TM a bindLocals new_ids (TM m) = TM $ \ env st -> case m env{ inScope = inScope env `extendVarSetList` new_ids } st of (r, fv, st') -> (r, fv `delListFromOccEnv` occs, st') where occs = [ nameOccName (idName id) | id <- new_ids ] isBlackListed :: SrcSpan -> TM Bool isBlackListed pos = TM $ \ env st -> case Map.lookup pos (blackList env) of Nothing -> (False,noFVs,st) Just () -> (True,noFVs,st) -- the tick application inherits the source position of its -- expression argument to support nested box allocations allocTickBox :: BoxLabel -> Bool -> Bool -> SrcSpan -> TM (HsExpr Id) -> TM (LHsExpr Id) allocTickBox boxLabel countEntries topOnly pos m = ifGoodTickSrcSpan pos (do (fvs, e) <- getFreeVars m env <- getEnv tickish <- mkTickish boxLabel countEntries topOnly pos fvs (declPath env) return (L pos (HsTick tickish (L pos e))) ) (do e <- m return (L pos e) ) -- the tick application inherits the source position of its -- expression argument to support nested box allocations allocATickBox :: BoxLabel -> Bool -> Bool -> SrcSpan -> FreeVars -> TM (Maybe (Tickish Id)) allocATickBox boxLabel countEntries topOnly pos fvs = ifGoodTickSrcSpan pos (do let mydecl_path = case boxLabel of TopLevelBox x -> x LocalBox xs -> xs _ -> panic "allocATickBox" tickish <- mkTickish boxLabel countEntries topOnly pos fvs mydecl_path return (Just tickish) ) (return Nothing) mkTickish :: BoxLabel -> Bool -> Bool -> SrcSpan -> OccEnv Id -> [String] -> TM (Tickish Id) mkTickish boxLabel countEntries topOnly pos fvs decl_path = do let ids = filter (not . isUnliftedType . idType) $ occEnvElts fvs -- unlifted types cause two problems here: -- * we can't bind them at the GHCi prompt -- (bindLocalsAtBreakpoint already fliters them out), -- * the simplifier might try to substitute a literal for -- the Id, and we can't handle that. me = (pos, decl_path, map (nameOccName.idName) ids, boxLabel) cc_name | topOnly = head decl_path | otherwise = concat (intersperse "." decl_path) dflags <- getDynFlags env <- getEnv case tickishType env of HpcTicks -> do c <- liftM tickBoxCount getState setState $ \st -> st { tickBoxCount = c + 1 , mixEntries = me : mixEntries st } return $ HpcTick (this_mod env) c ProfNotes -> do ccUnique <- getUniqueM let cc = mkUserCC (mkFastString cc_name) (this_mod env) pos ccUnique count = countEntries && gopt Opt_ProfCountEntries dflags return $ ProfNote cc count True{-scopes-} Breakpoints -> do c <- liftM tickBoxCount getState setState $ \st -> st { tickBoxCount = c + 1 , mixEntries = me:mixEntries st } return $ Breakpoint c ids SourceNotes | RealSrcSpan pos' <- pos -> return $ SourceNote pos' cc_name _otherwise -> panic "mkTickish: bad source span!" allocBinTickBox :: (Bool -> BoxLabel) -> SrcSpan -> TM (HsExpr Id) -> TM (LHsExpr Id) allocBinTickBox boxLabel pos m = do env <- getEnv case tickishType env of HpcTicks -> do e <- liftM (L pos) m ifGoodTickSrcSpan pos (mkBinTickBoxHpc boxLabel pos e) (return e) _other -> allocTickBox (ExpBox False) False False pos m mkBinTickBoxHpc :: (Bool -> BoxLabel) -> SrcSpan -> LHsExpr Id -> TM (LHsExpr Id) mkBinTickBoxHpc boxLabel pos e = TM $ \ env st -> let meT = (pos,declPath env, [],boxLabel True) meF = (pos,declPath env, [],boxLabel False) meE = (pos,declPath env, [],ExpBox False) c = tickBoxCount st mes = mixEntries st in ( L pos $ HsTick (HpcTick (this_mod env) c) $ L pos $ HsBinTick (c+1) (c+2) e -- notice that F and T are reversed, -- because we are building the list in -- reverse... , noFVs , st {tickBoxCount=c+3 , mixEntries=meF:meT:meE:mes} ) mkHpcPos :: SrcSpan -> HpcPos mkHpcPos pos@(RealSrcSpan s) | isGoodSrcSpan' pos = toHpcPos (srcSpanStartLine s, srcSpanStartCol s, srcSpanEndLine s, srcSpanEndCol s - 1) -- the end column of a SrcSpan is one -- greater than the last column of the -- span (see SrcLoc), whereas HPC -- expects to the column range to be -- inclusive, hence we subtract one above. mkHpcPos _ = panic "bad source span; expected such spans to be filtered out" hpcSrcSpan :: SrcSpan hpcSrcSpan = mkGeneralSrcSpan (fsLit "Haskell Program Coverage internals") matchesOneOfMany :: [LMatch Id body] -> Bool matchesOneOfMany lmatches = sum (map matchCount lmatches) > 1 where matchCount (L _ (Match _ _pats _ty (GRHSs grhss _binds))) = length grhss type MixEntry_ = (SrcSpan, [String], [OccName], BoxLabel) -- For the hash value, we hash everything: the file name, -- the timestamp of the original source file, the tab stop, -- and the mix entries. We cheat, and hash the show'd string. -- This hash only has to be hashed at Mix creation time, -- and is for sanity checking only. mixHash :: FilePath -> UTCTime -> Int -> [MixEntry] -> Int mixHash file tm tabstop entries = fromIntegral $ hashString (show $ Mix file tm 0 tabstop entries) {- ************************************************************************ * * * initialisation * * ************************************************************************ Each module compiled with -fhpc declares an initialisation function of the form `hpc_init_<module>()`, which is emitted into the _stub.c file and annotated with __attribute__((constructor)) so that it gets executed at startup time. The function's purpose is to call hs_hpc_module to register this module with the RTS, and it looks something like this: static void hpc_init_Main(void) __attribute__((constructor)); static void hpc_init_Main(void) {extern StgWord64 _hpc_tickboxes_Main_hpc[]; hs_hpc_module("Main",8,1150288664,_hpc_tickboxes_Main_hpc);} -} hpcInitCode :: Module -> HpcInfo -> SDoc hpcInitCode _ (NoHpcInfo {}) = Outputable.empty hpcInitCode this_mod (HpcInfo tickCount hashNo) = vcat [ text "static void hpc_init_" <> ppr this_mod <> text "(void) __attribute__((constructor));" , text "static void hpc_init_" <> ppr this_mod <> text "(void)" , braces (vcat [ text "extern StgWord64 " <> tickboxes <> text "[]" <> semi, text "hs_hpc_module" <> parens (hcat (punctuate comma [ doubleQuotes full_name_str, int tickCount, -- really StgWord32 int hashNo, -- really StgWord32 tickboxes ])) <> semi ]) ] where tickboxes = ppr (mkHpcTicksLabel $ this_mod) module_name = hcat (map (text.charToC) $ bytesFS (moduleNameFS (Module.moduleName this_mod))) package_name = hcat (map (text.charToC) $ bytesFS (unitIdFS (moduleUnitId this_mod))) full_name_str | moduleUnitId this_mod == mainUnitId = module_name | otherwise = package_name <> char '/' <> module_name
nushio3/ghc
compiler/deSugar/Coverage.hs
bsd-3-clause
51,902
0
25
15,411
13,547
6,849
6,698
952
8
module Main where import Types import RegexPattern import System.Environment import Data.String import Text.Regex.Posix import qualified Data.Text as T (splitOn, unpack) import Data.Maybe import System.IO import qualified System.IO as S import System.Process {- *# ADD #* Add to Todo.txt *# REMOVE #* Remove from Todo.txt, but place entry in backup file todo.backup.txt *# VIEW #* Default. Print in readable format. TODO: (pun not intended) Priority set for listed items, Completely edit an item. Written by Frank Hucek -} todoFile :: String todoFile = "/home/frank/bin_storage/Todo.txt" main :: IO () main = do x <- getArgs mainThrow x mainThrow :: [String] -> IO () mainThrow [] = viewTodoList [] mainThrow (option:xs) = do case option of "add" -> addItem xs "remove" -> removeItem xs _ -> viewTodoList xs -- sort removeItem :: [String] -> IO () removeItem [] = putStrLn "Please specify number of item you wish to remove" removeItem (x:_) = do case readMaybe x :: Maybe Int of Nothing -> removeItem [] Just itemNum -> removeFromFile itemNum addItem :: [String] -> IO () addItem inputItem = do let input = argOp inputItem =~ regexPattern :: String case input of "" -> putStrLn "Failed to match input pattern" _ -> appendFile todoFile (input ++ "\n") argOp :: [String] -> String argOp xs = init $ foldl (++) "" $ map (++ " ") xs -- map a space to end of each string in list -- concatenate list of strings into 1 string -- take new string - last character b/c last char is a whitespace -- input can now be checked against regular expression readMaybe :: Read a => String -> Maybe a readMaybe s = case reads s of [(val, "")] -> Just val _ -> Nothing -- READ, WRITE, APPEND, REMOVE operations on file -- File and user IO uses regex pattern. convert to Item type in program removeFromFile :: Int -> IO () removeFromFile x = do file <- readFile todoFile let xs = lines file (a, b) = splitAt x xs itemList = (init a) ++ b items = unlines itemList newTodoFile = todoFile ++ ".new" writeFile newTodoFile items _ <- createProcess (proc "mv" [newTodoFile, todoFile]) -- SUPER jank, temporary fix to lazy eval here return () -- removes indices even when typing in the wrong number viewTodoList :: [String] -> IO () viewTodoList _ = do (_, Just hout, _, _) <- createProcess (proc "cal" []) {std_out = CreatePipe} cal <- hGetContents hout putStrLn cal hClose hout putStrLn $ "\tPRIOR.\tDESCRIPTION" file <- readFile todoFile let items = fmap (displayItem . patternToItem) $ lines file -- [String] printTodoList items --putStrLn cal printTodoList = printTodo 1 printTodo :: Int -> [String] -> IO () printTodo _ [] = return () printTodo i (x:xs) = do putStrLn $ show i ++ ")\t" ++ x printTodo (i + 1) xs
frankhucek/Todo
app/Main.hs
bsd-3-clause
2,999
0
13
780
828
420
408
69
3
{-# LANGUAGE NoImplicitPrelude , ScopedTypeVariables , UnicodeSyntax #-} module System.FTDI.Utils.Properties where -- base import Control.Monad ( (>>) ) import Data.Bool ( otherwise ) import Data.Function ( ($) ) import Data.Ord ( Ord ) import Prelude ( Integral, RealFrac, Fractional, Double , Bounded, minBound, maxBound , fromInteger, toInteger, fromIntegral , (+), abs, mod, ceiling, div ) -- base-unicode import Data.Bool.Unicode ( (∧) ) import Data.Eq.Unicode ( (≡), (≢) ) import Data.Ord.Unicode ( (≤), (≥) ) import Prelude.Unicode ( (⋅), (÷) ) -- ftdi import System.FTDI.Utils ( clamp, divRndUp ) -- QuickCheck import Test.QuickCheck ( Property, (==>) ) ------------------------------------------------------------------------------- prop_divRndUp_min ∷ Integral α ⇒ α → α → Property prop_divRndUp_min x y = y ≢ 0 ==> let d = divRndUp x (abs y) d' = toInteger d y' = toInteger y x' = toInteger x in d' ⋅ abs y' ≥ x' prop_divRndUp_max ∷ Integral α ⇒ α → α → Property prop_divRndUp_max x y = y ≢ 0 ==> let d = divRndUp x y in x `div` y ≤ d prop_divRndUp_ceilFrac ∷ Integral α ⇒ α → α → Property prop_divRndUp_ceilFrac x y = y ≢ 0 ==> let x' = fromIntegral x ∷ Double y' = fromIntegral y ∷ Double in divRndUp x y ≡ ceilFrac x' y' prop_divRndUp2 ∷ Integral α ⇒ α → α → Property prop_divRndUp2 x y = y ≢ 0 ==> divRndUp x y ≡ divRndUp2 x y prop_clamp ∷ ∀ α. (Bounded α, Ord α) ⇒ α → Property prop_clamp x = (minBound ∷ α) ≤ (maxBound ∷ α) ==> minBound ≤ cx ∧ cx ≤ maxBound where cx = clamp x ------------------------------------------------------------------------------- ceilFrac ∷ (Fractional α, RealFrac α, Integral β) ⇒ α → α → β ceilFrac x y = ceiling $ x ÷ y divRndUp2 ∷ Integral α ⇒ α → α → α divRndUp2 x y = let r | mod x y ≡ 0 = 0 | otherwise = 1 in div x y + r
roelvandijk/ftdi
System/FTDI/Utils/Properties.hs
bsd-3-clause
2,166
0
13
587
702
388
314
46
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} module PrettyPrint where import Data.Function ((&)) import Data.Monoid ((<>)) import Data.String.Here import Data.Text (Text) import qualified Data.Text as T import Hexon import Hexon.Types format_id :: Either String RID -> Text format_id (Left _) = "meh." format_id (Right result) = [i| ${top} ${banner_top} ${banner_name} ${banner_bottom} • By ${username} • ${date} ${display_comments} |] where msg = result |> rid_message name = result |> rid_info |> info_name username = result |> rid_info |> info_username date = result |> rid_info |> info_date comments = result |> rid_info |> info_comments pre :: Text pre = case msg of "error" -> "Item not available :(" "success" -> "*Item available!*" _ -> "" nlength = T.length name + 2 top = "\ESC[1m" <> pre <> "\STX" banner_top = "┌" <> (T.replicate nlength "─") <> "┐" banner_name = "│ " <> name <> " │" banner_bottom = "└" <> (T.replicate nlength "─") <> "┘" display_comments = fmap ("• " <>) (T.splitOn "\n" comments) -- format_add :: Either String RItem -> Text -- format_add (Left _) = "meh." -- format_add (Right result) = undefined -- format_del :: Either String RID -> Text -- format_del (Left _) = "meh." -- format_del (Right result) = undefined -- format_auth :: Either String RAuth -> Text -- format_auth (Left _) = "meh." -- format_auth (Right result) = undefined -- format_comment :: Either String RComment -> Text -- format_comment (Left _) = "meh" -- format_comment (Right result) = undefined
tchoutri/Hexon
src/PrettyPrint.hs
bsd-3-clause
1,934
0
11
636
325
188
137
31
3
module Wallet.Inductive ( -- * Wallet events WalletEvent(..) , walletEventIsRollback -- * Inductive wallets , Inductive(..) , uptoFirstRollback , inductiveInit ) where import Universum import qualified Data.List as List import qualified Data.Set as Set import Formatting (bprint, build, (%)) import qualified Formatting.Buildable import Pos.Core.Chrono import Serokell.Util (listJson) import UTxO.DSL import UTxO.Util {------------------------------------------------------------------------------- Wallet events -------------------------------------------------------------------------------} -- | Wallet event data WalletEvent h a = -- | Inform the wallet of a new block added to the blockchain ApplyBlock (Block h a) -- | Submit a new transaction to the wallet to be included in the blockchain | NewPending (Transaction h a) -- | Roll back the last block added to the blockchain | Rollback walletEventIsRollback :: WalletEvent h a -> Bool walletEventIsRollback Rollback = True walletEventIsRollback _ = False {------------------------------------------------------------------------------- Inductive wallets -------------------------------------------------------------------------------} -- | Inductive definition of a wallet data Inductive h a = Inductive { -- | Bootstrap transaction inductiveBoot :: Transaction h a -- | Addresses that belong to the wallet , inductiveOurs :: Set a -- | Wallet events , inductiveEvents :: OldestFirst [] (WalletEvent h a) } -- | The prefix of the 'Inductive' that doesn't include any rollbacks uptoFirstRollback :: Inductive h a -> Inductive h a uptoFirstRollback i@Inductive{..} = i { inductiveEvents = liftOldestFirst (takeWhile notRollback) inductiveEvents } where notRollback = not . walletEventIsRollback inductiveInit :: forall h a. Inductive h a -> Inductive h a inductiveInit i@Inductive{..} = i { inductiveEvents = liftOldestFirst List.init inductiveEvents } {------------------------------------------------------------------------------- Pretty-printing -------------------------------------------------------------------------------} instance (Hash h a, Buildable a) => Buildable (OldestFirst [] (WalletEvent h a)) where build = bprint listJson . getOldestFirst instance (Hash h a, Buildable a) => Buildable (WalletEvent h a) where build (ApplyBlock b) = bprint ("ApplyBlock " % build) b build (NewPending t) = bprint ("NewPending " % build) t build Rollback = bprint "Rollback" instance (Hash h a, Buildable a) => Buildable (Inductive h a) where build Inductive{..} = bprint ( "Inductive" % "{ boot: " % build % ", ours: " % listJson % ", events: " % build % "}" ) inductiveBoot (Set.toList inductiveOurs) inductiveEvents
input-output-hk/pos-haskell-prototype
wallet/test/unit/Wallet/Inductive.hs
mit
2,930
0
14
602
598
332
266
-1
-1
{-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-orphans #-} -- | Chronological sequences. module Test.Pos.Core.Chrono ( ) where import Pos.Core.Chrono import Test.QuickCheck (Arbitrary) deriving instance Arbitrary (f a) => Arbitrary (NewestFirst f a) deriving instance Arbitrary (f a) => Arbitrary (OldestFirst f a)
input-output-hk/pos-haskell-prototype
core/test/Test/Pos/Core/Chrono.hs
mit
405
0
8
92
89
50
39
-1
-1
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RankNTypes #-} -- | -- Module : Network.AWS.Waiter -- Copyright : (c) 2013-2015 Brendan Hay -- License : This Source Code Form is subject to the terms of -- Maintainer : Brendan Hay <[email protected]> -- Stability : provisional -- Portability : non-portable (GHC extensions) -- module Network.AWS.Waiter ( -- * Types Acceptor , Accept (..) , Wait (..) -- * Acceptors , accept -- * Matchers , matchAll , matchAny , matchError , matchStatus -- * Util , nonEmpty ) where import Control.Applicative import Control.Lens import Data.Maybe import Data.Text (Text) import qualified Data.Text as Text import Network.AWS.Data.ByteString import Network.AWS.Data.Log import Network.AWS.Error import Network.AWS.Types import Prelude type Acceptor a = Request a -> Either Error (Response a) -> Maybe Accept data Accept = AcceptSuccess | AcceptFailure | AcceptRetry deriving (Eq, Show) instance ToLog Accept where build = \case AcceptSuccess -> "Success" AcceptFailure -> "Failure" AcceptRetry -> "Retry" -- | Timing and acceptance criteria to check fulfillment of a remote operation. data Wait a = Wait { _waitName :: ByteString , _waitAttempts :: !Int , _waitDelay :: !Seconds , _waitAcceptors :: [Acceptor a] } accept :: Wait a -> Acceptor a accept w rq rs = listToMaybe . mapMaybe (\f -> f rq rs) $ _waitAcceptors w matchAll :: Eq b => b -> Accept -> Fold (Rs a) b -> Acceptor a matchAll x a l = match (allOf l (== x)) a matchAny :: Eq b => b -> Accept -> Fold (Rs a) b -> Acceptor a matchAny x a l = match (anyOf l (== x)) a matchStatus :: Int -> Accept -> Acceptor a matchStatus x a _ = \case Right (s, _) | x == fromEnum s -> Just a Left e | Just x == (fromEnum <$> e ^? httpStatus) -> Just a _ -> Nothing matchError :: ErrorCode -> Accept -> Acceptor a matchError c a _ = \case Left e | Just c == e ^? _ServiceError . serviceCode -> Just a _ -> Nothing match :: (Rs a -> Bool) -> Accept -> Acceptor a match f a _ = \case Right (_, rs) | f rs -> Just a _ -> Nothing nonEmpty :: Fold a Text -> Fold a Bool nonEmpty l = l . to Text.null
olorin/amazonka
core/src/Network/AWS/Waiter.hs
mpl-2.0
2,640
0
14
887
723
384
339
-1
-1
-- | -- Module: BDCS.Builds -- Copyright: (c) 2016-2017 Red Hat, Inc. -- License: LGPL -- -- Maintainer: https://github.com/weldr -- Stability: alpha -- Portability: portable -- -- Utilities for working with database-related exceptions. module BDCS.Exceptions(DBException(..), isBadNameException, isDBExceptionException, isMissingRPMTagException, throwIfNothing, throwIfNothingOtherwise) where import Control.Exception(Exception, throw) import Data.Data(Typeable) -- | A general purpose exception type for dealing with things that go wrong when working -- with the database. This type could grow into a more complex system in the future, if -- needed. This type is most helpful because runSqlite will roll back the entire -- transaction if an exception is raised. data DBException = DBException String -- ^ A general purpose exception type, -- including an error message. | MissingRPMTag String -- ^ A required tag was missing from the -- RPM being processed. The argument should -- be the name of the missing tag. | BadName String -- ^ The name of the package is not parseable. deriving(Eq, Typeable) instance Exception DBException instance Show DBException where show (BadName s) = "Package name is not parseable: " ++ s show (DBException s) = show s show (MissingRPMTag s) = "Missing required tag in RPM: " ++ s -- | If a 'Maybe' value is Nothing, throw the given exception. Otherwise, return the -- value inside. throwIfNothing :: Exception e => Maybe a -> e -> a throwIfNothing (Just v) _ = v throwIfNothing _ exn = throw exn -- | If a 'Maybe' value is Nothing, throw the given exception. Otherwise, run the -- provided function on the value inside and return the result. throwIfNothingOtherwise :: Exception e => Maybe a -> e -> (a -> b) -> b throwIfNothingOtherwise (Just v) _ fn = fn v throwIfNothingOtherwise _ exn _ = throw exn -- | Is a given 'DBException' type a 'BadName'? isBadNameException :: DBException -> Bool isBadNameException (BadName _) = True isBadNameException _ = False -- | Is a given 'DBException' type the general 'DBException'? isDBExceptionException :: DBException -> Bool isDBExceptionException (DBException _) = True isDBExceptionException _ = False -- | Is a given 'DBException' type a 'MissingRPMTag'? isMissingRPMTagException :: DBException -> Bool isMissingRPMTagException (MissingRPMTag _) = True isMissingRPMTagException _ = False
atodorov/bdcs
src/BDCS/Exceptions.hs
lgpl-2.1
2,790
0
10
784
393
218
175
32
1
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="hu-HU"> <title>AdvFuzzer Add-On</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Keresés</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
veggiespam/zap-extensions
addOns/fuzz/src/main/javahelp/org/zaproxy/zap/extension/fuzz/resources/help_hu_HU/helpset_hu_HU.hs
apache-2.0
964
79
67
157
416
210
206
-1
-1
module Test.Invariant where import Test.QuickCheck infix 1 &> infix 2 <~~, @~>, <?>, <=> -- | Defines extensional equality. This allows concise, point-free, -- definitions of laws. -- -- > f(x) == g(x) -- > f <=> g (<=>) :: Eq b => (a -> b) -> (a -> b) -> a -> Bool (f <=> g) x = f x == g x -- | Pointfree version of QuickChecks ==>. This notation reduces a -- lot of lambdas, for example: -- -- >>> quickCheck $ (/=0) &> not . idempotent (*(2::Int)) -- +++ OK, passed 100 tests. (&>) :: Testable b => (a -> Bool) -> (a -> b) -> a -> Property (a &> b) x = a x ==> b x -- | Checks whether a function is idempotent. -- -- > f(f(x)) == f(x) -- -- >>> quickCheck $ idempotent (abs :: Int -> Int) -- +++ OK, passed 100 tests. idempotent :: Eq a => (a -> a) -> a -> Bool idempotent f = f <=> f . f -- | Checks whether a function is pointSymmetric. -- -- > f(-x) == -f(x) -- -- >>> quickCheck $ pointSymmetric (^3) -- +++ OK, passed 100 tests. pointSymmetric :: (Num a, Num b, Eq b) => (a -> b) -> a -> Bool pointSymmetric f = f . negate <=> negate . f -- | Checks whether a function is reflectionSymmetric. -- -- > f(x) == f(-x) -- -- >>> quickCheck $ pointSymmetric (^2) -- +++ OK, passed 100 tests. reflectionSymmetric :: (Num a, Eq b) => (a -> b) -> a -> Bool reflectionSymmetric f = f . negate <=> f -- | Checks whether a function is monotonicIncreasing. -- -- > x >= y, f(x) >= f(y) -- -- >>> quickCheck $ monotonicIncreasing ceiling -- +++ OK, passed 100 tests. monotonicIncreasing :: (Ord a, Ord b) => (a -> b) -> a -> a -> Bool monotonicIncreasing f x y = compare (f x) (f y) `elem` [EQ, compare x y] -- | Checks whether a function is strictly monotonicIncreasing'. -- -- > x > y, f(x) > f(y) -- -- >>> quickCheck $ monotonicIncreasing' (+1) -- +++ OK, passed 100 tests. monotonicIncreasing' :: (Ord a, Ord b) => (a -> b) -> a -> a -> Bool monotonicIncreasing' f x y = compare (f x) (f y) == compare x y -- | Checks whether a function is monotonicDecreasing. -- -- > x >= y, f(x) <= f(y) -- -- >>> quickCheck $ monotonicDecreasing (\x -> floor $ negate x) -- +++ OK, passed 100 tests. monotonicDecreasing :: (Ord a, Ord b) => (a -> b) -> a -> a -> Bool monotonicDecreasing f x y = compare (f x) (f y) `elem` [EQ, compare y x] -- | Checks whether a function is strictly monotonicDecreasing'. -- -- > x > y, f(x) < f(y) -- -- >>> quickCheck $ monotonicDecreasing' (-1) -- +++ OK, passed 100 tests. monotonicDecreasing' :: (Ord a, Ord b) => (a -> b) -> a -> a -> Bool monotonicDecreasing' f x y = compare (f x) (f y) == compare y x -- TODO create sorted list and fold with predicate over it -- | Checks whether a function is involutory. -- -- > f(f(x)) = x -- -- >>> quickCheck $ involutory negate -- +++ OK, passed 100 tests. involutory :: Eq a => (a -> a) -> a -> Bool involutory f = f . f <=> id -- | Checks whether a function is the inverse of another function. -- -- > f(g(x)) = x -- -- >>> quickCheck $ (`div` 2) `inverts` (*2) -- +++ OK, passed 100 tests. inverts :: Eq a => (b -> a) -> (a -> b) -> a -> Bool f `inverts` g = f . g <=> id -- | Checks whether an binary operator is commutative. -- -- > a * b = b * a -- -- >>> quickCheck $ commutative (+) -- +++ OK, passed 100 tests. commutative :: Eq b => (a -> a -> b) -> a -> a -> Bool commutative f x y = x `f` y == y `f` x -- | Checks whether an binary operator is associative. -- -- > a + (b + c) = (a + b) + c -- -- >>> quickCheck $ associative (+) -- +++ OK, passed 100 tests. associative :: Eq a => (a -> a -> a) -> a -> a -> a -> Bool associative f x y z = x `f` (y `f` z) == (x `f` y) `f` z -- | Checks whether an operator is left-distributive over an other operator. -- -- > a * (b + c) = (a * b) + (a * c) -- -- >>> quickCheck $ (*) `distributesLeftOver` (+) -- +++ OK, passed 100 tests. distributesLeftOver :: Eq a => (a -> a -> a) -> (a -> a -> a) -> a -> a -> a -> Bool (f `distributesLeftOver` g) x y z = x `f` (y `g` z) == (x `f` y) `g` (x `f` z) -- | Checks whether an operator is right-distributive over an other operator. -- -- > (b + c) / a = (b / a) + (c / a) -- -- >>> quickCheck $ (/) `distributesRightOver` (+) -- +++ OK, passed 100 tests. distributesRightOver :: Eq a => (a -> a -> a) -> (a -> a -> a) -> a -> a -> a -> Bool (f `distributesRightOver` g) x y z = (y `g` z) `f` x == (x `f` y) `g` (x `f` z) -- | Checks whether an operator is distributive over an other operator. -- -- > a * (b + c) = (a * b) + (a * c) = (b + c) * a -- -- >>> quickCheck $ (*) `distributesOver` (+) -- +++ OK, passed 100 tests. distributesOver :: Eq a => (a -> a -> a) -> (a -> a -> a) -> a -> a -> a -> Bool (f `distributesOver` g) x y z = (f `distributesLeftOver` g) x y z && (f `distributesRightOver` g) x y z -- | Checks whether a function increases the size of a list. -- -- >>> quickCheck $ inflating (1:) -- +++ OK, passed 100 tests. inflating :: ([a] -> [b]) -> [a] -> Bool inflating f xs = length (f xs) >= length xs -- | Checks whether a function increases strictly the size of a list. -- -- >>> quickCheck $ inflating (1:) -- +++ OK, passed 100 tests. inflating' :: ([a] -> [b]) -> [a] -> Bool inflating' f xs = length (f xs) > length xs -- For GHC 7.10 -- inflating :: (Foldable f, Foldable f') => (f a -> f' b) -> f a -> Bool -- inflating f xs = length (f xs) > length xs -- | Checks whether a function decreases the size of a list. -- -- -- >>> quickCheck $ deflating tail -- +++ OK, passed 100 tests. deflating :: ([a] -> [b]) -> [a] -> Bool deflating f xs = length (f xs) <= length xs -- | Checks whether a function decreases strictly the size of a list. -- -- -- >>> quickCheck $ deflating tail -- +++ OK, passed 100 tests. deflating' :: ([a] -> [b]) -> [a] -> Bool deflating' f xs = null xs || length (f xs) < length xs -- For GHC 7.10 -- deflating :: (Foldable f, Foldable f') => (f a -> f' b) -> f a -> Bool -- deflating f xs = null xs || length (f xs) < length xs -- | Checks whether a function is cyclic by applying its result to -- itself within n applications. -- -- >>> quickCheck $ (`div` 10) `cyclesWithin` 100 -- +++ OK, passed 100 tests. cyclesWithin :: Eq a => (a -> a) -> Int -> a -> Bool f `cyclesWithin` n = go [] . take (n + 1) . iterate f where go xs (y:ys) | y `elem` xs = True | otherwise = go (y:xs) ys go _ _ = False -- | Checks whether a function is invariant over an other function. -- -- >>> quickCheck $ length `invariatesOver` reverse -- +++ OK, passed 100 tests. invariatesOver :: Eq b => (a -> b) -> (a -> a) -> a -> Bool f `invariatesOver` g = f . g <=> f -- | Checks whether a binary function is fixed by an argument. -- -- f x y == const a y -- -- >>> quickCheck $ (*) `fixedBy` 0 -- +++ OK, passed 100 tests. fixedBy :: Eq c => (a -> b -> c) -> a -> b -> b -> Bool (f `fixedBy` x) y z = f x y == f x z -- | Checks whether a function is invariant over an other function. -- -- >>> quickCheck $ length <~~ reverse -- +++ OK, passed 100 tests. (<~~) :: Eq b => (a -> b) -> (a -> a) -> a -> Bool f <~~ g = f . g <=> f -- | Checks whether a function is the inverse of another function. -- -- > f(g(x)) = x -- -- >>> quickCheck $ (`div` 2) @~> (*2) -- +++ OK, passed 100 tests. (@~>) :: Eq a => (b -> a) -> (a -> b) -> a -> Bool f @~> g = f . g <=> id -- | Checks whether a function is an endomorphism in relation to a unary operator. -- -- > f(g(x)) = g(f(x)) -- -- >>> quickCheck $ (*7) <?> abs -- +++ OK, passed 100 tests. (<?>) :: Eq a => (a -> a) -> (a -> a) -> a -> Bool f <?> g = f . g <=> g . f -- | Checks whether a function is an endomorphism in relation to a binary operator. -- -- > f(g(x,y)) = g(f(x),f(y)) -- -- >>> quickCheck $ (^2) <??> (*) -- +++ OK, passed 100 tests. (<??>) :: Eq a => (a -> a) -> (a -> a -> a) -> a -> a -> Bool (f <??> g) x y = f (x `g` y) == f x `g` f y -- | Checks whether a function is an endomorphism in relation to a ternary operator. -- -- > f(g(x,y,z)) = g(f(x),f(y),f(z)) -- (<???>) :: Eq a => (a -> a) -> (a -> a -> a -> a) -> a -> a -> a -> Bool (f <???> g) x y z = f (g x y z) == g (f x) (f y) (f z)
knupfer/test-invariant
src/Test/Invariant.hs
bsd-3-clause
8,090
4
14
1,931
2,293
1,286
1,007
64
2
{-# LANGUAGE IncoherentInstances #-} {-# OPTIONS_GHC -fno-warn-incomplete-patterns #-} -- | This module provides a category transformer for automatic differentiation. -- -- There are many alternative notions of a generalized derivative. -- Perhaps the most common is the differential Ring. -- In Haskell, this might be defined as: -- -- > class Field r => Differential r where -- > derivative :: r -> r -- > -- > type Diff cat = forall a b. (Category cat, Differential cat a b) -- -- But this runs into problems with the lack of polymorphic constraints in GHC. -- See, for example <https://ghc.haskell.org/trac/ghc/ticket/2893 GHC ticket #2893>. -- -- References: -- -- * <http://en.wikipedia.org/wiki/Differential_algebra wikipedia article on differntial algebras> module SubHask.Category.Trans.Derivative where import SubHask.Algebra import SubHask.Category import SubHask.SubType import SubHask.Internal.Prelude -------------------------------------------------------------------------------- -- | This is essentially just a translation of the "Numeric.AD.Forward.Forward" type -- for use with the SubHask numeric hierarchy. -- -- FIXME: -- -- Add reverse mode auto-differentiation for vectors. -- Apply the "ProofOf" framework from Monotonic data Forward a = Forward { val :: !a , val' :: a } deriving (Typeable,Show) mkMutable [t| forall a. Forward a |] instance Semigroup a => Semigroup (Forward a) where (Forward a1 a1')+(Forward a2 a2') = Forward (a1+a2) (a1'+a2') instance Cancellative a => Cancellative (Forward a) where (Forward a1 a1')-(Forward a2 a2') = Forward (a1-a2) (a1'-a2') instance Monoid a => Monoid (Forward a) where zero = Forward zero zero instance Group a => Group (Forward a) where negate (Forward a b) = Forward (negate a) (negate b) instance Abelian a => Abelian (Forward a) instance Rg a => Rg (Forward a) where (Forward a1 a1')*(Forward a2 a2') = Forward (a1*a2) (a1*a2'+a2*a1') instance Rig a => Rig (Forward a) where one = Forward one zero instance Ring a => Ring (Forward a) where fromInteger x = Forward (fromInteger x) zero instance Field a => Field (Forward a) where reciprocal (Forward a a') = Forward (reciprocal a) (-a'/(a*a)) (Forward a1 a1')/(Forward a2 a2') = Forward (a1/a2) ((a1'*a2+a1*a2')/(a2'*a2')) fromRational r = Forward (fromRational r) 0 --------- proveC1 :: (a ~ (a><a), Rig a) => (Forward a -> Forward a) -> C1 (a -> a) proveC1 f = Diffn (\a -> val $ f $ Forward a one) $ Diff0 $ \a -> val' $ f $ Forward a one proveC2 :: (a ~ (a><a), Rig a) => (Forward (Forward a) -> Forward (Forward a)) -> C2 (a -> a) proveC2 f = Diffn (\a -> val $ val $ f $ Forward (Forward a one) one) $ Diffn (\a -> val' $ val $ f $ Forward (Forward a one) one) $ Diff0 (\a -> val' $ val' $ f $ Forward (Forward a one) one) -------------------------------------------------------------------------------- class C (cat :: * -> * -> *) where type D cat :: * -> * -> * derivative :: cat a b -> D cat a (a >< b) data Diff (n::Nat) a b where Diff0 :: (a -> b) -> Diff 0 a b Diffn :: (a -> b) -> Diff (n-1) a (a >< b) -> Diff n a b --------- instance Sup (->) (Diff n) (->) instance Sup (Diff n) (->) (->) instance Diff 0 <: (->) where embedType_ = Embed2 unDiff0 where unDiff0 :: Diff 0 a b -> a -> b unDiff0 (Diff0 f) = f unDiff0 (Diffn _ _) = undefined instance Diff n <: (->) where embedType_ = Embed2 unDiffn where unDiffn :: Diff n a b -> a -> b unDiffn (Diffn f _) = f unDiffn (Diff0 _) = undefined -- -- FIXME: these subtyping instance should be made more generic -- the problem is that type families aren't currently powerful enough -- instance Sup (Diff 0) (Diff 1) (Diff 0) instance Sup (Diff 1) (Diff 0) (Diff 0) instance Diff 1 <: Diff 0 where embedType_ = Embed2 m2n where m2n (Diffn f _) = Diff0 f m2n (Diff0 _) = undefined instance Sup (Diff 0) (Diff 2) (Diff 0) instance Sup (Diff 2) (Diff 0) (Diff 0) instance Diff 2 <: Diff 0 where embedType_ = Embed2 m2n where m2n (Diffn f _) = Diff0 f m2n (Diff0 _) = undefined instance Sup (Diff 1) (Diff 2) (Diff 1) instance Sup (Diff 2) (Diff 1) (Diff 1) instance Diff 2 <: Diff 1 where embedType_ = Embed2 m2n where m2n (Diffn f f') = Diffn f (embedType2 f') m2n (Diff0 _) = undefined --------- instance (1 <= n) => C (Diff n) where type D (Diff n) = Diff (n-1) derivative (Diffn _ f') = f' -- doesn't work, hence no non-ehaustive pattern ghc option -- derivative (Diff0 _) = undefined unsafeProveC0 :: (a -> b) -> Diff 0 a b unsafeProveC0 f = Diff0 f unsafeProveC1 :: (a -> b) -- ^ f(x) -> (a -> a><b) -- ^ f'(x) -> C1 (a -> b) unsafeProveC1 f f' = Diffn f $ unsafeProveC0 f' unsafeProveC2 :: (a -> b) -- ^ f(x) -> (a -> a><b) -- ^ f'(x) -> (a -> a><a><b) -- ^ f''(x) -> C2 (a -> b) unsafeProveC2 f f' f'' = Diffn f $ unsafeProveC1 f' f'' type C0 a = C0_ a type family C0_ (f :: *) :: * where C0_ (a -> b) = Diff 0 a b type C1 a = C1_ a type family C1_ (f :: *) :: * where C1_ (a -> b) = Diff 1 a b type C2 a = C2_ a type family C2_ (f :: *) :: * where C2_ (a -> b) = Diff 2 a b --------------------------------------- -- algebra mkMutable [t| forall n a b. Diff n a b |] instance Semigroup b => Semigroup (Diff 0 a b) where (Diff0 f1 )+(Diff0 f2 ) = Diff0 (f1+f2) _ + _ = undefined instance (Semigroup b, Semigroup (a><b)) => Semigroup (Diff 1 a b) where (Diffn f1 f1')+(Diffn f2 f2') = Diffn (f1+f2) (f1'+f2') instance (Semigroup b, Semigroup (a><b), Semigroup (a><a><b)) => Semigroup (Diff 2 a b) where (Diffn f1 f1')+(Diffn f2 f2') = Diffn (f1+f2) (f1'+f2') instance Monoid b => Monoid (Diff 0 a b) where zero = Diff0 zero instance (Monoid b, Monoid (a><b)) => Monoid (Diff 1 a b) where zero = Diffn zero zero instance (Monoid b, Monoid (a><b), Monoid (a><a><b)) => Monoid (Diff 2 a b) where zero = Diffn zero zero -------------------------------------------------------------------------------- -- test -- v = unsafeToModule [1,2,3,4,5] :: SVector 5 Double -- -- sphere :: Hilbert v => C0 (v -> Scalar v) -- sphere = unsafeProveC0 f -- where -- f v = v<>v
Drezil/subhask
src/SubHask/Category/Trans/Derivative.hs
bsd-3-clause
6,414
0
13
1,567
2,481
1,291
1,190
-1
-1
-- | When there aren't enough registers to hold all the vregs we have to spill some of those -- vregs to slots on the stack. This module is used modify the code to use those slots. -- module RegAlloc.Graph.Spill ( regSpill, SpillStats(..), accSpillSL ) where import RegAlloc.Liveness import Instruction import Reg import OldCmm hiding (RegSet) import BlockId import State import Unique import UniqFM import UniqSet import UniqSupply import Outputable import Data.List import Data.Maybe import Data.Map (Map) import Data.Set (Set) import qualified Data.Map as Map import qualified Data.Set as Set -- | Spill all these virtual regs to stack slots. -- -- TODO: See if we can split some of the live ranges instead of just globally -- spilling the virtual reg. This might make the spill cleaner's job easier. -- -- TODO: On CISCy x86 and x86_64 we don't nessesarally have to add a mov instruction -- when making spills. If an instr is using a spilled virtual we may be able to -- address the spill slot directly. -- regSpill :: Instruction instr => [LiveCmmDecl statics instr] -- ^ the code -> UniqSet Int -- ^ available stack slots -> UniqSet VirtualReg -- ^ the regs to spill -> UniqSM ([LiveCmmDecl statics instr] -- code with SPILL and RELOAD meta instructions added. , UniqSet Int -- left over slots , SpillStats ) -- stats about what happened during spilling regSpill code slotsFree regs -- not enough slots to spill these regs | sizeUniqSet slotsFree < sizeUniqSet regs = pprPanic "regSpill: out of spill slots!" ( text " regs to spill = " <> ppr (sizeUniqSet regs) $$ text " slots left = " <> ppr (sizeUniqSet slotsFree)) | otherwise = do -- allocate a slot for each of the spilled regs let slots = take (sizeUniqSet regs) $ uniqSetToList slotsFree let regSlotMap = listToUFM $ zip (uniqSetToList regs) slots -- grab the unique supply from the monad us <- getUs -- run the spiller on all the blocks let (code', state') = runState (mapM (regSpill_top regSlotMap) code) (initSpillS us) return ( code' , minusUniqSet slotsFree (mkUniqSet slots) , makeSpillStats state') -- | Spill some registers to stack slots in a top-level thing. regSpill_top :: Instruction instr => RegMap Int -- ^ map of vregs to slots they're being spilled to. -> LiveCmmDecl statics instr -- ^ the top level thing. -> SpillM (LiveCmmDecl statics instr) regSpill_top regSlotMap cmm = case cmm of CmmData{} -> return cmm CmmProc info label sccs | LiveInfo static firstId mLiveVRegsOnEntry liveSlotsOnEntry <- info -> do -- We should only passed Cmms with the liveness maps filled in, but we'll -- create empty ones if they're not there just in case. let liveVRegsOnEntry = fromMaybe mapEmpty mLiveVRegsOnEntry -- The liveVRegsOnEntry contains the set of vregs that are live on entry to -- each basic block. If we spill one of those vregs we remove it from that -- set and add the corresponding slot number to the liveSlotsOnEntry set. -- The spill cleaner needs this information to erase unneeded spill and -- reload instructions after we've done a successful allocation. let liveSlotsOnEntry' :: Map BlockId (Set Int) liveSlotsOnEntry' = mapFoldWithKey patchLiveSlot liveSlotsOnEntry liveVRegsOnEntry let info' = LiveInfo static firstId (Just liveVRegsOnEntry) liveSlotsOnEntry' -- Apply the spiller to all the basic blocks in the CmmProc. sccs' <- mapM (mapSCCM (regSpill_block regSlotMap)) sccs return $ CmmProc info' label sccs' where -- | Given a BlockId and the set of registers live in it, -- if registers in this block are being spilled to stack slots, -- then record the fact that these slots are now live in those blocks -- in the given slotmap. patchLiveSlot :: BlockId -> RegSet -> Map BlockId (Set Int) -> Map BlockId (Set Int) patchLiveSlot blockId regsLive slotMap = let curSlotsLive = fromMaybe Set.empty $ Map.lookup blockId slotMap moreSlotsLive = Set.fromList $ catMaybes $ map (lookupUFM regSlotMap) $ uniqSetToList regsLive slotMap' = Map.insert blockId (Set.union curSlotsLive moreSlotsLive) slotMap in slotMap' -- | Spill some registers to stack slots in a basic block. regSpill_block :: Instruction instr => UniqFM Int -- ^ map of vregs to slots they're being spilled to. -> LiveBasicBlock instr -> SpillM (LiveBasicBlock instr) regSpill_block regSlotMap (BasicBlock i instrs) = do instrss' <- mapM (regSpill_instr regSlotMap) instrs return $ BasicBlock i (concat instrss') -- | Spill some registers to stack slots in a single instruction. If the instruction -- uses registers that need to be spilled, then it is prefixed (or postfixed) with -- the appropriate RELOAD or SPILL meta instructions. regSpill_instr :: Instruction instr => UniqFM Int -- ^ map of vregs to slots they're being spilled to. -> LiveInstr instr -> SpillM [LiveInstr instr] regSpill_instr _ li@(LiveInstr _ Nothing) = do return [li] regSpill_instr regSlotMap (LiveInstr instr (Just _)) = do -- work out which regs are read and written in this instr let RU rlRead rlWritten = regUsageOfInstr instr -- sometimes a register is listed as being read more than once, -- nub this so we don't end up inserting two lots of spill code. let rsRead_ = nub rlRead let rsWritten_ = nub rlWritten -- if a reg is modified, it appears in both lists, want to undo this.. let rsRead = rsRead_ \\ rsWritten_ let rsWritten = rsWritten_ \\ rsRead_ let rsModify = intersect rsRead_ rsWritten_ -- work out if any of the regs being used are currently being spilled. let rsSpillRead = filter (\r -> elemUFM r regSlotMap) rsRead let rsSpillWritten = filter (\r -> elemUFM r regSlotMap) rsWritten let rsSpillModify = filter (\r -> elemUFM r regSlotMap) rsModify -- rewrite the instr and work out spill code. (instr1, prepost1) <- mapAccumLM (spillRead regSlotMap) instr rsSpillRead (instr2, prepost2) <- mapAccumLM (spillWrite regSlotMap) instr1 rsSpillWritten (instr3, prepost3) <- mapAccumLM (spillModify regSlotMap) instr2 rsSpillModify let (mPrefixes, mPostfixes) = unzip (prepost1 ++ prepost2 ++ prepost3) let prefixes = concat mPrefixes let postfixes = concat mPostfixes -- final code let instrs' = prefixes ++ [LiveInstr instr3 Nothing] ++ postfixes return {- $ pprTrace "* regSpill_instr spill" ( text "instr = " <> ppr instr $$ text "read = " <> ppr rsSpillRead $$ text "write = " <> ppr rsSpillWritten $$ text "mod = " <> ppr rsSpillModify $$ text "-- out" $$ (vcat $ map ppr instrs') $$ text " ") -} $ instrs' spillRead :: Instruction instr => UniqFM Int -> instr -> Reg -> SpillM (instr, ([LiveInstr instr'], [LiveInstr instr'])) spillRead regSlotMap instr reg | Just slot <- lookupUFM regSlotMap reg = do (instr', nReg) <- patchInstr reg instr modify $ \s -> s { stateSpillSL = addToUFM_C accSpillSL (stateSpillSL s) reg (reg, 0, 1) } return ( instr' , ( [LiveInstr (RELOAD slot nReg) Nothing] , []) ) | otherwise = panic "RegSpill.spillRead: no slot defined for spilled reg" spillWrite :: Instruction instr => UniqFM Int -> instr -> Reg -> SpillM (instr, ([LiveInstr instr'], [LiveInstr instr'])) spillWrite regSlotMap instr reg | Just slot <- lookupUFM regSlotMap reg = do (instr', nReg) <- patchInstr reg instr modify $ \s -> s { stateSpillSL = addToUFM_C accSpillSL (stateSpillSL s) reg (reg, 1, 0) } return ( instr' , ( [] , [LiveInstr (SPILL nReg slot) Nothing])) | otherwise = panic "RegSpill.spillWrite: no slot defined for spilled reg" spillModify :: Instruction instr => UniqFM Int -> instr -> Reg -> SpillM (instr, ([LiveInstr instr'], [LiveInstr instr'])) spillModify regSlotMap instr reg | Just slot <- lookupUFM regSlotMap reg = do (instr', nReg) <- patchInstr reg instr modify $ \s -> s { stateSpillSL = addToUFM_C accSpillSL (stateSpillSL s) reg (reg, 1, 1) } return ( instr' , ( [LiveInstr (RELOAD slot nReg) Nothing] , [LiveInstr (SPILL nReg slot) Nothing])) | otherwise = panic "RegSpill.spillModify: no slot defined for spilled reg" -- | Rewrite uses of this virtual reg in an instr to use a different virtual reg patchInstr :: Instruction instr => Reg -> instr -> SpillM (instr, Reg) patchInstr reg instr = do nUnique <- newUnique let nReg = case reg of RegVirtual vr -> RegVirtual (renameVirtualReg nUnique vr) RegReal{} -> panic "RegAlloc.Graph.Spill.patchIntr: not patching real reg" let instr' = patchReg1 reg nReg instr return (instr', nReg) patchReg1 :: Instruction instr => Reg -> Reg -> instr -> instr patchReg1 old new instr = let patchF r | r == old = new | otherwise = r in patchRegsOfInstr instr patchF -- Spiller monad -------------------------------------------------------------- data SpillS = SpillS { -- | unique supply for generating fresh vregs. stateUS :: UniqSupply -- | spilled vreg vs the number of times it was loaded, stored , stateSpillSL :: UniqFM (Reg, Int, Int) } initSpillS :: UniqSupply -> SpillS initSpillS uniqueSupply = SpillS { stateUS = uniqueSupply , stateSpillSL = emptyUFM } type SpillM a = State SpillS a newUnique :: SpillM Unique newUnique = do us <- gets stateUS case takeUniqFromSupply us of (uniq, us') -> do modify $ \s -> s { stateUS = us' } return uniq accSpillSL :: (Reg, Int, Int) -> (Reg, Int, Int) -> (Reg, Int, Int) accSpillSL (r1, s1, l1) (_, s2, l2) = (r1, s1 + s2, l1 + l2) -- Spiller stats -------------------------------------------------------------- data SpillStats = SpillStats { spillStoreLoad :: UniqFM (Reg, Int, Int) } makeSpillStats :: SpillS -> SpillStats makeSpillStats s = SpillStats { spillStoreLoad = stateSpillSL s } instance Outputable SpillStats where ppr stats = (vcat $ map (\(r, s, l) -> ppr r <+> int s <+> int l) $ eltsUFM (spillStoreLoad stats))
mcmaniac/ghc
compiler/nativeGen/RegAlloc/Graph/Spill.hs
bsd-3-clause
12,564
0
16
4,632
2,429
1,241
1,188
211
2
{-# LANGUAGE TypeFamilies, FlexibleInstances, ConstraintKinds, DeriveGeneric, DefaultSignatures #-} module BayesStack.DirMulti ( -- * Dirichlet/multinomial pair Multinom, dirMulti, symDirMulti, multinom -- | Do not do record updates with these , dmTotal, dmAlpha, dmDomain , setMultinom, SetUnset (..) , addMultinom, subMultinom , decMultinom, incMultinom , prettyMultinom , updatePrior , obsProb -- * Parameter estimation , estimatePrior, reestimatePriors, reestimateSymPriors -- * Convenience functions , probabilities, decProbabilities ) where import Data.EnumMap (EnumMap) import qualified Data.EnumMap as EM import Data.Sequence (Seq) import qualified Data.Sequence as SQ import qualified Data.Foldable as Foldable import Data.Foldable (toList, Foldable, foldMap) import Data.Function (on) import Text.PrettyPrint import Text.Printf import GHC.Generics (Generic) import Data.Binary import Data.Binary.EnumMap () import BayesStack.Types import BayesStack.Dirichlet import Numeric.Log hiding (sum) import Numeric.Digamma import Math.Gamma hiding (p) -- | Make error handling a bit easier checkNaN :: RealFloat a => String -> a -> a checkNaN loc x | isNaN x = error $ "BayesStack.DirMulti."++loc++": Not a number" checkNaN loc x | isInfinite x = error $ "BayesStack.DirMulti."++loc++": Infinity" checkNaN _ x = x maybeInc, maybeDec :: (Num a, Eq a) => Maybe a -> Maybe a maybeInc Nothing = Just 1 maybeInc (Just n) = Just (n+1) maybeDec Nothing = error "Can't decrement zero count" maybeDec (Just 1) = Nothing maybeDec (Just n) = Just (n-1) {-# INLINEABLE decMultinom #-} {-# INLINEABLE incMultinom #-} decMultinom, incMultinom :: (Num w, Eq w, Ord a, Enum a) => a -> Multinom w a -> Multinom w a decMultinom k = subMultinom 1 k incMultinom k = addMultinom 1 k subMultinom, addMultinom :: (Num w, Eq w, Ord a, Enum a) => w -> a -> Multinom w a -> Multinom w a subMultinom w k dm = dm { dmCounts = EM.alter maybeDec k $ dmCounts dm , dmTotal = dmTotal dm - w } addMultinom w k dm = dm { dmCounts = EM.alter maybeInc k $ dmCounts dm , dmTotal = dmTotal dm + w } data SetUnset = Set | Unset setMultinom :: (Num w, Eq w, Enum a, Ord a) => SetUnset -> a -> Multinom w a -> Multinom w a setMultinom Set s = incMultinom s setMultinom Unset s = decMultinom s -- | 'Multinom a' represents multinomial distribution over domain 'a'. -- Optionally, this can include a collapsed Dirichlet prior. -- 'Multinom alpha count total' is a multinomial with Dirichlet prior -- with symmetric parameter 'alpha', ... data Multinom w a = DirMulti { dmAlpha :: !(Alpha a) , dmCounts :: !(EnumMap a w) , dmTotal :: !w , dmDomain :: !(Seq a) } | Multinom { dmProbs :: !(EnumMap a Double) , dmCounts :: !(EnumMap a w) , dmTotal :: !w , dmDomain :: !(Seq a) } deriving (Show, Eq, Generic) instance (Enum a, Binary a, Binary w) => Binary (Multinom w a) -- | 'symMultinomFromPrecision d p' is a symmetric Dirichlet/multinomial over a -- domain 'd' with precision 'p' symDirMultiFromPrecision :: (Num w, Enum a) => [a] -> DirPrecision -> Multinom w a symDirMultiFromPrecision domain prec = symDirMulti (0.5*prec) domain -- | 'dirMultiFromMeanPrecision m p' is an asymmetric Dirichlet/multinomial -- over a domain 'd' with mean 'm' and precision 'p' dirMultiFromPrecision :: (Num w, Enum a) => DirMean a -> DirPrecision -> Multinom w a dirMultiFromPrecision m p = dirMultiFromAlpha $ meanPrecisionToAlpha m p -- | Create a symmetric Dirichlet/multinomial symDirMulti :: (Num w, Enum a) => Double -> [a] -> Multinom w a symDirMulti alpha domain = dirMultiFromAlpha $ symAlpha domain alpha -- | A multinomial without a prior multinom :: (Num w, Enum a) => [(a,Double)] -> Multinom w a multinom probs = Multinom { dmProbs = EM.fromList probs , dmCounts = EM.empty , dmTotal = 0 , dmDomain = SQ.fromList $ map fst probs } -- | Create an asymmetric Dirichlet/multinomial from items and alphas dirMulti :: (Num w, Enum a) => [(a,Double)] -> Multinom w a dirMulti domain = dirMultiFromAlpha $ asymAlpha $ EM.fromList domain -- | Create a Dirichlet/multinomial with a given prior dirMultiFromAlpha :: (Enum a, Num w) => Alpha a -> Multinom w a dirMultiFromAlpha alpha = DirMulti { dmAlpha = alpha , dmCounts = EM.empty , dmTotal = 0 , dmDomain = alphaDomain alpha } data Acc w = Acc !w !Probability obsProb :: (Enum a, Real w, Functor f, Foldable f) => Multinom w a -> f (a, w) -> Probability obsProb (Multinom {dmProbs=prob}) obs = Foldable.product $ fmap (\(k,w)->(realToFrac $ prob EM.! k)^^w) obs where (^^) :: Real w => Log Double -> w -> Log Double x ^^ y = Exp $ realToFrac y * ln x obsProb (DirMulti {dmAlpha=alpha}) obs = let go (Acc w p) (k',w') = Acc (w+w') (p*p') where p' = Exp $ checkNaN "obsProb" $ lnGamma (realToFrac w' + alpha `alphaOf` k') in case Foldable.foldl' go (Acc 0 1) obs of Acc w p -> p / alphaNormalizer alpha / Exp (lnGamma $ realToFrac w + sumAlpha alpha) {-# INLINE obsProb #-} dmGetCounts :: (Enum a, Num w) => Multinom w a -> a -> w dmGetCounts dm k = EM.findWithDefault 0 k (dmCounts dm) instance HasLikelihood (Multinom w) where type LContext (Multinom w) a = (Real w, Ord a, Enum a) likelihood dm = obsProb dm $ EM.assocs $ dmCounts dm {-# INLINEABLE likelihood #-} instance FullConditionable (Multinom w) where type FCContext (Multinom w) a = (Real w, Ord a, Enum a) sampleProb (Multinom {dmProbs=prob}) k = prob EM.! k sampleProb dm@(DirMulti {dmAlpha=a}) k = let alpha = a `alphaOf` k n = realToFrac $ dmGetCounts dm k total = realToFrac $ dmTotal dm in (n + alpha) / (total + sumAlpha a) {-# INLINEABLE sampleProb #-} {-# INLINEABLE probabilities #-} probabilities :: (Real w, Ord a, Enum a) => Multinom w a -> Seq (Double, a) probabilities dm = fmap (\a->(sampleProb dm a, a)) $ dmDomain dm -- FIXME -- | Probabilities sorted decreasingly decProbabilities :: (Real w, Ord a, Enum a, Num w) => Multinom w a -> Seq (Double, a) decProbabilities = SQ.sortBy (flip (compare `on` fst)) . probabilities prettyMultinom :: (Real w, Ord a, Enum a) => Int -> (a -> String) -> Multinom w a -> Doc prettyMultinom _ _ (Multinom {}) = error "TODO: prettyMultinom" prettyMultinom n showA dm@(DirMulti {}) = text "DirMulti" <+> parens (text "alpha=" <> prettyAlpha showA (dmAlpha dm)) $$ nest 5 (fsep $ punctuate comma $ map (\(p,a)->text (showA a) <> parens (text $ printf "%1.2e" p)) $ take n $ Data.Foldable.toList $ decProbabilities dm) -- | Update the prior of a Dirichlet/multinomial updatePrior :: (Alpha a -> Alpha a) -> Multinom w a -> Multinom w a updatePrior _ (Multinom {}) = error "TODO: updatePrior" updatePrior f dm = dm {dmAlpha=f $ dmAlpha dm} -- | Relative tolerance in precision for prior estimation estimationTol = 1e-8 reestimatePriors :: (Foldable f, Functor f, Real w, Enum a) => f (Multinom w a) -> f (Multinom w a) reestimatePriors dms = let usableDms = filter (\dm->dmTotal dm > 5) $ toList dms alpha = case () of _ | length usableDms <= 3 -> id otherwise -> const $ estimatePrior estimationTol usableDms in fmap (updatePrior alpha) dms reestimateSymPriors :: (Foldable f, Functor f, Real w, Enum a) => f (Multinom w a) -> f (Multinom w a) reestimateSymPriors dms = let usableDms = filter (\dm->dmTotal dm > 5) $ toList dms alpha = case () of _ | length usableDms <= 3 -> id otherwise -> const $ symmetrizeAlpha $ estimatePrior estimationTol usableDms in fmap (updatePrior alpha) dms -- | Estimate the prior alpha from a set of Dirichlet/multinomials estimatePrior' :: (Real w, Enum a) => [Multinom w a] -> Alpha a -> Alpha a estimatePrior' dms alpha = let domain = toList $ dmDomain $ head dms f k = let num = sum $ map (\i->digamma (realToFrac (dmGetCounts i k) + alphaOf alpha k) - digamma (alphaOf alpha k) ) $ filter (\i->dmGetCounts i k > 0) dms total i = realToFrac $ sum $ map (\k->dmGetCounts i k) domain sumAlpha = sum $ map (alphaOf alpha) domain denom = sum $ map (\i->digamma (total i + sumAlpha) - digamma sumAlpha) dms in case () of _ | isNaN num -> error $ "BayesStack.DirMulti.estimatePrior': num = NaN: "++show (map (\i->(digamma (realToFrac (dmGetCounts i k) + alphaOf alpha k), digamma (alphaOf alpha k))) dms) _ | denom == 0 -> error "BayesStack.DirMulti.estimatePrior': denom=0" _ | isInfinite num -> error "BayesStack.DirMulti.estimatePrior': num is infinity " _ | isNaN (alphaOf alpha k * num / denom) -> error $ "NaN"++show (num, denom) otherwise -> alphaOf alpha k * num / denom in asymAlpha $ foldMap (\k->EM.singleton k (f k)) domain estimatePrior :: (Real w, Enum a) => Double -> [Multinom w a] -> Alpha a estimatePrior tol dms = iter $ dmAlpha $ head dms where iter alpha = let alpha' = estimatePrior' dms alpha (_, prec) = alphaToMeanPrecision alpha (_, prec') = alphaToMeanPrecision alpha' in if abs ((prec' - prec) / prec) > tol then iter alpha' else alpha'
beni55/bayes-stack
BayesStack/DirMulti.hs
bsd-3-clause
10,404
0
27
3,196
3,403
1,760
1,643
185
5
----------------------------------------------------------------------------- -- | -- Module : XMonad.Util.DebugWindow -- Copyright : (c) Brandon S Allbery KF8NH, 2014 -- License : BSD3-style (see LICENSE) -- -- Maintainer : [email protected] -- Stability : unstable -- Portability : not portable -- -- Module to dump window information for diagnostic/debugging purposes. See -- "XMonad.Hooks.DebugEvents" and "XMonad.Hooks.DebugStack" for practical uses. -- ----------------------------------------------------------------------------- module XMonad.Util.DebugWindow (debugWindow) where import Prelude import XMonad import Codec.Binary.UTF8.String (decodeString) import Control.Exception.Extensible as E import Control.Monad (when) import Data.List (unfoldr ,intercalate ) import Foreign import Foreign.C.String import Numeric (showHex) import System.Exit -- | Output a window by ID in hex, decimal, its ICCCM resource name and class, -- and its title if available. Also indicate override_redirect with an -- exclamation mark, and wrap in brackets if it is unmapped or withdrawn. debugWindow :: Window -> X String debugWindow 0 = return "-no window-" debugWindow w = do let wx = pad 8 '0' $ showHex w "" w' <- withDisplay $ \d -> io (safeGetWindowAttributes d w) case w' of Nothing -> return $ "(deleted window " ++ wx ++ ")" Just (WindowAttributes { wa_x = x , wa_y = y , wa_width = wid , wa_height = ht , wa_border_width = bw , wa_map_state = m , wa_override_redirect = o }) -> do c' <- withDisplay $ \d -> io (getWindowProperty8 d wM_CLASS w) let c = case c' of Nothing -> "" Just c'' -> intercalate "/" $ flip unfoldr (map (toEnum . fromEnum) c'') $ \s -> if null s then Nothing else let (w'',s'') = break (== '\NUL') s s' = if null s'' then s'' else tail s'' in Just (w'',s') t <- catchX' (wrap `fmap` getEWMHTitle "VISIBLE" w) $ catchX' (wrap `fmap` getEWMHTitle "" w) $ catchX' (wrap `fmap` getICCCMTitle w) $ return "" h' <- getMachine w let h = if null h' then "" else '@':h' -- if it has WM_COMMAND use it, else use the appName -- NB. modern stuff often does not set WM_COMMAND since it's only ICCCM required and not some -- horrible gnome/freedesktop session manager thing like Wayland intended. How helpful of them. p' <- withDisplay $ \d -> safeGetCommand d w let p = if null p' then "" else wrap $ intercalate " " p' nWP <- getAtom "_NET_WM_PID" pid' <- withDisplay $ \d -> io $ getWindowProperty32 d nWP w let pid = case pid' of Just [pid''] -> '(':show pid'' ++ ")" _ -> "" let cmd = p ++ pid ++ h let (lb,rb) = case () of () | m == waIsViewable -> ("","") | otherwise -> ("[","]") o' = if o then "!" else "" return $ concat [lb ,o' ,wx ,t ," " ,show wid ,'x':show ht ,if bw == 0 then "" else '+':show bw ,"@" ,show x ,',':show y ,if null c then "" else ' ':c ,if null cmd then "" else ' ':cmd ,rb ] getEWMHTitle :: String -> Window -> X String getEWMHTitle sub w = do a <- getAtom $ "_NET_WM_" ++ (if null sub then "" else '_':sub) ++ "_NAME" (Just t) <- withDisplay $ \d -> io $ getWindowProperty32 d a w return $ map (toEnum . fromEnum) t getICCCMTitle :: Window -> X String getICCCMTitle w = getDecodedStringProp w wM_NAME getDecodedStringProp :: Window -> Atom -> X String getDecodedStringProp w a = do t@(TextProperty t' _ 8 _) <- withDisplay $ \d -> io $ getTextProperty d w a [s] <- catchX' (tryUTF8 t) $ catchX' (tryCompound t) $ io ((:[]) `fmap` peekCString t') return s tryUTF8 :: TextProperty -> X [String] tryUTF8 (TextProperty s enc _ _) = do uTF8_STRING <- getAtom "UTF8_STRING" when (enc == uTF8_STRING) $ error "String is not UTF8_STRING" (map decodeString . splitNul) `fmap` io (peekCString s) tryCompound :: TextProperty -> X [String] tryCompound t@(TextProperty _ enc _ _) = do cOMPOUND_TEXT <- getAtom "COMPOUND_TEXT" when (enc == cOMPOUND_TEXT) $ error "String is not COMPOUND_TEXT" withDisplay $ \d -> io $ wcTextPropertyToTextList d t splitNul :: String -> [String] splitNul "" = [] splitNul s = let (s',ss') = break (== '\NUL') s in s' : splitNul ss' pad :: Int -> Char -> String -> String pad w c s = replicate (w - length s) c ++ s -- modified 'catchX' without the print to 'stderr' catchX' :: X a -> X a -> X a catchX' job errcase = do st <- get c <- ask (a, s') <- io $ runX c st job `E.catch` \e -> case fromException e of Just x -> throw e `const` (x `asTypeOf` ExitSuccess) _ -> runX c st errcase put s' return a wrap :: String -> String wrap s = ' ' : '"' : wrap' s ++ "\"" where wrap' (s':ss) | s' == '"' = '\\' : s' : wrap' ss | s' == '\\' = '\\' : s' : wrap' ss | otherwise = s' : wrap' ss wrap' "" = "" -- Graphics.X11.Extras.getWindowAttributes is bugggggggy safeGetWindowAttributes :: Display -> Window -> IO (Maybe WindowAttributes) safeGetWindowAttributes d w = alloca $ \p -> do s <- xGetWindowAttributes d w p case s of 0 -> return Nothing _ -> Just `fmap` peek p -- and so is getCommand safeGetCommand :: Display -> Window -> X [String] safeGetCommand d w = do wC <- getAtom "WM_COMMAND" p <- io $ getWindowProperty8 d wC w case p of Nothing -> return [] Just cs' -> do let cs = map (toEnum . fromEnum) cs' go (a,(s,"\NUL")) = (s:a,("","")) go (a,(s,'\NUL':ss)) = go (s:a,go' ss) go r = r -- ??? go' = break (== '\NUL') in return $ reverse $ fst $ go ([],go' cs) getMachine :: Window -> X String getMachine w = catchX' (getAtom "WM_CLIENT_MACHINE" >>= getDecodedStringProp w) (return "")
f1u77y/xmonad-contrib
XMonad/Util/DebugWindow.hs
bsd-3-clause
7,208
0
26
2,823
2,062
1,055
1,007
139
12