code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
import System.Directory (getDirectoryContents, setCurrentDirectory)
import Criterion.Main
import qualified Data.Text.IO as TIO
import Hasmin
main :: IO ()
main = do
setCurrentDirectory "../hasmin-benchmarks/bnch"
xs <- getDirectoryContents "."
ys <- traverse TIO.readFile (filter (flip notElem [".", ".."]) xs)
defaultMain [bgroup "minification" [bench "a" $ nf (map minifyCSS) ys]]
| contivero/hasmin | benchmarks/Benchmarks.hs | bsd-3-clause | 400 | 0 | 14 | 63 | 133 | 68 | 65 | 10 | 1 |
{-# OPTIONS -fno-warn-unused-imports #-}
#include "HsConfigure.h"
-- #hide
module Data.Time.LocalTime.TimeOfDay
(
-- * Time of day
TimeOfDay(..),midnight,midday,makeTimeOfDayValid,
utcToLocalTimeOfDay,localToUTCTimeOfDay,
timeToTimeOfDay,timeOfDayToTime,
dayFractionToTimeOfDay,timeOfDayToDayFraction
) where
import Data.Int
import Data.Time.LocalTime.TimeZone
import Data.Time.Calendar.Private
import Data.Time.Clock
import Control.DeepSeq
import Data.Typeable
import Data.Fixed
#if LANGUAGE_Rank2Types
import Data.Data
#endif
-- | Time of day as represented in hour, minute and second (with picoseconds), typically used to express local time of day.
data TimeOfDay = TimeOfDay {
-- | range 0 - 23
todHour :: Int,
-- | range 0 - 59
todMin :: Int,
-- | Note that 0 <= todSec < 61, accomodating leap seconds.
-- Any local minute may have a leap second, since leap seconds happen in all zones simultaneously
todSec :: Pico
} deriving (Eq,Ord
#if LANGUAGE_DeriveDataTypeable
#if LANGUAGE_Rank2Types
#if HAS_DataPico
,Data
#endif
#endif
#endif
)
instance NFData TimeOfDay where
rnf (TimeOfDay h m s) = h `deepseq` m `deepseq` s `seq` () -- FIXME: Data.Fixed had no NFData instances yet at time of writing
instance Typeable TimeOfDay where
typeOf _ = mkTyConApp (mkTyCon "Data.Time.LocalTime.TimeOfDay.TimeOfDay") []
-- | Hour zero
midnight :: TimeOfDay
midnight = TimeOfDay 0 0 0
-- | Hour twelve
midday :: TimeOfDay
midday = TimeOfDay 12 0 0
instance Show TimeOfDay where
show (TimeOfDay h m s) = (show2 (Just '0') h) ++ ":" ++ (show2 (Just '0') m) ++ ":" ++ (show2Fixed (Just '0') s)
makeTimeOfDayValid :: Int -> Int -> Pico -> Maybe TimeOfDay
makeTimeOfDayValid h m s = do
_ <- clipValid 0 23 h
_ <- clipValid 0 59 m
_ <- clipValid 0 60.999999999999 s
return (TimeOfDay h m s)
-- | Convert a ToD in UTC to a ToD in some timezone, together with a day adjustment.
utcToLocalTimeOfDay :: TimeZone -> TimeOfDay -> (Int64,TimeOfDay)
utcToLocalTimeOfDay zone (TimeOfDay h m s) = (fromIntegral (div h' 24),TimeOfDay (mod h' 24) (mod m' 60) s) where
m' = m + timeZoneMinutes zone
h' = h + (div m' 60)
-- | Convert a ToD in some timezone to a ToD in UTC, together with a day adjustment.
localToUTCTimeOfDay :: TimeZone -> TimeOfDay -> (Int64,TimeOfDay)
localToUTCTimeOfDay zone = utcToLocalTimeOfDay (minutesToTimeZone (negate (timeZoneMinutes zone)))
posixDayLength :: DiffTime
posixDayLength = fromIntegral 86400
-- | Get a TimeOfDay given a time since midnight.
-- Time more than 24h will be converted to leap-seconds.
timeToTimeOfDay :: DiffTime -> TimeOfDay
timeToTimeOfDay dt | dt >= posixDayLength = TimeOfDay 23 59 (60 + (realToFrac (dt - posixDayLength)))
timeToTimeOfDay dt = TimeOfDay (fromIntegral h) (fromIntegral m) s where
s' = realToFrac dt
s = mod' s' 60
m' = div' s' 60
m = mod' m' 60
h = div' m' 60
-- | Find out how much time since midnight a given TimeOfDay is.
timeOfDayToTime :: TimeOfDay -> DiffTime
timeOfDayToTime (TimeOfDay h m s) = ((fromIntegral h) * 60 + (fromIntegral m)) * 60 + (realToFrac s)
-- | Get a TimeOfDay given the fraction of a day since midnight.
dayFractionToTimeOfDay :: Rational -> TimeOfDay
dayFractionToTimeOfDay df = timeToTimeOfDay (realToFrac (df * 86400))
-- | Get the fraction of a day since midnight given a TimeOfDay.
timeOfDayToDayFraction :: TimeOfDay -> Rational
timeOfDayToDayFraction tod = realToFrac (timeOfDayToTime tod) / realToFrac posixDayLength
| takano-akio/time | Data/Time/LocalTime/TimeOfDay.hs | bsd-3-clause | 3,461 | 14 | 13 | 586 | 887 | 479 | 408 | 57 | 1 |
module Sesyrel.Expression.Ratio (
Ratio,
Rational,
RealInfinite(..),
numerator,
denominator,
(%)) where
import Sesyrel.Expression.RealInfinite
import Prelude hiding (Rational)
import qualified Data.Ratio as R (numerator, denominator, (%))
data Ratio a = !a :% !a
numerator, denominator :: Ratio a -> a
numerator (x :% _) = x
denominator (_ :% y) = y
type Rational = Ratio Integer
instance (Num a, Eq a, Show a) => Show (Ratio a) where
show (0 :% 0) = "NaN"
show (1 :% 0) = "+Infinity"
show ((-1) :% 0) = "-Infinity"
show (x :% y) = show x ++ " % " ++ show y
instance Integral a => Num (Ratio a) where
(p1 :% 0) + (p2 :% 0) = (p1 + p2) % 0
(p1 :% 0) + _ = p1 :% 0
_ + (p2 :% 0) = p2 :% 0
(p1 :% q1) + (p2 :% q2) = (p1 * q2 + q1 * p2) % (q1 * q2)
(p1 :% 0) - (p2 :% 0) = (p1 - p2) % 0
(p1 :% 0) - _ = p1 :% 0
_ - (p2 :% 0) = p2 :% 0
(p1 :% q1) - (p2 :% q2) = (p1 * q2 - q1 * p2) % (q1 * q2)
(p1 :% q1) * (p2 :% q2) = (p1 * p2) % (q1 * q2)
negate (p :% q) = (-p) :% q
abs (p :% q) = abs p :% q
signum (p :% _) = signum p :% 1
fromInteger i = fromInteger i :% 1
instance Integral a => Fractional (Ratio a) where
(p1 :% q1) / (p2 :% q2) = (p1 * q2) % (q1 * p2)
recip (0 :% x) = signum x :% 0
recip (p :% q) = (q * signum p) :% abs p
fromRational r = fromInteger (R.numerator r) % fromInteger (R.denominator r)
instance (Num a, Eq a) => Eq (Ratio a) where
(0 :% 0) == _ = False
_ == (0 :% 0) = False
(p1 :% q1) == (p2 :% q2) = (p1 == p2) && (q1 == q2)
(0 :% 0) /= _ = True
_ /= (0 :% 0) = True
(p1 :% q1) /= (p2 :% q2) = (p1 /= p2) || (q1 /= q2)
instance (Num a, Ord a) => Ord (Ratio a) where
compare (p1 :% q1) (p2 :% q2) = compare (p1 * q2) (q1 * p2)
instance Integral a => Real (Ratio a) where
toRational (p :% q) = toInteger p R.% toInteger q
instance Integral a => RealInfinite (Ratio a) where
plusInfinity = 1 :% 0
infixl 7 %
(%) :: Integral a => a -> a -> Ratio a
x % y = reduce (x :% y)
reduce :: Integral a => Ratio a -> Ratio a
reduce (x :% 0) = signum x :% 0
reduce (x :% y) = let q = gcd x y in (signum y * (x `div` q)) :% (abs y `div` q)
| balodja/sesyrel | src/Sesyrel/Expression/Ratio.hs | bsd-3-clause | 2,131 | 0 | 11 | 583 | 1,309 | 685 | 624 | 62 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_format_feature_flags2 - device extension
--
-- == VK_KHR_format_feature_flags2
--
-- [__Name String__]
-- @VK_KHR_format_feature_flags2@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 361
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_get_physical_device_properties2@
--
-- [__Deprecation state__]
--
-- - /Promoted/ to
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#versions-1.3-promotions Vulkan 1.3>
--
-- [__Contact__]
--
-- - Lionel Landwerlin
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_format_feature_flags2] @llandwerlin%0A<<Here describe the issue or question you have about the VK_KHR_format_feature_flags2 extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2021-07-01
--
-- [__Interactions and External Dependencies__]
--
-- - Promoted to Vulkan 1.3 Core
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Lionel Landwerlin, Intel
--
-- - Jason Ekstrand, Intel
--
-- - Tobias Hector, AMD
--
-- - Spencer Fricke, Samsung Electronics
--
-- - Graeme Leese, Broadcom
--
-- - Jan-Harald Fredriksen, ARM
--
-- == Description
--
-- This extension adds a new 'FormatFeatureFlagBits2KHR' 64bits format
-- feature flag type to extend the existing
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FormatFeatureFlagBits' which
-- is limited to 31 flags. At the time of this writing 29 bits of
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FormatFeatureFlagBits' are
-- already used.
--
-- Because
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.FormatProperties2'
-- is already defined to extend the Vulkan 1.0
-- 'Vulkan.Core10.DeviceInitialization.getPhysicalDeviceFormatProperties'
-- entry point, this extension defines a new 'FormatProperties3KHR' to
-- extend the 'Vulkan.Core10.DeviceInitialization.FormatProperties'.
--
-- On top of replicating all the bits from
-- 'Vulkan.Core10.Enums.FormatFeatureFlagBits.FormatFeatureFlagBits',
-- 'FormatFeatureFlagBits2KHR' adds the following bits :
--
-- - 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR'
-- and
-- 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR'
-- indicate that an implementation supports respectively reading and
-- writing a given 'Vulkan.Core10.Enums.Format.Format' through storage
-- operations without specifying the format in the shader.
--
-- - 'Vulkan.Core13.Enums.FormatFeatureFlags2.FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR'
-- indicates that an implementation supports depth comparison performed
-- by @OpImage*Dref@ instructions on a given
-- 'Vulkan.Core10.Enums.Format.Format'. Previously the result of
-- executing a @OpImage*Dref*@ instruction on an image view, where the
-- @format@ was not one of the depth\/stencil formats with a depth
-- component, was undefined. This bit clarifies on which formats such
-- instructions can be used.
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.FormatProperties2':
--
-- - 'FormatProperties3KHR'
--
-- == New Enums
--
-- - 'FormatFeatureFlagBits2KHR'
--
-- == New Bitmasks
--
-- - 'FormatFeatureFlags2KHR'
--
-- == New Enum Constants
--
-- - 'KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME'
--
-- - 'KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR'
--
-- == Promotion to Vulkan 1.3
--
-- Functionality in this extension is included in core Vulkan 1.3, with the
-- KHR suffix omitted. The original type, enum and command names are still
-- available as aliases of the core functionality.
--
-- == Version History
--
-- - Revision 1, 2020-07-21 (Lionel Landwerlin)
--
-- - Initial draft
--
-- == See Also
--
-- 'FormatFeatureFlagBits2KHR', 'FormatFeatureFlags2KHR',
-- 'FormatProperties3KHR'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_format_feature_flags2 Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_format_feature_flags2 ( pattern STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR
, FormatFeatureFlags2KHR
, FormatFeatureFlagBits2KHR
, FormatProperties3KHR
, KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION
, pattern KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION
, KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME
, pattern KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME
) where
import Data.String (IsString)
import Vulkan.Core13.Enums.FormatFeatureFlags2 (FormatFeatureFlagBits2)
import Vulkan.Core13.Enums.FormatFeatureFlags2 (FormatFeatureFlags2)
import Vulkan.Core13.Promoted_From_VK_KHR_format_feature_flags2 (FormatProperties3)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_FORMAT_PROPERTIES_3))
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR"
pattern STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR = STRUCTURE_TYPE_FORMAT_PROPERTIES_3
-- No documentation found for TopLevel "VkFormatFeatureFlags2KHR"
type FormatFeatureFlags2KHR = FormatFeatureFlags2
-- No documentation found for TopLevel "VkFormatFeatureFlagBits2KHR"
type FormatFeatureFlagBits2KHR = FormatFeatureFlagBits2
-- No documentation found for TopLevel "VkFormatProperties3KHR"
type FormatProperties3KHR = FormatProperties3
type KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION"
pattern KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION = 1
type KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME = "VK_KHR_format_feature_flags2"
-- No documentation found for TopLevel "VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME"
pattern KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME = "VK_KHR_format_feature_flags2"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_KHR_format_feature_flags2.hs | bsd-3-clause | 6,902 | 0 | 8 | 1,312 | 352 | 274 | 78 | -1 | -1 |
module Main where
import Riot
import Data.Aeson
-- Just (TAPI {champions = fromList [Tchampions {botMmEnabled = False, freeToPlay = True, botEnabled = False, active = False, id = 4.0, rankedPlayEnabled = False}]})
main = do
print $ (decode test :: Maybe TAPI)
test = "{\"champions\":[{\"botMmEnabled\":false,\"id\":4,\"rankedPlayEnabled\":false,\"botEnabled\":false,\"active\":false,\"freeToPlay\":true}]}"
| mxswd/json-sampler | examples/Main.hs | bsd-3-clause | 412 | 0 | 9 | 50 | 42 | 24 | 18 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-|
Yesod.Test is a pragmatic framework for testing web applications built
using wai and persistent.
By pragmatic I may also mean 'dirty'. Its main goal is to encourage integration
and system testing of web applications by making everything /easy to test/.
Your tests are like browser sessions that keep track of cookies and the last
visited page. You can perform assertions on the content of HTML responses,
using CSS selectors to explore the document more easily.
You can also easily build requests using forms present in the current page.
This is very useful for testing web applications built in yesod, for example,
where your forms may have field names generated by the framework or a randomly
generated CSRF token input.
Your database is also directly available so you can use 'runDB' to set up
backend pre-conditions, or to assert that your session is having the desired effect.
-}
module Yesod.Test
( -- * Declaring and running your test suite
yesodSpec
, YesodSpec
, yesodSpecWithSiteGenerator
, yesodSpecApp
, YesodExample
, YesodExampleData(..)
, TestApp
, YSpec
, testApp
, YesodSpecTree (..)
, ydescribe
, yit
-- * Making requests
-- | You can construct requests with the 'RequestBuilder' monad, which lets you
-- set the URL and add parameters, headers, and files. Helper functions are provided to
-- lookup fields by label and to add the current CSRF token from your forms.
-- Once built, the request can be executed with the 'request' method.
--
-- Convenience functions like 'get' and 'post' build and execute common requests.
, get
, post
, postBody
, request
, addRequestHeader
, setMethod
, addPostParam
, addGetParam
, addFile
, setRequestBody
, RequestBuilder
, setUrl
-- *** Adding fields by label
-- | Yesod can auto generate field names, so you are never sure what
-- the argument name should be for each one of your inputs when constructing
-- your requests. What you do know is the /label/ of the field.
-- These functions let you add parameters to your request based
-- on currently displayed label names.
, byLabel
, fileByLabel
-- *** CSRF Tokens
-- | In order to prevent CSRF exploits, yesod-form adds a hidden input
-- to your forms with the name "_token". This token is a randomly generated,
-- per-session value.
--
-- In order to prevent your forms from being rejected in tests, use one of
-- these functions to add the token to your request.
, addToken
, addToken_
, addTokenFromCookie
, addTokenFromCookieNamedToHeaderNamed
-- * Assertions
, assertEqual
, assertHeader
, assertNoHeader
, statusIs
, bodyEquals
, bodyContains
, htmlAllContain
, htmlAnyContain
, htmlNoneContain
, htmlCount
-- * Grab information
, getTestYesod
, getResponse
, getRequestCookies
-- * Debug output
, printBody
, printMatches
-- * Utils for building your own assertions
-- | Please consider generalizing and contributing the assertions you write.
, htmlQuery
, parseHTML
, withResponse
) where
import qualified Test.Hspec.Core.Spec as Hspec
import qualified Data.List as DL
import qualified Data.ByteString.Char8 as BS8
import Data.ByteString (ByteString)
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.ByteString.Lazy.Char8 as BSL8
import qualified Test.HUnit as HUnit
import qualified Network.HTTP.Types as H
import qualified Network.Socket.Internal as Sock
import Data.CaseInsensitive (CI)
import Network.Wai
import Network.Wai.Test hiding (assertHeader, assertNoHeader, request)
import qualified Control.Monad.Trans.State as ST
import Control.Monad.IO.Class
import System.IO
import Yesod.Test.TransversingCSS
import Yesod.Core
import qualified Data.Text.Lazy as TL
import Data.Text.Lazy.Encoding (encodeUtf8, decodeUtf8)
import Text.XML.Cursor hiding (element)
import qualified Text.XML.Cursor as C
import qualified Text.HTML.DOM as HD
import Control.Monad.Trans.Writer
import qualified Data.Map as M
import qualified Web.Cookie as Cookie
import qualified Blaze.ByteString.Builder as Builder
import Data.Time.Clock (getCurrentTime)
import Control.Applicative ((<$>))
-- | The state used in a single test case defined using 'yit'
--
-- Since 1.2.4
data YesodExampleData site = YesodExampleData
{ yedApp :: !Application
, yedSite :: !site
, yedCookies :: !Cookies
, yedResponse :: !(Maybe SResponse)
}
-- | A single test case, to be run with 'yit'.
--
-- Since 1.2.0
type YesodExample site = ST.StateT (YesodExampleData site) IO
-- | Mapping from cookie name to value.
--
-- Since 1.2.0
type Cookies = M.Map ByteString Cookie.SetCookie
-- | Corresponds to hspec\'s 'Spec'.
--
-- Since 1.2.0
type YesodSpec site = Writer [YesodSpecTree site] ()
-- | Internal data structure, corresponding to hspec\'s 'YesodSpecTree'.
--
-- Since 1.2.0
data YesodSpecTree site
= YesodSpecGroup String [YesodSpecTree site]
| YesodSpecItem String (YesodExample site ())
-- | Get the foundation value used for the current test.
--
-- Since 1.2.0
getTestYesod :: YesodExample site site
getTestYesod = fmap yedSite ST.get
-- | Get the most recently provided response value, if available.
--
-- Since 1.2.0
getResponse :: YesodExample site (Maybe SResponse)
getResponse = fmap yedResponse ST.get
data RequestBuilderData site = RequestBuilderData
{ rbdPostData :: RBDPostData
, rbdResponse :: (Maybe SResponse)
, rbdMethod :: H.Method
, rbdSite :: site
, rbdPath :: [T.Text]
, rbdGets :: H.Query
, rbdHeaders :: H.RequestHeaders
}
data RBDPostData = MultipleItemsPostData [RequestPart]
| BinaryPostData BSL8.ByteString
-- | Request parts let us discern regular key/values from files sent in the request.
data RequestPart
= ReqKvPart T.Text T.Text
| ReqFilePart T.Text FilePath BSL8.ByteString T.Text
-- | The 'RequestBuilder' state monad constructs a URL encoded string of arguments
-- to send with your requests. Some of the functions that run on it use the current
-- response to analyze the forms that the server is expecting to receive.
type RequestBuilder site = ST.StateT (RequestBuilderData site) IO
-- | Start describing a Tests suite keeping cookies and a reference to the tested 'Application'
-- and 'ConnectionPool'
ydescribe :: String -> YesodSpec site -> YesodSpec site
ydescribe label yspecs = tell [YesodSpecGroup label $ execWriter yspecs]
yesodSpec :: YesodDispatch site
=> site
-> YesodSpec site
-> Hspec.Spec
yesodSpec site yspecs =
Hspec.fromSpecList $ map unYesod $ execWriter yspecs
where
unYesod (YesodSpecGroup x y) = Hspec.specGroup x $ map unYesod y
unYesod (YesodSpecItem x y) = Hspec.specItem x $ do
app <- toWaiAppPlain site
ST.evalStateT y YesodExampleData
{ yedApp = app
, yedSite = site
, yedCookies = M.empty
, yedResponse = Nothing
}
-- | Same as yesodSpec, but instead of taking already built site it
-- takes an action which produces site for each test.
yesodSpecWithSiteGenerator :: YesodDispatch site
=> IO site
-> YesodSpec site
-> Hspec.Spec
yesodSpecWithSiteGenerator getSiteAction yspecs =
Hspec.fromSpecList $ map (unYesod getSiteAction) $ execWriter yspecs
where
unYesod getSiteAction' (YesodSpecGroup x y) = Hspec.specGroup x $ map (unYesod getSiteAction') y
unYesod getSiteAction' (YesodSpecItem x y) = Hspec.specItem x $ do
site <- getSiteAction'
app <- toWaiAppPlain site
ST.evalStateT y YesodExampleData
{ yedApp = app
, yedSite = site
, yedCookies = M.empty
, yedResponse = Nothing
}
-- | Same as yesodSpec, but instead of taking a site it
-- takes an action which produces the 'Application' for each test.
-- This lets you use your middleware from makeApplication
yesodSpecApp :: YesodDispatch site
=> site
-> IO Application
-> YesodSpec site
-> Hspec.Spec
yesodSpecApp site getApp yspecs =
Hspec.fromSpecList $ map unYesod $ execWriter yspecs
where
unYesod (YesodSpecGroup x y) = Hspec.specGroup x $ map unYesod y
unYesod (YesodSpecItem x y) = Hspec.specItem x $ do
app <- getApp
ST.evalStateT y YesodExampleData
{ yedApp = app
, yedSite = site
, yedCookies = M.empty
, yedResponse = Nothing
}
-- | Describe a single test that keeps cookies, and a reference to the last response.
yit :: String -> YesodExample site () -> YesodSpec site
yit label example = tell [YesodSpecItem label example]
-- Performs a given action using the last response. Use this to create
-- response-level assertions
withResponse' :: MonadIO m
=> (state -> Maybe SResponse)
-> (SResponse -> ST.StateT state m a)
-> ST.StateT state m a
withResponse' getter f = maybe err f . getter =<< ST.get
where err = failure "There was no response, you should make a request"
-- | Performs a given action using the last response. Use this to create
-- response-level assertions
withResponse :: (SResponse -> YesodExample site a) -> YesodExample site a
withResponse = withResponse' yedResponse
-- | Use HXT to parse a value from an HTML tag.
-- Check for usage examples in this module's source.
parseHTML :: HtmlLBS -> Cursor
parseHTML html = fromDocument $ HD.parseLBS html
-- | Query the last response using CSS selectors, returns a list of matched fragments
htmlQuery' :: MonadIO m
=> (state -> Maybe SResponse)
-> Query
-> ST.StateT state m [HtmlLBS]
htmlQuery' getter query = withResponse' getter $ \ res ->
case findBySelector (simpleBody res) query of
Left err -> failure $ query <> " did not parse: " <> T.pack (show err)
Right matches -> return $ map (encodeUtf8 . TL.pack) matches
-- | Query the last response using CSS selectors, returns a list of matched fragments
htmlQuery :: Query -> YesodExample site [HtmlLBS]
htmlQuery = htmlQuery' yedResponse
-- | Asserts that the two given values are equal.
assertEqual :: (Eq a) => String -> a -> a -> YesodExample site ()
assertEqual msg a b = liftIO $ HUnit.assertBool msg (a == b)
-- | Assert the last response status is as expected.
statusIs :: Int -> YesodExample site ()
statusIs number = withResponse $ \ SResponse { simpleStatus = s } ->
liftIO $ flip HUnit.assertBool (H.statusCode s == number) $ concat
[ "Expected status was ", show number
, " but received status was ", show $ H.statusCode s
]
-- | Assert the given header key/value pair was returned.
assertHeader :: CI BS8.ByteString -> BS8.ByteString -> YesodExample site ()
assertHeader header value = withResponse $ \ SResponse { simpleHeaders = h } ->
case lookup header h of
Nothing -> failure $ T.pack $ concat
[ "Expected header "
, show header
, " to be "
, show value
, ", but it was not present"
]
Just value' -> liftIO $ flip HUnit.assertBool (value == value') $ concat
[ "Expected header "
, show header
, " to be "
, show value
, ", but received "
, show value'
]
-- | Assert the given header was not included in the response.
assertNoHeader :: CI BS8.ByteString -> YesodExample site ()
assertNoHeader header = withResponse $ \ SResponse { simpleHeaders = h } ->
case lookup header h of
Nothing -> return ()
Just s -> failure $ T.pack $ concat
[ "Unexpected header "
, show header
, " containing "
, show s
]
-- | Assert the last response is exactly equal to the given text. This is
-- useful for testing API responses.
bodyEquals :: String -> YesodExample site ()
bodyEquals text = withResponse $ \ res ->
liftIO $ HUnit.assertBool ("Expected body to equal " ++ text) $
(simpleBody res) == encodeUtf8 (TL.pack text)
-- | Assert the last response has the given text. The check is performed using the response
-- body in full text form.
bodyContains :: String -> YesodExample site ()
bodyContains text = withResponse $ \ res ->
liftIO $ HUnit.assertBool ("Expected body to contain " ++ text) $
(simpleBody res) `contains` text
contains :: BSL8.ByteString -> String -> Bool
contains a b = DL.isInfixOf b (TL.unpack $ decodeUtf8 a)
-- | Queries the HTML using a CSS selector, and all matched elements must contain
-- the given string.
htmlAllContain :: Query -> String -> YesodExample site ()
htmlAllContain query search = do
matches <- htmlQuery query
case matches of
[] -> failure $ "Nothing matched css query: " <> query
_ -> liftIO $ HUnit.assertBool ("Not all "++T.unpack query++" contain "++search) $
DL.all (DL.isInfixOf search) (map (TL.unpack . decodeUtf8) matches)
-- | Queries the HTML using a CSS selector, and passes if any matched
-- element contains the given string.
--
-- Since 0.3.5
htmlAnyContain :: Query -> String -> YesodExample site ()
htmlAnyContain query search = do
matches <- htmlQuery query
case matches of
[] -> failure $ "Nothing matched css query: " <> query
_ -> liftIO $ HUnit.assertBool ("None of "++T.unpack query++" contain "++search) $
DL.any (DL.isInfixOf search) (map (TL.unpack . decodeUtf8) matches)
-- | Queries the HTML using a CSS selector, and fails if any matched
-- element contains the given string (in other words, it is the logical
-- inverse of htmlAnyContains).
--
-- Since 1.2.2
htmlNoneContain :: Query -> String -> YesodExample site ()
htmlNoneContain query search = do
matches <- htmlQuery query
case DL.filter (DL.isInfixOf search) (map (TL.unpack . decodeUtf8) matches) of
[] -> return ()
found -> failure $ "Found " <> T.pack (show $ length found) <>
" instances of " <> T.pack search <> " in " <> query <> " elements"
-- | Performs a CSS query on the last response and asserts the matched elements
-- are as many as expected.
htmlCount :: Query -> Int -> YesodExample site ()
htmlCount query count = do
matches <- fmap DL.length $ htmlQuery query
liftIO $ flip HUnit.assertBool (matches == count)
("Expected "++(show count)++" elements to match "++T.unpack query++", found "++(show matches))
-- | Outputs the last response body to stderr (So it doesn't get captured by HSpec)
printBody :: YesodExample site ()
printBody = withResponse $ \ SResponse { simpleBody = b } ->
liftIO $ BSL8.hPutStrLn stderr b
-- | Performs a CSS query and print the matches to stderr.
printMatches :: Query -> YesodExample site ()
printMatches query = do
matches <- htmlQuery query
liftIO $ hPutStrLn stderr $ show matches
-- | Add a parameter with the given name and value to the request body.
addPostParam :: T.Text -> T.Text -> RequestBuilder site ()
addPostParam name value =
ST.modify $ \rbd -> rbd { rbdPostData = (addPostData (rbdPostData rbd)) }
where addPostData (BinaryPostData _) = error "Trying to add post param to binary content."
addPostData (MultipleItemsPostData posts) =
MultipleItemsPostData $ ReqKvPart name value : posts
-- | Add a parameter with the given name and value to the query string.
addGetParam :: T.Text -> T.Text -> RequestBuilder site ()
addGetParam name value = ST.modify $ \rbd -> rbd
{ rbdGets = (TE.encodeUtf8 name, Just $ TE.encodeUtf8 value)
: rbdGets rbd
}
-- | Add a file to be posted with the current request.
--
-- Adding a file will automatically change your request content-type to be multipart/form-data.
--
-- ==== __Examples__
--
-- > request $ do
-- > addFile "profile_picture" "static/img/picture.png" "img/png"
addFile :: T.Text -- ^ The parameter name for the file.
-> FilePath -- ^ The path to the file.
-> T.Text -- ^ The MIME type of the file, e.g. "image/png".
-> RequestBuilder site ()
addFile name path mimetype = do
contents <- liftIO $ BSL8.readFile path
ST.modify $ \rbd -> rbd { rbdPostData = (addPostData (rbdPostData rbd) contents) }
where addPostData (BinaryPostData _) _ = error "Trying to add file after setting binary content."
addPostData (MultipleItemsPostData posts) contents =
MultipleItemsPostData $ ReqFilePart name path contents mimetype : posts
-- This looks up the name of a field based on the contents of the label pointing to it.
nameFromLabel :: T.Text -> RequestBuilder site T.Text
nameFromLabel label = do
mres <- fmap rbdResponse ST.get
res <-
case mres of
Nothing -> failure "nameFromLabel: No response available"
Just res -> return res
let
body = simpleBody res
mlabel = parseHTML body
$// C.element "label"
>=> contentContains label
mfor = mlabel >>= attribute "for"
contentContains x c
| x `T.isInfixOf` T.concat (c $// content) = [c]
| otherwise = []
case mfor of
for:[] -> do
let mname = parseHTML body
$// attributeIs "id" for
>=> attribute "name"
case mname of
"":_ -> failure $ T.concat
[ "Label "
, label
, " resolved to id "
, for
, " which was not found. "
]
name:_ -> return name
[] -> failure $ "No input with id " <> for
[] ->
case filter (/= "") $ mlabel >>= (child >=> C.element "input" >=> attribute "name") of
[] -> failure $ "No label contained: " <> label
name:_ -> return name
_ -> failure $ "More than one label contained " <> label
(<>) :: T.Text -> T.Text -> T.Text
(<>) = T.append
-- How does this work for the alternate <label><input></label> syntax?
-- | Finds the @\<label>@ with the given value, finds its corresponding @\<input>@, then adds a parameter
-- for that input to the request body.
--
-- ==== __Examples__
--
-- Given this HTML, we want to submit @f1=Michael@ to the server:
--
-- > <form method="POST">
-- > <label for="user">Username</label>
-- > <input id="user" name="f1" />
-- > </form>
--
-- You can set this parameter like so:
--
-- > request $ do
-- > byLabel "Username" "Michael"
--
-- This function also supports the implicit label syntax, in which
-- the @\<input>@ is nested inside the @\<label>@ rather than specified with @for@:
--
-- > <form method="POST">
-- > <label>Username <input name="f1"> </label>
-- > </form>
byLabel :: T.Text -- ^ The text contained in the @\<label>@.
-> T.Text -- ^ The value to set the parameter to.
-> RequestBuilder site ()
byLabel label value = do
name <- nameFromLabel label
addPostParam name value
-- | Finds the @\<label>@ with the given value, finds its corresponding @\<input>@, then adds a file for that input to the request body.
--
-- ==== __Examples__
--
-- Given this HTML, we want to submit a file with the parameter name @f1@ to the server:
--
-- > <form method="POST">
-- > <label for="imageInput">Please submit an image</label>
-- > <input id="imageInput" type="file" name="f1" accept="image/*">
-- > </form>
--
-- You can set this parameter like so:
--
-- > request $ do
-- > fileByLabel "Please submit an image" "static/img/picture.png" "img/png"
--
-- This function also supports the implicit label syntax, in which
-- the @\<input>@ is nested inside the @\<label>@ rather than specified with @for@:
--
-- > <form method="POST">
-- > <label>Please submit an image <input type="file" name="f1"> </label>
-- > </form>
fileByLabel :: T.Text -- ^ The text contained in the @\<label>@.
-> FilePath -- ^ The path to the file.
-> T.Text -- ^ The MIME type of the file, e.g. "image/png".
-> RequestBuilder site ()
fileByLabel label path mime = do
name <- nameFromLabel label
addFile name path mime
-- | Lookups the hidden input named "_token" and adds its value to the params.
-- Receives a CSS selector that should resolve to the form element containing the token.
--
-- ==== __Examples__
--
-- > request $ do
-- > addToken_ "#formID"
addToken_ :: Query -> RequestBuilder site ()
addToken_ scope = do
matches <- htmlQuery' rbdResponse $ scope <> "input[name=_token][type=hidden][value]"
case matches of
[] -> failure $ "No CSRF token found in the current page"
element:[] -> addPostParam "_token" $ head $ attribute "value" $ parseHTML element
_ -> failure $ "More than one CSRF token found in the page"
-- | For responses that display a single form, just lookup the only CSRF token available.
--
-- ==== __Examples__
--
-- > request $ do
-- > addToken
addToken :: RequestBuilder site ()
addToken = addToken_ ""
-- | Calls 'addTokenFromCookieNamedToHeaderNamed' with the 'defaultCsrfCookieName' and 'defaultCsrfHeaderName'.
--
-- Use this function if you're using the CSRF middleware from "Yesod.Core" and haven't customized the cookie or header name.
--
-- ==== __Examples__
--
-- > request $ do
-- > addTokenFromCookie
--
-- Since 1.4.3.2
addTokenFromCookie :: RequestBuilder site ()
addTokenFromCookie = addTokenFromCookieNamedToHeaderNamed defaultCsrfCookieName defaultCsrfHeaderName
-- | Looks up the CSRF token stored in the cookie with the given name and adds it to the request headers. An error is thrown if the cookie can't be found.
--
-- Use this function if you're using the CSRF middleware from "Yesod.Core" and have customized the cookie or header name.
--
-- See "Yesod.Core.Handler" for details on this approach to CSRF protection.
--
-- ==== __Examples__
--
-- > import Data.CaseInsensitive (CI)
-- > request $ do
-- > addTokenFromCookieNamedToHeaderNamed "cookieName" (CI "headerName")
--
-- Since 1.4.3.2
addTokenFromCookieNamedToHeaderNamed :: ByteString -- ^ The name of the cookie
-> CI ByteString -- ^ The name of the header
-> RequestBuilder site ()
addTokenFromCookieNamedToHeaderNamed cookieName headerName = do
cookies <- getRequestCookies
case M.lookup cookieName cookies of
Just csrfCookie -> addRequestHeader (headerName, Cookie.setCookieValue csrfCookie)
Nothing -> failure $ T.concat
[ "addTokenFromCookieNamedToHeaderNamed failed to lookup CSRF cookie with name: "
, T.pack $ show cookieName
, ". Cookies were: "
, T.pack $ show cookies
]
-- | Returns the 'Cookies' from the most recent request. If a request hasn't been made, an error is raised.
--
-- ==== __Examples__
--
-- > request $ do
-- > cookies <- getRequestCookies
-- > liftIO $ putStrLn $ "Cookies are: " ++ show cookies
--
-- Since 1.4.3.2
getRequestCookies :: RequestBuilder site Cookies
getRequestCookies = do
requestBuilderData <- ST.get
headers <- case simpleHeaders <$> rbdResponse requestBuilderData of
Just h -> return h
Nothing -> failure "getRequestCookies: No request has been made yet; the cookies can't be looked up."
return $ M.fromList $ map (\c -> (Cookie.setCookieName c, c)) (parseSetCookies headers)
-- | Perform a POST request to @url@.
--
-- ==== __Examples__
--
-- > post HomeR
post :: (Yesod site, RedirectUrl site url)
=> url
-> YesodExample site ()
post url = request $ do
setMethod "POST"
setUrl url
-- | Perform a POST request to @url@ with the given body.
--
-- ==== __Examples__
--
-- > postBody HomeR "foobar"
--
-- > import Data.Aeson
-- > postBody HomeR (encode $ object ["age" .= (1 :: Integer)])
postBody :: (Yesod site, RedirectUrl site url)
=> url
-> BSL8.ByteString
-> YesodExample site ()
postBody url body = request $ do
setMethod "POST"
setUrl url
setRequestBody body
-- | Perform a GET request to @url@.
--
-- ==== __Examples__
--
-- > get HomeR
--
-- > get ("http://google.com" :: Text)
get :: (Yesod site, RedirectUrl site url)
=> url
-> YesodExample site ()
get url = request $ do
setMethod "GET"
setUrl url
-- | Sets the HTTP method used by the request.
--
-- ==== __Examples__
--
-- > request $ do
-- > setMethod "POST"
--
-- > import Network.HTTP.Types.Method
-- > request $ do
-- > setMethod methodPut
setMethod :: H.Method -> RequestBuilder site ()
setMethod m = ST.modify $ \rbd -> rbd { rbdMethod = m }
-- | Sets the URL used by the request.
--
-- ==== __Examples__
--
-- > request $ do
-- > setUrl HomeR
--
-- > request $ do
-- > setUrl ("http://google.com/" :: Text)
setUrl :: (Yesod site, RedirectUrl site url)
=> url
-> RequestBuilder site ()
setUrl url' = do
site <- fmap rbdSite ST.get
eurl <- runFakeHandler
M.empty
(const $ error "Yesod.Test: No logger available")
site
(toTextUrl url')
url <- either (error . show) return eurl
let (urlPath, urlQuery) = T.break (== '?') url
ST.modify $ \rbd -> rbd
{ rbdPath =
case DL.filter (/="") $ H.decodePathSegments $ TE.encodeUtf8 urlPath of
("http:":_:rest) -> rest
("https:":_:rest) -> rest
x -> x
, rbdGets = rbdGets rbd ++ H.parseQuery (TE.encodeUtf8 urlQuery)
}
-- | Simple way to set HTTP request body
--
-- ==== __ Examples__
--
-- > request $ do
-- > setRequestBody "foobar"
--
-- > import Data.Aeson
-- > request $ do
-- > setRequestBody $ encode $ object ["age" .= (1 :: Integer)]
setRequestBody :: (Yesod site)
=> BSL8.ByteString
-> RequestBuilder site ()
setRequestBody body = ST.modify $ \rbd -> rbd { rbdPostData = BinaryPostData body }
-- | Adds the given header to the request; see "Network.HTTP.Types.Header" for creating 'Header's.
--
-- ==== __Examples__
--
-- > import Network.HTTP.Types.Header
-- > request $ do
-- > addRequestHeader (hUserAgent, "Chrome/41.0.2228.0")
addRequestHeader :: H.Header -> RequestBuilder site ()
addRequestHeader header = ST.modify $ \rbd -> rbd
{ rbdHeaders = header : rbdHeaders rbd
}
-- | The general interface for performing requests. 'request' takes a 'RequestBuilder',
-- constructs a request, and executes it.
--
-- The 'RequestBuilder' allows you to build up attributes of the request, like the
-- headers, parameters, and URL of the request.
--
-- ==== __Examples__
--
-- > request $ do
-- > addToken
-- > byLabel "First Name" "Felipe"
-- > setMethod "PUT"
-- > setUrl NameR
request :: Yesod site
=> RequestBuilder site ()
-> YesodExample site ()
request reqBuilder = do
YesodExampleData app site oldCookies mRes <- ST.get
RequestBuilderData {..} <- liftIO $ ST.execStateT reqBuilder RequestBuilderData
{ rbdPostData = MultipleItemsPostData []
, rbdResponse = mRes
, rbdMethod = "GET"
, rbdSite = site
, rbdPath = []
, rbdGets = []
, rbdHeaders = []
}
let path
| null rbdPath = "/"
| otherwise = TE.decodeUtf8 $ Builder.toByteString $ H.encodePathSegments rbdPath
-- expire cookies and filter them for the current path. TODO: support max age
currentUtc <- liftIO getCurrentTime
let cookies = M.filter (checkCookieTime currentUtc) oldCookies
cookiesForPath = M.filter (checkCookiePath path) cookies
let req = case rbdPostData of
MultipleItemsPostData x ->
if DL.any isFile x
then (multipart x)
else singlepart
BinaryPostData _ -> singlepart
where singlepart = makeSinglepart cookiesForPath rbdPostData rbdMethod rbdHeaders path rbdGets
multipart x = makeMultipart cookiesForPath x rbdMethod rbdHeaders path rbdGets
-- let maker = case rbdPostData of
-- MultipleItemsPostData x ->
-- if DL.any isFile x
-- then makeMultipart
-- else makeSinglepart
-- BinaryPostData _ -> makeSinglepart
-- let req = maker cookiesForPath rbdPostData rbdMethod rbdHeaders path rbdGets
response <- liftIO $ runSession (srequest req
{ simpleRequest = (simpleRequest req)
{ httpVersion = H.http11
}
}) app
let newCookies = parseSetCookies $ simpleHeaders response
cookies' = M.fromList [(Cookie.setCookieName c, c) | c <- newCookies] `M.union` cookies
ST.put $ YesodExampleData app site cookies' (Just response)
where
isFile (ReqFilePart _ _ _ _) = True
isFile _ = False
checkCookieTime t c = case Cookie.setCookieExpires c of
Nothing -> True
Just t' -> t < t'
checkCookiePath url c =
case Cookie.setCookiePath c of
Nothing -> True
Just x -> x `BS8.isPrefixOf` TE.encodeUtf8 url
-- For building the multi-part requests
boundary :: String
boundary = "*******noneedtomakethisrandom"
separator = BS8.concat ["--", BS8.pack boundary, "\r\n"]
makeMultipart :: M.Map a0 Cookie.SetCookie
-> [RequestPart]
-> H.Method
-> [H.Header]
-> T.Text
-> H.Query
-> SRequest
makeMultipart cookies parts method extraHeaders urlPath urlQuery =
SRequest simpleRequest' (simpleRequestBody' parts)
where simpleRequestBody' x =
BSL8.fromChunks [multiPartBody x]
simpleRequest' = mkRequest
[ ("Cookie", cookieValue)
, ("Content-Type", contentTypeValue)]
method extraHeaders urlPath urlQuery
cookieValue = Builder.toByteString $ Cookie.renderCookies cookiePairs
cookiePairs = [ (Cookie.setCookieName c, Cookie.setCookieValue c)
| c <- map snd $ M.toList cookies ]
contentTypeValue = BS8.pack $ "multipart/form-data; boundary=" ++ boundary
multiPartBody parts =
BS8.concat $ separator : [BS8.concat [multipartPart p, separator] | p <- parts]
multipartPart (ReqKvPart k v) = BS8.concat
[ "Content-Disposition: form-data; "
, "name=\"", TE.encodeUtf8 k, "\"\r\n\r\n"
, TE.encodeUtf8 v, "\r\n"]
multipartPart (ReqFilePart k v bytes mime) = BS8.concat
[ "Content-Disposition: form-data; "
, "name=\"", TE.encodeUtf8 k, "\"; "
, "filename=\"", BS8.pack v, "\"\r\n"
, "Content-Type: ", TE.encodeUtf8 mime, "\r\n\r\n"
, BS8.concat $ BSL8.toChunks bytes, "\r\n"]
-- For building the regular non-multipart requests
makeSinglepart :: M.Map a0 Cookie.SetCookie
-> RBDPostData
-> H.Method
-> [H.Header]
-> T.Text
-> H.Query
-> SRequest
makeSinglepart cookies rbdPostData method extraHeaders urlPath urlQuery =
SRequest simpleRequest' (simpleRequestBody' rbdPostData)
where
simpleRequest' = (mkRequest
[ ("Cookie", cookieValue)
, ("Content-Type", "application/x-www-form-urlencoded")]
method extraHeaders urlPath urlQuery)
simpleRequestBody' (MultipleItemsPostData x) =
BSL8.fromChunks $ return $ TE.encodeUtf8 $ T.intercalate "&"
$ map singlepartPart x
simpleRequestBody' (BinaryPostData x) = x
cookieValue = Builder.toByteString $ Cookie.renderCookies cookiePairs
cookiePairs = [ (Cookie.setCookieName c, Cookie.setCookieValue c)
| c <- map snd $ M.toList cookies ]
singlepartPart (ReqFilePart _ _ _ _) = ""
singlepartPart (ReqKvPart k v) = T.concat [k,"=",v]
-- General request making
mkRequest headers method extraHeaders urlPath urlQuery = defaultRequest
{ requestMethod = method
, remoteHost = Sock.SockAddrInet 1 2
, requestHeaders = headers ++ extraHeaders
, rawPathInfo = TE.encodeUtf8 urlPath
, pathInfo = H.decodePathSegments $ TE.encodeUtf8 urlPath
, rawQueryString = H.renderQuery False urlQuery
, queryString = urlQuery
}
parseSetCookies :: [H.Header] -> [Cookie.SetCookie]
parseSetCookies headers = map (Cookie.parseSetCookie . snd) $ DL.filter (("Set-Cookie"==) . fst) $ headers
-- Yes, just a shortcut
failure :: (MonadIO a) => T.Text -> a b
failure reason = (liftIO $ HUnit.assertFailure $ T.unpack reason) >> error ""
type TestApp site = (site, Middleware)
testApp :: site -> Middleware -> TestApp site
testApp site middleware = (site, middleware)
type YSpec site = Hspec.SpecWith (TestApp site)
instance YesodDispatch site => Hspec.Example (ST.StateT (YesodExampleData site) IO a) where
type Arg (ST.StateT (YesodExampleData site) IO a) = TestApp site
evaluateExample example params action =
Hspec.evaluateExample
(action $ \(site, middleware) -> do
app <- toWaiAppPlain site
_ <- ST.evalStateT example YesodExampleData
{ yedApp = middleware app
, yedSite = site
, yedCookies = M.empty
, yedResponse = Nothing
}
return ())
params
($ ())
| Daniel-Diaz/yesod | yesod-test/Yesod/Test.hs | mit | 33,635 | 0 | 19 | 8,389 | 6,720 | 3,603 | 3,117 | 542 | 9 |
module Queens (boardString, canAttack) where
type Square = (Int, Int)
boardString :: Maybe Square -> Maybe Square -> String
boardString whiteQueen blackQueen = unlines $ map (unwords . squares) [0..7]
where
squares row = map (\ col -> symbol $ Just (row, col)) [0..7]
symbol square
| square == whiteQueen = "W"
| square == blackQueen = "B"
| otherwise = "_"
canAttack :: Square -> Square -> Bool
canAttack (x1, y1) (x2, y2) =
x1 == x2 || y1 == y2 || abs (x1 - x2) == abs (y1 - y2)
| Bugfry/exercises | exercism/haskell/queen-attack/src/Queens.hs | mit | 514 | 0 | 12 | 125 | 230 | 121 | 109 | 12 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
-- the base concept of location
-- spatial properties and relations
-- vector geometry could be represented as well-known text (http://en.wikipedia.org/wiki/Well-known_text)
-- better alternative encoding would be GeoJSON (http://www.macwright.org/2015/03/23/geojson-second-bite.html)
-- (c) Werner Kuhn
-- latest change: February 6, 2016
module Location where
-- locations are values of spatial attributes
-- unifying vector and raster representations
-- behavior is defined by spatial relations (add more as needed)
-- places are objects, not values, i.e. they do not belong here
class LOCATIONS location coord where
positionIn :: Position coord -> location coord -> Bool
-- contains :: location coord -> location coord -> Bool
-- distance :: location coord -> location coord -> coord
-- the number of dimensions
-- Int would be too general; it could be Nat, but that comes with no obvious choice of library
-- also, there are no computations on the number of dimensions
data Dimension = D1 | D2 | D3 deriving (Eq, Ord, Show)
errorDim = "different dimensions"
-- spatial reference systems
-- for now just an enumeration
-- possibly use http://en.wikipedia.org/wiki/SRID later
data SRS = WGS84 | Local deriving (Eq, Show)
errorSRS = "different spatial reference systems"
-- positions in any space
-- controlling for dimension
-- turns out similar to http://hackage.haskell.org/package/hgeometry
data Position coord = Position [coord] Dimension SRS deriving (Eq, Show)
instance LOCATIONS Position Int where
positionIn (Position clist1 dim1 srs1) (Position clist2 dim2 srs2)
| dim1 /= dim2 = error errorDim
| srs1 /= srs2 = error errorSRS
| otherwise = clist1 == clist2
{- contains (Position clist1 dim1 srs1) (Position clist2 dim2 srs2)
| dim1 /= dim2 = error errorDim
| srs1 /= srs2 = error errorSRS
| otherwise = clist1 == clist2
distance (Position clist1 dim1 srs1) (Position clist2 dim2 srs2)
| dim1 /= dim2 = error errorDim
| srs1 /= srs2 = error errorSRS
| otherwise = sum (map abs (zipWith (-) clist2 clist1)) -- in discrete spaces, use manhattan metric
-}
{- euclidean, for Floating coord
-- float coords will need a more sophisticated Eq implementation (with tolerance, or Id based)
distance (Position clist1 dim1 srs1) (Position clist2 dim2 srs2) =
let sqr a = a*a
in if (srid1==srid2 && d1==d2) then sqrt (sum (map sqr (zipWith (-) c2 c1))) else error "different spaces"
-}
-- bounding boxes
-- parameterization does NOT guarantee same coord type in both positions!
data MBR coord = MBR (Position coord) (Position coord) deriving (Eq, Show)
instance LOCATIONS MBR Int where
positionIn (Position clist1 dim1 srs1) (MBR (Position clist2 dim2 srs2) (Position clist3 dim3 srs3))
| (dim1 /= dim2) || (dim2 /= dim3) = error errorDim
| (srs1 /= srs2) || (srs2 /= srs3) = error errorSRS
| otherwise = error "not yet implemented"
{- contains (MBR (Position clist1 dim1 srs1) (Position clist2 dim2 srs2)) (MBR (Position clist3 dim3 srs3) (Position clist4 dim4 srs4))
| dim1 /= dim2 = error errorDim
| srs1 /= srs2 = error errorSRS
| otherwise = error "not yet implemented"
distance (MBR (Position clist1 dim1 srs1) (Position clist2 dim2 srs2)) (MBR (Position clist3 dim3 srs3) (Position clist4 dim4 srs4))
| dim1 /= dim2 = error errorDim
| srs1 /= srs2 = error errorSRS
| otherwise = sum (map abs (zipWith (-) clist2 clist1)) -- in discrete spaces, use manhattan metric
-}
-- converting positions to tuples (only 2-tuples for now)
-- needed as array indices (in Field), but may be useful otherwise
-- SRID intentionally dropped, can be added if needed
pos2Tup2 :: Position coord -> (coord, coord)
pos2Tup2 (Position c D2 s) = (c!!0,c!!1)
-- TESTS
p11, p12, p21, p22 :: Position Int
p11 = Position [1, 1] D2 Local
p12 = Position [1, 2] D2 Local
p21 = Position [2, 1] D2 Local
p22 = Position [2, 2] D2 Local
p11t = pos2Tup2 p11
p12t = pos2Tup2 p12
p21t = pos2Tup2 p21
p22t = pos2Tup2 p22
p3, p4 :: Position Float
p3 = Position [1.0, 2.0] D2 Local
p4 = Position [2.0, 1.0] D2 Local
mbr1 :: MBR Int
mbr1 = MBR p11 p22
mbr2 :: MBR Float
mbr2 = MBR p3 p4
--lt1 = show (distance p11 p22)
--lt2 = show (distance p3 p4)
| liangcun/ConceptsOfSpatialInformation | extras/CoreConceptsHs Backup/Location.hs | apache-2.0 | 4,261 | 0 | 11 | 773 | 649 | 357 | 292 | 39 | 1 |
{-| Converts a configuration state into a Ssconf map.
As TemplateHaskell require that splices be defined in a separate
module, we combine all the TemplateHaskell functionality that HTools
needs in this module (except the one for unittests).
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.WConfd.Ssconf
( SSConf(..)
, emptySSConf
, mkSSConf
) where
import Control.Arrow ((&&&), first, second)
import Data.Foldable (Foldable(..), toList)
import Data.List (partition)
import Data.Maybe (mapMaybe)
import qualified Data.Map as M
import qualified Text.JSON as J
import Ganeti.BasicTypes
import Ganeti.Config
import Ganeti.Constants
import Ganeti.JSON
import Ganeti.Objects
import Ganeti.Ssconf
import Ganeti.Utils
import Ganeti.Types
eqPair :: (String, String) -> String
eqPair (x, y) = x ++ "=" ++ y
mkSSConfHvparams :: Cluster -> [(Hypervisor, [String])]
mkSSConfHvparams cluster = map (id &&& hvparams) [minBound..maxBound]
where
hvparams :: Hypervisor -> [String]
hvparams h = maybe [] hvparamsStrings
$ lookupContainer Nothing h (clusterHvparams cluster)
-- | Convert a collection of hypervisor parameters to strings in the form
-- @key=value@.
hvparamsStrings :: HvParams -> [String]
hvparamsStrings =
map (eqPair . second hvparamShow) . M.toList . fromContainer
-- | Convert a hypervisor parameter in its JSON representation to a String.
-- Strings, numbers and booleans are just printed (without quotes), booleans
-- printed as @True@/@False@ and other JSON values (should they exist) as
-- their JSON representations.
hvparamShow :: J.JSValue -> String
hvparamShow (J.JSString s) = J.fromJSString s
hvparamShow (J.JSRational _ r) = J.showJSRational r []
hvparamShow (J.JSBool b) = show b
hvparamShow x = J.encode x
mkSSConf :: ConfigData -> SSConf
mkSSConf cdata = SSConf . M.fromList $
[ (SSClusterName, return $ clusterClusterName cluster)
, (SSClusterTags, toList $ tagsOf cluster)
, (SSFileStorageDir, return $ clusterFileStorageDir cluster)
, (SSSharedFileStorageDir, return $ clusterSharedFileStorageDir cluster)
, (SSGlusterStorageDir, return $ clusterGlusterStorageDir cluster)
, (SSMasterCandidates, mapLines nodeName mcs)
, (SSMasterCandidatesIps, mapLines nodePrimaryIp mcs)
, (SSMasterCandidatesCerts, mapLines eqPair . toPairs
. clusterCandidateCerts $ cluster)
, (SSMasterIp, return $ clusterMasterIp cluster)
, (SSMasterNetdev, return $ clusterMasterNetdev cluster)
, (SSMasterNetmask, return . show $ clusterMasterNetmask cluster)
, (SSMasterNode, return
. genericResult (const "NO MASTER") nodeName
. getNode cdata $ clusterMasterNode cluster)
, (SSNodeList, mapLines nodeName nodes)
, (SSNodePrimaryIps, mapLines (spcPair . (nodeName &&& nodePrimaryIp))
nodes )
, (SSNodeSecondaryIps, mapLines (spcPair . (nodeName &&& nodeSecondaryIp))
nodes )
, (SSNodeVmCapable, mapLines (eqPair . (nodeName &&& show . nodeVmCapable))
nodes)
, (SSOfflineNodes, mapLines nodeName offline )
, (SSOnlineNodes, mapLines nodeName online )
, (SSPrimaryIpFamily, return . show . ipFamilyToRaw
. clusterPrimaryIpFamily $ cluster)
, (SSInstanceList, niceSort . mapMaybe instName
. toList . configInstances $ cdata)
, (SSReleaseVersion, return releaseVersion)
, (SSHypervisorList, mapLines hypervisorToRaw
. clusterEnabledHypervisors $ cluster)
, (SSMaintainNodeHealth, return . show . clusterMaintainNodeHealth
$ cluster)
, (SSUidPool, mapLines formatUidRange . clusterUidPool $ cluster)
, (SSNodegroups, mapLines (spcPair . (groupUuid &&& groupName))
nodeGroups)
, (SSNetworks, mapLines (spcPair . (networkUuid
&&& (fromNonEmpty . networkName)))
. configNetworks $ cdata)
, (SSEnabledUserShutdown, return . show . clusterEnabledUserShutdown
$ cluster)
, (SSSshPorts, mapLines (eqPair . (nodeName
&&& getSshPort cdata)) nodes)
] ++
map (first hvparamsSSKey) (mkSSConfHvparams cluster)
where
mapLines :: (Foldable f) => (a -> String) -> f a -> [String]
mapLines f = map f . toList
spcPair (x, y) = x ++ " " ++ y
toPairs = M.assocs . fromContainer
cluster = configCluster cdata
mcs = getMasterOrCandidates cdata
nodes = niceSortKey nodeName . toList $ configNodes cdata
(offline, online) = partition nodeOffline nodes
nodeGroups = niceSortKey groupName . toList $ configNodegroups cdata
-- This will return the empty string only for the situation where the
-- configuration is corrupted and no nodegroup can be found for that node.
getSshPort :: ConfigData -> Node -> String
getSshPort cfg node = maybe "" (show . ndpSshPort)
$ getNodeNdParams cfg node
| bitemyapp/ganeti | src/Ganeti/WConfd/Ssconf.hs | bsd-2-clause | 6,454 | 0 | 17 | 1,535 | 1,250 | 691 | 559 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Tests for lock allocation.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Locking.Allocation
( testLocking_Allocation
, TestLock
, TestOwner
, requestSucceeded
) where
import qualified Data.Foldable as F
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Set as S
import qualified Text.JSON as J
import Test.QuickCheck
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHelper
import Ganeti.BasicTypes
import Ganeti.Locking.Allocation
import Ganeti.Locking.Types
{-
Ganeti.Locking.Allocation is polymorphic in the types of locks
and lock owners. So we can use much simpler types here than Ganeti's
real locks and lock owners, knowning that polymorphic functions cannot
exploit the simplicity of the types they're deling with.
-}
data TestOwner = TestOwner Int deriving (Ord, Eq, Show)
instance Arbitrary TestOwner where
arbitrary = TestOwner <$> choose (0, 2)
data TestLock = TestBigLock
| TestCollectionLockA
| TestLockA Int
| TestCollectionLockB
| TestLockB Int
deriving (Ord, Eq, Show, Read)
instance Arbitrary TestLock where
arbitrary = frequency [ (1, elements [ TestBigLock
, TestCollectionLockA
, TestCollectionLockB
])
, (2, TestLockA <$> choose (0, 2))
, (2, TestLockB <$> choose (0, 2))
]
instance Lock TestLock where
lockImplications (TestLockA _) = [TestCollectionLockA, TestBigLock]
lockImplications (TestLockB _) = [TestCollectionLockB, TestBigLock]
lockImplications TestBigLock = []
lockImplications _ = [TestBigLock]
{-
All states of a LockAllocation ever available outside the
Ganeti.Locking.Allocation module must be constructed by starting
with emptyAllocation and applying the exported functions.
-}
instance Arbitrary OwnerState where
arbitrary = elements [OwnShared, OwnExclusive]
instance Arbitrary a => Arbitrary (LockRequest a) where
arbitrary = LockRequest <$> arbitrary <*> genMaybe arbitrary
data UpdateRequest b a = UpdateRequest b [LockRequest a]
| FreeLockRequest b
deriving Show
instance (Arbitrary a, Arbitrary b) => Arbitrary (UpdateRequest a b) where
arbitrary =
frequency [ (4, UpdateRequest <$> arbitrary <*> (choose (1, 4) >>= vector))
, (1, FreeLockRequest <$> arbitrary)
]
-- | Transform an UpdateRequest into the corresponding state transformer.
asAllocTrans :: (Lock a, Ord b, Show b)
=> LockAllocation a b -> UpdateRequest b a -> LockAllocation a b
asAllocTrans state (UpdateRequest owner updates) =
fst $ updateLocks owner updates state
asAllocTrans state (FreeLockRequest owner) = freeLocks state owner
-- | Fold a sequence of requests to transform a lock allocation onto the empty
-- allocation. As we consider all exported LockAllocation transformers, any
-- LockAllocation definable is obtained in this way.
foldUpdates :: (Lock a, Ord b, Show b)
=> [UpdateRequest b a] -> LockAllocation a b
foldUpdates = foldl asAllocTrans emptyAllocation
instance (Arbitrary a, Lock a, Arbitrary b, Ord b, Show b)
=> Arbitrary (LockAllocation a b) where
arbitrary = foldUpdates <$> (choose (0, 8) >>= vector)
-- | Basic property of locking: the exclusive locks of one user
-- are disjoint from any locks of any other user.
prop_LocksDisjoint :: Property
prop_LocksDisjoint =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
let aExclusive = M.keysSet . M.filter (== OwnExclusive) $ listLocks a state
bAll = M.keysSet $ listLocks b state
in counterexample
(show a ++ "'s exclusive lock" ++ " is not respected by " ++ show b)
(S.null $ S.intersection aExclusive bAll)
-- | Verify that the list of active locks indeed contains all locks that
-- are owned by someone.
prop_LockslistComplete :: Property
prop_LockslistComplete =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . M.null . listLocks a)) $ \state ->
counterexample "All owned locks must be mentioned in the all-locks list" $
let allLocks = listAllLocks state in
all (`elem` allLocks) (M.keys $ listLocks a state)
-- | Verify that the list of all locks with states is contained in the list
-- of all locks.
prop_LocksAllOwnersSubsetLockslist :: Property
prop_LocksAllOwnersSubsetLockslist =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
counterexample "The list of all active locks must contain all locks mentioned\
\ in the locks state" $
S.isSubsetOf (S.fromList . map fst $ listAllLocksOwners state)
(S.fromList $ listAllLocks state)
-- | Verify that all locks of all owners are mentioned in the list of all locks'
-- owner's state.
prop_LocksAllOwnersComplete :: Property
prop_LocksAllOwnersComplete =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . M.null . listLocks a)) $ \state ->
counterexample "Owned locks must be mentioned in list of all locks' state" $
let allLocksState = listAllLocksOwners state
in flip all (M.toList $ listLocks a state) $ \(lock, ownership) ->
elem (a, ownership) . fromMaybe [] $ lookup lock allLocksState
-- | Verify that all lock owners mentioned in the list of all locks' owner's
-- state actually own their lock.
prop_LocksAllOwnersSound :: Property
prop_LocksAllOwnersSound =
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . null . listAllLocksOwners)) $ \state ->
counterexample "All locks mentioned in listAllLocksOwners must be owned by\
\ the mentioned owner" .
flip all (listAllLocksOwners state) $ \(lock, owners) ->
flip all owners $ \(owner, ownership) -> holdsLock owner lock ownership state
-- | Verify that exclusive group locks are honored, i.e., verify that if someone
-- holds a lock, then no one else can hold a lock on an exclusive lock on an
-- implied lock.
prop_LockImplicationX :: Property
prop_LockImplicationX =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
let bExclusive = M.keysSet . M.filter (== OwnExclusive) $ listLocks b state
in counterexample "Others cannot have an exclusive lock on an implied lock" .
flip all (M.keys $ listLocks a state) $ \lock ->
flip all (lockImplications lock) $ \impliedlock ->
not $ S.member impliedlock bExclusive
-- | Verify that shared group locks are honored, i.e., verify that if someone
-- holds an exclusive lock, then no one else can hold any form on lock on an
-- implied lock.
prop_LockImplicationS :: Property
prop_LockImplicationS =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
let aExclusive = M.keys . M.filter (== OwnExclusive) $ listLocks a state
bAll = M.keysSet $ listLocks b state
in counterexample "Others cannot hold locks implied by an exclusive lock" .
flip all aExclusive $ \lock ->
flip all (lockImplications lock) $ \impliedlock ->
not $ S.member impliedlock bAll
-- | Verify that locks can only be modified by updates of the owner.
prop_LocksStable :: Property
prop_LocksStable =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary `suchThat` (/= a)) $ \b ->
forAll (arbitrary :: Gen [LockRequest TestLock]) $ \request ->
let (state', _) = updateLocks b request state
in (listLocks a state ==? listLocks a state')
-- | Verify that a given request is statisfied in list of owned locks
requestSucceeded :: Ord a => M.Map a OwnerState -> LockRequest a -> Bool
requestSucceeded owned (LockRequest lock status) = M.lookup lock owned == status
-- | Verify that lock updates are atomic, i.e., either we get all the required
-- locks, or the state is completely unchanged.
prop_LockupdateAtomic :: Property
prop_LockupdateAtomic =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen [LockRequest TestLock]) $ \request ->
let (state', result) = updateLocks a request state
in if result == Ok S.empty
then counterexample
("Update succeeded, but in final state " ++ show state'
++ "not all locks are as requested")
$ let owned = listLocks a state'
in all (requestSucceeded owned) request
else counterexample
("Update failed, but state changed to " ++ show state')
(state == state')
-- | Verify that releasing a lock always succeeds.
prop_LockReleaseSucceeds :: Property
prop_LockReleaseSucceeds =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen TestLock) $ \lock ->
let (_, result) = updateLocks a [requestRelease lock] state
in counterexample
("Releasing a lock has to suceed uncondiationally, but got "
++ show result)
(isOk result)
-- | Verify the property that only the blocking owners prevent
-- lock allocation. We deliberatly go for the expensive variant
-- restraining by suchThat, as otherwise the number of cases actually
-- covered is too small.
prop_BlockSufficient :: Property
prop_BlockSufficient =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen TestLock) $ \lock ->
forAll (elements [ [requestShared lock]
, [requestExclusive lock]]) $ \request ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (genericResult (const False) (not . S.null)
. snd . updateLocks a request)) $ \state ->
let (_, result) = updateLocks a request state
blockedOn = genericResult (const S.empty) id result
in counterexample "After all blockers release, a request must succeed"
. isOk . snd . updateLocks a request $ F.foldl freeLocks state blockedOn
-- | Verify the property that every blocking owner is necessary, i.e., even
-- if we only keep the locks of one of the blocking owners, the request still
-- will be blocked. We deliberatly use the expensive variant of restraining
-- to ensure good coverage. To make sure the request can always be blocked
-- by two owners, for a shared request we request two different locks.
prop_BlockNecessary :: Property
prop_BlockNecessary =
forAll (arbitrary :: Gen TestOwner) $ \a ->
forAll (arbitrary :: Gen TestLock) $ \lock ->
forAll (arbitrary `suchThat` (/= lock)) $ \lock' ->
forAll (elements [ [requestShared lock, requestShared lock']
, [requestExclusive lock]]) $ \request ->
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (genericResult (const False) ((>= 2) . S.size)
. snd . updateLocks a request)) $ \state ->
let (_, result) = updateLocks a request state
blockers = genericResult (const S.empty) id result
in counterexample "Each blocker alone must block the request"
. flip all (S.elems blockers) $ \blocker ->
(==) (Ok $ S.singleton blocker) . snd . updateLocks a request
. F.foldl freeLocks state
$ S.filter (/= blocker) blockers
instance J.JSON TestOwner where
showJSON (TestOwner x) = J.showJSON x
readJSON = (>>= return . TestOwner) . J.readJSON
instance J.JSON TestLock where
showJSON = J.showJSON . show
readJSON = (>>= return . read) . J.readJSON
-- | Verify that for LockAllocation we have readJSON . showJSON = Ok.
prop_ReadShow :: Property
prop_ReadShow =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
J.readJSON (J.showJSON state) ==? J.Ok state
-- | Verify that the list of lock owners is complete.
prop_OwnerComplete :: Property
prop_OwnerComplete =
forAll (arbitrary :: Gen (LockAllocation TestLock TestOwner)) $ \state ->
foldl freeLocks state (lockOwners state) ==? emptyAllocation
-- | Verify that each owner actually owns a lock.
prop_OwnerSound :: Property
prop_OwnerSound =
forAll ((arbitrary :: Gen (LockAllocation TestLock TestOwner))
`suchThat` (not . null . lockOwners)) $ \state ->
counterexample "All subjects listed as owners must own at least one lock"
. flip all (lockOwners state) $ \owner ->
not . M.null $ listLocks owner state
-- | Verify that for LockRequest we have readJSON . showJSON = Ok.
prop_ReadShowRequest :: Property
prop_ReadShowRequest =
forAll (arbitrary :: Gen (LockRequest TestLock)) $ \state ->
J.readJSON (J.showJSON state) ==? J.Ok state
testSuite "Locking/Allocation"
[ 'prop_LocksDisjoint
, 'prop_LockslistComplete
, 'prop_LocksAllOwnersSubsetLockslist
, 'prop_LocksAllOwnersComplete
, 'prop_LocksAllOwnersSound
, 'prop_LockImplicationX
, 'prop_LockImplicationS
, 'prop_LocksStable
, 'prop_LockupdateAtomic
, 'prop_LockReleaseSucceeds
, 'prop_BlockSufficient
, 'prop_BlockNecessary
, 'prop_ReadShow
, 'prop_OwnerComplete
, 'prop_OwnerSound
, 'prop_ReadShowRequest
]
| mbakke/ganeti | test/hs/Test/Ganeti/Locking/Allocation.hs | bsd-2-clause | 14,925 | 0 | 27 | 3,125 | 3,385 | 1,812 | 1,573 | 230 | 2 |
{-# LANGUAGE BangPatterns, ForeignFunctionInterface, MultiParamTypeClasses, CPP #-}
-----------------------------------------------------------------------------
--
-- Module : Data.Digest.Pure.MD5
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable, requires bang patterns and ByteString
-- Tested with : GHC-6.8.1
--
-- | It is suggested you use the 'crypto-api' class-based interface to access the MD5 algorithm.
-- Either rely on type inference or provide an explicit type:
--
-- @
-- hashFileStrict = liftM hash' B.readFile
-- hashFileLazyBS = liftM hash B.readFile
-- @
--
-----------------------------------------------------------------------------
module Data.Digest.Pure.MD5
(
-- * Types
MD5Context
, MD5Digest
-- * Static data
, md5InitialContext
-- * Functions
, md5
, md5Update
, md5Finalize
-- * Crypto-API interface
, Hash(..)
) where
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Data.ByteString.Unsafe (unsafeDrop, unsafeUseAsCString)
import Data.ByteString.Internal
import Data.Bits
import Data.List hiding ((!!))
import Data.Word
import Foreign.Storable
import Foreign.Ptr (castPtr)
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import qualified Data.Serialize.Get as G
import qualified Data.Serialize.Put as P
import qualified Data.Serialize as S
import Crypto.Classes (Hash(..), hash)
import Data.Tagged
import Numeric
import Prelude hiding ((!!))
-- | Block size in bits
md5BlockSize :: Int
md5BlockSize = 512
blockSizeBytes :: Int
blockSizeBytes = md5BlockSize `div` 8
-- | The type for intermediate results (from md5Update)
data MD5Partial = MD5Par {-# UNPACK #-} !Word32 {-# UNPACK #-} !Word32 {-# UNPACK #-} !Word32 {-# UNPACK #-} !Word32
deriving (Ord, Eq)
-- | The type for final results.
data MD5Context = MD5Ctx { mdPartial :: {-# UNPACK #-} !MD5Partial,
mdTotalLen :: {-# UNPACK #-} !Word64 }
-- |After finalizing a context, using md5Finalize, a new type
-- is returned to prevent 're-finalizing' the structure.
data MD5Digest = MD5Digest MD5Partial deriving (Eq, Ord)
-- | The initial context to use when calling md5Update for the first time
md5InitialContext :: MD5Context
md5InitialContext = MD5Ctx (MD5Par h0 h1 h2 h3) 0
where
h0 = 0x67452301
h1 = 0xEFCDAB89
h2 = 0x98BADCFE
h3 = 0x10325476
-- | Processes a lazy ByteString and returns the md5 digest.
-- This is probably what you want.
md5 :: L.ByteString -> MD5Digest
md5 = hash
-- | Closes an MD5 context, thus producing the digest.
md5Finalize :: MD5Context -> B.ByteString -> MD5Digest
md5Finalize !(MD5Ctx par !totLen) end =
let totLen' = 8*(totLen + fromIntegral l) :: Word64
padBS = P.runPut ( do
P.putByteString end
P.putWord8 0x80
mapM_ P.putWord8 (replicate lenZeroPad 0)
P.putWord64le totLen' )
in MD5Digest $ blockAndDo par padBS
where
l = B.length end
lenZeroPad = if (l + 1) <= blockSizeBytes - 8
then (blockSizeBytes - 8) - (l + 1)
else (2 * blockSizeBytes - 8) - (l + 1)
-- | Alters the MD5Context with a partial digest of the data.
--
-- The input bytestring MUST be a multiple of the blockSize
-- or bad things can happen (incorrect digest results)!
md5Update :: MD5Context -> B.ByteString -> MD5Context
md5Update ctx bs
| B.length bs `rem` blockSizeBytes /= 0 = error "Invalid use of hash update routine (see crypto-api Hash class semantics)"
| otherwise =
let bs' = if isAligned bs then bs else B.copy bs -- copying has been measured as a net win on my x86 system
new = blockAndDo (mdPartial ctx) bs'
in ctx { mdPartial = new, mdTotalLen = mdTotalLen ctx + fromIntegral (B.length bs) }
blockAndDo :: MD5Partial -> B.ByteString -> MD5Partial
blockAndDo !ctx bs
| B.length bs == 0 = ctx
| otherwise =
let !new = performMD5Update ctx bs
in blockAndDo new (unsafeDrop blockSizeBytes bs)
{-# INLINE blockAndDo #-}
-- Assumes ByteString length == blockSizeBytes, will fold the
-- context across calls to applyMD5Rounds.
performMD5Update :: MD5Partial -> B.ByteString -> MD5Partial
performMD5Update !par@(MD5Par !a !b !c !d) !bs =
let MD5Par a' b' c' d' = applyMD5Rounds par bs
in MD5Par (a' + a) (b' + b) (c' + c) (d' + d)
{-# INLINE performMD5Update #-}
isAligned :: ByteString -> Bool
isAligned (PS _ off _) = off `rem` 4 == 0
applyMD5Rounds :: MD5Partial -> ByteString -> MD5Partial
applyMD5Rounds (MD5Par a b c d) w = {-# SCC "applyMD5Rounds" #-}
let -- Round 1
! r0 = ff a b c d (w!! 0) 7 3614090360
! r1 = ff d r0 b c (w!! 1) 12 3905402710
! r2 = ff c r1 r0 b (w!! 2) 17 606105819
! r3 = ff b r2 r1 r0 (w!! 3) 22 3250441966
! r4 = ff r0 r3 r2 r1 (w!! 4) 7 4118548399
! r5 = ff r1 r4 r3 r2 (w!! 5) 12 1200080426
! r6 = ff r2 r5 r4 r3 (w!! 6) 17 2821735955
! r7 = ff r3 r6 r5 r4 (w!! 7) 22 4249261313
! r8 = ff r4 r7 r6 r5 (w!! 8) 7 1770035416
! r9 = ff r5 r8 r7 r6 (w!! 9) 12 2336552879
!r10 = ff r6 r9 r8 r7 (w!!10) 17 4294925233
!r11 = ff r7 r10 r9 r8 (w!!11) 22 2304563134
!r12 = ff r8 r11 r10 r9 (w!!12) 7 1804603682
!r13 = ff r9 r12 r11 r10 (w!!13) 12 4254626195
!r14 = ff r10 r13 r12 r11 (w!!14) 17 2792965006
!r15 = ff r11 r14 r13 r12 (w!!15) 22 1236535329
-- Round 2
!r16 = gg r12 r15 r14 r13 (w!! 1) 5 4129170786
!r17 = gg r13 r16 r15 r14 (w!! 6) 9 3225465664
!r18 = gg r14 r17 r16 r15 (w!!11) 14 643717713
!r19 = gg r15 r18 r17 r16 (w!! 0) 20 3921069994
!r20 = gg r16 r19 r18 r17 (w!! 5) 5 3593408605
!r21 = gg r17 r20 r19 r18 (w!!10) 9 38016083
!r22 = gg r18 r21 r20 r19 (w!!15) 14 3634488961
!r23 = gg r19 r22 r21 r20 (w!! 4) 20 3889429448
!r24 = gg r20 r23 r22 r21 (w!! 9) 5 568446438
!r25 = gg r21 r24 r23 r22 (w!!14) 9 3275163606
!r26 = gg r22 r25 r24 r23 (w!! 3) 14 4107603335
!r27 = gg r23 r26 r25 r24 (w!! 8) 20 1163531501
!r28 = gg r24 r27 r26 r25 (w!!13) 5 2850285829
!r29 = gg r25 r28 r27 r26 (w!! 2) 9 4243563512
!r30 = gg r26 r29 r28 r27 (w!! 7) 14 1735328473
!r31 = gg r27 r30 r29 r28 (w!!12) 20 2368359562
-- Round 3
!r32 = hh r28 r31 r30 r29 (w!! 5) 4 4294588738
!r33 = hh r29 r32 r31 r30 (w!! 8) 11 2272392833
!r34 = hh r30 r33 r32 r31 (w!!11) 16 1839030562
!r35 = hh r31 r34 r33 r32 (w!!14) 23 4259657740
!r36 = hh r32 r35 r34 r33 (w!! 1) 4 2763975236
!r37 = hh r33 r36 r35 r34 (w!! 4) 11 1272893353
!r38 = hh r34 r37 r36 r35 (w!! 7) 16 4139469664
!r39 = hh r35 r38 r37 r36 (w!!10) 23 3200236656
!r40 = hh r36 r39 r38 r37 (w!!13) 4 681279174
!r41 = hh r37 r40 r39 r38 (w!! 0) 11 3936430074
!r42 = hh r38 r41 r40 r39 (w!! 3) 16 3572445317
!r43 = hh r39 r42 r41 r40 (w!! 6) 23 76029189
!r44 = hh r40 r43 r42 r41 (w!! 9) 4 3654602809
!r45 = hh r41 r44 r43 r42 (w!!12) 11 3873151461
!r46 = hh r42 r45 r44 r43 (w!!15) 16 530742520
!r47 = hh r43 r46 r45 r44 (w!! 2) 23 3299628645
-- Round 4
!r48 = ii r44 r47 r46 r45 (w!! 0) 6 4096336452
!r49 = ii r45 r48 r47 r46 (w!! 7) 10 1126891415
!r50 = ii r46 r49 r48 r47 (w!!14) 15 2878612391
!r51 = ii r47 r50 r49 r48 (w!! 5) 21 4237533241
!r52 = ii r48 r51 r50 r49 (w!!12) 6 1700485571
!r53 = ii r49 r52 r51 r50 (w!! 3) 10 2399980690
!r54 = ii r50 r53 r52 r51 (w!!10) 15 4293915773
!r55 = ii r51 r54 r53 r52 (w!! 1) 21 2240044497
!r56 = ii r52 r55 r54 r53 (w!! 8) 6 1873313359
!r57 = ii r53 r56 r55 r54 (w!!15) 10 4264355552
!r58 = ii r54 r57 r56 r55 (w!! 6) 15 2734768916
!r59 = ii r55 r58 r57 r56 (w!!13) 21 1309151649
!r60 = ii r56 r59 r58 r57 (w!! 4) 6 4149444226
!r61 = ii r57 r60 r59 r58 (w!!11) 10 3174756917
!r62 = ii r58 r61 r60 r59 (w!! 2) 15 718787259
!r63 = ii r59 r62 r61 r60 (w!! 9) 21 3951481745
in MD5Par r60 r63 r62 r61
where
f !x !y !z = (x .&. y) .|. ((complement x) .&. z)
{-# INLINE f #-}
g !x !y !z = (x .&. z) .|. (y .&. (complement z))
{-# INLINE g #-}
h !x !y !z = (x `xor` y `xor` z)
{-# INLINE h #-}
i !x !y !z = y `xor` (x .|. (complement z))
{-# INLINE i #-}
ff a_ b_ c_ d_ !x s ac = {-# SCC "ff" #-}
let !a' = f b_ c_ d_ + x + ac + a_
!a'' = rotateL a' s
in a'' + b_
{-# INLINE ff #-}
gg a_ b_ c_ d_ !x s ac = {-# SCC "gg" #-}
let !a' = g b_ c_ d_ + x + ac + a_
!a'' = rotateL a' s
in a'' + b_
{-# INLINE gg #-}
hh a_ b_ c_ d_ !x s ac = {-# SCC "hh" #-}
let !a' = h b_ c_ d_ + x + ac + a_
!a'' = rotateL a' s
in a'' + b_
{-# INLINE hh #-}
ii a_ b_ c_ d_ !x s ac = {-# SCC "ii" #-}
let !a' = i b_ c_ d_ + x + ac + a_
!a'' = rotateL a' s
in a'' + b_
{-# INLINE ii #-}
(!!) word32s pos = getNthWord pos word32s
{-# INLINE (!!) #-}
{-# INLINE applyMD5Rounds #-}
#ifdef FastWordExtract
getNthWord n b = inlinePerformIO (unsafeUseAsCString b (flip peekElemOff n . castPtr))
#else
getNthWord :: Int -> B.ByteString -> Word32
getNthWord n = right . G.runGet G.getWord32le . B.drop (n * sizeOf (undefined :: Word32))
where
right x = case x of Right y -> y
_ -> error "Missing Case for getNthWord"
#endif
{-# INLINE getNthWord #-}
----- Some quick and dirty instances follow -----
instance Show MD5Digest where
show (MD5Digest h) = show h
instance Show MD5Partial where
show (MD5Par a b c d) =
let bs = runPut $ putWord32be d >> putWord32be c >> putWord32be b >> putWord32be a
in foldl' (\str w -> let e = showHex w str
in if length e < length str + 2
then '0':e
else e) "" (L.unpack bs)
instance Binary MD5Digest where
put (MD5Digest p) = put p
get = do
p <- get
return $ MD5Digest p
instance Binary MD5Context where
put (MD5Ctx p l) = put p >> putWord64be l
get = do p <- get
l <- getWord64be
return $ MD5Ctx p l
instance Binary MD5Partial where
put (MD5Par a b c d) = putWord32le a >> putWord32le b >> putWord32le c >> putWord32le d
get = do a <- getWord32le
b <- getWord32le
c <- getWord32le
d <- getWord32le
return $ MD5Par a b c d
instance S.Serialize MD5Digest where
put (MD5Digest p) = S.put p
get = do
p <- S.get
return $ MD5Digest p
instance S.Serialize MD5Context where
put (MD5Ctx p l) = S.put p >>
P.putWord64be l
get = do p <- S.get
l <- G.getWord64be
return $ MD5Ctx p l
instance S.Serialize MD5Partial where
put (MD5Par a b c d) = P.putWord32le a >> P.putWord32le b >> P.putWord32le c >> P.putWord32le d
get = do a <- G.getWord32le
b <- G.getWord32le
c <- G.getWord32le
d <- G.getWord32le
return $ MD5Par a b c d
instance Hash MD5Context MD5Digest where
outputLength = Tagged 128
blockLength = Tagged 512
initialCtx = md5InitialContext
updateCtx = md5Update
finalize = md5Finalize
| sordina/PureMD5Improvements | Data/Digest/Pure/MD5.hs | bsd-3-clause | 12,396 | 20 | 16 | 4,243 | 4,027 | 2,029 | 1,998 | 234 | 2 |
{-# LANGUAGE CPP, FlexibleContexts #-}
#include "fusion-phases.h"
-- | Definition of the PArray type, and functions that work on it. The PArray
-- type is a PData with an array length. The functions we export from this
-- module are just wrappers for the PD functions from Data.Array.Parallel.PArray.PRepr.
--
-- TODO: Check inconsistent use of INLINE pragmas.
-- Most have INLINE_PA, but bpermutePD and nfPD have plain INLINE
--
module Data.Array.Parallel.PArray.Base (
PArray(..),
lengthPA#,
dataPA#,
-- These functions have corresponding members in the PR class
-- from Data.Array.Parallel.PArray.PData.
emptyPA,
replicatePA#,
replicatelPA#,
repeatPA#,
indexPA#,
extractPA#,
bpermutePA#,
appPA#,
applPA#,
packByTagPA#,
combine2PA#,
updatePA#,
fromListPA#, fromListPA,
nfPA,
)
where
import Data.Array.Parallel.Lifted.Unboxed (elementsSegd#)
import Data.Array.Parallel.PArray.PData
import Data.Array.Parallel.PArray.PRepr
import Data.Array.Parallel.Base (Tag)
import qualified Data.Array.Parallel.Unlifted as U
import GHC.Exts (Int#, Int(..), (+#), (*#))
import SpecConstr
-- | Lifted\/bulk parallel arrays
-- This contains the array length, along with the element data.
--
{-# ANN type PArray NoSpecConstr #-}
data PArray a = PArray Int# (PData a)
-- | Take the length field of a PArray.
lengthPA# :: PArray a -> Int#
{-# INLINE_PA lengthPA# #-}
lengthPA# (PArray n# _) = n#
-- | Take the data field of a PArray.
dataPA# :: PArray a -> PData a
{-# INLINE_PA dataPA# #-}
dataPA# (PArray _ d) = d
-- PA Wrappers ----------------------------------------------------------------
-- These wrappers work on PArrays. As the PArray contains a PData, we can
-- can just pass this to the corresponding PD function from
-- Data.Array.Parallel.PArray.PRepr. However, as a PData doesn't contain
-- the array length, we need to do the length calculations here.
--
-- Note: There are some more operator# functions that work on PArrays in
-- "Data.Array.Parallel.PArray.DataInstances". The ones there have
-- a similar shape but need to know about the underlying representation
-- constructors.
--
emptyPA :: PA a => PArray a
{-# INLINE_PA emptyPA #-}
emptyPA
= PArray 0# emptyPD
replicatePA# :: PA a => Int# -> a -> PArray a
{-# INLINE_PA replicatePA# #-}
replicatePA# n# x
= PArray n# (replicatePD n# x)
replicatelPA# :: PA a => U.Segd -> PArray a -> PArray a
{-# INLINE_PA replicatelPA# #-}
replicatelPA# segd (PArray _ xs)
= PArray (elementsSegd# segd) (replicatelPD segd xs)
repeatPA# :: PA a => Int# -> PArray a -> PArray a
{-# INLINE_PA repeatPA# #-}
repeatPA# m# (PArray n# xs)
= PArray (m# *# n#) (repeatPD m# n# xs)
indexPA# :: PA a => PArray a -> Int# -> a
{-# INLINE_PA indexPA# #-}
indexPA# (PArray _ xs) i#
= indexPD xs i#
extractPA# :: PA a => PArray a -> Int# -> Int# -> PArray a
{-# INLINE_PA extractPA# #-}
extractPA# (PArray _ xs) i# n#
= PArray n# (extractPD xs i# n#)
bpermutePA# :: PA a => PArray a -> Int# -> U.Array Int -> PArray a
{-# INLINE bpermutePA# #-}
bpermutePA# (PArray _ xs) n# is
= PArray n# (bpermutePD xs n# is)
appPA# :: PA a => PArray a -> PArray a -> PArray a
{-# INLINE_PA appPA# #-}
appPA# (PArray m# xs) (PArray n# ys)
= PArray (m# +# n#) (appPD xs ys)
applPA# :: PA a => U.Segd -> U.Segd -> PArray a -> U.Segd -> PArray a -> PArray a
{-# INLINE_PA applPA# #-}
applPA# segd is (PArray m# xs) js (PArray n# ys)
= PArray (m# +# n#) (applPD segd is xs js ys)
packByTagPA# :: PA a => PArray a -> Int# -> U.Array Tag -> Int# -> PArray a
{-# INLINE_PA packByTagPA# #-}
packByTagPA# (PArray _ xs) n# tags t#
= PArray n# (packByTagPD xs n# tags t#)
combine2PA# :: PA a => Int# -> U.Sel2 -> PArray a -> PArray a -> PArray a
{-# INLINE_PA combine2PA# #-}
combine2PA# n# sel (PArray _ as) (PArray _ bs)
= PArray n# (combine2PD n# sel as bs)
updatePA# :: PA a => PArray a -> U.Array Int -> PArray a -> PArray a
{-# INLINE_PA updatePA# #-}
updatePA# (PArray n# xs) is (PArray _ ys)
= PArray n# (updatePD xs is ys)
fromListPA# :: PA a => Int# -> [a] -> PArray a
{-# INLINE_PA fromListPA# #-}
fromListPA# n# xs
= PArray n# (fromListPD n# xs)
fromListPA :: PA a => [a] -> PArray a
{-# INLINE fromListPA #-}
fromListPA xs
= case length xs of
I# n# -> fromListPA# n# xs
nfPA :: PA a => PArray a -> ()
{-# INLINE nfPA #-}
nfPA (PArray _ xs)
= nfPD xs
| mainland/dph | dph-lifted-copy/Data/Array/Parallel/PArray/Base.hs | bsd-3-clause | 4,423 | 0 | 11 | 896 | 1,221 | 643 | 578 | -1 | -1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE OverloadedStrings #-}
module Generate.JavaScript.Builder (stmtsToText) where
import qualified Data.List as List
import Data.Monoid ((<>))
import qualified Data.Text.Lazy as LazyText
import Data.Text.Lazy.Builder
import Data.Text.Lazy.Builder.Int (decimal)
import Data.Text.Lazy.Builder.RealFloat (realFloat)
import Language.ECMAScript3.Syntax
import Prelude hiding (lines)
-- CONVERT TO LAZY TEXT
stmtsToText :: [Statement a] -> LazyText.Text
stmtsToText stmts =
toLazyText (fromStmtBlock "" stmts)
-- HELPERS
deeper :: Builder -> Builder
deeper indent =
"\t" <> indent
commaSep :: [Builder] -> Builder
commaSep builders =
mconcat (List.intersperse ", " builders)
commaNewlineSep :: Builder -> [Builder] -> Builder
commaNewlineSep indent builders =
mconcat (List.intersperse (",\n" <> deeper indent) builders)
-- STATEMENTS
fromStmtBlock :: Builder -> [Statement a] -> Builder
fromStmtBlock indent stmts =
mconcat (map (fromStmt indent) stmts)
fromStmt :: Builder -> Statement a -> Builder
fromStmt indent statement =
case statement of
BlockStmt _ stmts ->
fromStmtBlock indent stmts
EmptyStmt _ ->
mempty
ExprStmt _ expr ->
indent <> snd (fromExpr indent Whatever expr) <> ";\n"
IfStmt _ condition thenStmt elseStmt ->
mconcat
[ indent, "if (", snd (fromExpr indent Whatever condition), ") {\n"
, fromStmt (deeper indent) thenStmt
, indent, "} else {\n"
, fromStmt (deeper indent) elseStmt
, indent, "}\n"
]
IfSingleStmt _ condition thenStmt ->
mconcat
[ indent, "if (", snd (fromExpr indent Whatever condition), ") {\n"
, fromStmt (deeper indent) thenStmt
, indent, "}\n"
]
SwitchStmt _ expr clauses ->
mconcat
[ indent, "switch (", snd (fromExpr indent Whatever expr), ") {\n"
, mconcat (map (fromClause (deeper indent)) clauses)
, indent, "}\n"
]
WhileStmt _ expr stmt ->
mconcat
[ indent, "while (", snd (fromExpr indent Whatever expr), ") {\n"
, fromStmt (deeper indent) stmt
, indent, "}\n"
]
DoWhileStmt _ stmt expr ->
mconcat
[ indent, "do {\n"
, fromStmt (deeper indent) stmt
, indent, "} while(", snd (fromExpr indent Whatever expr), ");\n"
]
BreakStmt _ Nothing ->
indent <> "break;\n"
BreakStmt _ (Just label) ->
indent <> "break " <> fromId label <> ";\n"
ContinueStmt _ Nothing ->
indent <> "continue;\n"
ContinueStmt _ (Just label) ->
indent <> "continue " <> fromId label <> ";\n"
LabelledStmt _ label stmt ->
mconcat
[ indent, fromId label, ":\n"
, fromStmt indent stmt
]
ForInStmt _ _ _ _ ->
error "TODO"
ForStmt _ _ _ _ _ ->
error "TODO"
TryStmt _ _ _ _ ->
error "TODO"
ThrowStmt _ expr ->
indent <> "throw " <> snd (fromExpr indent Whatever expr) <> ";\n"
ReturnStmt _ Nothing ->
indent <> "return;\n"
ReturnStmt _ (Just expr) ->
indent <> "return " <> snd (fromExpr indent Whatever expr) <> ";\n"
WithStmt _ _ _ ->
error "TODO"
VarDeclStmt _ [] ->
mempty
VarDeclStmt _ decls ->
indent <> "var " <> commaNewlineSep indent (map (fromVarDecl indent) decls) <> ";\n"
FunctionStmt _ name args stmts ->
indent <> "function " <> fromId name <> "(" <> commaSep (map fromId args) <> ") {\n"
<>
fromStmtBlock (deeper indent) stmts
<>
indent <> "}\n"
-- SWITCH CLAUSES
fromClause :: Builder -> CaseClause a -> Builder
fromClause indent clause =
case clause of
CaseClause _ expr stmts ->
indent <> "case " <> snd (fromExpr indent Whatever expr) <> ":\n"
<> fromStmtBlock (deeper indent) stmts
CaseDefault _ stmts ->
indent <> "default:\n"
<> fromStmtBlock (deeper indent) stmts
-- ID
fromId :: Id a -> Builder
fromId (Id _ name) =
fromString name
-- VAR DECLS
fromVarDecl :: Builder -> VarDecl a -> Builder
fromVarDecl indent (VarDecl _ (Id _ name) maybeExpr) =
case maybeExpr of
Nothing ->
fromString name
Just expr ->
fromString name <> " = " <> snd (fromExpr indent Whatever expr)
-- EXPRESSIONS
data Lines = One | Many deriving (Eq)
merge :: Lines -> Lines -> Lines
merge a b =
if a == Many || b == Many then Many else One
linesMap :: (a -> (Lines, b)) -> [a] -> (Bool, [b])
linesMap func xs =
let
pairs =
map func xs
in
( any ((==) Many . fst) pairs
, map snd pairs
)
data Grouping = Atomic | Whatever
parensFor :: Grouping -> Builder -> Builder
parensFor grouping builder =
case grouping of
Atomic ->
"(" <> builder <> ")"
Whatever ->
builder
fromExpr :: Builder -> Grouping -> Expression a -> (Lines, Builder)
fromExpr indent grouping expression =
case expression of
StringLit _ string ->
(One, quoted string)
RegexpLit _ _ _ _ ->
error "TODO"
NumLit _ n ->
(One, realFloat n)
IntLit _ n ->
(One, decimal n)
BoolLit _ True ->
(One, "true")
BoolLit _ False ->
(One, "false")
NullLit _ ->
(One, "null")
ArrayLit _ exprs ->
(,) Many $
let
(anyMany, builders) =
linesMap (fromExpr indent Whatever) exprs
in
if anyMany then
"[\n"
<> deeper indent
<> commaNewlineSep indent builders
<> "\n" <> indent <> "]"
else
"[" <> commaSep builders <> "]"
ObjectLit _ fields ->
(,) Many $
let
deeperIndent =
deeper indent
(anyMany, builders) =
linesMap (fromField deeperIndent) fields
in
if anyMany then
"{\n"
<> deeperIndent
<> commaNewlineSep indent builders
<> "\n" <> indent <> "}"
else
"{" <> commaSep builders <> "}"
ThisRef _ ->
(One, "this")
VarRef _ name ->
(One, fromId name)
DotRef _ expr (Id _ name) ->
makeDot indent expr name
BracketRef _ expr bracketedExpr ->
makeBracketed indent expr bracketedExpr
NewExpr _ _ _ ->
error "TODO"
PrefixExpr _ op expr ->
let
(lines, builder) =
fromExpr indent Atomic expr
in
( lines
, parensFor grouping (fromPrefix op <> builder)
)
UnaryAssignExpr _ _ _ ->
error "TODO"
InfixExpr _ op leftExpr rightExpr ->
let
(leftLines, left) =
fromExpr indent Atomic leftExpr
(rightLines, right) =
fromExpr indent Atomic rightExpr
in
( merge leftLines rightLines
, parensFor grouping (left <> fromInfix op <> right)
)
CondExpr _ condExpr thenExpr elseExpr ->
let
condB = snd (fromExpr indent Atomic condExpr)
thenB = snd (fromExpr indent Atomic thenExpr)
elseB = snd (fromExpr indent Atomic elseExpr)
in
( Many
, parensFor grouping (condB <> " ? " <> thenB <> " : " <> elseB)
)
AssignExpr _ op lValue expr ->
let
(leftLines, left) =
fromLValue indent lValue
(rightLines, right) =
fromExpr indent Whatever expr
in
( merge leftLines rightLines
, parensFor grouping (left <> fromAssign op <> right)
)
ListExpr _ _ ->
error "TODO"
CallExpr _ function args ->
(,) Many $
let
deeperIndent =
deeper indent
funcB =
snd (fromExpr indent Atomic function)
(anyMany, argsB) =
linesMap (fromExpr deeperIndent Whatever) args
in
if anyMany then
funcB <> "(\n" <> deeperIndent <> commaNewlineSep indent argsB <> ")"
else
funcB <> "(" <> commaSep argsB <> ")"
FuncExpr _ maybeName args stmts ->
(,) Many $
"function " <> maybe mempty fromId maybeName <> "(" <> commaSep (map fromId args) <> ") {\n"
<>
fromStmtBlock (deeper indent) stmts
<>
indent <> "}"
-- FIELDS
fromField :: Builder -> (Prop a, Expression a) -> (Lines, Builder)
fromField indent (prop, expr) =
let
(lines, builder) =
fromExpr indent Whatever expr
in
( lines
, fromProp prop <> ": " <> builder
)
fromProp :: Prop a -> Builder
fromProp prop =
case prop of
PropId _ name ->
fromId name
PropString _ string ->
quoted string
PropNum _ n ->
decimal n
-- STRINGS
quoted :: String -> Builder
quoted string =
fromString ('\'' : foldr escapeCons "'" string)
-- https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Grammar_and_types#String_literals
escapeCons :: Char -> String -> String
escapeCons char rest =
case char of
'\b' -> '\\' : 'b' : rest
'\f' -> '\\' : 'f' : rest
'\n' -> '\\' : 'n' : rest
'\r' -> '\\' : 'r' : rest
'\t' -> '\\' : 't' : rest
'\v' -> '\\' : 'v' : rest
'\"' -> '\\' : '"' : rest
'\'' -> '\\' : '\'' : rest
'\\' -> '\\' : '\\' : rest
_ -> char : rest
-- VALUES
fromLValue :: Builder -> LValue a -> (Lines, Builder)
fromLValue indent lValue =
case lValue of
LVar _ name ->
(One, fromString name)
LDot _ expr field ->
makeDot indent expr field
LBracket _ expr bracketedExpr ->
makeBracketed indent expr bracketedExpr
makeDot :: Builder -> Expression a -> String -> (Lines, Builder)
makeDot indent expr field =
let
(lines, builder) =
fromExpr indent Atomic expr
in
(lines, builder <> "." <> fromString field)
makeBracketed :: Builder -> Expression a -> Expression a -> (Lines, Builder)
makeBracketed indent expr bracketedExpr =
let
(lines, builder) =
fromExpr indent Atomic expr
(bracketedLines, bracketedBuilder) =
fromExpr indent Whatever bracketedExpr
in
( merge lines bracketedLines
, builder <> "[" <> bracketedBuilder <> "]"
)
-- OPERATORS
fromPrefix :: PrefixOp -> Builder
fromPrefix op =
case op of
PrefixLNot -> "!"
PrefixBNot -> "~"
PrefixPlus -> "+"
PrefixMinus -> "-"
PrefixTypeof -> "typeof "
PrefixVoid -> "void "
PrefixDelete -> "delete "
fromAssign :: AssignOp -> Builder
fromAssign op =
case op of
OpAssign -> " = "
OpAssignAdd -> " += "
OpAssignSub -> " -= "
OpAssignMul -> " *= "
OpAssignDiv -> " /= "
OpAssignMod -> " %= "
OpAssignLShift -> " <<= "
OpAssignSpRShift -> " >>= "
OpAssignZfRShift -> " >>>= "
OpAssignBAnd -> " &= "
OpAssignBXor -> " ^= "
OpAssignBOr -> " |= "
fromInfix :: InfixOp -> Builder
fromInfix op =
case op of
OpLT -> " < "
OpLEq -> " <= "
OpGT -> " > "
OpGEq -> " >= "
OpIn -> " in "
OpInstanceof -> " instanceof "
OpEq -> " == "
OpNEq -> " != "
OpStrictEq -> " === "
OpStrictNEq -> " !=== "
OpLAnd -> " && "
OpLOr -> " || "
OpMul -> " * "
OpDiv -> " / "
OpMod -> " % "
OpSub -> " - "
OpLShift -> " << "
OpSpRShift -> " >> "
OpZfRShift -> " >>> "
OpBAnd -> " & "
OpBXor -> " ^ "
OpBOr -> " | "
OpAdd -> " + "
| mgold/Elm | src/Generate/JavaScript/Builder.hs | bsd-3-clause | 11,559 | 0 | 17 | 3,805 | 3,575 | 1,802 | 1,773 | 346 | 25 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "dist/dist-sandbox-261cd265/build/System/Posix/Semaphore.hs" #-}
{-# LINE 1 "System/Posix/Semaphore.hsc" #-}
{-# LINE 2 "System/Posix/Semaphore.hsc" #-}
{-# LANGUAGE Safe #-}
{-# LINE 6 "System/Posix/Semaphore.hsc" #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Posix.Semaphore
-- Copyright : (c) Daniel Franke 2007
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (requires POSIX)
--
-- POSIX named semaphore support.
--
-----------------------------------------------------------------------------
module System.Posix.Semaphore
(OpenSemFlags(..), Semaphore(),
semOpen, semUnlink, semWait, semTryWait, semThreadWait,
semPost, semGetValue)
where
{-# LINE 27 "System/Posix/Semaphore.hsc" #-}
{-# LINE 28 "System/Posix/Semaphore.hsc" #-}
import Foreign.C
import Foreign.ForeignPtr hiding (newForeignPtr)
import Foreign.Concurrent
import Foreign.Marshal
import Foreign.Ptr
import Foreign.Storable
import System.Posix.Types
import Control.Concurrent
import Data.Bits
data OpenSemFlags = OpenSemFlags { semCreate :: Bool,
-- ^ If true, create the semaphore if it
-- does not yet exist.
semExclusive :: Bool
-- ^ If true, throw an exception if the
-- semaphore already exists.
}
newtype Semaphore = Semaphore (ForeignPtr ())
-- | Open a named semaphore with the given name, flags, mode, and initial
-- value.
semOpen :: String -> OpenSemFlags -> FileMode -> Int -> IO Semaphore
semOpen name flags mode value =
let cflags = (if semCreate flags then 64 else 0) .|.
{-# LINE 54 "System/Posix/Semaphore.hsc" #-}
(if semExclusive flags then 128 else 0)
{-# LINE 55 "System/Posix/Semaphore.hsc" #-}
semOpen' cname =
do sem <- throwErrnoPathIfNull "semOpen" name $
sem_open cname (toEnum cflags) mode (toEnum value)
fptr <- newForeignPtr sem (finalize sem)
return $ Semaphore fptr
finalize sem = throwErrnoPathIfMinus1_ "semOpen" name $
sem_close sem in
withCAString name semOpen'
-- | Delete the semaphore with the given name.
semUnlink :: String -> IO ()
semUnlink name = withCAString name semUnlink'
where semUnlink' cname = throwErrnoPathIfMinus1_ "semUnlink" name $
sem_unlink cname
-- | Lock the semaphore, blocking until it becomes available. Since this
-- is done through a system call, this will block the *entire runtime*,
-- not just the current thread. If this is not the behaviour you want,
-- use semThreadWait instead.
semWait :: Semaphore -> IO ()
semWait (Semaphore fptr) = withForeignPtr fptr semWait'
where semWait' sem = throwErrnoIfMinus1Retry_ "semWait" $
sem_wait sem
-- | Attempt to lock the semaphore without blocking. Immediately return
-- False if it is not available.
semTryWait :: Semaphore -> IO Bool
semTryWait (Semaphore fptr) = withForeignPtr fptr semTrywait'
where semTrywait' sem = do res <- sem_trywait sem
(if res == 0 then return True
else do errno <- getErrno
(if errno == eINTR
then semTrywait' sem
else if errno == eAGAIN
then return False
else throwErrno "semTrywait"))
-- | Poll the semaphore until it is available, then lock it. Unlike
-- semWait, this will block only the current thread rather than the
-- entire process.
semThreadWait :: Semaphore -> IO ()
semThreadWait sem = do res <- semTryWait sem
(if res then return ()
else ( do { yield; semThreadWait sem } ))
-- | Unlock the semaphore.
semPost :: Semaphore -> IO ()
semPost (Semaphore fptr) = withForeignPtr fptr semPost'
where semPost' sem = throwErrnoIfMinus1Retry_ "semPost" $
sem_post sem
-- | Return the semaphore's current value.
semGetValue :: Semaphore -> IO Int
semGetValue (Semaphore fptr) = withForeignPtr fptr semGetValue'
where semGetValue' sem = alloca (semGetValue_ sem)
semGetValue_ :: Ptr () -> Ptr CInt -> IO Int
semGetValue_ sem ptr = do throwErrnoIfMinus1Retry_ "semGetValue" $
sem_getvalue sem ptr
cint <- peek ptr
return $ fromEnum cint
foreign import ccall safe "sem_open"
sem_open :: CString -> CInt -> CMode -> CUInt -> IO (Ptr ())
foreign import ccall safe "sem_close"
sem_close :: Ptr () -> IO CInt
foreign import ccall safe "sem_unlink"
sem_unlink :: CString -> IO CInt
foreign import ccall safe "sem_wait"
sem_wait :: Ptr () -> IO CInt
foreign import ccall safe "sem_trywait"
sem_trywait :: Ptr () -> IO CInt
foreign import ccall safe "sem_post"
sem_post :: Ptr () -> IO CInt
foreign import ccall safe "sem_getvalue"
sem_getvalue :: Ptr () -> Ptr CInt -> IO Int
| phischu/fragnix | tests/packages/scotty/System.Posix.Semaphore.hs | bsd-3-clause | 5,466 | 0 | 16 | 1,633 | 1,009 | 528 | 481 | 81 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Distribution.Types.ForeignLibOption(
ForeignLibOption(..)
) where
import Prelude ()
import Distribution.Compat.Prelude
import Text.PrettyPrint
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Text
data ForeignLibOption =
-- | Merge in all dependent libraries (i.e., use
-- @ghc -shared -static@ rather than just record
-- the dependencies, ala @ghc -shared -dynamic@).
-- This option is compulsory on Windows and unsupported
-- on other platforms.
ForeignLibStandalone
deriving (Generic, Show, Read, Eq, Typeable, Data)
instance Text ForeignLibOption where
disp ForeignLibStandalone = text "standalone"
parse = Parse.choice [
do _ <- Parse.string "standalone" ; return ForeignLibStandalone
]
instance Binary ForeignLibOption
| mydaum/cabal | Cabal/Distribution/Types/ForeignLibOption.hs | bsd-3-clause | 882 | 0 | 12 | 160 | 146 | 84 | 62 | 17 | 0 |
-- A start sequence byte (0xE0) followed by an invalid continuation:
bad = "ð."
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/parser/unicode/utf8_022.hs | bsd-3-clause | 81 | 0 | 4 | 15 | 7 | 4 | 3 | 1 | 1 |
{-
import Gui
import Game
import GoNetwork ( startServer, startClient )
import Board ( Cell(..), opposite )
import System.Environment
import Data.Char ( toLower )
import Data.Lens.Lazy ( (^=), (^.) )
import Control.Monad ( unless )
getBoardSize :: IO Int
getBoardSize = do putStrLn "Choose board size: s - Small (9), m - Medium (13), l - Large (19):"
choice <- getLine
case map toLower choice of
s | s `elem` ["small", "s", "9"] -> return 9
| s `elem` ["medium", "m", "13"] -> return 13
| s `elem` ["large", "l", "19"] -> return 19
| otherwise -> do putStrLn "Wrong! Try again"
getBoardSize
getColor :: IO Player
getColor = do putStrLn "Choose your side: b - Black, w - White"
choice <- getLine
case map toLower choice of
s | s `elem` ["b", "black"] -> return PBlack
| s `elem` ["w", "white"] -> return PWhite
| otherwise -> do putStrLn "Wrong! Try again"
getColor
menuLoop :: GameOptions -> [String] -> IO ()
menuLoop options args = do
putStrLn $ "Options: " ++ showOptions options
putStrLn "1. Single player"
putStrLn "2. Multiplayer"
putStrLn "3. Host LAN game"
putStrLn "4. Join LAN game"
putStrLn "5. Set board size"
putStrLn "6. Set your side"
putStrLn "7. Exit"
choice <- getLine
case choice of
"2" -> do startMultiplayer options
menuLoop options args
"3" -> do startServer options
menuLoop options args
"4" -> do putStrLn "Enter server ip/hostname (default: localhost):"
host <- getLine
startClient $ if host == "" then "localhost" else host
menuLoop options args
"5" -> do newBoardSize <- getBoardSize
let newOptions = boardSize ^= newBoardSize $ options
menuLoop newOptions args
"6" -> do newColor <- getColor
let newOptions = playerColor ^= newColor $ options
menuLoop newOptions args
"7" -> return ()
_ -> menuLoop options args
startMultiplayer :: GameOptions -> IO ()
startMultiplayer opts = do
st <- initGame opts
handleTurns st Black
handleTurns :: GameState -> Cell -> IO ()
handleTurns st color = do
showStats st
putStrLn $ show color ++ " moves: "
move <- readMove st
newSt <- makeMove st move
unless (newSt^.gameOver) $ handleTurns newSt $ opposite color
main :: IO ()
main = do
args <- getArgs
if "--console" `elem` args then menuLoop defaultOptions args
else mainWithGui defaultOptions args
-}
import Control.Wire
import Prelude hiding ((.), id)
import System.Console.ANSI
import Data.Maybe
import Control.Applicative ((<$>))
control whenInhibited whenProduced wire = loop wire clockSession
where
loop w' session' = do
(mx, w, session) <- stepSession w' session' ()
case mx of
Left ex -> whenInhibited ex
Right x -> whenProduced x
loop w session
foreign import ccall unsafe "conio.h getch" c_getch :: IO Char
foreign import ccall unsafe "conio.h kbhit" c_kbhit :: IO Bool
keyPressed = do isKey <- c_kbhit
if isKey then Just <$> c_getch
else return Nothing
pressedKeyMaybe = mkFixM $
\_ _ -> Right <$> keyPressed
main = control return (putStrLn . show) $
when (/= Nothing) . pressedKeyMaybe
| xarts19/GoHaskell | Main.hs | mit | 3,886 | 0 | 12 | 1,459 | 253 | 133 | 120 | 21 | 2 |
-- Haskell implementation of Nim --
-- Amman Vedi - Haskell Coursework 2 - 2012 --
-- basic operation of the gameplay implementation
-- game state -> player moves -> new game state -> show new game state
-- loops recirsivley until the game state returns a [0,0,0] list
import System.IO
import Data.Char
import Data.List
import Text.Show
import Prelude
initial_board = [3,4,5]
-- main method loads up the game with the default board --
-- also draws the initial game state --
main = do putStrLn " "
display_game initial_board 3
nim initial_board
-- main nim game --
-- runs recursivley until the game is empty "[0,0,0]" then returns --
nim :: [Int] -> IO [Int]
nim [] = return []
nim [0,0,0] = return []
nim xs = do
a <- p1 xs
display_game a 1
b <- p2 a
display_game b 2
nim b
-- take the player 1 input and return the updated state --
-- Current Game State -> update game state -> New Game State --
p1 :: [Int] -> IO [Int]
p1 xs = do
putStrLn " "
putStrLn "Player 1 Enter The Row From Which You Would Like To Take"
p1r <- readLn
putStrLn " "
putStrLn "Player 1 Enter The Amount You Would Like To Take"
p1t <- readLn
putStrLn " "
putStrLn "Nim Game Status: "
putStrLn " "
return $ update_game xs p1r p1t
-- take the player 2 input and return the updated state --
-- Current Game State -> update game state -> New Game State --
p2 :: [Int] -> IO [Int]
p2 [0,0,0] = return []
p2 xs = do
putStrLn " "
putStrLn "Player 2 Enter The Row From Which You Would Like To Take"
p2r <- readLn
putStrLn " "
putStrLn "Player 2 Enter The Amount You Would Like To Take"
p2t <- readLn
putStrLn " "
putStrLn "Nim Game Status: "
putStrLn " "
return $ update_game xs p2r p2t
-- display the game state board--
-- Current Game Board -> Which Player -> System Output --
display_game :: [Int] -> Int -> IO ()
display_game [] _ = return()
display_game xs player | sum xs == 0 && player == 1 = putStrLn $ "winner is player 1"
| sum xs == 0 && player == 2 = putStrLn $ "winner is player 2"
display_game xs player = do
putStrLn $ show (length xs) ++ " -> " ++ replicate (last xs) '|'
display_game (init xs) 0
--update the game state --
--Current game state -> user options (row and how many to take) -> new state --
update_game :: [Int] -> Int -> Int -> [Int]
update_game [] _ _ = []
update_game [0,0,0] _ _ = []
update_game (x:xs) row take_amnt | sum(x:xs) == 0 = []
| row == 1 = x - take_amnt:xs
| row == 2 = x : head(xs) - take_amnt : tail(xs)
| row == 3 = x : head(xs) : [last(xs) - take_amnt]
| ammanvedi/haskell-nim-game | nim.hs | mit | 2,995 | 0 | 12 | 1,026 | 760 | 368 | 392 | 57 | 1 |
{-# LANGUAGE ImplicitParams #-}
module Main where
import Control.Lens
import Control.Monad
import qualified Data.ByteString.Char8 as BS
import qualified Data.Map as M
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Yaml.Pretty as Y
import System.IO
import qualified Text.PrettyPrint.ANSI.Leijen as Ppr
import qualified Text.Trifecta as P
import qualified Formura.Annotation as A
import Formura.Annotation.Boundary
import Formura.Annotation.Representation
import Formura.CommandLineOption
import Formura.NumericalConfig
import Formura.OrthotopeMachine.Graph
import Formura.OrthotopeMachine.Translate (genOMProgram)
import Formura.OrthotopeMachine.Manifestation (genMMProgram)
import qualified Formura.Parser as P
import Formura.Desugar
import Formura.Compiler
import Formura.Syntax
import Formura.MPICxx.Language (TargetLanguage(..), targetLanguage)
import qualified Formura.MPICxx.Translate as C
import qualified Formura.MPIFortran.Translate as F
main :: IO ()
main = do
opts <- getCommandLineOption
let ?commandLineOption = opts
mapM_ process (opts ^. inputFilenames)
process :: WithCommandLineOption => FilePath -> IO ()
process fn = do
mprog <- P.parseFromFileEx (P.runP $ P.program <* P.eof) fn
case mprog of
P.Failure doc -> Ppr.displayIO stdout $ Ppr.renderPretty 0.8 80 $ doc <> Ppr.linebreak
P.Success prog -> codegen prog
codegen :: WithCommandLineOption => Program -> IO ()
codegen sugarcoated_prog = do
prog <- desugar sugarcoated_prog
omProg <- genOMProgram prog
when (?commandLineOption ^. verbose) $ do
putStrLn "## Debug print: global environment of the simulation"
print (omProg ^. omGlobalEnvironment)
putStrLn ""
putStrLn "## Debug print: simulation state"
print (omProg ^. omStateSignature)
putStrLn ""
putStrLn "## Debug print: init graph"
mapM_ pprNode $ M.toList (omProg ^. omInitGraph)
putStrLn ""
putStrLn "## Debug print: step graph"
mapM_ pprNode $ M.toList (omProg ^. omStepGraph)
putStrLn ""
mmProg <- genMMProgram omProg
when (?commandLineOption ^. verbose) $ do
putStrLn "## Debug print: manifested init graph"
mapM_ pprMMNode $ M.toList (mmProg ^. omInitGraph)
putStrLn ""
putStrLn "## Debug print: manifested step graph"
mapM_ pprMMNode $ M.toList (mmProg ^. omStepGraph)
putStrLn ""
putStrLn $ "Target language is:" ++ show targetLanguage
case targetLanguage of
MPICxx -> C.genCxxFiles prog mmProg
MPIFortran -> F.genFortranFiles prog mmProg
pprNode :: (OMNodeID, OMNode) -> IO ()
pprNode (i,n) = do
let r = case A.toMaybe (n ^. A.annotation) of
Just Manifest -> "M"
_ -> " "
varName = case A.toMaybe (n ^. A.annotation) of
Just (SourceName n1) -> n1
_ -> ""
putStrLn $ unwords [r , take 8 $ varName ++ repeat ' ', show (i,n)]
pprMMNode :: (OMNodeID, MMNode) -> IO ()
pprMMNode (i,n) = do
let
varName = case A.toMaybe (n ^. A.annotation) of
Just (SourceName n1) -> n1
_ -> ""
Just (Boundary bdy) = A.toMaybe $ n^.A.annotation
putStrLn $ unwords [take 8 $ varName ++ repeat ' ', show (i,n),show bdy]
| nushio3/formura | exe-src/formura.hs | mit | 3,398 | 0 | 15 | 817 | 1,005 | 515 | 490 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
import Yesod
data App = App
mkYesod "App" [parseRoutes|
/ HomeR GET
|]
instance Yesod App
getHomeR = defaultLayout $ do
setTitle "My Page Title"
toWidget [lucius| h1 { color: green; } |]
addScriptRemote "https://ajax.googleapis.com/ajax/libs/jquery/1.6.2/jquery.min.js"
toWidget
[julius|
$(function() {
$("h1").click(function(){
alert("You clicked on the heading!");
});
});
|]
toWidgetHead
[hamlet|
<meta name=keywords content="some sample keywords">
|]
toWidget
[hamlet|
<h1>Here's one way of including content
|]
[whamlet|<h2>Here's another |]
toWidgetBody
[julius|
alert("This is included in the body itself");
|]
main = warp 3000 App
| michalc/haskell-experiments | yes.hs | mit | 1,000 | 0 | 8 | 333 | 122 | 69 | 53 | 22 | 1 |
module Rebase.GHC.IO.Encoding
(
module GHC.IO.Encoding
)
where
import GHC.IO.Encoding
| nikita-volkov/rebase | library/Rebase/GHC/IO/Encoding.hs | mit | 89 | 0 | 5 | 12 | 23 | 16 | 7 | 4 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Diagrams.Plots.Line
( line
, LineOpts
, lineshape
) where
import Diagrams.Prelude
import Data.Default
import Control.Lens (makeLenses, (^.))
import Data.Maybe
import Diagrams.Plots.Types
import Diagrams.Plots.Utils (hasNaN)
data LineOpts = LineOpts
{ _lineshape :: Char
}
makeLenses ''LineOpts
instance Default LineOpts where
def = LineOpts
{ _lineshape = 'o'
}
line :: (PlotData m1 a1, PlotData m2 a2) => m1 a1 -> m2 a2 -> LineOpts -> PlotFn
line xs ys opt mapX mapY | hasNaN xy = error "Line: Found NaN"
| otherwise = [l]
where
l = lwO 1 . fromVertices . map p2 . mapMaybe (runMap pMap) $ xy
xy = zip (getValues xs) $ getValues ys
pMap = compose mapX mapY
| kaizhang/haskell-plot | src/Diagrams/Plots/Line.hs | mit | 792 | 0 | 11 | 211 | 261 | 139 | 122 | 23 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds, KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE PolyKinds #-}
--{-# LANGUAGE ScopedTypeVariables #-}
--{-# LANGUAGE FunctionalDependencies #-}
--{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE CPP #-}
module DeBruijn where
--import Prelude hiding (last, lookup, (-))
import Data.HList
import Data.HList.HArray
import Data.Proxy
import Nats
--import Nats
--import qualified GHC.TypeLits as TL
-- unifies two lists, one of which is a prefix of the other
-- App :: DB l1 (t1 -> t2) -> DB l2 t1 -> DB (Unify l1 l2) t2
-- might actually work now with closed type families
type family Unify (l1 :: [*]) (l2 :: [*]) :: [*] where
Unify '[] '[] = '[]
Unify '[] (t ': l) = t ': l
Unify (t ': l) '[] = t ': l
Unify (t ': l1) (t ': l2) = t ': (Unify l1 l2)
-- this might work, but the current approach is nicer
-- App :: (Prefix l1 l, Prefix l2 l) => DB l1 (t1 -> t2) -> DB l2 t1 -> DB l t2
class Prefix (p :: [*]) (l :: [*]) where
prefix :: HList l -> HList p
instance Prefix '[] l where
prefix _ = HNil
instance (Prefix p l) => Prefix (t ': p) (t ': l) where
prefix (HCons t l) = HCons t (prefix l)
-- Typed lambda calculus where variables are encoded by De Bruijn indices.
-- The "l" parameter is the environement type, a simple list.
-- The "t" parameter is the (Haskell) type that the expression evaluates to.
-- Ed Kmett's Bound package might provide some interesting ideas.
data DB (l :: [*]) (t :: *) where
Val :: t -> DB l t
Var :: Var l t -> DB l t
VarN :: (HLookupByHNat n l) => Proxy n -> DB l (HLookupByHNatR n l)
App :: DB l (t1 -> t2) -> DB l t1 -> DB l t2
Lam :: DB (b ': l) t -> DB l (b -> t)
-- Taken from Stephanie Weirich's implementation:
-- http://www.cs.ox.ac.uk/projects/gip/school/tc.hs
data Var (l :: [*]) (t :: *) where
ZVar :: Var (t ': l) t
SVar :: Var l t -> Var (s ': l) t
lookupVar :: HList l -> Var l t -> t
lookupVar (HCons t _) ZVar = t
lookupVar (HCons _ l) (SVar v) = lookupVar l v
--lookup HNil ZVar = error "Unreachable pattern."
--lookup HNil (SVar _) = error "Unreachable pattern."
-- Either GHC can't tell this is impossible,
-- or it must be possible because of bottom.
lookupVar HNil _ = error "Unreachable pattern."
eval' :: HList l -> DB l t -> t
eval' _ (Val t) = t
eval' l (Var v) = lookupVar l v
eval' l (VarN v) = hLookupByHNat v l
eval' l (App f x) = (eval' l f) (eval' l x)
eval' l (Lam b) = \x -> eval' (HCons x l) b
eval = eval' HNil
-- some utilities for actually writing terms
-- some de Bruijn indices
var0 = ZVar
var1 = SVar var0
var2 = SVar var1
var3 = SVar var2
v0 = Var var0
v1 = Var var1
v2 = Var var2
v3 = Var var3
#define VAR(n) (VarN (Proxy::Proxy (ToHNat n)))
--lift f = App (Val f)
app2 f x y = App (App f x) y
--lift2 f = app2 (Val f)
plus = Val (+)
times = Val (*)
times2 = Lam $ app2 plus v0 VAR(0)
six = eval $ App times2 (Val 3)
mk_pair = Lam $ Lam $ Lam $ app2 v0 v2 v1
first = Lam $ Lam $ v1
second = Lam $ Lam $ v0
get_fst = Lam $ App v0 first
pair12 = app2 mk_pair (Val 1) (Val 2)
| vladfi1/hs-misc | DeBruijn.hs | mit | 3,242 | 0 | 9 | 712 | 1,047 | 565 | 482 | 64 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SVGAnimatedAngle
(getBaseVal, getAnimVal, SVGAnimatedAngle(..),
gTypeSVGAnimatedAngle)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGAnimatedAngle.baseVal Mozilla SVGAnimatedAngle.baseVal documentation>
getBaseVal :: (MonadDOM m) => SVGAnimatedAngle -> m SVGAngle
getBaseVal self
= liftDOM ((self ^. js "baseVal") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGAnimatedAngle.animVal Mozilla SVGAnimatedAngle.animVal documentation>
getAnimVal :: (MonadDOM m) => SVGAnimatedAngle -> m SVGAngle
getAnimVal self
= liftDOM ((self ^. js "animVal") >>= fromJSValUnchecked)
| ghcjs/jsaddle-dom | src/JSDOM/Generated/SVGAnimatedAngle.hs | mit | 1,586 | 0 | 10 | 183 | 398 | 252 | 146 | 25 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module Hpack.Syntax.DefaultsSpec (spec) where
import Helper
import Data.Aeson.Config.FromValueSpec hiding (spec)
import Data.Aeson.Config.FromValue
import Hpack.Syntax.Defaults
defaultsGithub :: String -> String -> String -> [FilePath] -> Defaults
defaultsGithub owner repo ref path = DefaultsGithub $ Github owner repo ref path
spec :: Spec
spec = do
describe "isValidOwner" $ do
it "rejects the empty string" $ do
isValidOwner "" `shouldBe` False
it "accepts valid owner names" $ do
isValidOwner "Foo-Bar-23" `shouldBe` True
it "rejects dots" $ do
isValidOwner "foo.bar" `shouldBe` False
it "rejects multiple consecutive hyphens" $ do
isValidOwner "foo--bar" `shouldBe` False
it "rejects hyphens at the beginning" $ do
isValidOwner "-foo" `shouldBe` False
it "rejects hyphens at the end" $ do
isValidOwner "foo-" `shouldBe` False
describe "isValidRepo" $ do
it "rejects the empty string" $ do
isValidRepo "" `shouldBe` False
it "rejects ." $ do
isValidRepo "." `shouldBe` False
it "rejects .." $ do
isValidRepo ".." `shouldBe` False
it "accepts underscores" $ do
isValidRepo "foo_bar" `shouldBe` True
it "accepts dots" $ do
isValidRepo "foo.bar" `shouldBe` True
it "accepts hyphens" $ do
isValidRepo "foo-bar" `shouldBe` True
describe "fromValue" $ do
context "when parsing Defaults" $ do
let
left :: String -> Result Defaults
left = Left
context "with Object" $ do
it "fails when neither github nor local is present" $ do
[yaml|
defaults:
foo: one
bar: two
library: {}
|] `shouldDecodeTo` left "Error while parsing $ - neither key \"github\" nor key \"local\" present"
it "accepts Defaults from GitHub" $ do
[yaml|
github: sol/hpack
ref: 0.1.0
path: defaults.yaml
|] `shouldDecodeTo_` defaultsGithub "sol" "hpack" "0.1.0" ["defaults.yaml"]
it "rejects invalid owner names" $ do
[yaml|
github: ../hpack
ref: 0.1.0
path: defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.github - invalid owner name \"..\""
it "rejects invalid repository names" $ do
[yaml|
github: sol/..
ref: 0.1.0
path: defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.github - invalid repository name \"..\""
it "rejects invalid Git references" $ do
[yaml|
github: sol/hpack
ref: ../foo/bar
path: defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.ref - invalid Git reference \"../foo/bar\""
it "rejects \\ in path" $ do
[yaml|
github: sol/hpack
ref: 0.1.0
path: hpack\defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.path - rejecting '\\' in \"hpack\\\\defaults.yaml\", please use '/' to separate path components"
it "rejects : in path" $ do
[yaml|
github: sol/hpack
ref: 0.1.0
path: foo:bar.yaml
|] `shouldDecodeTo` left "Error while parsing $.path - rejecting ':' in \"foo:bar.yaml\""
it "rejects absolute paths" $ do
[yaml|
github: sol/hpack
ref: 0.1.0
path: /defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.path - rejecting absolute path \"/defaults.yaml\""
it "rejects .. in path" $ do
[yaml|
github: sol/hpack
ref: 0.1.0
path: ../../defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.path - rejecting \"..\" in \"../../defaults.yaml\""
context "with String" $ do
it "accepts Defaults from GitHub" $ do
[yaml|
sol/[email protected]
|] `shouldDecodeTo_` defaultsGithub "sol" "hpack" "0.1.0" [".hpack", "defaults.yaml"]
it "rejects invalid owner names" $ do
[yaml|
../[email protected]
|] `shouldDecodeTo` left "Error while parsing $ - invalid owner name \"..\""
it "rejects invalid repository names" $ do
[yaml|
sol/[email protected]
|] `shouldDecodeTo` left "Error while parsing $ - invalid repository name \"..\""
it "rejects invalid Git references" $ do
[yaml|
sol/pack@../foo/bar
|] `shouldDecodeTo` left "Error while parsing $ - invalid Git reference \"../foo/bar\""
it "rejects missing Git reference" $ do
[yaml|
sol/hpack
|] `shouldDecodeTo` left "Error while parsing $ - missing Git reference for \"sol/hpack\", the expected format is owner/repo@ref"
context "with neither Object nor String" $ do
it "fails" $ do
[yaml|
10
|] `shouldDecodeTo` left "Error while parsing $ - expected Object or String, but encountered Number"
| sol/hpack | test/Hpack/Syntax/DefaultsSpec.hs | mit | 5,095 | 0 | 21 | 1,592 | 869 | 441 | 428 | 89 | 1 |
data Person =
Person {name :: String
, age :: Int }
deriving (Eq, Show)
jm = Person "julie" 108
ca = Person "chris" 16 | candu/haskellbook | ch11/personRecord.hs | mit | 132 | 0 | 8 | 38 | 53 | 29 | 24 | 6 | 1 |
{-|
Module : Control.Flower
Description : Modern, readable, directional Haskell
== Use
>>> import Control.Flower
== Rationale
> Mathematics, rightly viewed, possesses not only truth,
> but supreme beauty -- a beauty cold and austere, like that of sculpture,
> without appeal to any part of our weaker nature, without the gorgeous
> trappings of painting or music, yet sublimely pure, and capable of a stern
> perfection such as only the greatest art can show. The true spirit of delight,
> the exaltation, the sense of being more than Man, which is the touchstone of
> the highest excellence, is to be found in mathematics as surely as poetry.
> - Bertrand Russell, "The Study of Mathematics"
Inspired by the wonderful @Flow@ package, Flower provides directional operators
for many common Haskell functions.
With the pipe operator ('|>') proliferating through OCaml, F#, and Elixir,
it's becoming clear which way the wind is blowing. A dataflow model is very
natural to functional programming.
Thinking in Haskell is multidimensional, reading forwards and backwards,
and through levels of abstraction. This is extremely powerful, but does introduce
a learning curve (in grade school, when starting with Haskell, or both).
Here, instead of 'Prelude.$', we use '<|', or reversed with '|>'. Instead of
'Prelude.<$>', we use '<$', and reversed '$>'. Many of the combinators are
built up from meaningful character combinations. One such example is 'Prelude.lift2',
which is translated into '<$**'. '<$**', as 'f <$ a <* b <* c'.
Please do note that 'Control.Flower' exposes conflicting combinators versus the
standard 'Prelude'.
=== Teaching
Teaching concepts becomes simplified by providing a visual aid. Many of the operators
are made up of simpler symbols, much in the same way as the @Lens@ library.
One common challenge when teaching Haskell is showing what an applicative
or monad "mean". By using a progressive, modular picture of each abstraction,
we help build the intuition.
=== Reading
A focus on a single direction of data flow makes code easy to follow.
=== Simplify
All `lift`s (`fmap`, `liftA*` and `liftM*`) are unified as `lift*`.
-}
module Control.Flower (
-- * Basic data flow
module Control.Flower.Apply,
module Control.Flower.Compose,
-- * Functors
module Control.Flower.Functor,
module Control.Flower.Applicative,
module Control.Flower.Monad
) where
import Control.Flower.Apply
import Control.Flower.Compose
import Control.Flower.Functor
import Control.Flower.Applicative
import Control.Flower.Monad
| expede/flower | src/Control/Flower.hs | mit | 2,552 | 0 | 5 | 412 | 76 | 53 | 23 | 11 | 0 |
{-
**************************************************************
* Filename : AutomatonInterface.hs *
* Author : Markus Forsberg *
* [email protected] *
* Last Modified : 6 July, 2001 *
* Lines : 58 *
**************************************************************
-}
module FST.AutomatonInterface ( compileNFA,
minimize,
complete,
determinize,
compile,
Automaton,
-- states,
-- isFinal,
initial,
-- finals,
-- transitionList,
-- transitions,
showAutomaton,
module FST.RegTypes,
module FST.AutomatonTypes,
numberOfStates,
numberOfTransitions
) where
import FST.Automaton
import FST.AutomatonTypes
import qualified FST.MinimalBrzozowski as M
import FST.Complete
import qualified FST.Deterministic as D
import qualified FST.LBFA as L
import FST.RegTypes
compileNFA :: Ord a => Reg a -> Sigma a -> State -> Automaton a
compileNFA reg sigma s = L.compileToAutomaton reg sigma s
minimize :: Ord a => Automaton a -> Automaton a
minimize automaton = M.minimize automaton
determinize :: Ord a => Automaton a -> Automaton a
determinize automaton = D.determinize automaton
compile :: Ord a => Reg a -> Sigma a -> State -> Automaton a
compile reg sigma s = minimize $ L.compileToAutomaton reg sigma s
initial :: Automaton a -> State
initial automaton = head $ initials automaton
numberOfStates :: Ord a => Automaton a -> Int
numberOfStates auto = length $ states auto
numberOfTransitions :: Ord a => Automaton a -> Int
numberOfTransitions auto = sum [length (transitionList auto s) |
s <- states auto]
| SAdams601/ParRegexSearch | test/fst-0.9.0.1/FST/AutomatonInterface.hs | mit | 2,191 | 0 | 9 | 906 | 398 | 208 | 190 | 34 | 1 |
module First where
import Data.Monoid (Monoid(mempty,mappend))
import Data.Maybe (isNothing)
class BoolLike a where
falsy :: a -> Bool
bempty :: a
instance BoolLike [a] where
bempty = []
falsy = null
newtype First a = MkFirst { getFirst :: a } deriving (Show, Eq)
instance BoolLike a => Monoid (First a) where
mempty = MkFirst bempty
mappend (MkFirst l) (MkFirst r) = MkFirst (if falsy l then r else l)
-- For completeness:
instance BoolLike (Maybe a) where
bempty = Nothing
falsy = isNothing
| olivierverdier/Norvigs-Spelling-Corrector | src/First.hs | mit | 533 | 0 | 9 | 124 | 194 | 109 | 85 | 16 | 0 |
-- A set of functions for determining metrics of a cluster in the format [(x, y, c)]
-- Depends on the module statistics-linreg for linear regression algorithm; hackage.haskell.org/package/statistics-linreg
-- Real documentation coming soon...
module TimepixData.ClusterProperties
( clusterProperties
) where
import qualified Data.Vector.Unboxed as U
import Statistics.LinearRegression
-- This is only used internally, as the c values need not be used, and using Double coordinates rather
-- than Int to avoid the code being littered with fromIntegral calls
type Pixels = [(Double, Double)]
-- Returns a tuple of all of a cluster's properties:
-- (centroid, radius, numberPixels, density, squiggliness)
clusterProperties :: [(Int, Int, Float)] -> ((Double, Double), Double, Int, Double, Double)
clusterProperties cluster =
let pixels = [ (fromIntegral x, fromIntegral y) | (x, y, c) <- cluster ] in
(findCentroid pixels, findRadius pixels, findNumPixels pixels, findDensity pixels, findSquiggliness pixels)
-- A few helper fuctions
distance :: (Double, Double) -> (Double, Double) -> Double
distance (x1, y1) (x2, y2) = sqrt((x2 - x1) ^ 2 + (y2 - y1) ^ 2)
pointLineDistance :: (Double, Double) -> (Double, Double) -> Double
pointLineDistance (x, y) (m, c) = abs (m * x - y + c) / sqrt (1 + m ^ 2)
mean :: (Fractional a) => [a] -> a
mean xs = sum xs / fromIntegral (length xs)
-- Definitions of various properties of a cluster
findCentroid :: Pixels -> (Double, Double)
findCentroid pixels = (mean (map fst pixels), mean (map snd pixels))
findRadius :: Pixels -> Double
findRadius pixels = maximum [ distance pixel centroid | pixel <- pixels ]
where centroid = findCentroid pixels
findNumPixels :: Pixels -> Int
findNumPixels pixels = length pixels
findDensity :: Pixels -> Double
findDensity pixels
| area == 0 = 1
| otherwise = fromIntegral numPixels / area
where
area = radius^2 * pi
radius = findRadius pixels
numPixels = findNumPixels pixels
findSquiggliness :: Pixels -> Double
findSquiggliness pixels
-- If all x values or all y values are the same, the blob is a straight line, so has 0 squiggliness
| all (== head xs) (tail xs) = 0.0
| all (== head ys) (tail ys) = 0.0
-- Otherwise, calculate a line of best fit...
| otherwise = let
-- x and y values need to be in a vector form for the regression algorithm to work
vxs = U.fromList xs
vys = U.fromList ys
(intercept, gradient) = linearRegressionTLS vxs vys in
-- Find the mean distance between hit pixels and the LoBF
mean [ pointLineDistance pixel (gradient, intercept) | pixel <- pixels ]
where
(xs, ys) = unzip pixels
| InstituteForResearchInSchools/FunctionalTimepixAnalysis | TimepixData/ClusterProperties.hs | mit | 2,675 | 0 | 12 | 524 | 736 | 399 | 337 | 39 | 1 |
{-# LANGUAGE OverloadedStrings, RankNTypes, GADTs, FlexibleContexts #-}
module Models.Events
( module Models.Events.Types
, module Models.Events.Projections
, module Models.Events.Database
)
where
import Models.Events.Database
import Models.Events.Projections
import Models.Events.Types
import Models.Projections
| CarstenKoenig/MyWebSite | src/Models/Events.hs | mit | 323 | 0 | 5 | 38 | 53 | 36 | 17 | 9 | 0 |
module Chapter14ListTest where
--import Test.Hspec
import Test.QuickCheck
import Data.List (sort)
-- for any list you apply sort to
-- this property should hold
listOrdered :: (Ord a) => [a] -> Bool
listOrdered xs =
snd $ foldr go (Nothing, True) xs
where
go _ status@(_, False) = status
go y (Nothing, t) = (Just y, t)
go y (Just x, _) = (Just y, x >= y)
prop_sortedList :: (Ord a) => [a] -> Bool
prop_sortedList list = listOrdered $ sort list
main :: IO ()
main = verboseCheck (prop_sortedList :: [Int] -> Bool)
| brodyberg/Notes | addition/Chapter14ListTest.hs | mit | 546 | 0 | 9 | 126 | 213 | 119 | 94 | 13 | 3 |
module Rebase.Control.Monad.Trans.State.Strict
(
module Control.Monad.Trans.State.Strict
)
where
import Control.Monad.Trans.State.Strict
| nikita-volkov/rebase | library/Rebase/Control/Monad/Trans/State/Strict.hs | mit | 140 | 0 | 5 | 12 | 29 | 22 | 7 | 4 | 0 |
module Main where
import qualified ImportSort.Sort as S
main :: IO ()
main = getContents >>= putStrLn . S.sortImport
| joshuaclayton/import-sort | app/Main.hs | mit | 119 | 0 | 6 | 21 | 37 | 22 | 15 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module CryptoSpec (main, spec) where
import Test.Hspec
import Test.QuickCheck
import Prelude as P
import Data.Maybe
import Data.ByteString as BS
import Data.ByteString.Arbitrary
import Data.Serialize as DS
import Data.Byteable
import Crypto.Curve25519
import Crypto.Random
import Crypto.Random.AESCtr as AESCtr
import Network.BitSmuggler.Utils
import Network.BitSmuggler.Crypto as Crypto
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "handshake" $ do
it "handshake created by client is understandable by server" $
property $ serverDecryptsHandshake
describe "encryption-decryption" $ do
it "encrypted message matches decrypted" $
property $ encryptedMatchesDecrypted
return ()
serverDecryptsHandshake :: ArbByteString -> Bool
serverDecryptsHandshake abs = isJust readHandshake && (P.snd $ fromJust readHandshake) == message
where
message = fromABS abs
readHandshake = tryReadHandshake serverSkWord handshakeCipher
handshakeCipher = encryptHandshake (clientCrypto, repr) iv message
((clientCrypto, repr), (_, serverSkWord), iv) = cryptoFromSeed "le fixed seed" -- $ fromABS entropySeed
encryptedMatchesDecrypted :: ArbByteString -> Bool
encryptedMatchesDecrypted abs
= decrypt serverCrypto (encrypt clientCrypto iv message) == Just message
&& decrypt clientCrypto (encrypt serverCrypto iv message) == Just message
where
message = fromABS abs
((clientCrypto, _), (serverCrypto, _), iv) = cryptoFromSeed "FIXED" -- $ fromABS entropySeed
-- sets up the encrypt/decrypt functions and vals to test 1 encrypt-decrypt
cryptoFromSeed entropySeed = ((clientCrypto, repr), (serverCrypto, serverSkWord), iv)
where
rng :: AESRNG
rng = cprgCreate $ createTestEntropyPool entropySeed
(ivBytes, next) = cprgGenerate Crypto.ivLen rng
(skBytes, next2) = cprgGenerate Crypto.keySize next
iv = fromRight $ DS.decode ivBytes :: Entropy
serverSkWord = (fromRight $ DS.decode skBytes :: Key)
serverSk = fromBytes $ toBytes serverSkWord
serverPk = derivePublicKey serverSk
serverPkWord = (fromRight $ DS.decode (toBytes serverPk) :: Key)
(clientCrypto, repr) = makeClientEncryption serverPkWord rng
serverCrypto = makeServerEncryption serverSkWord repr
| danoctavian/bit-smuggler | BitSmuggler/test/unit/CryptoSpec.hs | gpl-2.0 | 2,320 | 0 | 12 | 409 | 586 | 320 | 266 | 50 | 1 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : ./ExtModal/Logic_ExtModal.hs
Description : Instance of class Logic for ExtModal
Copyright : DFKI GmbH 2009
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (imports Logic)
Instance of class Logic for ExtModal
-}
module ExtModal.Logic_ExtModal where
import ExtModal.AS_ExtModal
import ExtModal.ExtModalSign
import ExtModal.ATC_ExtModal ()
import ExtModal.Parse_AS
import ExtModal.StatAna
import ExtModal.MorphismExtension
import ExtModal.Sublogic
import CASL.AS_Basic_CASL
import CASL.Logic_CASL
import CASL.MapSentence
import CASL.Morphism
import CASL.Parse_AS_Basic
import CASL.Sign
import CASL.SimplifySen
import CASL.Sublogic
import CASL.SymbolMapAnalysis
import CASL.SymbolParser
import CASL.Taxonomy
import CASL.ToDoc
import Logic.Logic
import Common.DocUtils
import qualified Common.Lib.MapSet as MapSet
import qualified Data.Set as Set
data ExtModal = ExtModal deriving Show
instance Language ExtModal where
description _ = unlines
[ "ExtModal is the 'extended modal logic' extension of CASL. "
, "Syntax for ordinary modalities, multi-modal logic, dynamic "
, "logic, graded modal logic, hybrid logic, CTL* and mu-calculus "
, "is provided. Specific modal logics can be obtained via "
, "restrictions to sublanguages."
]
type ExtModalSign = Sign EM_FORMULA EModalSign
type ExtModalMorph = Morphism EM_FORMULA EModalSign MorphExtension
type ExtModalFORMULA = FORMULA EM_FORMULA
instance SignExtension EModalSign where
isSubSignExtension = isSubEModalSign
instance Syntax ExtModal EM_BASIC_SPEC Symbol SYMB_ITEMS SYMB_MAP_ITEMS where
parse_basic_spec ExtModal = Just $ basicSpec ext_modal_reserved_words
parse_symb_items ExtModal = Just $ symbItems ext_modal_reserved_words
parse_symb_map_items ExtModal =
Just $ symbMapItems ext_modal_reserved_words
-- Simplification of formulas - simplifySen for ExtFORMULA
simEMSen :: Sign EM_FORMULA EModalSign -> EM_FORMULA -> EM_FORMULA
simEMSen sign = mapExtForm (simplifySen frmTypeAna simEMSen sign)
correctTarget :: Morphism f EModalSign m -> Morphism f EModalSign m
correctTarget m = m
{ mtarget = correctSign $ mtarget m
, msource = correctSign $ msource m }
instance Sentences ExtModal ExtModalFORMULA ExtModalSign ExtModalMorph Symbol
where
map_sen ExtModal morph = return . mapSen mapEMform morph
simplify_sen ExtModal = simplifySen frmTypeAna simEMSen . setRevSortRel
print_named ExtModal = printTheoryFormula
print_sign ExtModal sig = let e = extendedInfo sig in pretty sig
{ opMap = diffOpMapSet (opMap sig) $ flexOps e
, predMap = Set.fold (`MapSet.delete` nomPType)
(diffMapSet (predMap sig) $ flexPreds e) $ nominals e
}
sym_of ExtModal = symOf
symKind ExtModal = show . pretty . symbolKind . symbType
symmap_of ExtModal = morphismToSymbMap
sym_name ExtModal = symName
instance StaticAnalysis ExtModal EM_BASIC_SPEC ExtModalFORMULA SYMB_ITEMS
SYMB_MAP_ITEMS ExtModalSign ExtModalMorph Symbol RawSymbol where
basic_analysis ExtModal = Just basicEModalAnalysis
stat_symb_map_items ExtModal = statSymbMapItems
stat_symb_items ExtModal = statSymbItems
symbol_to_raw ExtModal = symbolToRaw
id_to_raw ExtModal = idToRaw
matches ExtModal = CASL.Morphism.matches
empty_signature ExtModal = emptySign emptyEModalSign
signature_union ExtModal sgn = return . addSig addEModalSign sgn
intersection ExtModal sgn = return . interSig interEModalSign sgn
signatureDiff ExtModal sgn = return . diffSig diffEModalSign sgn
final_union ExtModal = finalUnion addEModalSign
morphism_union ExtModal = plainMorphismUnion addEModalSign
is_subsig ExtModal = isSubSig isSubEModalSign
subsig_inclusion ExtModal = sigInclusion emptyMorphExtension
generated_sign ExtModal s = fmap correctTarget
. generatedSign emptyMorphExtension s
cogenerated_sign ExtModal s = fmap correctTarget
. cogeneratedSign emptyMorphExtension s
induced_from_morphism ExtModal =
inducedFromMorphismExt inducedEMsign
(constMorphExt emptyMorphExtension)
induced_from_to_morphism ExtModal =
inducedFromToMorphismExt inducedEMsign
(constMorphExt emptyMorphExtension)
(\ _ _ -> return emptyMorphExtension) isSubEModalSign diffEModalSign
theory_to_taxonomy ExtModal = convTaxo
instance Logic ExtModal ExtModalSL EM_BASIC_SPEC ExtModalFORMULA SYMB_ITEMS
SYMB_MAP_ITEMS ExtModalSign ExtModalMorph Symbol RawSymbol () where
stability ExtModal = Testing
all_sublogics ExtModal = sublogics_all $ foleml : concat sublogicsDim
sublogicDimensions ExtModal = sDims sublogicsDim
parseSublogic ExtModal = parseSL $ Just . parseSublog
empty_proof_tree ExtModal = ()
instance MinSL Sublogic EM_FORMULA where
minSL = minSublogicOfEM
instance ProjForm Sublogic EM_FORMULA where
projForm _ = Just . ExtFORMULA
instance ProjSigItem Sublogic EM_SIG_ITEM EM_FORMULA where
projSigItems _ s = (Just $ Ext_SIG_ITEMS s, [])
instance ProjBasic Sublogic EM_BASIC_ITEM EM_SIG_ITEM EM_FORMULA where
projBasicItems _ b = (Just $ Ext_BASIC_ITEMS b, [])
instance MinSL Sublogic EM_SIG_ITEM where
minSL = comp_list . minSLExtSigItem
instance MinSL Sublogic EM_BASIC_ITEM where
minSL = minSublogicEMBasic
instance MinSL Sublogic EModalSign where
minSL = minSublogicEMSign
instance NameSL Sublogic where
nameSL = sublogName
| spechub/Hets | ExtModal/Logic_ExtModal.hs | gpl-2.0 | 5,840 | 0 | 17 | 1,195 | 1,153 | 593 | 560 | 112 | 1 |
{-# LANGUAGE ForeignFunctionInterface #-}
module OpenGL.Helpers
(
module Linear,
localMatrix,
statevarLocal,
enableLocal,
disableLocal,
setProjModV,
setModelView,
setProjection,
color4,
multMat4,
rotateX,
rotateY,
rotateZ,
scaleXYZ,
module OpenGL.Helpers.GL1D,
module OpenGL.Helpers.GL2D,
module OpenGL.Helpers.GL3D,
) where
import MyPrelude
import Linear
import qualified Graphics.Rendering.OpenGL as GL
import Graphics.Rendering.OpenGL (($=))
import Graphics.Rendering.OpenGL.Raw
import Graphics.Rendering.GLU.Raw
import OpenGL.Helpers.GL1D
import OpenGL.Helpers.GL2D
import OpenGL.Helpers.GL3D
import Control.Monad.Trans
import Foreign.Marshal.Array
setProjModV :: Mat4 -> Mat4 -> IO ()
setProjModV proj modv = do
glMatrixMode gl_PROJECTION
glLoadIdentity
multMat4 proj
glMatrixMode gl_MODELVIEW
glLoadIdentity
multMat4 modv
localMatrix :: MonadIO m => m a -> m a
localMatrix ma = do
liftIO $ glPushMatrix
a <- ma
liftIO $ glPopMatrix
return a
-- todo: only rely on OpenGLRaw. create
-- localMatrix, localPrimitive
-- | set 'var' to 'v' in 'ma', else unchanged
statevarLocal :: MonadIO m => a -> GL.StateVar a -> m b -> m b
statevarLocal v' var mb = do
v <- liftIO $ GL.get var
liftIO $ var $= v
b <- mb
liftIO $ var $= v
return b
-- | set enabled in 'ma', else unchanged
enableLocal :: MonadIO m => GL.StateVar GL.Capability -> m a -> m a
enableLocal var =
statevarLocal GL.Enabled var
-- | set disabled in 'ma', else unchanged
disableLocal :: MonadIO m => GL.StateVar GL.Capability -> m a -> m a
disableLocal var =
statevarLocal GL.Disabled var
-- | sets ModelView matrix
setModelView :: MonadIO m => m ()
setModelView = do
liftIO $ do
GL.matrixMode $= GL.Modelview 0
-- | sets Projection matrix
setProjection :: MonadIO m => m ()
setProjection = do
liftIO $ do
GL.matrixMode $= GL.Projection
-- | set current rgba color
color4 :: MonadIO m => GL.GLfloat -> GL.GLfloat -> GL.GLfloat -> GL.GLfloat -> m ()
color4 r g b a = do
liftIO $ GL.currentColor $= GL.Color4 r g b a
-- | rotating radians around x-axis
rotateX :: MonadIO m => Double -> m ()
rotateX rad = liftIO $
GL.rotate (rad * 57.295780) $ GL.Vector3 1 0 0
-- | rotating radians around y-axis
rotateY :: MonadIO m => Double -> m ()
rotateY rad = liftIO $
GL.rotate (rad * 57.295780) $ GL.Vector3 0 1 0
-- | rotating radians around z-axis
rotateZ :: MonadIO m => Double -> m ()
rotateZ rad = liftIO $
GL.rotate (rad * 57.295780) $ GL.Vector3 0 0 1
-- | scales x,y,z with value
scaleXYZ :: MonadIO m => Double -> m ()
scaleXYZ v =
liftIO $ GL.scale v v v
multMat4 :: Mat4 -> IO ()
multMat4 mat =
let Mat4 x0 x1 x2 x3
y0 y1 y2 y3
z0 z1 z2 z3
w0 w1 w2 w3 = mat
in withArray [c x0, c x1, c x2, c x3,
c y0, c y1, c y2, c y3,
c z0, c z1, c z2, c z3,
c w0, c w1, c w2, c w3] $ \ptr -> glMultMatrixd ptr
where
c = realToFrac
| karamellpelle/grid | designer/source/OpenGL/Helpers.hs | gpl-3.0 | 3,135 | 0 | 12 | 838 | 1,030 | 522 | 508 | 92 | 1 |
join :: Monoid w => Writer w (Writer w a) -> Writer w a
join (Writer ((Writer (a, w')), w)) = Writer (a, w `mappend` w') | hmemcpy/milewski-ctfp-pdf | src/content/3.4/code/haskell/snippet16.hs | gpl-3.0 | 120 | 0 | 11 | 25 | 82 | 43 | 39 | 2 | 1 |
module A1Nat where
-- This is our simple Natural numbers data type.
--
-- S is a successor, Z is zero.
-- 1 is S Z (the successor of zero).
-- 2 is S (S Z).
-- And so on.
data Nat = Z -- Zero
| S Nat -- Successor
deriving Show -- This is a special syntax for automatically giving us
-- the implemnetation of a typeclass. Here we derive
-- the Show typeclass, which lets us convert values of
-- our Nat type into Strings to print e.g. in GHCi.
-- Some terminology real quick: "data" is the keyword to make algebraic data
-- types. Data types in Haskell are algebraic because they can be sums
-- (alternations) or products (combinations).
-- data D = A | B
-- data D e f = G H I
-- Both of these are perfectly acceptable. In here D is the type, and above
-- Nat is our type. A, B, and G are data constructors. In our type, Z and
-- S are data constructors. This means that they are *values* not *types*.
-- 3 is a *value* of *type* Int. This is an important distinction. H and I are
-- the types of the arguments to G, like our S takes a Nat. That means you
-- can't write S 2 or S "Hallo!", because 2 and "Hallo!" are not Nats. You can
-- however write S Z, denoting the sucessor of zero, i.e. the natural number
-- 1.
-- Now let's get hacking! First of we want a notion of equality for our number
-- system.
instance Eq Nat where
-- This means that we are making our Nat type an instance of the Eq type
-- class, which defines equality (==) and inequality (=/).
--
-- I've given you the patterns you need to match. But you'll have to figure
-- out the code yourself. (==) has the type a -> a -> Bool, where 'a' is an
-- instance of Eq. Here we are going to define how Nat is an instance of Eq.
-- So here it effectively has the type Nat -> Nat -> Bool.
Z == Z = undefined -- Is Z (zero) equal to Z?
(S x) == (S y) = undefined -- Is one number equal to a different number?
-- Hint: Use recursion!
_ == _ = undefined -- This is what we call the base case, or the
-- catch all. '_' means we don't bind the value
-- to a variable at all.
-- Note that we did not have to implement inequality (=/) at all. Why is
-- this? Because Haskell's implementation of Eq is quite clever. It is simply
-- the mutually recursive definition:
-- a == b = not (a =/ b)
-- a =/ b = not (a == b)
-- This is the default definition that you get. They don't make much sense by
-- themselves, as they will never terminate. But what they mean in practice is
-- that if you implement one, you get the other for free, since it is
-- automatically "the opposite of the other one".
--
-- The syntax is simple enough here. "value == value". (==) here is an
-- operator. Operators are just functions. We could write it prefix:
-- (==) Z Z = ...
-- Any function may be written infix or prefix. It's a matter of taste and
-- convenience.
-- Now let's get going and write some functions!
--
-- We'll introduce syntax as we go. But let's explain type signatures straight
-- away. If we have a function 'f' which takes an Int and returns an Int, we
-- say that f has type Int -> Int. "Has type" is written "::".
-- f :: Int -> Int
-- This means we must feed f an Int to get us an Int.
--
-- Types always start with an uppercase letter. If you use lowercase letters,
-- you are using type variables instead. This can be used for polymorphism:
-- f :: a_type -> another_type -> a_type -> another_type
-- f _ y _ = y
-- Here we need to give f three values. Two of them have to be the same type
-- (a_type), but that type can be any type, like Int or String. the middle one
-- can be any type, including the same as the a_type arguments.
--
-- You'll observe that we bind it to 'y'. y here is just a variable (not
-- a *type variable*, but a variable on the value level). It matches anything
-- of value another_type. Consider this:
-- f :: Nat -> String
-- f Z = "Zero"
-- f (S Z) = "One"
-- f a = "Something else:" ++ show a
-- Here we match Z specifically, then we match (S Z) specifically, then we
-- match anything of value Nat as a. It's just like the basecase above where
-- we matched anything as _, but here we also bind it to a named variable that
-- we can use in the function body.
--
-- We could call it almost what we like, but we prefer very short variable
-- names because of the short scope of their existence.
--
-- We usually write short type variables too, like "f :: a -> b -> c -> b".
isZ :: Nat -> Bool
-- We'll start off with a very simple function -- isZ. Is the argument given
-- in Zero?
isZ Z = undefined -- As you can see, I'm really spoonfeeding you here.
-- Is Z equal to Z, do you think?
isZ _ = undefined -- Another base case, because we only care about Z.
toNat :: Int -> Nat
-- Now let's make add some Int interop.
--
-- In this function we are using a new syntax you have not seen yet.
--
-- f | p = x
-- | p2 = y
-- | otheriwse = z
--
-- The parts between the '|'s and '='s are prepositions. So simple boolean
-- expressions. If p, then x. If p2, then y. Otherwise (base case) z.
--
-- This function will take an Int and give you a Nat.
-- toNat 0 = Z
-- toNat 1 = S Z
-- toNat 2 = S (S Z)
-- And so on.
--
-- Numbers below 0 should result in Z.
toNat n | n > 0 = undefined
| otherwise = undefined
fromNat :: Nat -> Int
-- Now let's go the other way around. Here we don't have to worry about n < Z,
-- because that can't happen.
fromNat Z = undefined
fromNat (S n) = undefined
predNat :: Nat -> Nat
-- predNat takes a Nat and gives us its predecessor.
--
-- predNat S (S (S N)) = S (S N)
-- Like that. Remember that our number system doesn't go any lower than Z.
predNat Z = undefined
predNat (S n) = undefined
succNat :: Nat -> Nat
-- And here's the successor function for symmetry. Hint: if you think it's too
-- simple to be true, you probably have the answer to this one.
succNat = undefined
plus :: Nat -> Nat -> Nat
-- And now some classic arithmetic. Let's start out with addition.
-- Hint: Recursion! Of course recursion. Always recursion.
Z `plus` y = undefined
(S x) `plus` y = undefined
times :: Nat -> Nat -> Nat
-- Next up is multiplication.
Z `times` _ = undefined
(S x) `times` y = undefined -- Hint: You probably want to use plus here.
powerOf :: Nat -> Nat -> Nat
-- Power of. The first argument is the base case, the second is the exponent.
_ `powerOf` Z = undefined
b `powerOf` (S e) = undefined
minus :: Nat -> Nat -> Nat
-- Subtraction.
Z `minus` _ = undefined
x `minus` Z = undefined
(S x) `minus` (S y) = undefined
lteNat :: Nat -> Nat -> Bool
-- Moving onto ordering. This is the less-than-or-equal-to function for Nats.
-- Is the left-hand side argument (lhs) smaller than or equal to the
-- right-hand side (rhs) argument?
Z `lteNat` y = undefined
x `lteNat` Z = undefined
(S x) `lteNat` (S y) = undefined
ltNat :: Nat -> Nat -> Bool
-- The less-than function. Hint: You probably want to use lteNat and succNat.
ltNat x y = undefined
gteNat :: Nat -> Nat -> Bool
-- The greater-than-or-equal-to function. Hint: Reuse!
gteNat x y = undefined
gtNat :: Nat -> Nat -> Bool
-- The greater-than function.
gtNat x y = undefined
minNat :: Nat -> Nat -> Nat
-- The minimum function. Given two Nats, which is the smallest one?
minNat Z _ = undefined
minNat _ Z = undefined
minNat (S x) (S y) = undefined
maxNat :: Nat -> Nat -> Nat
-- And now the maximum.
maxNat Z y = undefined
maxNat x Z = undefined
maxNat (S x) (S y) = undefined
instance Ord Nat where
-- Of course all those order functions could be given to us a lot more
-- cheaply. We could just make our data type an instance of the Ord type
-- class. This gives us all of those functions, and more, for free! All we
-- need to do is to implement the compare function.
--
-- Compare takes two Nats and returns one of three values: EQ, LT or GT. EQ
-- means that the values are equal, LT that lhs is smaller than rhs, and GT
-- the opposite of LT.
compare Z Z = undefined
compare Z (S y) = undefined
compare (S x) Z = undefined
compare (S x) (S y) = undefined
-- Of course, while we're being honest here, we don't *really* have to do
-- *any* of this. We can just derive Ord for completely free, like we did
-- with Show. But it was a fun exercise, right? The functions you get from
-- Ord are listed here in the documentation:
-- https://hackage.haskell.org/package/base-4.8.1.0/docs/Prelude.html#t:Ord
fact :: Nat -> Nat
-- Let's do a couple more. First the factorial. The factorial is the product
-- of all the numbers less than the argument, and the number itself.
--
-- fact (S (S (S Z))) = S (S (S (S (S (S Z)))))
-- Or, if we used Ints it would look like this.
-- fact 3 = 6
-- Because 1 * 2 * 3 = 6. Get it? Good. Now implement it!
fact Z = undefined
fact (S n) = undefined
fib :: Nat -> Nat
-- No tutorial is complete without "Hallo, world!". But in functional
-- programming, we tend to implement the fibonacci sequence instead! The nice
-- thing about the fibonacci sequence is that its mathematical definition maps
-- pretty perfectly to Haskell.
--
-- The definition is as follows:
-- fib n = fib (n - 1) + fib (n - 2)
-- With two seeds:
-- fib 0 = 0
-- fib 1 = 1
--
-- Now you give it a go, using our Nat type.
fib Z = undefined
fib (S Z) = undefined
fib (S (S n)) = undefined
| alexander-b/thug-beginners | lessonA/A1Nat.hs | gpl-3.0 | 9,661 | 0 | 9 | 2,392 | 969 | 581 | 388 | 65 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Tasks.Tasks.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified task from the task list.
--
-- /See:/ <https://developers.google.com/google-apps/tasks/firstapp Tasks API Reference> for @tasks.tasks.delete@.
module Network.Google.Resource.Tasks.Tasks.Delete
(
-- * REST Resource
TasksDeleteResource
-- * Creating a Request
, tasksDelete
, TasksDelete
-- * Request Lenses
, tdTaskList
, tdTask
) where
import Network.Google.AppsTasks.Types
import Network.Google.Prelude
-- | A resource alias for @tasks.tasks.delete@ method which the
-- 'TasksDelete' request conforms to.
type TasksDeleteResource =
"tasks" :>
"v1" :>
"lists" :>
Capture "tasklist" Text :>
"tasks" :>
Capture "task" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deletes the specified task from the task list.
--
-- /See:/ 'tasksDelete' smart constructor.
data TasksDelete = TasksDelete'
{ _tdTaskList :: !Text
, _tdTask :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TasksDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tdTaskList'
--
-- * 'tdTask'
tasksDelete
:: Text -- ^ 'tdTaskList'
-> Text -- ^ 'tdTask'
-> TasksDelete
tasksDelete pTdTaskList_ pTdTask_ =
TasksDelete'
{ _tdTaskList = pTdTaskList_
, _tdTask = pTdTask_
}
-- | Task list identifier.
tdTaskList :: Lens' TasksDelete Text
tdTaskList
= lens _tdTaskList (\ s a -> s{_tdTaskList = a})
-- | Task identifier.
tdTask :: Lens' TasksDelete Text
tdTask = lens _tdTask (\ s a -> s{_tdTask = a})
instance GoogleRequest TasksDelete where
type Rs TasksDelete = ()
type Scopes TasksDelete =
'["https://www.googleapis.com/auth/tasks"]
requestClient TasksDelete'{..}
= go _tdTaskList _tdTask (Just AltJSON)
appsTasksService
where go
= buildClient (Proxy :: Proxy TasksDeleteResource)
mempty
| rueshyna/gogol | gogol-apps-tasks/gen/Network/Google/Resource/Tasks/Tasks/Delete.hs | mpl-2.0 | 2,873 | 0 | 14 | 705 | 385 | 231 | 154 | 60 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DialogFlow.Projects.Locations.SecuritySettings.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Create security settings in the specified location.
--
-- /See:/ <https://cloud.google.com/dialogflow/ Dialogflow API Reference> for @dialogflow.projects.locations.securitySettings.create@.
module Network.Google.Resource.DialogFlow.Projects.Locations.SecuritySettings.Create
(
-- * REST Resource
ProjectsLocationsSecuritySettingsCreateResource
-- * Creating a Request
, projectsLocationsSecuritySettingsCreate
, ProjectsLocationsSecuritySettingsCreate
-- * Request Lenses
, plsscParent
, plsscXgafv
, plsscUploadProtocol
, plsscAccessToken
, plsscUploadType
, plsscPayload
, plsscCallback
) where
import Network.Google.DialogFlow.Types
import Network.Google.Prelude
-- | A resource alias for @dialogflow.projects.locations.securitySettings.create@ method which the
-- 'ProjectsLocationsSecuritySettingsCreate' request conforms to.
type ProjectsLocationsSecuritySettingsCreateResource
=
"v3" :>
Capture "parent" Text :>
"securitySettings" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GoogleCloudDialogflowCxV3SecuritySettings
:>
Post '[JSON]
GoogleCloudDialogflowCxV3SecuritySettings
-- | Create security settings in the specified location.
--
-- /See:/ 'projectsLocationsSecuritySettingsCreate' smart constructor.
data ProjectsLocationsSecuritySettingsCreate =
ProjectsLocationsSecuritySettingsCreate'
{ _plsscParent :: !Text
, _plsscXgafv :: !(Maybe Xgafv)
, _plsscUploadProtocol :: !(Maybe Text)
, _plsscAccessToken :: !(Maybe Text)
, _plsscUploadType :: !(Maybe Text)
, _plsscPayload :: !GoogleCloudDialogflowCxV3SecuritySettings
, _plsscCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsSecuritySettingsCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plsscParent'
--
-- * 'plsscXgafv'
--
-- * 'plsscUploadProtocol'
--
-- * 'plsscAccessToken'
--
-- * 'plsscUploadType'
--
-- * 'plsscPayload'
--
-- * 'plsscCallback'
projectsLocationsSecuritySettingsCreate
:: Text -- ^ 'plsscParent'
-> GoogleCloudDialogflowCxV3SecuritySettings -- ^ 'plsscPayload'
-> ProjectsLocationsSecuritySettingsCreate
projectsLocationsSecuritySettingsCreate pPlsscParent_ pPlsscPayload_ =
ProjectsLocationsSecuritySettingsCreate'
{ _plsscParent = pPlsscParent_
, _plsscXgafv = Nothing
, _plsscUploadProtocol = Nothing
, _plsscAccessToken = Nothing
, _plsscUploadType = Nothing
, _plsscPayload = pPlsscPayload_
, _plsscCallback = Nothing
}
-- | Required. The location to create an SecuritySettings for. Format:
-- \`projects\/\/locations\/\`.
plsscParent :: Lens' ProjectsLocationsSecuritySettingsCreate Text
plsscParent
= lens _plsscParent (\ s a -> s{_plsscParent = a})
-- | V1 error format.
plsscXgafv :: Lens' ProjectsLocationsSecuritySettingsCreate (Maybe Xgafv)
plsscXgafv
= lens _plsscXgafv (\ s a -> s{_plsscXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plsscUploadProtocol :: Lens' ProjectsLocationsSecuritySettingsCreate (Maybe Text)
plsscUploadProtocol
= lens _plsscUploadProtocol
(\ s a -> s{_plsscUploadProtocol = a})
-- | OAuth access token.
plsscAccessToken :: Lens' ProjectsLocationsSecuritySettingsCreate (Maybe Text)
plsscAccessToken
= lens _plsscAccessToken
(\ s a -> s{_plsscAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plsscUploadType :: Lens' ProjectsLocationsSecuritySettingsCreate (Maybe Text)
plsscUploadType
= lens _plsscUploadType
(\ s a -> s{_plsscUploadType = a})
-- | Multipart request metadata.
plsscPayload :: Lens' ProjectsLocationsSecuritySettingsCreate GoogleCloudDialogflowCxV3SecuritySettings
plsscPayload
= lens _plsscPayload (\ s a -> s{_plsscPayload = a})
-- | JSONP
plsscCallback :: Lens' ProjectsLocationsSecuritySettingsCreate (Maybe Text)
plsscCallback
= lens _plsscCallback
(\ s a -> s{_plsscCallback = a})
instance GoogleRequest
ProjectsLocationsSecuritySettingsCreate
where
type Rs ProjectsLocationsSecuritySettingsCreate =
GoogleCloudDialogflowCxV3SecuritySettings
type Scopes ProjectsLocationsSecuritySettingsCreate =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow"]
requestClient
ProjectsLocationsSecuritySettingsCreate'{..}
= go _plsscParent _plsscXgafv _plsscUploadProtocol
_plsscAccessToken
_plsscUploadType
_plsscCallback
(Just AltJSON)
_plsscPayload
dialogFlowService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsSecuritySettingsCreateResource)
mempty
| brendanhay/gogol | gogol-dialogflow/gen/Network/Google/Resource/DialogFlow/Projects/Locations/SecuritySettings/Create.hs | mpl-2.0 | 6,198 | 0 | 17 | 1,383 | 784 | 458 | 326 | 125 | 1 |
module Data.GI.CodeGen.Code
( Code(..)
, ModuleInfo(..)
, ModuleFlag(..)
, BaseCodeGen
, CodeGen
, ExcCodeGen
, CGError(..)
, genCode
, evalCodeGen
, writeModuleTree
, listModuleTree
, codeToText
, transitiveModuleDeps
, minBaseVersion
, BaseVersion(..)
, showBaseVersion
, registerNSDependency
, qualified
, getDeps
, recurseWithAPIs
, handleCGExc
, describeCGError
, notImplementedError
, badIntroError
, missingInfoError
, indent
, bline
, line
, blank
, group
, hsBoot
, submodule
, setLanguagePragmas
, setGHCOptions
, setModuleFlags
, setModuleMinBase
, exportToplevel
, exportModule
, exportDecl
, exportMethod
, exportProperty
, exportSignal
, findAPI
, getAPI
, findAPIByName
, getAPIs
, getC2HMap
, config
, currentModule
) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
import Data.Monoid (Monoid(..))
#endif
import Control.Monad.Reader
import Control.Monad.State.Strict
import Control.Monad.Except
import qualified Data.Foldable as F
import Data.Maybe (fromMaybe, catMaybes)
import Data.Monoid ((<>))
import Data.Sequence (Seq, ViewL ((:<)), (><), (|>), (<|))
import qualified Data.Map.Strict as M
import qualified Data.Sequence as S
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import System.Directory (createDirectoryIfMissing)
import System.FilePath (joinPath, takeDirectory)
import Data.GI.CodeGen.API (API, Name(..))
import Data.GI.CodeGen.Config (Config(..))
import {-# SOURCE #-} Data.GI.CodeGen.CtoHaskellMap (cToHaskellMap,
Hyperlink)
import Data.GI.CodeGen.GtkDoc (CRef)
import Data.GI.CodeGen.ModulePath (ModulePath(..), dotModulePath, (/.))
import Data.GI.CodeGen.Type (Type(..))
import Data.GI.CodeGen.Util (tshow, terror, padTo, utf8WriteFile)
import Data.GI.CodeGen.ProjectInfo (authors, license, maintainers)
data Code
= NoCode -- ^ No code
| Line Text -- ^ A single line, indented to current indentation
| Indent Code -- ^ Indented region
| Sequence (Seq Code) -- ^ The basic sequence of code
| Group Code -- ^ A grouped set of lines
deriving (Eq, Show)
instance Monoid Code where
mempty = NoCode
NoCode `mappend` NoCode = NoCode
x `mappend` NoCode = x
NoCode `mappend` x = x
(Sequence a) `mappend` (Sequence b) = Sequence (a >< b)
(Sequence a) `mappend` b = Sequence (a |> b)
a `mappend` (Sequence b) = Sequence (a <| b)
a `mappend` b = Sequence (a <| b <| S.empty)
type Deps = Set.Set Text
-- | Subsection of the haddock documentation where the export should
-- be located.
type HaddockSection = Text
-- | Symbol to export.
type SymbolName = Text
-- | Possible exports for a given module. Every export type
-- constructor has two parameters: the section of the haddocks where
-- it should appear, and the symbol name to export in the export list
-- of the module.
data Export = Export {
exportType :: ExportType -- ^ Which kind of export.
, exportSymbol :: SymbolName -- ^ Actual symbol to export.
} deriving (Show, Eq, Ord)
-- | Possible types of exports.
data ExportType = ExportTypeDecl -- ^ A type declaration.
| ExportToplevel -- ^ An export in no specific section.
| ExportMethod HaddockSection -- ^ A method for a struct/union, etc.
| ExportProperty HaddockSection -- ^ A property for an object/interface.
| ExportSignal HaddockSection -- ^ A signal for an object/interface.
| ExportModule -- ^ Reexport of a whole module.
deriving (Show, Eq, Ord)
-- | Information on a generated module.
data ModuleInfo = ModuleInfo {
modulePath :: ModulePath -- ^ Full module name: ["Gtk", "Label"].
, moduleCode :: Code -- ^ Generated code for the module.
, bootCode :: Code -- ^ Interface going into the .hs-boot file.
, submodules :: M.Map Text ModuleInfo -- ^ Indexed by the relative
-- module name.
, moduleDeps :: Deps -- ^ Set of dependencies for this module.
, moduleExports :: Seq Export -- ^ Exports for the module.
, qualifiedImports :: Set.Set ModulePath -- ^ Qualified (source) imports
, modulePragmas :: Set.Set Text -- ^ Set of language pragmas for the module.
, moduleGHCOpts :: Set.Set Text -- ^ GHC options for compiling the module.
, moduleFlags :: Set.Set ModuleFlag -- ^ Flags for the module.
, moduleDoc :: Maybe Text -- ^ Documentation for the module.
, moduleMinBase :: BaseVersion -- ^ Minimal version of base the
-- module will work on.
}
-- | Flags for module code generation.
data ModuleFlag = ImplicitPrelude -- ^ Use the standard prelude,
-- instead of the haskell-gi-base short one.
deriving (Show, Eq, Ord)
-- | Minimal version of base supported by a given module.
data BaseVersion = Base47 -- ^ 4.7.0
| Base48 -- ^ 4.8.0
deriving (Show, Eq, Ord)
-- | A `Text` representation of the given base version bound.
showBaseVersion :: BaseVersion -> Text
showBaseVersion Base47 = "4.7"
showBaseVersion Base48 = "4.8"
-- | Generate the empty module.
emptyModule :: ModulePath -> ModuleInfo
emptyModule m = ModuleInfo { modulePath = m
, moduleCode = NoCode
, bootCode = NoCode
, submodules = M.empty
, moduleDeps = Set.empty
, moduleExports = S.empty
, qualifiedImports = Set.empty
, modulePragmas = Set.empty
, moduleGHCOpts = Set.empty
, moduleFlags = Set.empty
, moduleDoc = Nothing
, moduleMinBase = Base47
}
-- | Information for the code generator.
data CodeGenConfig = CodeGenConfig {
hConfig :: Config -- ^ Ambient config.
, loadedAPIs :: M.Map Name API -- ^ APIs available to the generator.
, c2hMap :: M.Map CRef Hyperlink -- ^ Map from C references
-- to Haskell symbols.
}
data CGError = CGErrorNotImplemented Text
| CGErrorBadIntrospectionInfo Text
| CGErrorMissingInfo Text
deriving (Show)
type BaseCodeGen excType a =
ReaderT CodeGenConfig (StateT ModuleInfo (ExceptT excType IO)) a
-- | The code generator monad, for generators that cannot throw
-- errors. The fact that they cannot throw errors is encoded in the
-- forall, which disallows any operation on the error, except
-- discarding it or passing it along without inspecting. This last
-- operation is useful in order to allow embedding `CodeGen`
-- computations inside `ExcCodeGen` computations, while disallowing
-- the opposite embedding without explicit error handling.
type CodeGen a = forall e. BaseCodeGen e a
-- | Code generators that can throw errors.
type ExcCodeGen a = BaseCodeGen CGError a
-- | Run a `CodeGen` with given `Config` and initial `ModuleInfo`,
-- returning either the resulting exception, or the result and final
-- state of the codegen.
runCodeGen :: BaseCodeGen e a -> CodeGenConfig -> ModuleInfo ->
IO (Either e (a, ModuleInfo))
runCodeGen cg cfg state = runExceptT (runStateT (runReaderT cg cfg) state)
-- | This is useful when we plan run a subgenerator, and `mconcat` the
-- result to the original structure later.
cleanInfo :: ModuleInfo -> ModuleInfo
cleanInfo info = info { moduleCode = NoCode, submodules = M.empty,
bootCode = NoCode, moduleExports = S.empty,
qualifiedImports = Set.empty,
moduleDoc = Nothing, moduleMinBase = Base47 }
-- | Run the given code generator using the state and config of an
-- ambient CodeGen, but without adding the generated code to
-- `moduleCode`, instead returning it explicitly.
recurseCG :: BaseCodeGen e a -> BaseCodeGen e (a, Code)
recurseCG cg = do
cfg <- ask
oldInfo <- get
-- Start the subgenerator with no code and no submodules.
let info = cleanInfo oldInfo
liftIO (runCodeGen cg cfg info) >>= \case
Left e -> throwError e
Right (r, new) -> put (mergeInfoState oldInfo new) >>
return (r, moduleCode new)
-- | Like `recurse`, giving explicitly the set of loaded APIs and C to
-- Haskell map for the subgenerator.
recurseWithAPIs :: M.Map Name API -> CodeGen () -> CodeGen ()
recurseWithAPIs apis cg = do
cfg <- ask
oldInfo <- get
-- Start the subgenerator with no code and no submodules.
let info = cleanInfo oldInfo
cfg' = cfg {loadedAPIs = apis,
c2hMap = cToHaskellMap (M.toList apis)}
liftIO (runCodeGen cg cfg' info) >>= \case
Left e -> throwError e
Right (_, new) -> put (mergeInfo oldInfo new)
-- | Merge everything but the generated code for the two given `ModuleInfo`.
mergeInfoState :: ModuleInfo -> ModuleInfo -> ModuleInfo
mergeInfoState oldState newState =
let newDeps = Set.union (moduleDeps oldState) (moduleDeps newState)
newSubmodules = M.unionWith mergeInfo (submodules oldState) (submodules newState)
newExports = moduleExports oldState <> moduleExports newState
newImports = qualifiedImports oldState <> qualifiedImports newState
newPragmas = Set.union (modulePragmas oldState) (modulePragmas newState)
newGHCOpts = Set.union (moduleGHCOpts oldState) (moduleGHCOpts newState)
newFlags = Set.union (moduleFlags oldState) (moduleFlags newState)
newBoot = bootCode oldState <> bootCode newState
newDoc = moduleDoc oldState <> moduleDoc newState
newMinBase = max (moduleMinBase oldState) (moduleMinBase newState)
in oldState {moduleDeps = newDeps, submodules = newSubmodules,
moduleExports = newExports, qualifiedImports = newImports,
modulePragmas = newPragmas,
moduleGHCOpts = newGHCOpts, moduleFlags = newFlags,
bootCode = newBoot, moduleDoc = newDoc,
moduleMinBase = newMinBase }
-- | Merge the infos, including code too.
mergeInfo :: ModuleInfo -> ModuleInfo -> ModuleInfo
mergeInfo oldInfo newInfo =
let info = mergeInfoState oldInfo newInfo
in info { moduleCode = moduleCode oldInfo <> moduleCode newInfo }
-- | Add the given submodule to the list of submodules of the current
-- module.
addSubmodule :: Text -> ModuleInfo -> ModuleInfo -> ModuleInfo
addSubmodule modName submodule current = current { submodules = M.insertWith mergeInfo modName submodule (submodules current)}
-- | Run the given CodeGen in order to generate a single submodule of the
-- current module. Note that we do not generate the submodule if the
-- code generator generated no code and the module does not have
-- submodules.
submodule' :: Text -> BaseCodeGen e () -> BaseCodeGen e ()
submodule' modName cg = do
cfg <- ask
oldInfo <- get
let info = emptyModule (modulePath oldInfo /. modName)
liftIO (runCodeGen cg cfg info) >>= \case
Left e -> throwError e
Right (_, smInfo) -> if moduleCode smInfo == NoCode &&
M.null (submodules smInfo)
then return ()
else modify' (addSubmodule modName smInfo)
-- | Run the given CodeGen in order to generate a submodule (specified
-- an an ordered list) of the current module.
submodule :: ModulePath -> BaseCodeGen e () -> BaseCodeGen e ()
submodule (ModulePath []) cg = cg
submodule (ModulePath (m:ms)) cg = submodule' m (submodule (ModulePath ms) cg)
-- | Try running the given `action`, and if it fails run `fallback`
-- instead.
handleCGExc :: (CGError -> CodeGen a) -> ExcCodeGen a -> CodeGen a
handleCGExc fallback
action = do
cfg <- ask
oldInfo <- get
let info = cleanInfo oldInfo
liftIO (runCodeGen action cfg info) >>= \case
Left e -> fallback e
Right (r, newInfo) -> do
put (mergeInfo oldInfo newInfo)
return r
-- | Return the currently loaded set of dependencies.
getDeps :: CodeGen Deps
getDeps = moduleDeps <$> get
-- | Return the ambient configuration for the code generator.
config :: CodeGen Config
config = hConfig <$> ask
-- | Return the name of the current module.
currentModule :: CodeGen Text
currentModule = do
s <- get
return (dotWithPrefix (modulePath s))
-- | Return the list of APIs available to the generator.
getAPIs :: CodeGen (M.Map Name API)
getAPIs = loadedAPIs <$> ask
-- | Return the C -> Haskell available to the generator.
getC2HMap :: CodeGen (M.Map CRef Hyperlink)
getC2HMap = c2hMap <$> ask
-- | Due to the `forall` in the definition of `CodeGen`, if we want to
-- run the monad transformer stack until we get an `IO` action, our
-- only option is ignoring the possible error code from
-- `runExceptT`. This is perfectly safe, since there is no way to
-- construct a computation in the `CodeGen` monad that throws an
-- exception, due to the higher rank type.
unwrapCodeGen :: CodeGen a -> CodeGenConfig -> ModuleInfo ->
IO (a, ModuleInfo)
unwrapCodeGen cg cfg info =
runCodeGen cg cfg info >>= \case
Left _ -> error "unwrapCodeGen:: The impossible happened!"
Right (r, newInfo) -> return (r, newInfo)
-- | Like `evalCodeGen`, but discard the resulting output value.
genCode :: Config -> M.Map Name API ->
ModulePath -> CodeGen () -> IO ModuleInfo
genCode cfg apis mPath cg = snd <$> evalCodeGen cfg apis mPath cg
-- | Run a code generator, and return the information for the
-- generated module together with the return value of the generator.
evalCodeGen :: Config -> M.Map Name API ->
ModulePath -> CodeGen a -> IO (a, ModuleInfo)
evalCodeGen cfg apis mPath cg = do
let initialInfo = emptyModule mPath
cfg' = CodeGenConfig {hConfig = cfg, loadedAPIs = apis,
c2hMap = cToHaskellMap (M.toList apis)}
unwrapCodeGen cg cfg' initialInfo
-- | Mark the given dependency as used by the module.
registerNSDependency :: Text -> CodeGen ()
registerNSDependency name = do
deps <- getDeps
unless (Set.member name deps) $ do
let newDeps = Set.insert name deps
modify' $ \s -> s {moduleDeps = newDeps}
-- | Return the transitive set of dependencies, i.e. the union of
-- those of the module and (transitively) its submodules.
transitiveModuleDeps :: ModuleInfo -> Deps
transitiveModuleDeps minfo =
Set.unions (moduleDeps minfo
: map transitiveModuleDeps (M.elems $ submodules minfo))
-- | Given a module name and a symbol in the module (including a
-- proper namespace), return a qualified name for the symbol.
qualified :: ModulePath -> Name -> CodeGen Text
qualified mp (Name ns s) = do
cfg <- config
-- Make sure the module is listed as a dependency.
when (modName cfg /= ns) $
registerNSDependency ns
minfo <- get
if mp == modulePath minfo
then return s
else do
qm <- qualifiedImport mp
return (qm <> "." <> s)
-- | Import the given module name qualified (as a source import if the
-- namespace is the same as the current one), and return the name
-- under which the module was imported.
qualifiedImport :: ModulePath -> CodeGen Text
qualifiedImport mp = do
modify' $ \s -> s {qualifiedImports = Set.insert mp (qualifiedImports s)}
return (qualifiedModuleName mp)
-- | Construct a simplified version of the module name, suitable for a
-- qualified import.
qualifiedModuleName :: ModulePath -> Text
qualifiedModuleName (ModulePath [ns, "Objects", o]) = ns <> "." <> o
qualifiedModuleName (ModulePath [ns, "Interfaces", i]) = ns <> "." <> i
qualifiedModuleName (ModulePath [ns, "Structs", s]) = ns <> "." <> s
qualifiedModuleName (ModulePath [ns, "Unions", u]) = ns <> "." <> u
qualifiedModuleName mp = dotModulePath mp
-- | Return the minimal base version supported by the module and all
-- its submodules.
minBaseVersion :: ModuleInfo -> BaseVersion
minBaseVersion minfo =
maximum (moduleMinBase minfo
: map minBaseVersion (M.elems $ submodules minfo))
-- | Give a friendly textual description of the error for presenting
-- to the user.
describeCGError :: CGError -> Text
describeCGError (CGErrorNotImplemented e) = "Not implemented: " <> tshow e
describeCGError (CGErrorBadIntrospectionInfo e) = "Bad introspection data: " <> tshow e
describeCGError (CGErrorMissingInfo e) = "Missing info: " <> tshow e
notImplementedError :: Text -> ExcCodeGen a
notImplementedError s = throwError $ CGErrorNotImplemented s
badIntroError :: Text -> ExcCodeGen a
badIntroError s = throwError $ CGErrorBadIntrospectionInfo s
missingInfoError :: Text -> ExcCodeGen a
missingInfoError s = throwError $ CGErrorMissingInfo s
findAPI :: Type -> CodeGen (Maybe API)
findAPI TError = Just <$> findAPIByName (Name "GLib" "Error")
findAPI (TInterface n) = Just <$> findAPIByName n
findAPI _ = return Nothing
-- | Find the API associated with a given type. If the API cannot be
-- found this raises an `error`.
getAPI :: Type -> CodeGen API
getAPI t = findAPI t >>= \case
Just a -> return a
Nothing -> terror ("Could not resolve type \"" <> tshow t <> "\".")
findAPIByName :: Name -> CodeGen API
findAPIByName n@(Name ns _) = do
apis <- getAPIs
case M.lookup n apis of
Just api -> return api
Nothing ->
terror $ "couldn't find API description for " <> ns <> "." <> name n
-- | Add some code to the current generator.
tellCode :: Code -> CodeGen ()
tellCode c = modify' (\s -> s {moduleCode = moduleCode s <> c})
-- | Print out a (newline-terminated) line.
line :: Text -> CodeGen ()
line = tellCode . Line
-- | Print out the given line both to the normal module, and to the
-- HsBoot file.
bline :: Text -> CodeGen ()
bline l = hsBoot (line l) >> line l
-- | A blank line
blank :: CodeGen ()
blank = line ""
-- | Increase the indent level for code generation.
indent :: BaseCodeGen e a -> BaseCodeGen e a
indent cg = do
(x, code) <- recurseCG cg
tellCode (Indent code)
return x
-- | Group a set of related code.
group :: BaseCodeGen e a -> BaseCodeGen e a
group cg = do
(x, code) <- recurseCG cg
tellCode (Group code)
blank
return x
-- | Write the given code into the .hs-boot file for the current module.
hsBoot :: BaseCodeGen e a -> BaseCodeGen e a
hsBoot cg = do
(x, code) <- recurseCG cg
modify' (\s -> s{bootCode = bootCode s <> code})
return x
-- | Add a export to the current module.
export :: Export -> CodeGen ()
export e =
modify' $ \s -> s{moduleExports = moduleExports s |> e}
-- | Reexport a whole module.
exportModule :: SymbolName -> CodeGen ()
exportModule m = export (Export ExportModule m)
-- | Export a toplevel (i.e. belonging to no section) symbol.
exportToplevel :: SymbolName -> CodeGen ()
exportToplevel t = export (Export ExportToplevel t)
-- | Add a type declaration-related export.
exportDecl :: SymbolName -> CodeGen ()
exportDecl d = export (Export ExportTypeDecl d)
-- | Add a method export under the given section.
exportMethod :: HaddockSection -> SymbolName -> CodeGen ()
exportMethod s n = export (Export (ExportMethod s) n)
-- | Add a property-related export under the given section.
exportProperty :: HaddockSection -> SymbolName -> CodeGen ()
exportProperty s n = export (Export (ExportProperty s) n)
-- | Add a signal-related export under the given section.
exportSignal :: HaddockSection -> SymbolName -> CodeGen ()
exportSignal s n = export (Export (ExportSignal s) n)
-- | Set the language pragmas for the current module.
setLanguagePragmas :: [Text] -> CodeGen ()
setLanguagePragmas ps =
modify' $ \s -> s{modulePragmas = Set.fromList ps}
-- | Set the GHC options for compiling this module (in a OPTIONS_GHC pragma).
setGHCOptions :: [Text] -> CodeGen ()
setGHCOptions opts =
modify' $ \s -> s{moduleGHCOpts = Set.fromList opts}
-- | Set the given flags for the module.
setModuleFlags :: [ModuleFlag] -> CodeGen ()
setModuleFlags flags =
modify' $ \s -> s{moduleFlags = Set.fromList flags}
-- | Set the minimum base version supported by the current module.
setModuleMinBase :: BaseVersion -> CodeGen ()
setModuleMinBase v =
modify' $ \s -> s{moduleMinBase = max v (moduleMinBase s)}
-- | Return a text representation of the `Code`.
codeToText :: Code -> Text
codeToText c = T.concat $ str 0 c []
where
str :: Int -> Code -> [Text] -> [Text]
str _ NoCode cont = cont
str n (Line s) cont = paddedLine n s : cont
str n (Indent c) cont = str (n + 1) c cont
str n (Sequence s) cont = deseq n (S.viewl s) cont
str n (Group c) cont = str n c cont
deseq _ S.EmptyL cont = cont
deseq n (c :< cs) cont = str n c (deseq n (S.viewl cs) cont)
-- | Pad a line to the given number of leading spaces, and add a
-- newline at the end.
paddedLine :: Int -> Text -> Text
paddedLine n s = T.replicate (n * 4) " " <> s <> "\n"
-- | Put a (padded) comma at the end of the text.
comma :: Text -> Text
comma s = padTo 40 s <> ","
-- | Format the list of exported modules.
formatExportedModules :: [Export] -> Maybe Text
formatExportedModules [] = Nothing
formatExportedModules exports =
Just . T.concat . map ( paddedLine 1
. comma
. ("module " <>)
. exportSymbol)
. filter ((== ExportModule) . exportType) $ exports
-- | Format the toplevel exported symbols.
formatToplevel :: [Export] -> Maybe Text
formatToplevel [] = Nothing
formatToplevel exports =
Just . T.concat . map (paddedLine 1 . comma . exportSymbol)
. filter ((== ExportToplevel) . exportType) $ exports
-- | Format the type declarations section.
formatTypeDecls :: [Export] -> Maybe Text
formatTypeDecls exports =
let exportedTypes = filter ((== ExportTypeDecl) . exportType) exports
in if exportedTypes == []
then Nothing
else Just . T.unlines $ [ "-- * Exported types"
, T.concat . map ( paddedLine 1
. comma
. exportSymbol )
$ exportedTypes ]
-- | A subsection name, with an optional anchor name.
data Subsection = Subsection { subsectionTitle :: Text
, subsectionAnchor :: Maybe Text
} deriving (Eq, Show, Ord)
-- | A subsection with an anchor given by the title and @prefix:title@ anchor.
subsecWithPrefix prefix title =
Subsection { subsectionTitle = title
, subsectionAnchor = Just (prefix <> ":" <> title) }
-- | Format a given section made of subsections.
formatSection :: Text -> (Export -> Maybe (Subsection, SymbolName)) ->
[Export] -> Maybe Text
formatSection section filter exports =
if M.null exportedSubsections
then Nothing
else Just . T.unlines $ [" -- * " <> section
, ( T.unlines
. map formatSubsection
. M.toList ) exportedSubsections]
where
filteredExports :: [(Subsection, SymbolName)]
filteredExports = catMaybes (map filter exports)
exportedSubsections :: M.Map Subsection (Set.Set SymbolName)
exportedSubsections = foldr extract M.empty filteredExports
extract :: (Subsection, SymbolName) -> M.Map Subsection (Set.Set Text)
-> M.Map Subsection (Set.Set Text)
extract (subsec, m) secs =
M.insertWith Set.union subsec (Set.singleton m) secs
formatSubsection :: (Subsection, Set.Set SymbolName) -> Text
formatSubsection (subsec, symbols) =
T.unlines [ "-- ** " <> case subsectionAnchor subsec of
Just anchor -> subsectionTitle subsec <>
" #" <> anchor <> "#"
Nothing -> subsectionTitle subsec
, ( T.concat
. map (paddedLine 1 . comma)
. Set.toList ) symbols]
-- | Format the list of methods.
formatMethods :: [Export] -> Maybe Text
formatMethods = formatSection "Methods" toMethod
where toMethod :: Export -> Maybe (Subsection, SymbolName)
toMethod (Export (ExportMethod s) m) =
Just (subsecWithPrefix "method" s, m)
toMethod _ = Nothing
-- | Format the list of properties.
formatProperties :: [Export] -> Maybe Text
formatProperties = formatSection "Properties" toProperty
where toProperty :: Export -> Maybe (Subsection, SymbolName)
toProperty (Export (ExportProperty s) m) =
Just (subsecWithPrefix "attr" s, m)
toProperty _ = Nothing
-- | Format the list of signals.
formatSignals :: [Export] -> Maybe Text
formatSignals = formatSection "Signals" toSignal
where toSignal :: Export -> Maybe (Subsection, SymbolName)
toSignal (Export (ExportSignal s) m) =
Just (subsecWithPrefix "signal" s, m)
toSignal _ = Nothing
-- | Format the given export list. This is just the inside of the
-- parenthesis.
formatExportList :: [Export] -> Text
formatExportList exports =
T.unlines . catMaybes $ [ formatExportedModules exports
, formatToplevel exports
, formatTypeDecls exports
, formatMethods exports
, formatProperties exports
, formatSignals exports ]
-- | Write down the list of language pragmas.
languagePragmas :: [Text] -> Text
languagePragmas [] = ""
languagePragmas ps = "{-# LANGUAGE " <> T.intercalate ", " ps <> " #-}\n"
-- | Write down the list of GHC options.
ghcOptions :: [Text] -> Text
ghcOptions [] = ""
ghcOptions opts = "{-# OPTIONS_GHC " <> T.intercalate ", " opts <> " #-}\n"
-- | Standard fields for every module.
standardFields :: Text
standardFields = T.unlines [ "Copyright : " <> authors
, "License : " <> license
, "Maintainer : " <> maintainers ]
-- | The haddock header for the module, including optionally a description.
moduleHaddock :: Maybe Text -> Text
moduleHaddock Nothing = T.unlines ["{- |", standardFields <> "-}"]
moduleHaddock (Just description) = T.unlines ["{- |", standardFields,
description, "-}"]
-- | Generic module prelude. We reexport all of the submodules.
modulePrelude :: Text -> [Export] -> [Text] -> Text
modulePrelude name [] [] = "module " <> name <> " () where\n"
modulePrelude name exports [] =
"module " <> name <> "\n ( "
<> formatExportList exports
<> " ) where\n"
modulePrelude name [] reexportedModules =
"module " <> name <> "\n ( "
<> formatExportList (map (Export ExportModule) reexportedModules)
<> " ) where\n\n"
<> T.unlines (map ("import " <>) reexportedModules)
modulePrelude name exports reexportedModules =
"module " <> name <> "\n ( "
<> formatExportList (map (Export ExportModule) reexportedModules)
<> "\n"
<> formatExportList exports
<> " ) where\n\n"
<> T.unlines (map ("import " <>) reexportedModules)
-- | Code for loading the needed dependencies. One needs to give the
-- prefix for the namespace being currently generated, modules with
-- this prefix will be imported as {-# SOURCE #-}, and otherwise will
-- be imported normally.
importDeps :: ModulePath -> [ModulePath] -> Text
importDeps _ [] = ""
importDeps (ModulePath prefix) deps = T.unlines . map toImport $ deps
where toImport :: ModulePath -> Text
toImport dep = let impSt = if importSource dep
then "import {-# SOURCE #-} qualified "
else "import qualified "
in impSt <> dotWithPrefix dep <>
" as " <> qualifiedModuleName dep
importSource :: ModulePath -> Bool
importSource (ModulePath [_, "Callbacks"]) = False
importSource (ModulePath mp) = take (length prefix) mp == prefix
-- | Standard imports.
moduleImports :: Text
moduleImports = T.unlines [
"import Data.GI.Base.ShortPrelude"
, "import qualified Data.GI.Base.ShortPrelude as SP"
, "import qualified Data.GI.Base.Overloading as O"
, "import qualified Prelude as P"
, ""
, "import qualified Data.GI.Base.Attributes as GI.Attributes"
, "import qualified Data.GI.Base.ManagedPtr as B.ManagedPtr"
, "import qualified Data.GI.Base.GError as B.GError"
, "import qualified Data.GI.Base.GVariant as B.GVariant"
, "import qualified Data.GI.Base.GParamSpec as B.GParamSpec"
, "import qualified Data.GI.Base.CallStack as B.CallStack"
, "import qualified Data.Text as T"
, "import qualified Data.ByteString.Char8 as B"
, "import qualified Data.Map as Map"
, "import qualified Foreign.Ptr as FP" ]
-- | Like `dotModulePath`, but add a "GI." prefix.
dotWithPrefix :: ModulePath -> Text
dotWithPrefix mp = dotModulePath ("GI" <> mp)
-- | Write to disk the code for a module, under the given base
-- directory. Does not write submodules recursively, for that use
-- `writeModuleTree`.
writeModuleInfo :: Bool -> Maybe FilePath -> ModuleInfo -> IO ()
writeModuleInfo verbose dirPrefix minfo = do
let submodulePaths = map (modulePath) (M.elems (submodules minfo))
-- We reexport any submodules.
submoduleExports = map dotWithPrefix submodulePaths
fname = modulePathToFilePath dirPrefix (modulePath minfo) ".hs"
dirname = takeDirectory fname
code = codeToText (moduleCode minfo)
pragmas = languagePragmas (Set.toList $ modulePragmas minfo)
optionsGHC = ghcOptions (Set.toList $ moduleGHCOpts minfo)
prelude = modulePrelude (dotWithPrefix $ modulePath minfo)
(F.toList (moduleExports minfo))
submoduleExports
imports = if ImplicitPrelude `Set.member` moduleFlags minfo
then ""
else moduleImports
pkgRoot = ModulePath (take 1 (modulePathToList $ modulePath minfo))
deps = importDeps pkgRoot (Set.toList $ qualifiedImports minfo)
haddock = moduleHaddock (moduleDoc minfo)
when verbose $ putStrLn ((T.unpack . dotWithPrefix . modulePath) minfo
++ " -> " ++ fname)
createDirectoryIfMissing True dirname
utf8WriteFile fname (T.unlines [pragmas, optionsGHC, haddock,
prelude, imports, deps, code])
when (bootCode minfo /= NoCode) $ do
let bootFName = modulePathToFilePath dirPrefix (modulePath minfo) ".hs-boot"
utf8WriteFile bootFName (genHsBoot minfo)
-- | Generate the .hs-boot file for the given module.
genHsBoot :: ModuleInfo -> Text
genHsBoot minfo =
"module " <> (dotWithPrefix . modulePath) minfo <> " where\n\n" <>
moduleImports <> "\n" <>
codeToText (bootCode minfo)
-- | Construct the filename corresponding to the given module.
modulePathToFilePath :: Maybe FilePath -> ModulePath -> FilePath -> FilePath
modulePathToFilePath dirPrefix (ModulePath mp) ext =
joinPath (fromMaybe "" dirPrefix : "GI" : map T.unpack mp) ++ ext
-- | Write down the code for a module and its submodules to disk under
-- the given base directory. It returns the list of written modules.
writeModuleTree :: Bool -> Maybe FilePath -> ModuleInfo -> IO [Text]
writeModuleTree verbose dirPrefix minfo = do
submodulePaths <- concat <$> forM (M.elems (submodules minfo))
(writeModuleTree verbose dirPrefix)
writeModuleInfo verbose dirPrefix minfo
return $ (dotWithPrefix (modulePath minfo) : submodulePaths)
-- | Return the list of modules `writeModuleTree` would write, without
-- actually writing anything to disk.
listModuleTree :: ModuleInfo -> [Text]
listModuleTree minfo =
let submodulePaths = concatMap listModuleTree (M.elems (submodules minfo))
in dotWithPrefix (modulePath minfo) : submodulePaths
| ford-prefect/haskell-gi | lib/Data/GI/CodeGen/Code.hs | lgpl-2.1 | 32,696 | 0 | 16 | 8,584 | 7,483 | 3,982 | 3,501 | -1 | -1 |
{-# OPTIONS_GHC -Wall #-}
module HW04 where
import Data.List
newtype Poly a = P [a]
-- Exercise 1 -----------------------------------------
x :: Num a => Poly a
x = P [0, 1]
-- Exercise 2 ----------------------------------------
instance (Num a, Eq a) => Eq (Poly a) where
(==) (P as) (P bs) = (==) as bs
-- Exercise 3 -----------------------------------------
instance (Num a, Eq a, Show a) => Show (Poly a) where
show (P as)
| length as == 0 = "0"
| length as == 1 && as == [0] = "0"
| otherwise = intercalate " + " $ filter (/= "") $ map getTerm $ reverse $ zip as [0..]
where getTerm :: (Num a, Eq a, Show a) => (a, Integer) -> String
getTerm (c, e)
| c == 0 = ""
| e == 0 = show c
| otherwise = coefficient ++ exponent'
where coefficient = case c of 1 -> ""
-1 -> "-"
_ -> show c
exponent' = case e of 1 -> "x"
_ -> "x^" ++ show e
-- Exercise 4 -----------------------------------------
plus :: Num a => Poly a -> Poly a -> Poly a
plus (P as) (P bs) = P $ zipWith (+) padded_as padded_bs
where padded_as = if (length as) < (length bs) then (as ++ zero_pad) else as
padded_bs = if (length as) > (length bs) then (bs ++ zero_pad) else bs
zero_pad = flip replicate 0 $ abs $ (-) (length as) (length bs)
-- Exercise 5 -----------------------------------------
times :: Num a => Poly a -> Poly a -> Poly a
times (P as) (P bs) = sum polynomials
where zero_pads = map (flip replicate 0) [0..]
multiplied_values = map (\a -> map (*a) bs) as
polynomials = map (P) $ zipWith (++) zero_pads multiplied_values
-- Exercise 6 -----------------------------------------
instance Num a => Num (Poly a) where
(+) = plus
(*) = times
negate = (times) (P [-1])
fromInteger = P . (:[]) . fromInteger
-- No meaningful definitions exist
abs = undefined
signum = undefined
-- Exercise 7 -----------------------------------------
applyP :: Num a => Poly a -> a -> a
applyP (P as) value = sum $ zipWith (\a b -> a * b) as (iterate (* value) 1)
-- Exercise 8 -----------------------------------------
class Num a => Differentiable a where
deriv :: a -> a
nderiv :: Int -> a -> a
nderiv n f
| n == 0 = f
| otherwise = nderiv (n - 1) (deriv f)
-- Exercise 9 -----------------------------------------
instance (Num a, Enum a) => Differentiable (Poly a) where
deriv (P as) = P $ drop 1 $ zipWith (*) as [0..]
| redongjun/haskellschool | homework/HW04.hs | unlicense | 2,652 | 0 | 14 | 812 | 1,025 | 533 | 492 | 50 | 3 |
-- module with tools for combinatoric formulae
module Maths.Combinatorics.Sums
( sumOfSquares,
squarePyramid
) where
-- the sum of squares of a finite list of numbers
sumOfSquares :: Num a => [a] -> a
sumOfSquares xs = sum [x^2 | x <- xs]
-- the n'th square pyramidal number
-- see http://en.wikipedia.org/wiki/Square_pyramidal_number
squarePyramid :: Int -> Int
squarePyramid n = (2*n^3 + 3*n^2 + n) `div` 6 -- '/' would require Fractional Int
| mathemage/htools | Maths/Combinatorics/Formulae.hs | apache-2.0 | 453 | 0 | 12 | 80 | 115 | 65 | 50 | 7 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Test.Pos.Chain.Block.Arbitrary
( HeaderAndParams (..)
, BlockHeaderList (..)
, genMainBlockHeader
, genMainBlockBody
, genMainBlockBodyForSlot
, genMainBlock
, genHeaderAndParams
, genStubbedBHL
) where
import qualified Prelude
import Universum
import qualified Data.List as List
import qualified Data.Set as Set
import Formatting (bprint, build, (%))
import qualified Formatting.Buildable as Buildable
import System.Random (Random, mkStdGen, randomR)
import Test.QuickCheck (Arbitrary (..), Gen, choose, suchThat)
import Test.QuickCheck.Arbitrary.Generic (genericArbitrary,
genericShrink)
import Pos.Binary.Class (biSize)
import Pos.Chain.Block (ConsensusEraLeaders (..), HeaderHash,
headerLastSlotInfo, mkMainBlock, mkMainBlockExplicit)
import qualified Pos.Chain.Block as Block
import qualified Pos.Chain.Block.Slog.LastBlkSlots as LastBlkSlots
import qualified Pos.Chain.Delegation as Core
import Pos.Chain.Genesis (GenesisHash (..))
import Pos.Chain.Update (ConsensusEra (..),
ObftConsensusStrictness (..))
import Pos.Core (BlockCount (..), EpochOrSlot (..), SlotId (..),
getEpochOrSlot, localSlotIndexMaxBound,
localSlotIndexMinBound)
import qualified Pos.Core as Core
import Pos.Core.Attributes (areAttributesKnown)
import Pos.Core.Chrono (OldestFirst (..))
import Pos.Core.Slotting (LocalSlotIndex (..), SlotCount (..),
epochOrSlotToSlot)
import Pos.Crypto (ProtocolMagic, PublicKey, SecretKey, createPsk,
toPublic)
import Test.Pos.Chain.Delegation.Arbitrary (genDlgPayload)
import Test.Pos.Chain.Genesis.Dummy (dummyEpochSlots,
dummyGenesisHash)
import Test.Pos.Chain.Ssc.Arbitrary (SscPayloadDependsOnSlot (..),
genSscPayload, genSscPayloadForSlot)
import Test.Pos.Chain.Txp.Arbitrary (genTxPayload)
import Test.Pos.Chain.Update.Arbitrary (genUpdatePayload)
import Test.Pos.Core.Arbitrary (genSlotId)
newtype BodyDependsOnSlot body = BodyDependsOnSlot
{ genBodyDepsOnSlot :: SlotId -> Gen body
}
------------------------------------------------------------------------------------------
-- Arbitrary instances for Blockchain related types
------------------------------------------------------------------------------------------
instance Arbitrary Block.BlockHeader where
arbitrary = genericArbitrary
shrink = genericShrink
instance Arbitrary Block.BlockSignature where
arbitrary = genericArbitrary
shrink = genericShrink
------------------------------------------------------------------------------------------
-- GenesisBlockchain
------------------------------------------------------------------------------------------
instance Arbitrary Block.GenesisExtraHeaderData where
arbitrary = genericArbitrary
shrink = genericShrink
instance Arbitrary Block.GenesisExtraBodyData where
arbitrary = genericArbitrary
shrink = genericShrink
instance Arbitrary Block.GenesisBlockHeader where
arbitrary = genericArbitrary
shrink = genericShrink
instance Arbitrary Block.GenesisProof where
arbitrary = genericArbitrary
shrink = genericShrink
instance Arbitrary Block.GenesisConsensusData where
arbitrary = genericArbitrary
shrink = genericShrink
instance Arbitrary (BodyDependsOnSlot Block.GenesisBody) where
arbitrary = pure $ BodyDependsOnSlot $ \_ -> arbitrary
instance Arbitrary Block.GenesisBody where
arbitrary = genericArbitrary
shrink = genericShrink
instance Arbitrary Block.GenesisBlock where
arbitrary = Block.mkGenesisBlock
<$> arbitrary
<*> (maybe (Left dummyGenesisHash) Right <$> arbitrary)
<*> arbitrary
<*> arbitrary
shrink = genericShrink
------------------------------------------------------------------------------------------
-- MainBlockchain
------------------------------------------------------------------------------------------
-- | Generate a 'MainBlockHeader' given a parent hash, difficulty and body.
genMainBlockHeader
:: ProtocolMagic
-> HeaderHash
-> Core.ChainDifficulty
-> Block.MainBody
-> Gen Block.MainBlockHeader
genMainBlockHeader pm prevHash difficulty body =
Block.mkMainHeaderExplicit pm <$> pure prevHash
<*> pure difficulty
<*> genSlotId dummyEpochSlots
<*> arbitrary -- SecretKey
<*> pure Nothing
<*> pure body
<*> arbitrary
instance Arbitrary Block.MainBlockHeader where
arbitrary = do
prevHash <- arbitrary
difficulty <- arbitrary
body <- arbitrary
pm <- arbitrary
genMainBlockHeader pm prevHash difficulty body
shrink = genericShrink
instance Arbitrary Block.MainExtraHeaderData where
arbitrary = genericArbitrary
shrink = genericShrink
instance Arbitrary Block.MainExtraBodyData where
arbitrary = genericArbitrary
shrink = genericShrink
instance Arbitrary Block.MainProof where
arbitrary = genericArbitrary
shrink Block.MainProof {..} =
[Block.MainProof txp mpcp prxp updp
| (txp, mpcp, prxp, updp) <-
shrink (mpTxProof, mpMpcProof, mpProxySKsProof, mpUpdateProof)
]
instance Arbitrary Block.MainConsensusData where
arbitrary = genericArbitrary
shrink = genericShrink
instance Arbitrary Block.MainToSign where
arbitrary = genericArbitrary
shrink = genericShrink
-- | In the main blockchain's body, the number of transactions must be the
-- same as the number of transaction witnesses.
--
-- Furthermore, for every transaction in index i of the list, the length of
-- its output list must be the same as the length of the i-th item in the
-- TxDistribution list.
--
-- Because of this, the Arbitrary instance for @Body
-- MainBlockchain@ ensures that for every transaction generated, a
-- transaction witness is generated as well, and the lengths of its list of
-- outputs must also be the same as the length of its corresponding
-- TxDistribution item.
{-# ANN module ("HLint: ignore Reduce duplication" :: Text) #-}
genMainBlockBody
:: ProtocolMagic
-> Core.EpochIndex -- ^ For the delegation payload.
-> Gen Block.MainBody
genMainBlockBody pm epoch =
Block.MainBody <$> genTxPayload pm
<*> genSscPayload pm
<*> genDlgPayload pm epoch
<*> genUpdatePayload pm
genMainBlockBodyForSlot
:: ProtocolMagic
-> SlotId
-> Gen Block.MainBody
genMainBlockBodyForSlot pm slotId = do
txpPayload <- genTxPayload pm
sscPayload <- genSscPayloadForSlot pm slotId
dlgPayload <- genDlgPayload pm (Core.siEpoch slotId)
updPayload <- genUpdatePayload pm
pure $ Block.MainBody txpPayload sscPayload dlgPayload updPayload
instance Arbitrary (BodyDependsOnSlot Block.MainBody) where
arbitrary = pure $ BodyDependsOnSlot $ \slotId -> do
txPayload <- arbitrary
generator <- genPayloadDependsOnSlot <$> arbitrary
mpcData <- generator slotId
pm <- arbitrary
dlgPayload <- genDlgPayload pm $ Core.siEpoch slotId
mpcUpload <- arbitrary
return $ Block.MainBody txPayload mpcData dlgPayload mpcUpload
instance Arbitrary Block.MainBody where
arbitrary = genericArbitrary
shrink mb =
[ Block.MainBody txp sscp dlgp updp
| (txp, sscp, dlgp, updp) <-
shrink (mb ^. Block.mbTxPayload,
mb ^. Block.mbSscPayload,
mb ^. Block.mbDlgPayload,
mb ^. Block.mbUpdatePayload)
]
-- | Generate a main block (slot is chosen arbitrarily).
-- You choose the previous header hash.
genMainBlock
:: ProtocolMagic
-> HeaderHash
-> Core.ChainDifficulty
-> Gen Block.MainBlock
genMainBlock pm prevHash difficulty = do
bv <- arbitrary
sv <- arbitrary
slot <- genSlotId dummyEpochSlots
sk <- arbitrary
body <- genMainBlockBodyForSlot pm slot
pure $ mkMainBlockExplicit pm bv sv prevHash difficulty slot sk Nothing body
instance Arbitrary Block.MainBlock where
arbitrary = do
slot <- arbitrary
pm <- arbitrary
bv <- arbitrary
sv <- arbitrary
prevHeader <- maybe (Left dummyGenesisHash) Right <$> arbitrary
sk <- arbitrary
BodyDependsOnSlot {..} <- arbitrary :: Gen (BodyDependsOnSlot Block.MainBody)
body <- genBodyDepsOnSlot slot
pure $ mkMainBlock pm bv sv prevHeader slot sk Nothing body
shrink = genericShrink
instance Buildable (Block.BlockHeader, PublicKey) where
build (block, key) =
bprint
( build%"\n"%
build%"\n"
) block key
newtype BlockHeaderList = BHL
{ getHeaderList :: ([Block.BlockHeader], [PublicKey])
} deriving (Eq)
instance Show BlockHeaderList where
show = toString . unlines . map pretty . uncurry zip . getHeaderList
-- | Generation of arbitrary, valid headerchain along with a list of leaders
-- for each epoch.
--
-- Because 'verifyHeaders' assumes the head of the list is the most recent
-- block, this function is tail-recursive: while keeping track of the current
-- block and epoch/slot, it adds the most recent one to the head of the
-- header list it'll return when done.
--
-- The @[Either SecretKey (SecretKey, SecretKey, Bool)]@ type is for
-- determining what kind of signature the slot's block will have. If it's
-- @Left sk@, it'll be a simple 'BlockSignature'; if it's @Right (issuerSK,
-- delegateSK, b)@, it will be a proxy signature, and if @b :: Bool@ is
-- false, it'll be a simple proxy secret key. Otherwise, it'll be a proxy
-- secret key with epochs, whose lower and upper epoch bounds will be
-- randomly generated.
--
-- Beware that
-- * genesis blocks have no leaders, and that
-- * if an epoch is `n` slots long, every `n+1`-th block will be of the
-- genesis kind.
recursiveHeaderGen
:: ProtocolMagic
-> ConsensusEra
-> GenesisHash
-> Bool -- ^ Whether to create genesis block before creating main block for 0th slot
-> [Either SecretKey (SecretKey, SecretKey)]
-> [SlotId]
-> [Block.BlockHeader]
-> Gen [Block.BlockHeader]
recursiveHeaderGen pm
era
gHash
genesis
(eitherOfLeader : leaders)
(SlotId{..} : rest)
blockchain
| genesis && era == Original && Core.getSlotIndex siSlot == 0 = do
gBody <- arbitrary
let pHeader = maybe (Left gHash) Right ((fmap fst . uncons) blockchain)
gHeader = Block.BlockHeaderGenesis $ Block.mkGenesisHeader pm pHeader siEpoch gBody
mHeader <- genMainHeader (Just gHeader)
recursiveHeaderGen pm era gHash True leaders rest (mHeader : gHeader : blockchain)
| otherwise = do
curHeader <- genMainHeader ((fmap fst . uncons) blockchain)
recursiveHeaderGen pm era gHash True leaders rest (curHeader : blockchain)
where
genMainHeader prevHeader = do
body <- arbitrary
extraHData <- arbitrary
-- These two values may not be used at all. If the slot in question
-- will have a simple signature, laziness will prevent them from
-- being calculated. Otherwise, they'll be the proxy secret key's ω.
let slotId = SlotId siEpoch siSlot
(leader, proxySK) = case eitherOfLeader of
Left sk -> (sk, Nothing)
Right (issuerSK, delegateSK) ->
let delegatePK = toPublic delegateSK
proxy = ( createPsk pm issuerSK delegatePK (Core.HeavyDlgIndex siEpoch)
, toPublic issuerSK)
in (delegateSK, Just proxy)
pure $ Block.BlockHeaderMain $
Block.mkMainHeader pm (maybe (Left gHash) Right prevHeader) slotId leader proxySK body extraHData
recursiveHeaderGen _ _ _ _ [] _ b = return b
recursiveHeaderGen _ _ _ _ _ [] b = return b
-- | Maximum start epoch in block header verification tests
bhlMaxStartingEpoch :: Integral a => a
bhlMaxStartingEpoch = 1000000
-- | Amount of full epochs in block header verification tests
bhlEpochs :: Integral a => a
bhlEpochs = 2
-- | This type is used to generate a blockchain, as well a list of leaders
-- for every slot with which the chain will be paired. The leaders are in
-- reverse order to the chain - the list goes from first to last leader. This
-- is used in a `verifyHeader` test.
--
-- Note that every non-empty blockchain has at least one epoch, which may be
-- complete or incomplete. To simulate this behavior, two random numbers are
-- generated: one that stands for the number of complete epochs we have, and
-- the other for the number of incomplete slots of the last epoch, which, in
-- this instance, must exist.
--
-- A blockchain with only complete epochs is a subset of some blockchain with
-- one incomplete epoch, so if the former is desired, a simple list
-- `takeWhile` of the list this instance generates will be enough.
--
-- Note that a leader is generated for each slot.
-- (Not exactly a leader - see previous comment)
instance Arbitrary BlockHeaderList where
arbitrary = do
pm <- arbitrary
era <- arbitrary
genStubbedBHL pm era
genStubbedBHL
:: ProtocolMagic
-> ConsensusEra
-> Gen BlockHeaderList
genStubbedBHL pm era = do
incompleteEpochSize <- choose (1, dummyEpochSlots - 1)
let slot = SlotId 0 localSlotIndexMinBound
generateBHL pm era dummyGenesisHash True slot (dummyEpochSlots * bhlEpochs + incompleteEpochSize)
generateBHL
:: ProtocolMagic
-> ConsensusEra
-> GenesisHash
-> Bool -- ^ Whether to create genesis block before creating main
-- block for 0th slot
-> SlotId -- ^ Start slot
-> SlotCount -- ^ Slot count
-> Gen BlockHeaderList
generateBHL pm era gHash createInitGenesis startSlot slotCount = BHL <$> do
leadersList <- genLeaderKeyList $ fromIntegral slotCount
let actualLeaders = map (toPublic . either identity (view _1)) leadersList
slotIdsRange =
take (fromIntegral slotCount) $
map (Core.unflattenSlotId dummyEpochSlots)
[Core.flattenSlotId dummyEpochSlots startSlot ..]
(, actualLeaders) <$>
recursiveHeaderGen
pm
era
gHash
createInitGenesis
leadersList
slotIdsRange
[]
-- Generate a unique list of leader keys of the specified length. Needs to be
-- unique so that block vaidation doesn't fail when validating ObftLenient.
genLeaderKeyList :: Int -> Gen [Either SecretKey (SecretKey, SecretKey)]
genLeaderKeyList count =
loop 0 []
where
loop :: Int -> [Either SecretKey (SecretKey, SecretKey)] -> Gen [Either SecretKey (SecretKey, SecretKey)]
loop n !acc
| n >= count = pure acc
| otherwise = do
key <- correctLeaderGen
-- New keys that are already present in the list are discarded.
if key `elem` acc
then loop n acc
else loop (n + 1) (key : acc)
correctLeaderGen :: Gen (Either SecretKey (SecretKey, SecretKey))
correctLeaderGen =
-- We don't want to create blocks with self-signed psks
let issDelDiff (Left _) = True
issDelDiff (Right (i,d)) = i /= d
in arbitrary `suchThat` issDelDiff
-- | This type is used to generate a valid blockheader and associated header
-- verification params. With regards to the block header function
-- 'Pos.Types.Blocks.Functions.verifyHeader', the blockheaders that may be
-- part of the verification parameters are guaranteed to be valid, as are the
-- slot leaders and the current slot.
data HeaderAndParams = HeaderAndParams
{ hapHeader :: Block.BlockHeader
, hapParams :: Block.VerifyHeaderParams
} deriving (Eq, Show)
-- This generator produces a header and a set of params for testing that header.
genHeaderAndParams :: ProtocolMagic -> ConsensusEra -> Gen HeaderAndParams
genHeaderAndParams pm era = do
-- This Int is used as a seed to randomly choose a slot down below
seed <- arbitrary :: Gen Int
-- If the blkSecurityParam is too low (ie < 10) then ObftLenient is likely
-- to fail.
blkSecurityParam <- BlockCount <$> choose (10, 50)
slotsPerEpoch <- SlotCount . (getBlockCount blkSecurityParam *) <$> choose (2, 10)
startSlot <- SlotId <$> choose (0, bhlMaxStartingEpoch)
<*> (UnsafeLocalSlotIndex <$> choose (0, fromIntegral (getSlotCount slotsPerEpoch) - 1))
-- Create up to 10 slots, and trim them later.
slotCount <- choose (2, 10)
headers <- reverse . fst . getHeaderList
<$> generateBHL pm era dummyGenesisHash True startSlot slotCount
-- 'skip' is the random number of headers that should be skipped in
-- the header chain. This ensures different parts of it are chosen
-- each time.
skip <- choose (0, length headers - 2)
let (prev, header) =
case take 2 $ drop skip headers of
[h] -> (Nothing, h)
[h1, h2] -> (Just h1, h2)
[] -> error "[BlockSpec] empty headerchain"
_ -> error "[BlockSpec] the headerchain doesn't have enough headers"
-- A helper function. Given integers 'x' and 'y', it chooses a
-- random integer in the interval [x, y]
betweenXAndY :: Random a => a -> a -> a
betweenXAndY x y = fst . randomR (x, y) $ mkStdGen seed
-- One of the fields in the 'VerifyHeaderParams' type is 'Just
-- SlotId'. The following binding is where it is calculated.
randomSlotBeforeThisHeader =
case header of
-- If the header is of the genesis kind, this field is
-- not needed.
Block.BlockHeaderGenesis _ -> Nothing
-- If it's a main blockheader, then a valid "current"
-- SlotId for testing is any with an epoch greater than
-- the header's epoch and with any slot index, or any in
-- the same epoch but with a greater or equal slot index
-- than the header.
Block.BlockHeaderMain h ->
let (SlotId e s) = view Block.headerSlotL h
rndEpoch :: Core.EpochIndex
rndEpoch = betweenXAndY e maxBound
rndSlotIdx :: LocalSlotIndex
rndSlotIdx = if rndEpoch > e
then betweenXAndY localSlotIndexMinBound (localSlotIndexMaxBound slotsPerEpoch)
else betweenXAndY s (localSlotIndexMaxBound slotsPerEpoch)
rndSlot = SlotId rndEpoch rndSlotIdx
in Just rndSlot
hasUnknownAttributes =
not . areAttributesKnown $
case header of
Block.BlockHeaderGenesis h -> h ^. Block.gbhExtra . Block.gehAttributes
Block.BlockHeaderMain h -> h ^. Block.gbhExtra . Block.mehAttributes
thisEpochLeaderSchedule :: Maybe [Core.AddressHash PublicKey]
thisEpochLeaderSchedule =
toList <$> mkEpochLeaderSchedule era (getEpochOrSlot header) headers
params = Block.VerifyHeaderParams
{ Block.vhpPrevHeader = prev
, Block.vhpCurrentSlot = randomSlotBeforeThisHeader
, Block.vhpLeaders = case era of
Original -> OriginalLeaders <$> thisEpochLeaderSchedule
OBFT ObftStrict -> ObftStrictLeaders <$> thisEpochLeaderSchedule
OBFT ObftLenient ->
pure $ ObftLenientLeaders
(Set.fromList $ mapMaybe (fmap Core.addressHash . Block.headerLeaderKey) headers)
blkSecurityParam
(LastBlkSlots.updateMany (LastBlkSlots.create (fromIntegral $ getBlockCount blkSecurityParam)) . OldestFirst $ mapMaybe (headerLastSlotInfo slotsPerEpoch) headers)
, Block.vhpMaxSize = Just (biSize header)
, Block.vhpVerifyNoUnknown = not hasUnknownAttributes
, Block.vhpConsensusEra = era
}
return $ HeaderAndParams header params
-- Pad the head of a list of block headers to generate a list that is long enough
-- to index correctly during validation. Use the EpochOrSlot of the target
-- BlockHeader to and the header index to calculate the number of fake leader
-- keys to prepend to the header
mkEpochLeaderSchedule :: ConsensusEra -> EpochOrSlot -> [Block.BlockHeader] -> Maybe (NonEmpty (Core.AddressHash PublicKey))
mkEpochLeaderSchedule era eos hdrs =
case List.elemIndex eos (map getEpochOrSlot hdrs) of
Nothing -> Nothing
Just idx ->
let count = prependCount idx in
nonEmpty .
(if count >= 0
then (replicate count fakeLeaderKey ++)
else List.drop (- count - extra)
)
$ mapMaybe (fmap Core.addressHash . Block.headerLeaderKey) hdrs
where
fakeLeaderKey :: Core.AddressHash PublicKey
fakeLeaderKey = Core.unsafeAddressHash ("fake leader key" :: ByteString)
prependCount :: Int -> Int
prependCount idx =
fromIntegral (getSlotIndex . siSlot $ epochOrSlotToSlot eos) - idx
-- Need this because in the validation code, the indexing for the slot
-- leader schedule starts at 1 for the Original chain (due to the epoch
-- boundary block) but at 0 for ObftStrict.
extra :: Int
extra =
case era of
Original -> 1
OBFT ObftStrict -> 0
OBFT ObftLenient ->
-- This should never happen.
Prelude.error "Test.Pos.Chain.Block.Arbitrary.mkEpochLeaderSchedule ObftLenient"
-- | A lot of the work to generate a valid sequence of blockheaders has
-- already been done in the 'Arbitrary' instance of the 'BlockHeaderList'
-- type, so it is used here and at most 3 blocks are taken from the generated
-- list.
instance Arbitrary HeaderAndParams where
arbitrary = do
pm <- arbitrary
era <- arbitrary
genHeaderAndParams pm era
| input-output-hk/cardano-sl | chain/test/Test/Pos/Chain/Block/Arbitrary.hs | apache-2.0 | 23,124 | 0 | 25 | 6,268 | 4,132 | 2,215 | 1,917 | -1 | -1 |
{-# LANGUAGE BangPatterns, RankNTypes, GeneralizedNewtypeDeriving, ScopedTypeVariables #-}
module LDPC.Array.Decode where
import Data.Array.Matrix
import Data.Bit
import Data.Array
import ECC
decoder :: Int -> M Bit -> V Double -> V Bit
decoder = ldpc
ldpc :: forall d. (Floating d, Ord d) => Int -> M Bit -> V d -> V Bit
ldpc maxIterations a orig_lam = fmap hard $ loop 0 orig_ne orig_lam
where
orig_ne :: M d
orig_ne = fmap (const 0) a
loop :: Int -> M d -> V d -> V d
loop !n ne lam
| all (== 0) (elems ans) = lam
| n >= maxIterations = orig_lam
| otherwise = loop (n+1) ne' lam'
where
c_hat :: V Bit
c_hat = fmap hard lam
ans :: M Bit
ans = a `mm` columnM c_hat
ne' :: M d
ne' = ne // [ ((m,n), -2 * atanh (product
[ tanh (- ((lam ! j - ne ! (m,j)) / 2))
| j <- indices lam
, j /= n
, a ! (m,j) == 1
]))
| (m,n) <- indices ne
, a ! (m,n) == 1
]
lam' :: V d
lam' = accum (+) orig_lam [ (n,a) | ((_,n),a) <- assocs ne' ]
min_decoder :: Int -> M Bit -> V Double -> V Bit
min_decoder = min_ldpc
min_ldpc :: forall d . (Floating d, Ord d) => Int -> M Bit -> V d -> V Bit
min_ldpc maxIterations a orig_lam = fmap hard $ loop 0 orig_ne orig_lam
where
orig_ne :: M d
orig_ne = fmap (const 0) a
loop :: Int -> M d -> V d -> V d
loop !n ne lam
| all (== 0) (elems ans) = lam
| n >= maxIterations = orig_lam
| otherwise = loop (n+1) ne' lam'
where
c_hat :: V Bit
c_hat = fmap hard lam
ans :: M Bit
ans = a `mm` columnM c_hat
ne' :: M d
ne' = ne // [ ((m,n), -0.75 * foldr1 min'
[ - (lam ! j - ne ! (m,j))
| j <- indices lam
, j /= n
, a ! (m,j) == 1
])
| (m,n) <- indices ne
, a ! (m,n) == 1
]
lam' :: V d
lam' = accum (+) orig_lam [ (n,a) | ((_,n),a) <- assocs ne' ]
min' :: (Num a, Ord a) => a -> a -> a
min' x y = signum x * signum y * min (abs x) (abs y)
| ku-fpg/ldpc | src/LDPC/Array/Decode.hs | bsd-2-clause | 2,431 | 0 | 25 | 1,092 | 1,037 | 537 | 500 | -1 | -1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, CApiFFI #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.IO
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- The standard IO library.
--
-----------------------------------------------------------------------------
module System.IO (
-- * The IO monad
IO,
fixIO,
-- * Files and handles
FilePath,
Handle, -- abstract, instance of: Eq, Show.
-- | GHC note: a 'Handle' will be automatically closed when the garbage
-- collector detects that it has become unreferenced by the program.
-- However, relying on this behaviour is not generally recommended:
-- the garbage collector is unpredictable. If possible, use
-- an explicit 'hClose' to close 'Handle's when they are no longer
-- required. GHC does not currently attempt to free up file
-- descriptors when they have run out, it is your responsibility to
-- ensure that this doesn't happen.
-- ** Standard handles
-- | Three handles are allocated during program initialisation,
-- and are initially open.
stdin, stdout, stderr,
-- * Opening and closing files
-- ** Opening files
withFile,
openFile,
IOMode(ReadMode,WriteMode,AppendMode,ReadWriteMode),
-- ** Closing files
hClose,
-- ** Special cases
-- | These functions are also exported by the "Prelude".
readFile,
writeFile,
appendFile,
-- ** File locking
-- $locking
-- * Operations on handles
-- ** Determining and changing the size of a file
hFileSize,
hSetFileSize,
-- ** Detecting the end of input
hIsEOF,
isEOF,
-- ** Buffering operations
BufferMode(NoBuffering,LineBuffering,BlockBuffering),
hSetBuffering,
hGetBuffering,
hFlush,
-- ** Repositioning handles
hGetPosn,
hSetPosn,
HandlePosn, -- abstract, instance of: Eq, Show.
hSeek,
SeekMode(AbsoluteSeek,RelativeSeek,SeekFromEnd),
hTell,
-- ** Handle properties
hIsOpen, hIsClosed,
hIsReadable, hIsWritable,
hIsSeekable,
-- ** Terminal operations (not portable: GHC only)
hIsTerminalDevice,
hSetEcho,
hGetEcho,
-- ** Showing handle state (not portable: GHC only)
hShow,
-- * Text input and output
-- ** Text input
hWaitForInput,
hReady,
hGetChar,
hGetLine,
hLookAhead,
hGetContents,
-- ** Text output
hPutChar,
hPutStr,
hPutStrLn,
hPrint,
-- ** Special cases for standard input and output
-- | These functions are also exported by the "Prelude".
interact,
putChar,
putStr,
putStrLn,
print,
getChar,
getLine,
getContents,
readIO,
readLn,
-- * Binary input and output
withBinaryFile,
openBinaryFile,
hSetBinaryMode,
hPutBuf,
hGetBuf,
hGetBufSome,
hPutBufNonBlocking,
hGetBufNonBlocking,
-- * Temporary files
openTempFile,
openBinaryTempFile,
openTempFileWithDefaultPermissions,
openBinaryTempFileWithDefaultPermissions,
#if defined(HaLVM_TARGET_OS)
setXenPutStr,
setXenGetChar,
#endif
-- * Unicode encoding\/decoding
-- | A text-mode 'Handle' has an associated 'TextEncoding', which
-- is used to decode bytes into Unicode characters when reading,
-- and encode Unicode characters into bytes when writing.
--
-- The default 'TextEncoding' is the same as the default encoding
-- on your system, which is also available as 'localeEncoding'.
-- (GHC note: on Windows, we currently do not support double-byte
-- encodings; if the console\'s code page is unsupported, then
-- 'localeEncoding' will be 'latin1'.)
--
-- Encoding and decoding errors are always detected and reported,
-- except during lazy I/O ('hGetContents', 'getContents', and
-- 'readFile'), where a decoding error merely results in
-- termination of the character stream, as with other I/O errors.
hSetEncoding,
hGetEncoding,
-- ** Unicode encodings
TextEncoding,
latin1,
utf8, utf8_bom,
utf16, utf16le, utf16be,
utf32, utf32le, utf32be,
localeEncoding,
char8,
mkTextEncoding,
-- * Newline conversion
-- | In Haskell, a newline is always represented by the character
-- '\n'. However, in files and external character streams, a
-- newline may be represented by another character sequence, such
-- as '\r\n'.
--
-- A text-mode 'Handle' has an associated 'NewlineMode' that
-- specifies how to transate newline characters. The
-- 'NewlineMode' specifies the input and output translation
-- separately, so that for instance you can translate '\r\n'
-- to '\n' on input, but leave newlines as '\n' on output.
--
-- The default 'NewlineMode' for a 'Handle' is
-- 'nativeNewlineMode', which does no translation on Unix systems,
-- but translates '\r\n' to '\n' and back on Windows.
--
-- Binary-mode 'Handle's do no newline translation at all.
--
hSetNewlineMode,
Newline(..), nativeNewline,
NewlineMode(..),
noNewlineTranslation, universalNewlineMode, nativeNewlineMode,
) where
import Control.Exception.Base
import Data.Bits
import Data.Maybe
import Foreign.C.Error
#ifdef mingw32_HOST_OS
import Foreign.C.String
#endif
import Foreign.C.Types
import System.Posix.Internals
import System.Posix.Types
import GHC.Base
import GHC.List
import GHC.IO hiding ( bracket, onException )
import GHC.IO.IOMode
import GHC.IO.Handle.FD
import qualified GHC.IO.FD as FD
import GHC.IO.Handle
import GHC.IO.Handle.Text ( hGetBufSome, hPutStrLn )
import GHC.IO.Exception ( userError )
import GHC.IO.Encoding
import Text.Read
import GHC.Show
import GHC.MVar
-- -----------------------------------------------------------------------------
-- Standard IO
#ifdef HaLVM_TARGET_OS
import GHC.IORef
import System.IO.Unsafe(unsafePerformIO)
data XenIO = XenIO {
xenPutStr :: String -> IO ()
, xenGetChar :: IO Char
}
{-# NOINLINE xenOps #-}
xenOps :: IORef XenIO
xenOps = unsafePerformIO $ newIORef XenIO {
xenPutStr = \ _ -> return ()
, xenGetChar = fail "System.IO.xenGetChar not set in xenOps!"
}
withXenOp :: (XenIO -> f) -> IO f
withXenOp p = do
ops <- readIORef xenOps
return (p ops)
setXenPutStr :: (String -> IO ()) -> IO ()
setXenPutStr f = atomicModifyIORef xenOps
(\ ops -> (ops{ xenPutStr = f }, ()))
setXenGetChar :: IO Char -> IO ()
setXenGetChar f = atomicModifyIORef xenOps
(\ ops -> (ops{ xenGetChar = f }, ()))
#endif
-- | Write a character to the standard output device
-- (same as 'hPutChar' 'stdout').
putChar :: Char -> IO ()
#ifdef HaLVM_TARGET_OS
putChar c = withXenOp xenPutStr >>= \ f -> f [c]
#else
putChar c = hPutChar stdout c
#endif
-- | Write a string to the standard output device
-- (same as 'hPutStr' 'stdout').
putStr :: String -> IO ()
#ifdef HaLVM_TARGET_OS
putStr s = withXenOp xenPutStr >>= \f -> f s
#else
putStr s = hPutStr stdout s
#endif
-- | The same as 'putStr', but adds a newline character.
putStrLn :: String -> IO ()
#ifdef HaLVM_TARGET_OS
putStrLn s = withXenOp xenPutStr >>= (\f -> (f s >> f "\n"))
#else
putStrLn s = hPutStrLn stdout s
#endif
-- | The 'print' function outputs a value of any printable type to the
-- standard output device.
-- Printable types are those that are instances of class 'Show'; 'print'
-- converts values to strings for output using the 'show' operation and
-- adds a newline.
--
-- For example, a program to print the first 20 integers and their
-- powers of 2 could be written as:
--
-- > main = print ([(n, 2^n) | n <- [0..19]])
print :: Show a => a -> IO ()
print x = putStrLn (show x)
-- | Read a character from the standard input device
-- (same as 'hGetChar' 'stdin').
getChar :: IO Char
#ifdef HaLVM_TARGET_OS
getChar = withXenOp xenGetChar >>= id
#else
getChar = hGetChar stdin
#endif
-- | Read a line from the standard input device
-- (same as 'hGetLine' 'stdin').
getLine :: IO String
#ifdef HaLVM_TARGET_OS
getLine = do get <- withXenOp xenGetChar
let loop = get >>= \ c -> case c of
'\n' -> return []
_ -> (c:) `fmap` loop
loop
#else
getLine = hGetLine stdin
#endif
-- | The 'getContents' operation returns all user input as a single string,
-- which is read lazily as it is needed
-- (same as 'hGetContents' 'stdin').
getContents :: IO String
#ifdef HaLVM_TARGET_OS
getContents = do get <- withXenOp xenGetChar
let loop = do c <- get
rest <- loop
return (c:rest)
loop
#else
getContents = hGetContents stdin
#endif
-- | The 'interact' function takes a function of type @String->String@
-- as its argument. The entire input from the standard input device is
-- passed to this function as its argument, and the resulting string is
-- output on the standard output device.
interact :: (String -> String) -> IO ()
interact f = do s <- getContents
putStr (f s)
-- | The 'readFile' function reads a file and
-- returns the contents of the file as a string.
-- The file is read lazily, on demand, as with 'getContents'.
readFile :: FilePath -> IO String
readFile name = openFile name ReadMode >>= hGetContents
-- | The computation 'writeFile' @file str@ function writes the string @str@,
-- to the file @file@.
writeFile :: FilePath -> String -> IO ()
writeFile f txt = withFile f WriteMode (\ hdl -> hPutStr hdl txt)
-- | The computation 'appendFile' @file str@ function appends the string @str@,
-- to the file @file@.
--
-- Note that 'writeFile' and 'appendFile' write a literal string
-- to a file. To write a value of any printable type, as with 'print',
-- use the 'show' function to convert the value to a string first.
--
-- > main = appendFile "squares" (show [(x,x*x) | x <- [0,0.1..2]])
appendFile :: FilePath -> String -> IO ()
appendFile f txt = withFile f AppendMode (\ hdl -> hPutStr hdl txt)
-- | The 'readLn' function combines 'getLine' and 'readIO'.
readLn :: Read a => IO a
readLn = do l <- getLine
r <- readIO l
return r
-- | The 'readIO' function is similar to 'read' except that it signals
-- parse failure to the 'IO' monad instead of terminating the program.
readIO :: Read a => String -> IO a
readIO s = case (do { (x,t) <- reads s ;
("","") <- lex t ;
return x }) of
[x] -> return x
[] -> ioError (userError "Prelude.readIO: no parse")
_ -> ioError (userError "Prelude.readIO: ambiguous parse")
-- | The Unicode encoding of the current locale
--
-- This is the initial locale encoding: if it has been subsequently changed by
-- 'GHC.IO.Encoding.setLocaleEncoding' this value will not reflect that change.
localeEncoding :: TextEncoding
localeEncoding = initLocaleEncoding
-- | Computation 'hReady' @hdl@ indicates whether at least one item is
-- available for input from handle @hdl@.
--
-- This operation may fail with:
--
-- * 'System.IO.Error.isEOFError' if the end of file has been reached.
hReady :: Handle -> IO Bool
hReady h = hWaitForInput h 0
-- | Computation 'hPrint' @hdl t@ writes the string representation of @t@
-- given by the 'shows' function to the file or channel managed by @hdl@
-- and appends a newline.
--
-- This operation may fail with:
--
-- * 'System.IO.Error.isFullError' if the device is full; or
--
-- * 'System.IO.Error.isPermissionError' if another system resource limit would be exceeded.
hPrint :: Show a => Handle -> a -> IO ()
hPrint hdl = hPutStrLn hdl . show
-- | @'withFile' name mode act@ opens a file using 'openFile' and passes
-- the resulting handle to the computation @act@. The handle will be
-- closed on exit from 'withFile', whether by normal termination or by
-- raising an exception. If closing the handle raises an exception, then
-- this exception will be raised by 'withFile' rather than any exception
-- raised by 'act'.
withFile :: FilePath -> IOMode -> (Handle -> IO r) -> IO r
withFile name mode = bracket (openFile name mode) hClose
-- | @'withBinaryFile' name mode act@ opens a file using 'openBinaryFile'
-- and passes the resulting handle to the computation @act@. The handle
-- will be closed on exit from 'withBinaryFile', whether by normal
-- termination or by raising an exception.
withBinaryFile :: FilePath -> IOMode -> (Handle -> IO r) -> IO r
withBinaryFile name mode = bracket (openBinaryFile name mode) hClose
-- ---------------------------------------------------------------------------
-- fixIO
fixIO :: (a -> IO a) -> IO a
fixIO k = do
m <- newEmptyMVar
ans <- unsafeInterleaveIO (takeMVar m)
result <- k ans
putMVar m result
return result
-- NOTE: we do our own explicit black holing here, because GHC's lazy
-- blackholing isn't enough. In an infinite loop, GHC may run the IO
-- computation a few times before it notices the loop, which is wrong.
--
-- NOTE2: the explicit black-holing with an IORef ran into trouble
-- with multiple threads (see #5421), so now we use an MVar. I'm
-- actually wondering whether we should use readMVar rather than
-- takeMVar, just in case it ends up being executed multiple times,
-- but even then it would have to be masked to protect against async
-- exceptions. Ugh. What we really need here is an IVar, or an
-- atomic readMVar, or even STM. All these seem like overkill.
--
-- See also System.IO.Unsafe.unsafeFixIO.
--
-- | The function creates a temporary file in ReadWrite mode.
-- The created file isn\'t deleted automatically, so you need to delete it manually.
--
-- The file is creates with permissions such that only the current
-- user can read\/write it.
--
-- With some exceptions (see below), the file will be created securely
-- in the sense that an attacker should not be able to cause
-- openTempFile to overwrite another file on the filesystem using your
-- credentials, by putting symbolic links (on Unix) in the place where
-- the temporary file is to be created. On Unix the @O_CREAT@ and
-- @O_EXCL@ flags are used to prevent this attack, but note that
-- @O_EXCL@ is sometimes not supported on NFS filesystems, so if you
-- rely on this behaviour it is best to use local filesystems only.
--
openTempFile :: FilePath -- ^ Directory in which to create the file
-> String -- ^ File name template. If the template is \"foo.ext\" then
-- the created file will be \"fooXXX.ext\" where XXX is some
-- random number.
-> IO (FilePath, Handle)
openTempFile tmp_dir template
= openTempFile' "openTempFile" tmp_dir template False 0o600
-- | Like 'openTempFile', but opens the file in binary mode. See 'openBinaryFile' for more comments.
openBinaryTempFile :: FilePath -> String -> IO (FilePath, Handle)
openBinaryTempFile tmp_dir template
= openTempFile' "openBinaryTempFile" tmp_dir template True 0o600
-- | Like 'openTempFile', but uses the default file permissions
openTempFileWithDefaultPermissions :: FilePath -> String
-> IO (FilePath, Handle)
openTempFileWithDefaultPermissions tmp_dir template
= openTempFile' "openTempFileWithDefaultPermissions" tmp_dir template False 0o666
-- | Like 'openBinaryTempFile', but uses the default file permissions
openBinaryTempFileWithDefaultPermissions :: FilePath -> String
-> IO (FilePath, Handle)
openBinaryTempFileWithDefaultPermissions tmp_dir template
= openTempFile' "openBinaryTempFileWithDefaultPermissions" tmp_dir template True 0o666
openTempFile' :: String -> FilePath -> String -> Bool -> CMode
-> IO (FilePath, Handle)
openTempFile' loc tmp_dir template binary mode = findTempName
where
-- We split off the last extension, so we can use .foo.ext files
-- for temporary files (hidden on Unix OSes). Unfortunately we're
-- below filepath in the hierarchy here.
(prefix,suffix) =
case break (== '.') $ reverse template of
-- First case: template contains no '.'s. Just re-reverse it.
(rev_suffix, "") -> (reverse rev_suffix, "")
-- Second case: template contains at least one '.'. Strip the
-- dot from the prefix and prepend it to the suffix (if we don't
-- do this, the unique number will get added after the '.' and
-- thus be part of the extension, which is wrong.)
(rev_suffix, '.':rest) -> (reverse rest, '.':reverse rev_suffix)
-- Otherwise, something is wrong, because (break (== '.')) should
-- always return a pair with either the empty string or a string
-- beginning with '.' as the second component.
_ -> errorWithoutStackTrace "bug in System.IO.openTempFile"
findTempName = do
rs <- rand_string
let filename = prefix ++ rs ++ suffix
filepath = tmp_dir `combine` filename
r <- openNewFile filepath binary mode
case r of
FileExists -> findTempName
OpenNewError errno -> ioError (errnoToIOError loc errno Nothing (Just tmp_dir))
NewFileCreated fd -> do
(fD,fd_type) <- FD.mkFD fd ReadWriteMode Nothing{-no stat-}
False{-is_socket-}
True{-is_nonblock-}
enc <- getLocaleEncoding
h <- mkHandleFromFD fD fd_type filepath ReadWriteMode False{-set non-block-} (Just enc)
return (filepath, h)
where
-- XXX bits copied from System.FilePath, since that's not available here
combine a b
| null b = a
| null a = b
| last a == pathSeparator = a ++ b
| otherwise = a ++ [pathSeparator] ++ b
-- int rand(void) from <stdlib.h>, limited by RAND_MAX (small value, 32768)
foreign import capi "stdlib.h rand" c_rand :: IO CInt
-- build large digit-alike number
rand_string :: IO String
rand_string = do
r1 <- c_rand
r2 <- c_rand
return $ show r1 ++ show r2
data OpenNewFileResult
= NewFileCreated CInt
| FileExists
| OpenNewError Errno
openNewFile :: FilePath -> Bool -> CMode -> IO OpenNewFileResult
openNewFile filepath binary mode = do
let oflags1 = rw_flags .|. o_EXCL
binary_flags
| binary = o_BINARY
| otherwise = 0
oflags = oflags1 .|. binary_flags
fd <- withFilePath filepath $ \ f ->
c_open f oflags mode
if fd < 0
then do
errno <- getErrno
case errno of
_ | errno == eEXIST -> return FileExists
#ifdef mingw32_HOST_OS
-- If c_open throws EACCES on windows, it could mean that filepath is a
-- directory. In this case, we want to return FileExists so that the
-- enclosing openTempFile can try again instead of failing outright.
-- See bug #4968.
_ | errno == eACCES -> do
withCString filepath $ \path -> do
-- There is a race here: the directory might have been moved or
-- deleted between the c_open call and the next line, but there
-- doesn't seem to be any direct way to detect that the c_open call
-- failed because of an existing directory.
exists <- c_fileExists path
return $ if exists
then FileExists
else OpenNewError errno
#endif
_ -> return (OpenNewError errno)
else return (NewFileCreated fd)
#ifdef mingw32_HOST_OS
foreign import ccall "file_exists" c_fileExists :: CString -> IO Bool
#endif
-- XXX Should use filepath library
pathSeparator :: Char
#ifdef mingw32_HOST_OS
pathSeparator = '\\'
#else
pathSeparator = '/'
#endif
-- XXX Copied from GHC.Handle
std_flags, output_flags, rw_flags :: CInt
std_flags = o_NONBLOCK .|. o_NOCTTY
output_flags = std_flags .|. o_CREAT
rw_flags = output_flags .|. o_RDWR
-- $locking
-- Implementations should enforce as far as possible, at least locally to the
-- Haskell process, multiple-reader single-writer locking on files.
-- That is, /there may either be many handles on the same file which manage input, or just one handle on the file which manages output/. If any
-- open or semi-closed handle is managing a file for output, no new
-- handle can be allocated for that file. If any open or semi-closed
-- handle is managing a file for input, new handles can only be allocated
-- if they do not manage output. Whether two files are the same is
-- implementation-dependent, but they should normally be the same if they
-- have the same absolute path name and neither has been renamed, for
-- example.
--
-- /Warning/: the 'readFile' operation holds a semi-closed handle on
-- the file until the entire contents of the file have been consumed.
-- It follows that an attempt to write to a file (using 'writeFile', for
-- example) that was earlier opened by 'readFile' will usually result in
-- failure with 'System.IO.Error.isAlreadyInUseError'.
| GaloisInc/halvm-ghc | libraries/base/System/IO.hs | bsd-3-clause | 21,742 | 0 | 22 | 5,465 | 2,857 | 1,639 | 1,218 | 236 | 5 |
{-# LANGUAGE TypeFamilies #-}
module Aws.Swf.Commands.RecordActivityTaskHeartbeat
where
import Aws.Core (AsMemoryResponse (..),
ResponseConsumer (..), Transaction)
import qualified Aws.Core.Sign as S (ServiceConfiguration)
import qualified Aws.Core.Sign3 as SIG3 (signRequest)
import Aws.Core.SignClass (SignQuery (..))
import Aws.Swf.Response (SwfMetadata, jsonConsumer,
swfResponseConsumer)
import Aws.Swf.Sign (swfRequest)
import Control.Applicative ((<$>))
import Control.Monad (mzero)
import Data.Aeson (FromJSON (..), ToJSON (..),
Value (Object), object, (.:), (.=))
import Data.ByteString as B
import qualified Data.Text as T
target :: B.ByteString
target = "com.amazonaws.swf.service.model.SimpleWorkflowService.RecordActivityTaskHeartbeat"
data RecordActivityTaskHeartbeat =
RecordActivityTaskHeartbeat { details :: T.Text,
taskToken :: T.Text }
deriving Show
data RecordActivityTaskHeartbeatResponse =
RecordActivityTaskHeartbeatResponse { cancelRequested :: Bool }
deriving Show
instance ToJSON RecordActivityTaskHeartbeat where
toJSON (RecordActivityTaskHeartbeat details taskToken) =
object [ "details" .= details,
"taskToken" .= taskToken ]
instance FromJSON RecordActivityTaskHeartbeatResponse where
parseJSON (Object o) = RecordActivityTaskHeartbeatResponse <$> o .: "cancelRequested"
parseJSON _ = mzero
instance SignQuery RecordActivityTaskHeartbeat where
type ServiceConfiguration RecordActivityTaskHeartbeat = S.ServiceConfiguration
signQuery req = SIG3.signRequest $ swfRequest target $ toJSON req
instance ResponseConsumer RecordActivityTaskHeartbeat RecordActivityTaskHeartbeatResponse where
type ResponseMetadata RecordActivityTaskHeartbeatResponse = SwfMetadata
responseConsumer _ mref = swfResponseConsumer mref $ \rsp -> jsonConsumer rsp
instance Transaction RecordActivityTaskHeartbeat RecordActivityTaskHeartbeatResponse
instance AsMemoryResponse RecordActivityTaskHeartbeatResponse where
type MemoryResponse RecordActivityTaskHeartbeatResponse = RecordActivityTaskHeartbeatResponse
loadToMemory = return
| RayRacine/aws | Aws/Swf/Commands/RecordActivityTaskHeartbeat.hs | bsd-3-clause | 2,387 | 0 | 9 | 539 | 437 | 259 | 178 | -1 | -1 |
{-# LANGUAGE BangPatterns, DeriveDataTypeable, DeriveGeneric, FlexibleInstances, MultiParamTypeClasses, OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module GTFS.Realtime.Internal.Com.Google.Transit.Realtime.Alert.Cause (Cause(..)) where
import Prelude ((+), (/), (.))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified GHC.Generics as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data Cause = UNKNOWN_CAUSE
| OTHER_CAUSE
| TECHNICAL_PROBLEM
| STRIKE
| DEMONSTRATION
| ACCIDENT
| HOLIDAY
| WEATHER
| MAINTENANCE
| CONSTRUCTION
| POLICE_ACTIVITY
| MEDICAL_EMERGENCY
deriving (Prelude'.Read, Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data, Prelude'.Generic)
instance P'.Mergeable Cause
instance Prelude'.Bounded Cause where
minBound = UNKNOWN_CAUSE
maxBound = MEDICAL_EMERGENCY
instance P'.Default Cause where
defaultValue = UNKNOWN_CAUSE
toMaybe'Enum :: Prelude'.Int -> P'.Maybe Cause
toMaybe'Enum 1 = Prelude'.Just UNKNOWN_CAUSE
toMaybe'Enum 2 = Prelude'.Just OTHER_CAUSE
toMaybe'Enum 3 = Prelude'.Just TECHNICAL_PROBLEM
toMaybe'Enum 4 = Prelude'.Just STRIKE
toMaybe'Enum 5 = Prelude'.Just DEMONSTRATION
toMaybe'Enum 6 = Prelude'.Just ACCIDENT
toMaybe'Enum 7 = Prelude'.Just HOLIDAY
toMaybe'Enum 8 = Prelude'.Just WEATHER
toMaybe'Enum 9 = Prelude'.Just MAINTENANCE
toMaybe'Enum 10 = Prelude'.Just CONSTRUCTION
toMaybe'Enum 11 = Prelude'.Just POLICE_ACTIVITY
toMaybe'Enum 12 = Prelude'.Just MEDICAL_EMERGENCY
toMaybe'Enum _ = Prelude'.Nothing
instance Prelude'.Enum Cause where
fromEnum UNKNOWN_CAUSE = 1
fromEnum OTHER_CAUSE = 2
fromEnum TECHNICAL_PROBLEM = 3
fromEnum STRIKE = 4
fromEnum DEMONSTRATION = 5
fromEnum ACCIDENT = 6
fromEnum HOLIDAY = 7
fromEnum WEATHER = 8
fromEnum MAINTENANCE = 9
fromEnum CONSTRUCTION = 10
fromEnum POLICE_ACTIVITY = 11
fromEnum MEDICAL_EMERGENCY = 12
toEnum
= P'.fromMaybe
(Prelude'.error
"hprotoc generated code: toEnum failure for type GTFS.Realtime.Internal.Com.Google.Transit.Realtime.Alert.Cause")
. toMaybe'Enum
succ UNKNOWN_CAUSE = OTHER_CAUSE
succ OTHER_CAUSE = TECHNICAL_PROBLEM
succ TECHNICAL_PROBLEM = STRIKE
succ STRIKE = DEMONSTRATION
succ DEMONSTRATION = ACCIDENT
succ ACCIDENT = HOLIDAY
succ HOLIDAY = WEATHER
succ WEATHER = MAINTENANCE
succ MAINTENANCE = CONSTRUCTION
succ CONSTRUCTION = POLICE_ACTIVITY
succ POLICE_ACTIVITY = MEDICAL_EMERGENCY
succ _
= Prelude'.error "hprotoc generated code: succ failure for type GTFS.Realtime.Internal.Com.Google.Transit.Realtime.Alert.Cause"
pred OTHER_CAUSE = UNKNOWN_CAUSE
pred TECHNICAL_PROBLEM = OTHER_CAUSE
pred STRIKE = TECHNICAL_PROBLEM
pred DEMONSTRATION = STRIKE
pred ACCIDENT = DEMONSTRATION
pred HOLIDAY = ACCIDENT
pred WEATHER = HOLIDAY
pred MAINTENANCE = WEATHER
pred CONSTRUCTION = MAINTENANCE
pred POLICE_ACTIVITY = CONSTRUCTION
pred MEDICAL_EMERGENCY = POLICE_ACTIVITY
pred _
= Prelude'.error "hprotoc generated code: pred failure for type GTFS.Realtime.Internal.Com.Google.Transit.Realtime.Alert.Cause"
instance P'.Wire Cause where
wireSize ft' enum = P'.wireSize ft' (Prelude'.fromEnum enum)
wirePut ft' enum = P'.wirePut ft' (Prelude'.fromEnum enum)
wireGet 14 = P'.wireGetEnum toMaybe'Enum
wireGet ft' = P'.wireGetErr ft'
wireGetPacked 14 = P'.wireGetPackedEnum toMaybe'Enum
wireGetPacked ft' = P'.wireGetErr ft'
instance P'.GPB Cause
instance P'.MessageAPI msg' (msg' -> Cause) Cause where
getVal m' f' = f' m'
instance P'.ReflectEnum Cause where
reflectEnum
= [(1, "UNKNOWN_CAUSE", UNKNOWN_CAUSE), (2, "OTHER_CAUSE", OTHER_CAUSE), (3, "TECHNICAL_PROBLEM", TECHNICAL_PROBLEM),
(4, "STRIKE", STRIKE), (5, "DEMONSTRATION", DEMONSTRATION), (6, "ACCIDENT", ACCIDENT), (7, "HOLIDAY", HOLIDAY),
(8, "WEATHER", WEATHER), (9, "MAINTENANCE", MAINTENANCE), (10, "CONSTRUCTION", CONSTRUCTION),
(11, "POLICE_ACTIVITY", POLICE_ACTIVITY), (12, "MEDICAL_EMERGENCY", MEDICAL_EMERGENCY)]
reflectEnumInfo _
= P'.EnumInfo
(P'.makePNF (P'.pack ".transit_realtime.Alert.Cause") ["GTFS", "Realtime", "Internal"]
["Com", "Google", "Transit", "Realtime", "Alert"]
"Cause")
["GTFS", "Realtime", "Internal", "Com", "Google", "Transit", "Realtime", "Alert", "Cause.hs"]
[(1, "UNKNOWN_CAUSE"), (2, "OTHER_CAUSE"), (3, "TECHNICAL_PROBLEM"), (4, "STRIKE"), (5, "DEMONSTRATION"), (6, "ACCIDENT"),
(7, "HOLIDAY"), (8, "WEATHER"), (9, "MAINTENANCE"), (10, "CONSTRUCTION"), (11, "POLICE_ACTIVITY"), (12, "MEDICAL_EMERGENCY")]
Prelude'.False
instance P'.TextType Cause where
tellT = P'.tellShow
getT = P'.getRead | romanofski/gtfsbrisbane | src/GTFS/Realtime/Internal/Com/Google/Transit/Realtime/Alert/Cause.hs | bsd-3-clause | 4,883 | 0 | 11 | 855 | 1,264 | 703 | 561 | 114 | 1 |
module NativeInfo (module System.Info) where
import System.Info
| OS2World/DEV-UTIL-HUGS | oldlib/NativeInfo.hs | bsd-3-clause | 64 | 0 | 5 | 7 | 17 | 11 | 6 | 2 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module ETA.CodeGen.ArgRep
(ArgRep(..),
toArgRep,
jrepType,
isNonV,
idArgRep,
idPrimRep,
primRepFieldType_maybe,
primRepFieldType,
ftArgRep,
typeArgRep,
repFieldTypes,
repFieldType_maybe,
contextLoad,
contextStore,
slowCallPattern,
argRepFt
) where
import ETA.BasicTypes.Id
import ETA.Types.Type
import ETA.Types.TyCon ( PrimRep(..) )
import ETA.BasicTypes.BasicTypes ( RepArity )
-- import ETA.Main.DynFlags
import ETA.Debug
import Data.Maybe
import ETA.CodeGen.Rts
import ETA.Util
import Codec.JVM
import Data.Monoid ((<>))
import Data.Text (Text)
data ArgRep = P -- StgClosure
| N -- int-sized non-ptr
| V -- Void
| L -- long
| F -- float
| D -- double
| O -- Java object pointer
deriving (Eq, Show)
instance Outputable ArgRep where
ppr = str . show
toArgRep :: PrimRep -> ArgRep
toArgRep VoidRep = V
toArgRep PtrRep = P
toArgRep IntRep = N
toArgRep WordRep = N
toArgRep AddrRep = O
toArgRep Int64Rep = L
toArgRep Word64Rep = L
toArgRep FloatRep = F
toArgRep DoubleRep = D
toArgRep BoolRep = N
toArgRep CharRep = N
toArgRep ByteRep = N
toArgRep ShortRep = N
toArgRep (ObjectRep _) = O
toArgRep (ArrayRep _) = O
--toArgRep (VecRep len elem) = error $ "Unsupported PrimRep: VecRep " ++ show len ++ " " ++ show elem
isNonV :: ArgRep -> Bool
isNonV V = False
isNonV _ = True
idArgRep :: Id -> ArgRep
idArgRep = toArgRep . idPrimRep
typeArgRep :: Type -> ArgRep
typeArgRep = toArgRep . typePrimRep
ftArgRep :: FieldType -> ArgRep
ftArgRep ft
| ft == closureType = P
| otherwise = case ft of
BaseType JDouble -> D
BaseType JFloat -> F
BaseType JLong -> L
ObjectType _ -> O
ArrayType _ -> O
_ -> N
primRepFieldType_maybe :: PrimRep -> Maybe FieldType
primRepFieldType_maybe VoidRep = Nothing
primRepFieldType_maybe rep = Just $
case rep of
PtrRep -> closureType
IntRep -> jint
WordRep -> jint
AddrRep -> byteBufferType
Int64Rep -> jlong
Word64Rep -> jlong
FloatRep -> jfloat
DoubleRep -> jdouble
BoolRep -> jbool
CharRep -> jchar
ByteRep -> jbyte
ShortRep -> jshort
ObjectRep className -> obj $ className
ArrayRep rep -> ArrayType . fromJust $ primRepFieldType_maybe rep
VoidRep -> undefined
primRepFieldType :: PrimRep -> FieldType
primRepFieldType = expectJust "primRepFieldType" . primRepFieldType_maybe
-- NOTE: We assume that unboxed tuples won't occur
repFieldType_maybe :: Type -> Maybe FieldType
repFieldType_maybe = primRepFieldType_maybe . typePrimRep . jrepType
repFieldTypes :: [Type] -> [FieldType]
repFieldTypes = mapMaybe repFieldType_maybe
-- NOTE: Assumes StgContext is in local variable slot 1
contextLoad :: FieldType -> ArgRep -> Int -> Code
contextLoad _ argRep n =
loadContext
<> iconst jint (fromIntegral n)
<> loadMethod
where loadMethod = case argRep of
P -> loadR
N -> loadI
L -> loadL
F -> loadF
D -> loadD
O -> loadO
_ -> error "contextLoad: V"
contextStore :: FieldType -> ArgRep -> Code -> Int -> Code
contextStore _ argRep storeCode n =
loadContext
<> iconst jint (fromIntegral n)
<> storeCode
<> storeMethod
where storeMethod = case argRep of
P -> storeR
N -> storeI
L -> storeL
F -> storeF
D -> storeD
O -> storeO
_ -> error "contextStore: V"
slowCallPattern :: [ArgRep] -> (Text, RepArity, [FieldType])
slowCallPattern (P: P: P: P: P: P: _) =
("ap_pppppp", 6, replicate 6 closureType)
slowCallPattern (P: P: P: P: P: _) =
("ap_ppppp", 5, replicate 5 closureType)
slowCallPattern (P: P: P: V: O: _) =
("ap_pppvo", 5, replicate 3 closureType ++ [jobject])
slowCallPattern (P: P: P: P: _) = ("ap_pppp", 4, replicate 4 closureType)
slowCallPattern (P: P: P: V: _) = ("ap_pppv", 4, replicate 3 closureType)
slowCallPattern (P: P: V: O: _) =
("ap_ppvo", 4, replicate 2 closureType ++ [jobject])
slowCallPattern (P: P: P: _) = ("ap_ppp", 3, replicate 3 closureType)
slowCallPattern (P: P: V: _) = ("ap_ppv", 3, replicate 2 closureType)
slowCallPattern (P: V: O: _) = ("ap_pvo", 3, [closureType, jobject])
slowCallPattern (P: P: _) = ("ap_pp", 2, replicate 2 closureType)
slowCallPattern (P: V: _) = ("ap_pv", 2, [closureType])
slowCallPattern (P: _) = ("ap_p", 1, [closureType])
slowCallPattern (O: _) = ("ap_o", 1, [jobject])
slowCallPattern (N: _) = ("ap_n", 1, [jint])
slowCallPattern (L: _) = ("ap_l", 1, [jlong])
slowCallPattern (F: _) = ("ap_f", 1, [jfloat])
slowCallPattern (D: _) = ("ap_d", 1, [jdouble])
slowCallPattern (V: _) = ("ap_v", 1, [])
slowCallPattern [] = ("ap_0", 0, [])
idPrimRep :: Id -> PrimRep
idPrimRep = typePrimRep . idType
jrepType :: Type -> UnaryType
jrepType = head . flattenRepType . repType
argRepFt :: ArgRep -> FieldType
argRepFt P = closureType
argRepFt O = jobject
argRepFt N = jint
argRepFt F = jfloat
argRepFt L = jlong
argRepFt D = jdouble
argRepFt _ = panic "argRepFt: V argrep!"
| AlexeyRaga/eta | compiler/ETA/CodeGen/ArgRep.hs | bsd-3-clause | 5,728 | 0 | 12 | 1,795 | 1,732 | 943 | 789 | 160 | 15 |
module Physics where
import Math
import Prelude()
limitPower :: Scalar DPower -> Vector DForce -> Vector DVelocity -> Vector DForce
limitPower max force speed = mulSV coeff force where
coeff | power > max = max / power
| otherwise = 1 *~ one
power = dotV force speed
rotationVelocity :: Scalar DLength -> -- ^ Radius
Scalar DAngularVelocity -> -- ^ Counter-clockwise Rotation speed, in radians per second
Scalar DPlaneAngle -> -- ^ Angle to the point of interest
Vector DVelocity -- ^ The speed of the point of interest
rotationVelocity r v a = v * r `mulSV` rotate90ccw (directionV a)
| Rotsor/wheeled-vehicle | Physics.hs | bsd-3-clause | 661 | 0 | 10 | 178 | 167 | 84 | 83 | 13 | 1 |
module Categorizer.Util.List
( safeHead ) where
safeHead :: [a] -> Maybe a
safeHead (x:_) = Just x
safeHead _ = Nothing
| ameingast/categorizer | src/Categorizer/Util/List.hs | bsd-3-clause | 126 | 0 | 7 | 27 | 53 | 29 | 24 | 5 | 1 |
module Network.OpenFlow.Message.OfpType where
data OfpType =
-- Immutable messages
OfptHello
| OfptError
| OfptEchoRequest
| OfptEchoReply
| OfptExperimenter
-- Switch configuration messages
| OfptFeaturesRequest
| OfptFeaturesReply
| OfptGetConfigRequest
| OfptGetConfigReply
| OfptSetConfig
-- Asynchronous messages
| OfptPacketIn
| OfptFlowRemoved
| OfptPortStatus
-- Controller command messages
| OfptPacketOut
| OfptFlowMod
| OfptGroupMod
| OfptPortMod
| OfptTableMod
-- Multipart messages
| OfptMultipartRequest
| OfptMultipartReply
-- Barrier messages
| OfptBarrierRequest
| OfptBarrierReply
-- Queue configuraiton messages
| OfptQueueGetConfigRequest
| OfptQueueGetConfigReply
-- Controller role change request messages
| OfptRoleRequest
| OfptRoleReply
-- Asynchronous message configuration
| OfptGetAsyncRequest
| OfptGetAsyncReply
| OfptSetAsync
-- Metrics and rate limiters configuration messages
| OfptMeterMod deriving (Eq, Ord, Show)
ofpTypeCode :: (Num a) => OfpType -> a
ofpTypeCode t =
case t of
OfptHello -> 0
OfptError -> 1
OfptEchoRequest -> 2
OfptEchoReply -> 3
OfptExperimenter -> 4
OfptFeaturesRequest -> 5
OfptFeaturesReply -> 6
OfptGetConfigRequest -> 7
OfptGetConfigReply -> 8
OfptSetConfig -> 9
OfptPacketIn -> 10
OfptFlowRemoved -> 11
OfptPortStatus -> 12
OfptPacketOut -> 13
OfptFlowMod -> 14
OfptGroupMod -> 15
OfptPortMod -> 16
OfptTableMod -> 17
OfptMultipartRequest -> 18
OfptMultipartReply -> 19
OfptBarrierRequest -> 20
OfptBarrierReply -> 21
OfptQueueGetConfigRequest -> 22
OfptQueueGetConfigReply -> 23
OfptRoleRequest -> 24
OfptRoleReply -> 25
OfptGetAsyncRequest -> 26
OfptGetAsyncReply -> 27
OfptSetAsync -> 28
OfptMeterMod -> 29
ofpType :: Int -> Maybe OfpType
ofpType n =
case n of
0 -> Just OfptHello
1 -> Just OfptError
2 -> Just OfptEchoRequest
3 -> Just OfptEchoReply
4 -> Just OfptExperimenter
5 -> Just OfptFeaturesRequest
6 -> Just OfptFeaturesReply
7 -> Just OfptGetConfigRequest
8 -> Just OfptGetConfigReply
9 -> Just OfptSetConfig
10 -> Just OfptPacketIn
11 -> Just OfptFlowRemoved
12 -> Just OfptPortStatus
13 -> Just OfptPacketOut
14 -> Just OfptFlowMod
15 -> Just OfptGroupMod
16 -> Just OfptPortMod
17 -> Just OfptTableMod
18 -> Just OfptMultipartRequest
19 -> Just OfptMultipartReply
20 -> Just OfptBarrierRequest
21 -> Just OfptBarrierReply
22 -> Just OfptQueueGetConfigRequest
23 -> Just OfptQueueGetConfigReply
24 -> Just OfptRoleRequest
25 -> Just OfptRoleReply
26 -> Just OfptGetAsyncRequest
27 -> Just OfptGetAsyncReply
28 -> Just OfptSetAsync
29 -> Just OfptMeterMod
_ -> Nothing
| utky/openflow | src/Network/OpenFlow/Message/OfpType.hs | bsd-3-clause | 3,263 | 0 | 8 | 1,067 | 637 | 327 | 310 | 99 | 31 |
module IptAdmin.EditIpForwPage where
import Control.Monad.Error
import Happstack.Server.SimpleHTTP
import IptAdmin.EditIpForwForm.Render
import IptAdmin.Render
import IptAdmin.System
import IptAdmin.Types
import IptAdmin.Utils
import Iptables
import Iptables.Types
import Text.Blaze.Renderer.Pretty (renderHtml)
pageHandlers :: IptAdmin Response
pageHandlers = msum [ methodSP GET pageHandlerGet
, methodSP POST pageHandlerPost
]
pageHandlerGet :: IptAdmin Response
pageHandlerGet = do
-- 1. Получение текущего значения из /proc/...
forwState <- getForwardingState
-- 2. Отрисовка формы
return $ buildResponse $ renderHtml $ do
editIpForwForm forwState
pageHandlerPost :: IptAdmin Response
pageHandlerPost = do
forwStateStr <- getInputString "ipForwState"
-- 1. Проверка параметра формы.
newValue <- case forwStateStr of
"off" -> return False
"on" -> return True
a -> throwError $ "Ivalid parameter 'ipForwState': " ++ a
-- 2. Применяем изменения в /proc/sys/net/ipv4/ip_forward и sysctl.conf
{-
if newPolicy == policy
then -- redir $ "/show?table=" ++ tableName ++ bookmarkForJump chainName Nothing
return $ buildResponse $ "ok:" ++ show newPolicy
else do
tryChange (setPolicy tableName chainName newPolicy)
-- redir $ "/show?table=" ++ tableName ++ bookmarkForJump chainName Nothing
return $ buildResponse $ "ok:" ++ show newPolicy
-}
setForwardingState newValue
return $ buildResponse $ "ok:" ++ forwStateStr
| etarasov/iptadmin | src/IptAdmin/EditIpForwPage.hs | bsd-3-clause | 1,701 | 0 | 12 | 382 | 229 | 121 | 108 | 28 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -w #-}
module Dipper (
HadoopEnv(..)
, dipperMain
, clouderaEnv
) where
import Control.Applicative ((<|>))
import Control.Concurrent.Async (Concurrently(..))
import Control.Monad (void, zipWithM_, foldM)
import Data.Conduit ((=$=), runConduit, sequenceConduits)
import Data.Conduit (Source, Sink, Consumer, Producer, Conduit)
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Text as CT
import Data.List (sort, isPrefixOf)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Tuple.Strict (Pair(..))
import System.Environment (getEnvironment)
import System.Environment (getExecutablePath, getArgs, lookupEnv)
import System.Exit (ExitCode(..), exitWith)
import System.FilePath (takeFileName)
import System.FilePath.Posix (takeDirectory)
import System.IO (Handle, stdin, stdout, stderr)
import System.Process (StdStream(..), CreateProcess(..))
import System.Process (proc, createProcess, waitForProcess)
import Dipper.Core.Types
import Dipper.Hadoop.Environment
import Dipper.Hadoop.Encoding
import Dipper.Jar (withDipperJar)
import Dipper.Pipeline
------------------------------------------------------------------------
dipperMain :: (Ord n, Show n) => HadoopEnv -> FilePath -> Term n () -> IO ()
dipperMain henv jobDir term = do
args <- getArgs
case args of
[] -> withDipperJar (\jar -> runJob henv jar jobDir pipeline)
["mapper"] -> runMapper pipeline
["reducer"] -> runReducer pipeline
_ -> putStrLn "error: Run with no arguments to execute Hadoop job"
where
pipeline = mkPipeline jobDir term
------------------------------------------------------------------------
runJob :: HadoopEnv -> FilePath -> FilePath -> Pipeline -> IO ()
runJob henv jar jobDir pipeline = do
print stages
zipWithM_ go [1..] stages
where
stages = stagesOfPipeline pipeline
go ix stage = runStage henv jar (jobDir ++ "/stage." ++ show ix) pipeline stage
------------------------------------------------------------------------
runStage :: HadoopEnv -> FilePath -> FilePath -> Pipeline -> Stage -> IO ()
runStage henv jar stageDir pipeline stage = do
self <- getExecutablePath
putStrLn "=== Arguments ==="
mapM_ printArg (mkArgs self)
(Nothing, Just out, Just err, h) <-
createProcess (program self) { std_out = CreatePipe
, std_err = CreatePipe }
runConcurrently
$ Concurrently (output out "stdout")
*> Concurrently (output err "stderr")
code <- waitForProcess h
case code of
ExitSuccess -> return ()
ExitFailure _ -> exitWith code
where
output :: Handle -> T.Text -> IO ()
output src name = do
runConduit $ CB.sourceHandle src
=$= CT.decodeUtf8
=$= CT.lines
=$= CL.mapM_ (\xs -> T.putStrLn (name <> "> " <> xs))
printArg "-D" = return ()
printArg arg = putStrLn arg
program self = proc (hadoopExec henv) (mkArgs self)
mkArgs self =
[ "jar", hadoopStreamingJar henv
, "-files", self
, "-libjars", jar
-- , "-D", "mapred.reduce.tasks=300"
, "-D", "mapred.output.key.comparator.class=org.dipper.TagKeyComparator"
, "-D", "stream.io.identifier.resolver.class=org.dipper.DipperResolver"
] ++ stageArgs pipeline stage ++
[ "-outputformat", "org.dipper.DipperOutputFormat"
, "-output", stageDir
, "-mapper", takeFileName self <> " mapper"
, "-reducer", takeFileName self <> " reducer"
]
stageArgs :: Pipeline -> Stage -> [String]
stageArgs p Stage{..} =
shuffleArgs
++ outputArgs
++ ["-inputformat", "org.apache.hadoop.streaming.AutoInputFormat"]
++ inputArgs
where
inputArgs = concatMap (\path -> ["-input", path]) (M.keys stageInputs)
shuffleArgs = concatMap (uncurry kvArgs) (M.toList stageShuffle)
outputArgs = concatMap (\(p,(t,f)) -> sequenceFileArgs p t f)
. M.toList
$ M.intersectionWith (,) (pathTags p) stageOutputs
kvArgs :: Tag -> KVFormat -> [String]
kvArgs tag (kFmt, vFmt) =
[ "-D", "dipper.tag." ++ show tag ++ ".key=" ++ T.unpack (fmtType kFmt)
, "-D", "dipper.tag." ++ show tag ++ ".value=" ++ T.unpack (fmtType vFmt) ]
textFileArgs :: FilePath -> Tag -> KVFormat -> [String]
textFileArgs = outputFileArgs "org.apache.hadoop.mapred.TextOutputFormat"
sequenceFileArgs :: FilePath -> Tag -> KVFormat -> [String]
sequenceFileArgs = outputFileArgs "org.apache.hadoop.mapred.SequenceFileOutputFormat"
outputFileArgs :: String -> FilePath -> Tag -> KVFormat -> [String]
outputFileArgs fileFormat path tag kvFormat =
kvArgs tag kvFormat ++
[ "-D", "dipper.tag." ++ show tag ++ ".format=" ++ fileFormat
, "-D", "dipper.tag." ++ show tag ++ ".path=" ++ path ]
------------------------------------------------------------------------
runMapper :: Pipeline -> IO ()
runMapper Pipeline{..} = do
inputFile <- stripHdfs . fromMaybe errorNoInput <$> foldM lookupEnv' Nothing inputFileVars
let step = lookupStep inputFile
inSchema = fFormat (stepInput step)
runConduit $ CB.sourceHandle stdin
=$= decodeUnitRows inSchema
=$= stepExec step
=$= encodeTagRows outSchema
=$= CB.sinkHandle stdout
where
lookupEnv' Nothing k = lookupEnv k
lookupEnv' v _ = return v
inputFileVars = ["map_input_file", "mapreduce_map_input_file"]
lookupStep :: FilePath -> Step FilePath Tag
lookupStep input = fromMaybe (errorNoMapper input)
$ M.lookup (input) pMappers
<|> M.lookup (takeDirectory input) pMappers
outSchema :: Map Tag KVFormat
outSchema = M.map (fFormat . stepInput) pReducers
errorNoInput = error ("runMapper: could not detect input file, " ++ show inputFileVars ++ " not set.")
errorNoMapper input = error ("runMapper: could not find mapper for input file: " ++ input)
stripHdfs :: String -> FilePath
stripHdfs uri
| hdfs `isPrefixOf` uri = dropWhile (/= '/') . drop (length hdfs) $ uri
| otherwise = uri
where
hdfs = "hdfs://"
------------------------------------------------------------------------
runReducer :: Pipeline -> IO ()
runReducer p@Pipeline{..} =
runConduit $ CB.sourceHandle stdin
=$= decodeTagRows inSchema
=$= reduceAll
=$= CL.map (mapTag fromPath)
=$= encodeTagRows outSchema
=$= CB.sinkHandle stdout
where
reduceAll :: Conduit (Row Tag) IO (Row FilePath)
reduceAll = void
. sequenceConduits
. M.elems
. M.mapWithKey (\tag r -> CL.filter (hasTag tag)
=$= CL.map (withTag ())
=$= stepExec r)
$ pReducers
fromPath :: FilePath -> Tag
fromPath path = fromMaybe (pathError path) (M.lookup path tags)
inSchema :: Map Tag KVFormat
inSchema = M.map (fFormat . stepInput) pReducers
outSchema :: Map Tag KVFormat
outSchema = M.fromList
. map (\x -> (fromPath (fName x), fFormat x))
. concatMap stepOutputs
. M.elems
$ pReducers
tags = pathTags p
pathError path = error ("runReducer: unexpected output path: " ++ path)
------------------------------------------------------------------------
pathTags :: Pipeline -> Map FilePath Tag
pathTags Pipeline{..} = M.fromList
. flip zip [startTag..]
. map fName
. concatMap stepOutputs
. M.elems
$ pReducers
where
startTag :: Tag
startTag = fromMaybe 0
. fmap ((+1) . fst . fst)
. M.maxViewWithKey
$ pReducers
| jystic/dipper | src/Dipper.hs | bsd-3-clause | 8,432 | 0 | 17 | 2,341 | 2,274 | 1,203 | 1,071 | 176 | 4 |
{-
Joseph Eremondi
UU# 4229924
APA Project 2
April 17, 2015
-}
{-# LANGUAGE FlexibleInstances #-}
module Type.Effect.Pattern where
import Control.Arrow (second)
import Control.Applicative ((<$>))
import qualified Control.Monad as Monad
import Control.Monad.Error
import qualified Data.Map as Map
import qualified Text.PrettyPrint as PP
import qualified Reporting.Annotation as A
import qualified Reporting.Region as R
import qualified AST.Pattern as P
import qualified AST.Variable as V
import Reporting.PrettyPrint (pretty)
import qualified Type.Type as TT
--import Type.Fragment
import qualified Type.Environment as Env
import qualified AST.Literal as Literal
import Type.Effect.Common
import qualified Data.List as List
import qualified Data.UnionFind.IO as UF
import qualified Reporting.Error.Type as RErr
import qualified Type.PrettyPrint as TP
import System.IO.Unsafe (unsafePerformIO)
import Type.Effect.Env
--import Debug.Trace (trace)
--trace _ x = x
--Find the annotations that a variable matching a pattern must have
--And return those constraints, along with the "fragment"
--In the Elm type system, fragments contain new variables defined
--By patterns, as well as constraints on them
constrain
:: PatAnnEnv
-> P.CanonicalPattern
-> PatAnn
-> IO PatFragment
constrain env (A.A _ pattern) tipe =
--TODO what is sensible default for here?
let region = R.Region (R.Position 0 0 ) (R.Position 0 0 ) --_ --A.None (pretty pattern)
exists = existsWith env
--newVar = varN `fmap` (liftIO $ variable Flexible)
--t1 === t2 = CEqual RErr.None region t1 t2
--genSubTypeConstr :: Type -> [P.CanonicalPattern] -> Int -> TypeConstraint -> TypeConstraint
--Helper function: given the sub-patterns of a pattern match
--Generate the fragment with constraints for annotating
--With their precise values
--Nothing fancy, really just looping over the patterns
--And joining their fragments, recusively calling constrain on them
{-
genSubTypeConstr :: PatAnn -> [P.CanonicalPattern] -> Int -> PatFragment -> IO PatFragment
genSubTypeConstr ty patList num frag = do
let thePatList :: [P.CanonicalPattern]
thePatList = patList
case patList of
[] -> return frag
(currentPat:rest) -> do
fieldAnnot <- VarAnnot <$> newVar env
let
n :: Int
n = num
--TODO make this safe?
ourFieldFrag <- constrain env currentPat fieldAnnot
newConstr <- do
--exists $ \restOfRec -> do
--exists $ \fieldAnnot -> do
let constr = typeConstraint frag
--TODO do we need this, now that we have list?
let ourFieldConstr = _ -- ty === (directRecord [("_sub" ++ show n, fieldAnnot)] restOfRec)
return $ constr /\ ourFieldConstr
let newFrag = joinFragments env [frag, ourFieldFrag {typeConstraint = newConstr}]
genSubTypeConstr ty rest (n+1) newFrag
-}
in
case pattern of
--No constraints when we match anything, no variables either
P.Anything -> return $ emptyFragment
--We know the exact value of a literal
P.Literal lit -> do
c <- constrainLiteral env lit tipe
return $ (emptyFragment) { typeConstraint = c }
--Variable: could have any annotations, so use a fresh typeVar
P.Var name -> do
v <- newVar env
return $ AnnFragment {
typeEnv = Map.singleton name (VarAnnot v) ,
vars = [v],
typeConstraint = VarAnnot v === tipe
}
--Alias: just add the name of the pattern to a fragment, then constrain the pattern
--This is used for things like sort ((x,y) as pair) = if x < y then pair else (y,x)
P.Alias name p -> do
v <- newVar env
let varType = VarAnnot v
fragment <- constrain env p tipe
--TODO this case? Constrain alias?
return $ fragment
{ typeEnv = Map.insert name (varType) (typeEnv fragment)
, vars = v : (vars fragment)
, typeConstraint = (VarAnnot v === tipe) /\ typeConstraint fragment
}
--Data: go into sub-patterns to extract their fragments
--And constrain that the final result has the given constructor
P.Data name patterns -> do
--TODO is this the right args?
(kind, cvars, args, result) <- liftIO $ Env.freshDataScheme (importedInfo env) (V.toString name)
argTypes <- mapM (\_ -> VarAnnot <$> newVar env) args
fragment <- (joinFragments ) <$> Monad.zipWithM (constrain env) patterns argTypes
--return fragment --TODO right?
--We don't constrain at all here, since we already did the pattern match check
--TODO let-expression for special cases?
recordStructureConstr <- do
argAnnotVars <- mapM (\_ -> VarAnnot <$> newVar env) patterns
--exists $ \recordSubType -> do
--exists $ \restOfRec -> do
let ctorFieldConstr =
tipe `Contains` ( PatData ("_" ++ V.toString name) argAnnotVars )
argTypesFrags <- mapM (\(pat, t) -> constrain env pat t) $ zip patterns argAnnotVars
let argTypesFrag = joinFragments argTypesFrags
let argTypesConstr = typeConstraint argTypesFrag
return $ ctorFieldConstr /\ argTypesConstr
--genSubTypeConstr tipe args 1 $ A.A region CTrue
--return $ tipe === mkRecord [("_" ++ V.toString name, args )] restOfRec
return $ fragment {
typeConstraint = typeConstraint fragment /\ recordStructureConstr,
vars = vars fragment --TODO where get constructor vars?
}
--Record : just map each sub-pattern into fields of a record
P.Record fields -> do
pairs <- mapM (\name -> do (,) name <$> newVar env) fields
let tenv = Map.map VarAnnot $ Map.fromList pairs
let c = (tipe === (BaseAnnot $ PatRecord tenv emptyAnnot )) --record (Map.map (:[]) tenv) t
return $ AnnFragment {
typeEnv = tenv ,
vars = map snd pairs,
typeConstraint = c
}
{-
instance Error (R.Region -> PP.Doc) where
noMsg _ = PP.empty
strMsg str span =
PP.vcat [ PP.text $ "Type error " ++ show span
, PP.text str ]
-}
--Given a pattern, return name of the top constructor in the pattern
ctorName :: P.CanonicalPattern -> String
ctorName (A.A _ pat) = case pat of
(P.Data name p2) -> "_" ++ V.toString name
(P.Record p) -> ""
(P.Alias p1 p2) -> ctorName p2
(P.Var p) -> "_"
P.Anything -> "_"
(P.Literal l) -> "_" ++ showLit l
showLit :: Literal.Literal -> String
showLit lit = case lit of
(Literal.IntNum i) -> show i
(Literal.FloatNum f) -> show f
(Literal.Chr c) -> show c
(Literal.Str s) -> show s
(Literal.Boolean b) -> show b
trace _ x = x
--Group patterns by their constructors, since we might match on more/less specific versions
sortByCtor :: [P.CanonicalPattern] -> [(String, [[P.CanonicalPattern]])]
sortByCtor patList =
let
--TODO sort other than by CTOR?
allNames = (List.nub $ map (ctorName) patList)
maybeAddName name pa@(A.A _ pat) subPatList = case pat of
P.Data name2 pats -> if (name == ctorName pa) then (pats : subPatList) else subPatList
(P.Record e) -> subPatList
(P.Alias e1 e2) -> maybeAddName name e2 subPatList
(P.Var e) -> subPatList --Ignor naze these, we should catch this earlier
P.Anything -> subPatList --Ignore these, we should catch this earlier
(P.Literal e) -> subPatList
sortedPats = [ (ctor, List.transpose $ foldr (maybeAddName ctor) [] patList) | ctor <- allNames]
in trace ("ALL NAMES " ++ show allNames ) $ sortedPats
--Check if a pattern can match any expression
--Basically check for a variable or underscore
containsWildcard :: P.CanonicalPattern -> Bool
containsWildcard (A.A _ pat) =
case pat of
(P.Alias p1 p2) -> containsWildcard p2
(P.Var p) -> True
P.Anything -> True
_ -> False
--Given an environment, a type of a value to be matched
--Error information, and a list of patterns to match against
--Return the constraint that every possible constructor the value can take
--Must be able to be matched by the patterns
allMatchConstraints env argType region patList = do
typeCanMatch <- typeForPatList env region patList
return $ argType `OnlyContains` typeCanMatch
fieldSubset :: (Map.Map String [TT.Type]) -> (Map.Map String [TT.Type]) -> Bool
fieldSubset f1 f2 =
let
names1 = Map.keys f1
f2Values = map (\n -> (n, Map.lookup n f2)) names1
valueGood v = case v of
(_, Nothing) -> False
(n, Just t2) ->
let
t1 = case Map.lookup n f1 of
Nothing -> error $ "Key " ++ show n ++ " not in map " ++ show (Map.keys f1)
Just x -> x
pairWise = List.all (uncurry typeNEqual) $ zip t1 t2
in (length t1 == length t2) && pairWise
in List.all valueGood f2Values
--Generate the annotation of all patterns which can be matched
--By the given list of patterns
typeForPatList
:: PatAnnEnv -> R.Region -> [P.CanonicalPattern]
-> IO PatAnn
typeForPatList env region patList = do
isTotal <- checkIfTotal env patList
if isTotal
then trace ("IS TOTAL " ++ show patList) $ VarAnnot <$> newVar env
else trace ("NOT TOTAL") $ eachCtorHelper (sortByCtor patList)
where
--indexFields = map (\i -> "_sub" ++ show i) ([1..] :: [Int])
eachCtorHelper [] = return emptyAnnot
eachCtorHelper ( (ctor, subPats ) : otherPats) =
do
subTypes <- mapM (typeForPatList env region) subPats
otherFields <- eachCtorHelper otherPats
let ourRec = PatData ctor subTypes --TODO need otherFields?
return $ BaseAnnot ourRec
--Equality check for types, used for sorting through environments and getting constructor types
type1Equal :: TT.Term1 TT.Type -> TT.Term1 TT.Type -> Bool
type1Equal t1 t2 = case (t1, t2) of
(TT.App1 t1a t1b, TT.App1 t2a t2b) -> (typeNEqual t1a t2a) && (typeNEqual t1b t2b)
(TT.Fun1 t1a t1b, TT.App1 t2a t2b) -> (typeNEqual t1a t2a) && (typeNEqual t1b t2b)
(TT.Var1 t1a, TT.Var1 t2a) -> typeNEqual t1a t2a
(TT.EmptyRecord1, TT.EmptyRecord1) -> True
(TT.Record1 fields1 t1b, TT.Record1 fields2 t2b) ->
(fieldSubset fields1 fields2) && (fieldSubset fields2 fields1) && (typeNEqual t1b t2b)
_ -> False
--Check if two types are literally identical
--Equality check for types, used for sorting through environments and getting constructor types
typeNEqual :: TT.Type -> TT.Type -> Bool
typeNEqual t1 t2 = trace ("Comparing " ++ (show $ TP.pretty TP.Never t1 ) ++ " and " ++ ((show $ TP.pretty TP.Never t2 ) ) ) $ case (t1, t2) of
(TT.VarN (Just n1) _, TT.VarN (Just n2) _) -> (fst n1) == (fst n2) --trace "VAR JUST BASE CASE" $ n1 == n2
(TT.VarN Nothing t1a, TT.VarN Nothing t2a) -> let
desc1 = unsafePerformIO $ UF.descriptor t1a
desc2 = unsafePerformIO $ UF.descriptor t2a
in trace ("DESCRIPTOR CASE " ++ (show $ TT.name desc1) ++ " " ++ show (TT.name desc2 )) $ (TT.name desc1 == TT.name desc2)
(TT.TermN (Just n1) t1, TT.TermN (Just n2) t2) -> trace "TERM JUST BASE CASE" $ (fst n1 == fst n2) && (type1Equal t1 t2)
(TT.TermN Nothing t1, TT.TermN Nothing t2) -> trace "TERM NOTHING BASE CASE" $ (type1Equal t1 t2)
_ -> False
isInfiniteLit :: P.CanonicalPattern -> Bool
isInfiniteLit (A.A _ p) = case p of
P.Literal (Literal.IntNum _) -> True
P.Literal (Literal.Str _) -> True
P.Literal (Literal.Chr _) -> True --We assume chars may be infinite, in the case of Unicode
_ -> False
removeUnderscore :: String -> String
removeUnderscore s = case s of
[] -> []
('_' : s2) -> s2
_ -> s
--Given a list of patterns, determine if the pattern can match
--any possible value of its type
--This is used to ensure that complete pattern matches can match against Top,
--Even in the case where no wildcard is present
--Since integers have no constructors (only literals), this will never succeed for integers
checkIfTotal
:: PatAnnEnv
-> [P.CanonicalPattern]
-> IO Bool
--Special case: only ever 1 option for pattern matching on a record
--So it doesn't play into our totality calculations
checkIfTotal _ [A.A _ (P.Record _)] = return True
checkIfTotal env rawPatList = trace ("\n\n\n\n\nCHECK IF TOTAL!!!\n" ++ show rawPatList) $ do
--An integer or string match will never be total
--TODO bools and such?
let patList = filter (not . isInfiniteLit) rawPatList
let hasWildcard = (any containsWildcard patList)
let sortedPats = trace ("PAT LIST LENGTH " ++ show (length patList) ) $ sortByCtor patList
let mapGet d k = case Map.lookup k d of
Nothing -> error $ "Key " ++ show k ++ " not in " ++ show (Map.keys d)
Just x -> x
case (patList,hasWildcard) of
(_, True) -> trace ("HAS WILDCARD " ++ show patList) $ return True
([], _) -> return False
(_,False) -> do
--TODO pattern match on Bool?
let allCtors = constructor env --TODO need real env?
let ctorNames = Map.keys allCtors
ctorValues <- mapM liftIO $ Map.elems allCtors
ourTypeInfo <- liftIO $ mapGet allCtors (removeUnderscore $ trace ("PATLIST SHOW " ++ show patList) $ fst $ head sortedPats) --remove underscore
let (_,_,_,ourType) = ourTypeInfo
let
ctorsForOurType =
filter (/= "_Tuple1") $
map fst $
filter (\(_, (_,_,_,tp)) -> typeNEqual tp ourType) $ zip ctorNames ctorValues
let tupleNames = filter (List.isPrefixOf "__Tuple") $ map fst sortedPats
case (trace ("TUPLE NAMES: " ++ show tupleNames) $ tupleNames) of
(_:_) -> trace ("TUPLE NAMES: " ++ show tupleNames) $ return True
_ -> do
let
--ctorCovered :: Map.Map String [P.CanonicalPattern] -> String -> Bool
ctorCovered dict ctor = trace ("CTORS FOR OUR TYPE: " ++ show ctorsForOurType ) $
case (Map.lookup ("_" ++ ctor) dict) of
Nothing -> return False
Just subPats -> List.and `fmap` mapM (checkIfTotal env) subPats
coveredList <- mapM (ctorCovered $ Map.fromList sortedPats) ctorsForOurType
return $ trace ("Ctors for our type: " ++ show ctorsForOurType ++ "\nCovered List " ++ show coveredList ) $ List.and coveredList
--Very Boring, constraint rules for literal patterns
--Constrain just like expression literals, but we don't leave the possible set of values open
--This is for cases where we match against a literal and know its exact value
constrainLiteral
:: PatAnnEnv
-> Literal.Literal
-> PatAnn
-> IO (AnnConstraint PatInfo)
constrainLiteral env lit tipe = case lit of
(Literal.IntNum n) ->
return $ tipe `Contains` PatData ("_" ++ show n) []
(Literal.FloatNum f) ->
return $ tipe `Contains` PatData ("_" ++ show f) []
(Literal.Chr u) ->
return $ tipe `Contains` PatData ("_" ++ show u) []
(Literal.Str s) ->
return $ tipe `Contains` PatData ("_" ++ show s) []
(Literal.Boolean b) ->
return $ tipe `Contains` PatData ("_" ++ show b) []
| JoeyEremondi/elm-type-effect | src/Type/Effect/Pattern.hs | bsd-3-clause | 15,680 | 0 | 28 | 4,274 | 3,863 | 1,988 | 1,875 | 225 | 7 |
module Module4.Task16 where
import Data.Char(isDigit)
findDigit :: [Char] -> Maybe Char
findDigit [] = Nothing
findDigit (x:xs) | isDigit x = Just x
| otherwise = findDigit xs
| dstarcev/stepic-haskell | src/Module4/Task16.hs | bsd-3-clause | 203 | 0 | 8 | 55 | 80 | 40 | 40 | 6 | 1 |
module Sexy.Data.Maybe (
Maybe(..)
, maybe'
, fromJust'
) where
data Maybe a = Just a | Nothing
maybe' :: (a -> b) -> b -> Maybe a -> b
maybe' f _ (Just x) = f x
maybe' _ x Nothing = x
-- fromJust = maybe id
fromJust' :: a -> Maybe a -> a
fromJust' _ (Just x) = x
fromJust' x Nothing = x
| DanBurton/sexy | src/Sexy/Data/Maybe.hs | bsd-3-clause | 304 | 0 | 8 | 84 | 138 | 75 | 63 | 11 | 1 |
-- | Combinators for use with the Web type. This module allows one to easily opt into a small subset of Quiz.Web.Prelude.
-- NOTE: These combinators are defined in Quiz.Web.Prelude and not here to avoid a circular dependency.
module Quiz.Web.Prelude.Helpers () where
import Quiz.Web.Prelude
| michael-swan/quick-quiz | src/Quiz/Web/Prelude/Combinators.hs | bsd-3-clause | 294 | 0 | 4 | 47 | 19 | 14 | 5 | 2 | 0 |
{-# LANGUAGE FlexibleInstances, FlexibleContexts, TypeSynonymInstances
, UndecidableInstances, OverlappingInstances, MultiParamTypeClasses #-}
{- |
Module : ./CSL/ReduceInterpreter.hs
Description : Reduce instance for the AssignmentStore class
Copyright : (c) Ewaryst Schulz, DFKI Bremen 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (various glasgow extensions)
Reduce as AssignmentStore
-}
module CSL.ReduceInterpreter where
import Common.ProverTools (missingExecutableInPath)
import Common.Utils (getEnvDef, trimLeft)
import Common.IOS
import Common.ResultT
import CSL.Reduce_Interface ( evalString, exportExp, connectCAS, disconnectCAS
, lookupRedShellCmd, Session (..), cslReduceDefaultMapping)
import CSL.AS_BASIC_CSL
import CSL.Parse_AS_Basic (parseExpression)
import CSL.Transformation
import CSL.Interpreter
-- the process communication interface
import qualified Interfaces.Process as PC
import Control.Monad.Trans (MonadTrans (..), MonadIO (..))
import Control.Monad.State (MonadState (..))
import Data.Maybe
import System.IO (Handle)
import System.Process (ProcessHandle)
import System.Exit (ExitCode)
import Prelude hiding (lookup)
{- ----------------------------------------------------------------------
Reduce Calculator Instances
---------------------------------------------------------------------- -}
data ReduceInterpreter = ReduceInterpreter { inh :: Handle
, outh :: Handle
, ph :: ProcessHandle
, varcounter :: Int }
-- | ReduceInterpreter with Translator based on the CommandState
data RITrans = RITrans { getBMap :: BMap
, getRI :: PC.CommandState }
-- Types for two alternative reduce interpreter
-- Reds as (Red)uce (s)tandard interface
type RedsIO = ResultT (IOS ReduceInterpreter)
-- Redc as (Red)uce (c)ommand interface (it is built on CommandState)
type RedcIO = ResultT (IOS RITrans)
instance AssignmentStore RedsIO where
assign = redAssign evalRedsString redsTransS return
lookup = redLookup evalRedsString redsTransS
eval = redEval evalRedsString return
check = redCheck evalRedsString return
names = error "ReduceInterpreter as CS: names are unsupported"
instance VarGen RedsIO where
genVar = do
s <- get
let i = varcounter s
put $ s { varcounter = i + 1 }
return $ '?' : show i
instance AssignmentStore RedcIO where
assign = redAssign evalRedcString redcTransS redcTransE
lookup = redLookup evalRedcString redcTransS
eval = redEval evalRedcString redcTransE
check = redCheck evalRedcString redcTransE
names = liftM (SMem . getBMap) get
instance VarGen RedcIO where
genVar = do
s <- get
let i = newkey $ getBMap s
put $ s { getBMap = (getBMap s) { newkey = i + 1 } }
return $ '?' : show i
{- ----------------------------------------------------------------------
Reduce syntax functions
---------------------------------------------------------------------- -}
printAssignment :: String -> EXPRESSION -> String
printAssignment n e = concat [n, ":=", exportExp e, ";"]
printEvaluation :: EXPRESSION -> String
printEvaluation e = exportExp e ++ ";"
printLookup :: String -> String
printLookup n = n ++ ";"
{- As reduce does not support boolean expressions as first class citizens
we encode them in an if-stmt and transform the numeric response back. -}
printBooleanExpr :: EXPRESSION -> String
printBooleanExpr e = concat [ "on rounded;"
, " if "
, exportExp e, " then 1 else 0;"
, " off rounded;"
]
getBooleanFromExpr :: EXPRESSION -> Bool
getBooleanFromExpr (Int 1 _) = True
getBooleanFromExpr (Int 0 _) = False
getBooleanFromExpr e =
error $ "getBooleanFromExpr: can't translate expression to boolean: "
++ show e
{- ----------------------------------------------------------------------
Generic Communication Interface
---------------------------------------------------------------------- -}
{- |
The generic interface abstracts over the concrete evaluation function
-}
redAssign :: (AssignmentStore s, MonadResult s) =>
(String -> s [EXPRESSION])
-> (ConstantName -> s String)
-> (EXPRESSION -> s EXPRESSION)
-> ConstantName -> AssDefinition -> s ()
redAssign ef trans transE n def =
let e = getDefiniens def
args = getArguments def
in if null args then
do
e' <- transE e
n' <- trans n
ef $ printAssignment n' e'
return ()
else error $ "redAssign: functional assignments unsupported: " ++ show n
redLookup :: (AssignmentStore s, MonadResult s) =>
(String -> s [EXPRESSION])
-> (ConstantName -> s String)
-> ConstantName -> s (Maybe EXPRESSION)
redLookup ef trans n = do
n' <- trans n
el <- ef $ printLookup n'
return $ listToMaybe el
{- we don't want to return nothing on id-lookup: "x; --> x"
if e == mkOp n [] then return Nothing else return $ Just e -}
redEval :: (AssignmentStore s, MonadResult s) =>
(String -> s [EXPRESSION])
-> (EXPRESSION -> s EXPRESSION)
-> EXPRESSION -> s EXPRESSION
redEval ef trans e = do
e' <- trans e
el <- ef $ printEvaluation e'
if null el
then error $ "redEval: expression " ++ show e' ++ " couldn't be evaluated"
else return $ head el
redCheck :: (AssignmentStore s, MonadResult s) =>
(String -> s [EXPRESSION])
-> (EXPRESSION -> s EXPRESSION)
-> EXPRESSION -> s Bool
redCheck ef trans e = do
e' <- trans e
el <- ef $ printBooleanExpr e'
if null el
then error $ "redCheck: expression " ++ show e' ++ " couldn't be evaluated"
else return $ getBooleanFromExpr $ head el
{- ----------------------------------------------------------------------
The Standard Communication Interface
---------------------------------------------------------------------- -}
instance Session ReduceInterpreter where
inp = inh
outp = outh
proch = Just . ph
redsTransS :: ConstantName -> RedsIO String
redsTransS = return . show
evalRedsString :: String -> RedsIO [EXPRESSION]
evalRedsString s = do
r <- get
liftIO $ evalString r s
redsInit :: IO ReduceInterpreter
redsInit = do
putStr "Connecting CAS.."
reducecmd <- getEnvDef "HETS_REDUCE" "redcsl"
-- check that prog exists
noProg <- missingExecutableInPath reducecmd
if noProg
then error $ "Could not find reduce under " ++ reducecmd
else do
(inpt, out, _, pid) <- connectCAS reducecmd
return ReduceInterpreter
{ inh = inpt, outh = out, ph = pid, varcounter = 1 }
redsExit :: ReduceInterpreter -> IO ()
redsExit = disconnectCAS
{- ----------------------------------------------------------------------
An alternative Communication Interface
---------------------------------------------------------------------- -}
wrapCommand :: IOS PC.CommandState a -> IOS RITrans a
wrapCommand ios = do
r <- get
let map' x = r { getRI = x }
stmap map' getRI ios
-- | A direct way to communicate with Reduce
redcDirect :: RITrans -> String -> IO String
redcDirect rit s = do
(res, _) <- runIOS (getRI rit) (PC.call 0.5 s)
return res
redcTransE :: EXPRESSION -> RedcIO EXPRESSION
redcTransE e = do
r <- get
let bm = getBMap r
(bm', e') = translateExpr bm e
put r { getBMap = bm' }
return e'
redcTransS :: ConstantName -> RedcIO String
redcTransS s = do
r <- get
let bm = getBMap r
(bm', s') = lookupOrInsert bm $ Left s
put r { getBMap = bm' }
return s'
evalRedcString :: String -> RedcIO [EXPRESSION]
evalRedcString s = do
-- 0.09 seconds is a critical value for the accepted response time of Reduce
res <- lift $ wrapCommand $ PC.call 0.5 s
r <- get
let bm = getBMap r
trans = revtranslateExpr bm
{- don't need to skip the reducelinenr here, because the Command-Interface
cleans the outpipe before sending (hence removes the reduce line nr) -}
return $ map trans $ maybeToList $ parseExpression operatorInfoMap
$ trimLeft res
-- | init the reduce communication
redcInit :: Int -- ^ Verbosity level
-> IO RITrans
redcInit v = do
rc <- lookupRedShellCmd
case rc of
Left redcmd -> do
cs <- PC.start redcmd v Nothing
(_, cs') <- runIOS cs $ PC.send $ "off nat; load redlog; "
++ "rlset reals; " -- on rounded; precision 30;"
return RITrans { getBMap = initWithDefault cslReduceDefaultMapping
, getRI = cs' }
_ -> error "Could not find reduce shell command!"
redcExit :: RITrans -> IO (Maybe ExitCode)
redcExit r = do
(ec, _) <- runIOS (getRI r) $ PC.close $ Just "quit;"
return ec
| spechub/Hets | CSL/ReduceInterpreter.hs | gpl-2.0 | 9,005 | 0 | 16 | 2,154 | 2,102 | 1,076 | 1,026 | 183 | 2 |
{-| Generic data loader.
This module holds the common code for parsing the input data after it
has been loaded from external sources.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Loader
( mergeData
, clearDynU
, checkData
, assignIndices
, setMaster
, lookupNode
, lookupInstance
, lookupGroup
, eitherLive
, commonSuffix
, extractExTags
, updateExclTags
, RqType(..)
, Request(..)
, ClusterData(..)
, isAllocationRequest
, emptyCluster
) where
import Control.Monad
import Data.List
import qualified Data.Map as M
import Data.Maybe
import qualified Data.Set as Set
import Text.Printf (printf)
import System.Time (ClockTime(..))
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Cluster as Cluster
import qualified Ganeti.HTools.Cluster.Moves as Moves
import Ganeti.BasicTypes
import qualified Ganeti.HTools.Tags as Tags
import qualified Ganeti.HTools.Tags.Constants as TagsC
import Ganeti.HTools.Types
import Ganeti.Utils
import Ganeti.Types (EvacMode)
-- * Types
{-| The iallocator request type.
This type denotes what request we got from Ganeti and also holds
request-specific fields.
-}
data RqType
= Allocate Instance.Instance Cluster.AllocDetails (Maybe [String])
-- ^ A new instance allocation, maybe with allocation restrictions
| AllocateSecondary Idx -- ^ Find a suitable
-- secondary node for disk
-- conversion
| Relocate Idx Int [Ndx] -- ^ Choose a new
-- secondary node
| NodeEvacuate [Idx] EvacMode -- ^ node-evacuate mode
| ChangeGroup [Gdx] [Idx] -- ^ Multi-relocate mode
| MultiAllocate [(Instance.Instance, Cluster.AllocDetails)]
-- ^ Multi-allocate mode
deriving (Show)
-- | A complete request, as received from Ganeti.
data Request = Request RqType ClusterData
deriving (Show)
-- | Decide whether a request asks to allocate new instances; if so, also
-- return the desired node group, if a unique node group is specified.
-- That is, return `Nothing` if the request is not an allocation request,
-- `Just Nothing`, if it is an Allocation request, but there is no unique
-- group specified, and return `Just (Just g)` if it is an allocation request
-- uniquely requesting Group `g`.
isAllocationRequest :: RqType -> Maybe (Maybe String)
isAllocationRequest (Allocate _ (Cluster.AllocDetails _ grp) _) = Just grp
isAllocationRequest (MultiAllocate reqs) = Just $
case ordNub . catMaybes
$ map (\(_, Cluster.AllocDetails _ grp) -> grp) reqs of
[grp] -> Just grp
_ -> Nothing
isAllocationRequest _ = Nothing
-- | The cluster state.
data ClusterData = ClusterData
{ cdGroups :: Group.List -- ^ The node group list
, cdNodes :: Node.List -- ^ The node list
, cdInstances :: Instance.List -- ^ The instance list
, cdTags :: [String] -- ^ The cluster tags
, cdIPolicy :: IPolicy -- ^ The cluster instance policy
} deriving (Show, Eq)
-- | An empty cluster.
emptyCluster :: ClusterData
emptyCluster = ClusterData Container.empty Container.empty Container.empty []
defIPolicy
-- * Functions
-- | Lookups a node into an assoc list.
lookupNode :: (Monad m) => NameAssoc -> String -> String -> m Ndx
lookupNode ktn inst node =
maybe (fail $ "Unknown node '" ++ node ++ "' for instance " ++ inst) return $
M.lookup node ktn
-- | Lookups an instance into an assoc list.
lookupInstance :: (Monad m) => NameAssoc -> String -> m Idx
lookupInstance kti inst =
maybe (fail $ "Unknown instance '" ++ inst ++ "'") return $ M.lookup inst kti
-- | Lookups a group into an assoc list.
lookupGroup :: (Monad m) => NameAssoc -> String -> String -> m Gdx
lookupGroup ktg nname gname =
maybe (fail $ "Unknown group '" ++ gname ++ "' for node " ++ nname) return $
M.lookup gname ktg
-- | Given a list of elements (and their names), assign indices to them.
assignIndices :: (Element a) =>
[(String, a)]
-> (NameAssoc, Container.Container a)
assignIndices name_element =
let (name_idx, idx_element) =
unzip . map (\ (idx, (k, v)) -> ((k, idx), (idx, setIdx v idx)))
. zip [0..] $ name_element
in (M.fromList name_idx, Container.fromList idx_element)
-- | Given am indexed node list, and the name of the master, mark it as such.
setMaster :: (Monad m) => NameAssoc -> Node.List -> String -> m Node.List
setMaster node_names node_idx master = do
kmaster <- maybe (fail $ "Master node " ++ master ++ " unknown") return $
M.lookup master node_names
let mnode = Container.find kmaster node_idx
return $ Container.add kmaster (Node.setMaster mnode True) node_idx
-- | Given the nodes with the location tags already set correctly, compute
-- the location score for an instance.
setLocationScore :: Node.List -> Instance.Instance -> Instance.Instance
setLocationScore nl inst =
let pnode = Container.find (Instance.pNode inst) nl
snode = Container.find (Instance.sNode inst) nl
in Moves.setInstanceLocationScore inst pnode snode
-- | For each instance, add its index to its primary and secondary nodes.
fixNodes :: Node.List
-> Instance.Instance
-> Node.List
fixNodes accu inst =
let pdx = Instance.pNode inst
sdx = Instance.sNode inst
pold = Container.find pdx accu
pnew = Node.setPri pold inst
ac2 = Container.add pdx pnew accu
in if sdx /= Node.noSecondary
then let sold = Container.find sdx accu
snew = Node.setSec sold inst
in Container.add sdx snew ac2
else ac2
-- | Set the node's policy to its group one. Note that this requires
-- the group to exist (should have been checked before), otherwise it
-- will abort with a runtime error.
setNodePolicy :: Group.List -> Node.Node -> Node.Node
setNodePolicy gl node =
let grp = Container.find (Node.group node) gl
gpol = Group.iPolicy grp
in Node.setPolicy gpol node
-- | Update instance with exclusion tags list.
updateExclTags :: [String] -> Instance.Instance -> Instance.Instance
updateExclTags tl inst =
let allTags = Instance.allTags inst
exclTags = filter (\tag -> any (`isPrefixOf` tag) tl) allTags
in inst { Instance.exclTags = exclTags }
-- | Update instance with desired location tags list.
updateDesiredLocationTags :: [String] -> Instance.Instance -> Instance.Instance
updateDesiredLocationTags tl inst =
let allTags = Instance.allTags inst
dsrdLocTags = filter (\tag -> any (`isPrefixOf` tag) tl) allTags
in inst { Instance.dsrdLocTags = Set.fromList dsrdLocTags }
-- | Update the movable attribute.
updateMovable :: [String] -- ^ Selected instances (if not empty)
-> [String] -- ^ Excluded instances
-> Instance.Instance -- ^ Target Instance
-> Instance.Instance -- ^ Target Instance with updated attribute
updateMovable selinsts exinsts inst =
if Instance.name inst `elem` exinsts ||
not (null selinsts || Instance.name inst `elem` selinsts)
then Instance.setMovable inst False
else inst
-- | Disables moves for instances with a split group.
disableSplitMoves :: Node.List -> Instance.Instance -> Instance.Instance
disableSplitMoves nl inst =
if not . isOk . Cluster.instanceGroup nl $ inst
then Instance.setMovable inst False
else inst
-- | Set the auto-repair policy for an instance.
setArPolicy :: [String] -- ^ Cluster tags
-> Group.List -- ^ List of node groups
-> Node.List -- ^ List of nodes
-> Instance.List -- ^ List of instances
-> ClockTime -- ^ Current timestamp, to evaluate ArSuspended
-> Instance.List -- ^ Updated list of instances
setArPolicy ctags gl nl il time =
let getArPolicy' = flip getArPolicy time
cpol = fromMaybe ArNotEnabled $ getArPolicy' ctags
gpols = Container.map (fromMaybe cpol . getArPolicy' . Group.allTags) gl
ipolfn = getArPolicy' . Instance.allTags
nlookup = flip Container.find nl . Instance.pNode
glookup = flip Container.find gpols . Node.group . nlookup
updateInstance inst = inst {
Instance.arPolicy = fromMaybe (glookup inst) $ ipolfn inst }
in
Container.map updateInstance il
-- | Get the auto-repair policy from a list of tags.
--
-- This examines the ganeti:watcher:autorepair and
-- ganeti:watcher:autorepair:suspend tags to determine the policy. If none of
-- these tags are present, Nothing (and not ArNotEnabled) is returned.
getArPolicy :: [String] -> ClockTime -> Maybe AutoRepairPolicy
getArPolicy tags time =
let enabled = mapMaybe (autoRepairTypeFromRaw <=<
chompPrefix TagsC.autoRepairTagEnabled) tags
suspended = mapMaybe (chompPrefix TagsC.autoRepairTagSuspended) tags
futureTs = filter (> time) . map (flip TOD 0) $
mapMaybe (tryRead "auto-repair suspend time") suspended
in
case () of
-- Note how we must return ArSuspended even if "enabled" is empty, so that
-- node groups or instances can suspend repairs that were enabled at an
-- upper scope (cluster or node group).
_ | "" `elem` suspended -> Just $ ArSuspended Forever
| not $ null futureTs -> Just . ArSuspended . Until . maximum $ futureTs
| not $ null enabled -> Just $ ArEnabled (minimum enabled)
| otherwise -> Nothing
-- | Compute the longest common suffix of a list of strings that
-- starts with a dot.
longestDomain :: [String] -> String
longestDomain [] = ""
longestDomain (x:xs) =
foldr (\ suffix accu -> if all (isSuffixOf suffix) xs
then suffix
else accu)
"" $ filter (isPrefixOf ".") (tails x)
-- | Extracts the exclusion tags from the cluster configuration.
extractExTags :: [String] -> [String]
extractExTags = filter (not . null) . mapMaybe (chompPrefix TagsC.exTagsPrefix)
-- | Extracts the desired locations from the instance tags.
extractDesiredLocations :: [String] -> [String]
extractDesiredLocations =
filter (not . null) . mapMaybe (chompPrefix TagsC.desiredLocationPrefix)
-- | Extracts the common suffix from node\/instance names.
commonSuffix :: Node.List -> Instance.List -> String
commonSuffix nl il =
let node_names = map Node.name $ Container.elems nl
inst_names = map Instance.name $ Container.elems il
in longestDomain (node_names ++ inst_names)
-- | Set the migration-related tags on a node given the cluster tags;
-- this assumes that the node tags are already set on that node.
addMigrationTags :: [String] -- ^ cluster tags
-> Node.Node -> Node.Node
addMigrationTags ctags node =
let ntags = Node.nTags node
migTags = Tags.getMigRestrictions ctags ntags
rmigTags = Tags.getRecvMigRestrictions ctags ntags
in Node.setRecvMigrationTags (Node.setMigrationTags node migTags) rmigTags
-- | Set the location tags on a node given the cluster tags;
-- this assumes that the node tags are already set on that node.
addLocationTags :: [String] -- ^ cluster tags
-> Node.Node -> Node.Node
addLocationTags ctags node =
let ntags = Node.nTags node
in Node.setLocationTags node $ Tags.getLocations ctags ntags
-- | Initializer function that loads the data from a node and instance
-- list and massages it into the correct format.
mergeData :: [(String, DynUtil)] -- ^ Instance utilisation data
-> [String] -- ^ Exclusion tags
-> [String] -- ^ Selected instances (if not empty)
-> [String] -- ^ Excluded instances
-> ClockTime -- ^ The current timestamp
-> ClusterData -- ^ Data from backends
-> Result ClusterData -- ^ Fixed cluster data
mergeData um extags selinsts exinsts time cdata@(ClusterData gl nl il ctags _) =
let il2 = setArPolicy ctags gl nl il time
il3 = foldl' (\im (name, n_util) ->
case Container.findByName im name of
Nothing -> im -- skipping unknown instance
Just inst ->
let new_i = inst { Instance.util = n_util }
in Container.add (Instance.idx inst) new_i im
) il2 um
allextags = extags ++ extractExTags ctags
dsrdLocTags = extractDesiredLocations ctags
inst_names = map Instance.name $ Container.elems il3
selinst_lkp = map (lookupName inst_names) selinsts
exinst_lkp = map (lookupName inst_names) exinsts
lkp_unknown = filter (not . goodLookupResult) (selinst_lkp ++ exinst_lkp)
selinst_names = map lrContent selinst_lkp
exinst_names = map lrContent exinst_lkp
node_names = map Node.name (Container.elems nl)
common_suffix = longestDomain (node_names ++ inst_names)
il4 = Container.map (computeAlias common_suffix .
updateExclTags allextags .
updateDesiredLocationTags dsrdLocTags .
updateMovable selinst_names exinst_names) il3
nl2 = Container.map (addLocationTags ctags) nl
il5 = Container.map (setLocationScore nl2) il4
nl3 = foldl' fixNodes nl2 (Container.elems il5)
nl4 = Container.map (setNodePolicy gl .
computeAlias common_suffix .
(`Node.buildPeers` il4)) nl3
il6 = Container.map (disableSplitMoves nl3) il5
nl5 = Container.map (addMigrationTags ctags) nl4
in if' (null lkp_unknown)
(Ok cdata { cdNodes = nl5, cdInstances = il6 })
(Bad $ "Unknown instance(s): " ++ show(map lrContent lkp_unknown))
-- | In a cluster description, clear dynamic utilisation information.
clearDynU :: ClusterData -> Result ClusterData
clearDynU cdata@(ClusterData _ _ il _ _) =
let il2 = Container.map (\ inst -> inst {Instance.util = zeroUtil }) il
in Ok cdata { cdInstances = il2 }
-- | Checks the cluster data for consistency.
checkData :: Node.List -> Instance.List
-> ([String], Node.List)
checkData nl il =
Container.mapAccum
(\ msgs node ->
let nname = Node.name node
delta_mem = truncate (Node.tMem node)
- Node.nMem node
- Node.fMem node
- nodeImem node il
delta_dsk = truncate (Node.tDsk node)
- Node.fDsk node
- nodeIdsk node il
newn = node `Node.setXmem` delta_mem
umsg1 =
if delta_mem > 512 || delta_dsk > 1024
then printf "node %s is missing %d MB ram \
\and %d GB disk"
nname delta_mem (delta_dsk `div` 1024):msgs
else msgs
in (umsg1, newn)
) [] nl
-- | Compute the amount of memory used by primary instances on a node.
nodeImem :: Node.Node -> Instance.List -> Int
nodeImem node il =
let rfind = flip Container.find il
il' = map rfind $ Node.pList node
oil' = filter Instance.usesMemory il'
in sum . map Instance.mem $ oil'
-- | Compute the amount of disk used by instances on a node (either primary
-- or secondary).
nodeIdsk :: Node.Node -> Instance.List -> Int
nodeIdsk node il =
let rfind = flip Container.find il
in sum . map (Instance.dsk . rfind)
$ Node.pList node ++ Node.sList node
-- | Get live information or a default value
eitherLive :: (Monad m) => Bool -> a -> m a -> m a
eitherLive True _ live_data = live_data
eitherLive False def_data _ = return def_data
| bitemyapp/ganeti | src/Ganeti/HTools/Loader.hs | bsd-2-clause | 17,434 | 0 | 20 | 4,539 | 3,696 | 1,972 | 1,724 | 275 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
module Network.IRC.Bot.Log where
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as C
import Data.Data
data LogLevel
= Debug
| Normal
| Important
deriving (Eq, Ord, Read, Show, Data, Typeable)
type Logger = LogLevel -> ByteString -> IO ()
stdoutLogger :: LogLevel -> Logger
stdoutLogger minLvl msgLvl msg
| msgLvl >= minLvl = C.putStrLn msg -- assumes ascii, which is wrong(?)
| otherwise = return ()
nullLogger :: Logger
nullLogger _ _ = return () | eigengrau/haskell-ircbot | Network/IRC/Bot/Log.hs | bsd-3-clause | 549 | 0 | 8 | 114 | 163 | 89 | 74 | 17 | 1 |
{- |
Module : $Header$
Description : Overload resolution
Copyright : (c) Martin Kuehl, T. Mossakowski, C. Maeder, 2004-2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Overload resolution (injections are inserted separately)
Follows Sect. III:3.3 of the CASL Reference Manual.
The algorthim is from:
Till Mossakowski, Kolyang, Bernd Krieg-Brueckner:
Static semantic analysis and theorem proving for CASL.
12th Workshop on Algebraic Development Techniques, Tarquinia 1997,
LNCS 1376, p. 333-348
-}
module CASL.Overload
( minExpFORMULA
, minExpFORMULAeq
, minExpTerm
, isUnambiguous
, oneExpTerm
, mkSorted
, Min
, leqF
, leqP
, leqSort
, minimalSupers
, maximalSubs
, haveCommonSupersorts
, haveCommonSubsorts
, keepMinimals1
, keepMinimals
) where
import CASL.ToDoc (FormExtension)
import CASL.Sign
import CASL.AS_Basic_CASL
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.Rel as Rel
import Common.Lib.State
import Common.Id
import Common.GlobalAnnotations
import Common.DocUtils
import Common.Result
import Common.Partial
import Common.Utils
import Data.List
import Data.Maybe
import qualified Data.Map as Map
import qualified Data.Set as Set
import Control.Monad
-- | the type of the type checking function of extensions
type Min f e = Sign f e -> f -> Result f
mkSorted :: TermExtension f => Sign f e -> TERM f -> SORT -> Range -> TERM f
mkSorted sign t s r = let nt = Sorted_term t s r in case optTermSort t of
Nothing -> nt
Just srt -> if leqSort sign s srt then t else nt
{- ----------------------------------------------------------
- Minimal expansion of a formula -
Expand a given formula by typing information.
* For non-atomic formulae, recurse through subsentences.
* For trival atomic formulae, no expansion is neccessary.
* For atomic formulae, the following cases are implemented:
+ Predication is handled by the dedicated expansion function
'minExpFORMULApred'.
+ Existl_equation and Strong_equation are handled by the dedicated
expansion function 'minExpFORMULAeq'.
+ Definedness is handled by expanding the subterm.
+ Membership is handled like Cast
---------------------------------------------------------- -}
minExpFORMULA
:: (FormExtension f, TermExtension f)
=> Min f e -> Sign f e -> FORMULA f -> Result (FORMULA f)
minExpFORMULA mef sign formula = let sign0 = sign { envDiags = [] } in
case formula of
Quantification q vars f pos -> do
-- add 'vars' to signature
let sign' = execState (mapM_ addVars vars) sign0
Result (envDiags sign') $ Just ()
f' <- minExpFORMULA mef sign' f
return (Quantification q vars f' pos)
Junction j fs pos -> do
fs' <- mapR (minExpFORMULA mef sign) fs
return (Junction j fs' pos)
Relation f1 c f2 pos ->
joinResultWith (\ f1' f2' -> Relation f1' c f2' pos)
(minExpFORMULA mef sign f1) $ minExpFORMULA mef sign f2
Negation f pos -> do
f' <- minExpFORMULA mef sign f
return (Negation f' pos)
Predication (Pred_name ide) terms pos
-> minExpFORMULApred mef sign ide Nothing terms pos
Predication (Qual_pred_name ide ty pos1) terms pos2
-> minExpFORMULApred mef sign ide (Just $ toPredType ty)
terms (pos1 `appRange` pos2)
Equation term1 e term2 pos
-> minExpFORMULAeq mef sign (`Equation` e) term1 term2 pos
Definedness term pos -> do
t <- oneExpTerm mef sign term
return (Definedness t pos)
Membership term srt pos -> do
ts <- minExpTerm mef sign term
let fs = map (concatMap ( \ t ->
map ( \ c ->
Membership (mkSorted sign t c pos) srt pos)
$ maybe [srt] (minimalSupers sign srt)
$ optTermSort t)) ts
msg = superSortError True sign srt ts
isUnambiguous msg (globAnnos sign) formula fs pos
ExtFORMULA f -> fmap ExtFORMULA $ mef sign f
QuantOp o ty f -> do
let sign' = sign0 { opMap = addOpTo o (toOpType ty) $ opMap sign0 }
Result (envDiags sign') $ Just ()
f' <- minExpFORMULA mef sign' f
return $ QuantOp o ty f'
QuantPred p ty f -> do
let pm = predMap sign0
sign' = sign0
{ predMap = MapSet.insert p (toPredType ty) pm }
Result (envDiags sign') $ Just ()
f' <- minExpFORMULA mef sign' f
return $ QuantPred p ty f'
Mixfix_formula term -> do
t <- oneExpTerm mef sign term
let Result ds mt = adjustPos (getRangeSpan term) $ termToFormula t
appendDiags ds
case mt of
Nothing -> mkError "not a formula" term
Just f -> return f
_ -> return formula -- do not fail even for unresolved cases
superSortError :: TermExtension f
=> Bool -> Sign f e -> SORT -> [[TERM f]] -> String
superSortError super sign srt ts = let
ds = keepMinimals sign id . map sortOfTerm $ concat ts
in "\n" ++ showSort ds ++ "found but\na "
++ (if super then "super" else "sub") ++ "sort of '"
++ shows srt "' was expected."
-- | test if a term can be uniquely resolved
oneExpTerm :: (FormExtension f, TermExtension f)
=> Min f e -> Sign f e -> TERM f -> Result (TERM f)
oneExpTerm minF sign term = do
ts <- minExpTerm minF sign term
isUnambiguous "" (globAnnos sign) term ts nullRange
{- ----------------------------------------------------------
- Minimal expansion of an equation formula -
see minExpTermCond
---------------------------------------------------------- -}
minExpFORMULAeq :: (FormExtension f, TermExtension f)
=> Min f e -> Sign f e -> (TERM f -> TERM f -> Range -> FORMULA f)
-> TERM f -> TERM f -> Range -> Result (FORMULA f)
minExpFORMULAeq mef sign eq term1 term2 pos = do
(ps, msg) <- minExpTermCond mef sign ( \ t1 t2 -> eq t1 t2 pos)
term1 term2 pos
isUnambiguous msg (globAnnos sign) (eq term1 term2 pos) ps pos
-- | check if there is at least one solution
hasSolutions :: Pretty f => String -> GlobalAnnos -> f -> [[f]] -> Range
-> Result [[f]]
hasSolutions msg ga topterm ts pos = let terms = filter (not . null) ts in
if null terms then Result
[Diag Error ("no typing for: " ++ showGlobalDoc ga topterm "" ++ msg)
pos] Nothing
else return terms
-- | check if there is a unique equivalence class
isUnambiguous :: Pretty f => String -> GlobalAnnos -> f -> [[f]] -> Range
-> Result f
isUnambiguous msg ga topterm ts pos = do
terms <- hasSolutions msg ga topterm ts pos
case terms of
[ term : _ ] -> return term
_ -> Result [Diag Error ("ambiguous term\n " ++
showSepList (showString "\n ") (showGlobalDoc ga)
(take 5 $ map head terms) "") pos] Nothing
checkIdAndArgs :: Id -> [a] -> Range -> Result Int
checkIdAndArgs ide args poss =
let nargs = length args
pargs = placeCount ide
in if isMixfix ide && pargs /= nargs then
Result [Diag Error
("expected " ++ shows pargs " argument(s) of mixfix identifier '"
++ showDoc ide "' but found " ++ shows nargs " argument(s)")
poss] Nothing
else return nargs
noOpOrPredDiag :: Pretty t => [a] -> DiagKind -> String -> Maybe t -> Id
-> Range -> Int -> [Diagnosis]
noOpOrPredDiag ops k str mty ide pos nargs = case ops of
[] -> let
hd = "no " ++ str ++ " with "
ft = " found for '" ++ showDoc ide "'"
in [Diag k (case mty of
Nothing -> hd ++ shows nargs " argument"
++ (if nargs == 1 then "" else "s") ++ ft
Just ty -> hd ++ "profile '" ++ showDoc ty "'" ++ ft) pos]
_ -> []
noOpOrPred :: Pretty t => [a] -> String -> Maybe t -> Id -> Range -> Int
-> Result ()
noOpOrPred ops str mty ide pos nargs = when (null ops) $ Result
(noOpOrPredDiag ops Error str mty ide pos nargs) Nothing
{- ----------------------------------------------------------
- Minimal expansion of a predication formula -
see minExpTermAppl
---------------------------------------------------------- -}
minExpFORMULApred :: (FormExtension f, TermExtension f)
=> Min f e -> Sign f e -> Id -> Maybe PredType -> [TERM f] -> Range
-> Result (FORMULA f)
minExpFORMULApred mef sign ide mty args pos = do
nargs <- checkIdAndArgs ide args pos
let -- predicates matching that name in the current environment
preds' = Set.filter ((nargs ==) . length . predArgs) $
MapSet.lookup ide $ predMap sign
preds = case mty of
Nothing -> map (pSortBy predArgs sign)
$ Rel.leqClasses (leqP' sign) preds'
Just ty -> [[ty] | Set.member ty preds']
boolAna l cmd = case mty of
Nothing | null l -> do
appendDiags $ noOpOrPredDiag preds Hint
"matching predicate" mty ide pos nargs
minExpFORMULA mef sign $ Mixfix_formula
$ Application (Op_name ide) args pos
_ -> cmd
boolAna preds $ do
noOpOrPred preds "predicate" mty ide pos nargs
tts <- mapM (minExpTerm mef sign) args
let (goodCombs, msg) = getAllCombs sign nargs ide predArgs preds tts
qualForms = qualifyGFs qualifyPred ide pos goodCombs
boolAna qualForms
$ isUnambiguous msg (globAnnos sign)
(Predication (Pred_name ide) args pos) qualForms pos
showSort :: [SORT] -> String
showSort s = case s of
[ft] -> "a term of sort '" ++ shows ft "' was "
_ -> "terms of sorts " ++ showDoc s " were "
missMsg :: Bool -> Int -> Id -> [[SORT]] -> [SORT] -> [SORT] -> String
missMsg singleArg maxArg ide args foundTs expectedTs =
"\nin the "
++ (if singleArg then "" else show (maxArg + 1) ++ ". ")
++ "argument of '" ++ show ide
++ (if singleArg then "'\n" else case args of
[arg] -> " : " ++ showDoc (PredType arg) "'\n"
_ -> "'\n with argument sorts " ++ showDoc (map PredType args) "\n")
++ showSort foundTs ++ "found but\n"
++ showSort expectedTs ++ "expected."
getAllCombs :: TermExtension f => Sign f e -> Int -> Id -> (a -> [SORT])
-> [[a]] -> [[[TERM f]]] -> ([[(a, [TERM f], Maybe Int)]], String)
getAllCombs sign nargs ide getArgs fs expansions =
let formCombs = concatMap (getCombs sign getArgs fs . combine)
$ combine expansions
partCombs = map (partition $ \ (_, _, m) -> isNothing m) formCombs
(goodCombs, badCombs) = unzip partCombs
badCs = concat badCombs
in if null badCs then (goodCombs, "") else let
maxArg = maximum $ map (\ (_, _, Just c) -> c) badCs
badCs2 = filter (\ (_, _, Just c) -> maxArg == c) badCs
args = Set.toList . Set.fromList
$ map (\ (a, _, _) -> getArgs a) badCs2
foundTs = keepMinimals sign id
$ map (\ (_, ts, _) -> sortOfTerm $ ts !! maxArg) badCs2
expectedTs = keepMinimals1 False sign id $ map (!! maxArg) args
in (goodCombs, missMsg (nargs == 1) maxArg ide args foundTs expectedTs)
getCombs :: TermExtension f => Sign f e -> (a -> [SORT]) -> [[a]] -> [[TERM f]]
-> [[(a, [TERM f], Maybe Int)]]
getCombs sign getArgs = flip $ map . \ cs fs ->
[ (f, ts, elemIndex False $ zipWith (leqSort sign) (map sortOfTerm ts)
$ getArgs f) | f <- fs, ts <- cs ]
qualifyGFs :: (Id -> Range -> (a, [TERM f]) -> b) -> Id -> Range
-> [[(a, [TERM f], Maybe Int)]] -> [[b]]
qualifyGFs f ide pos = map $ map (f ide pos . \ (a, b, _) -> (a, b))
-- | qualify a single pred, given by its signature and its arguments
qualifyPred :: Id -> Range -> (PredType, [TERM f]) -> FORMULA f
qualifyPred ide pos (pred', terms') =
Predication (Qual_pred_name ide (toPRED_TYPE pred') pos) terms' pos
-- | expansions of an equation formula or a conditional
minExpTermEq :: (FormExtension f, TermExtension f)
=> Min f e -> Sign f e -> TERM f -> TERM f -> Result [[(TERM f, TERM f)]]
minExpTermEq mef sign term1 term2 = do
exps1 <- minExpTerm mef sign term1
exps2 <- minExpTerm mef sign term2
return $ map (minimizeEq sign . getPairs) $ combine [exps1, exps2]
getPairs :: [[TERM f]] -> [(TERM f, TERM f)]
getPairs cs = [ (t1, t2) | [t1, t2] <- combine cs ]
minimizeEq :: TermExtension f => Sign f e -> [(TERM f, TERM f)]
-> [(TERM f, TERM f)]
minimizeEq s = keepMinimals s (sortOfTerm . snd)
. keepMinimals s (sortOfTerm . fst)
{- ----------------------------------------------------------
- Minimal expansion of a term -
Expand a given term by typing information.
* 'Qual_var' terms are handled by 'minExpTerm_var'
* 'Application' terms are handled by 'minExpTermOp'.
* 'Conditional' terms are handled by 'minExpTermCond'.
---------------------------------------------------------- -}
minExpTerm :: (FormExtension f, TermExtension f)
=> Min f e -> Sign f e -> TERM f -> Result [[TERM f]]
minExpTerm mef sign top = let ga = globAnnos sign in case top of
Qual_var var srt _ -> let ts = minExpTermVar sign var (Just srt) in
if null ts then mkError "no matching qualified variable found" var
else return ts
Application op terms pos -> minExpTermOp mef sign op terms pos
Sorted_term term srt pos -> do
expandedTerm <- minExpTerm mef sign term
-- choose expansions that fit the given signature, then qualify
let validExps =
map (filter (maybe True (flip (leqSort sign) srt)
. optTermSort))
expandedTerm
msg = superSortError False sign srt expandedTerm
hasSolutions msg ga top (map (map (\ t ->
Sorted_term t srt pos)) validExps) pos
Cast term srt pos -> do
expandedTerm <- minExpTerm mef sign term
-- find a unique minimal common supersort
let ts = map (concatMap (\ t ->
map ( \ c ->
Cast (mkSorted sign t c pos) srt pos)
$ maybe [srt] (minimalSupers sign srt)
$ optTermSort t)) expandedTerm
msg = superSortError True sign srt expandedTerm
hasSolutions msg ga top ts pos
Conditional term1 formula term2 pos -> do
f <- minExpFORMULA mef sign formula
(ts, msg) <- minExpTermCond mef sign ( \ t1 t2 -> Conditional t1 f t2 pos)
term1 term2 pos
hasSolutions msg ga (Conditional term1 formula term2 pos) ts pos
ExtTERM t -> do
nt <- mef sign t
return [[ExtTERM nt]]
_ -> mkError "unexpected kind of term" top
-- | Minimal expansion of a possibly qualified variable identifier
minExpTermVar :: Sign f e -> Token -> Maybe SORT -> [[TERM f]]
minExpTermVar sign tok ms = case Map.lookup tok $ varMap sign of
Nothing -> []
Just s -> let qv = [[Qual_var tok s nullRange]] in
case ms of
Nothing -> qv
Just s2 -> if s == s2 then qv else []
-- | minimal expansion of an (possibly qualified) operator application
minExpTermAppl :: (FormExtension f, TermExtension f)
=> Min f e -> Sign f e -> Id -> Maybe OpType -> [TERM f] -> Range
-> Result [[TERM f]]
minExpTermAppl mef sign ide mty args pos = do
nargs <- checkIdAndArgs ide args pos
let -- functions matching that name in the current environment
ops' = Set.filter ( \ o -> length (opArgs o) == nargs) $
MapSet.lookup ide $ opMap sign
ops = case mty of
Nothing -> map (pSortBy opArgs sign)
$ Rel.leqClasses (leqF' sign) ops'
Just ty -> [[ty] | Set.member ty ops' ||
-- might be known to be total
Set.member (mkTotal ty) ops' ]
noOpOrPred ops "operation" mty ide pos nargs
expansions <- mapM (minExpTerm mef sign) args
let -- generate profiles as descr. on p. 339 (Step 3)
(goodCombs, msg) = getAllCombs sign nargs ide opArgs ops expansions
qualTerms = qualifyGFs qualifyOp ide pos
$ map (minimizeOp sign) goodCombs
hasSolutions msg (globAnnos sign)
(Application (Op_name ide) args pos) qualTerms pos
-- qualify a single op, given by its signature and its arguments
qualifyOp :: Id -> Range -> (OpType, [TERM f]) -> TERM f
qualifyOp ide pos (op', terms') =
Application (Qual_op_name ide (toOP_TYPE op') pos) terms' pos
{- ----------------------------------------------------------
- Minimal expansion of a function application or a variable -
Expand a function application by typing information.
1. First expand all argument subterms.
2. Combine these expansions so we compute the set of tuples
{ (C_1, ..., C_n) | (C_1, ..., C_n) \in
minExpTerm(t_1) x ... x minExpTerm(t_n) }
where t_1, ..., t_n are the given argument terms.
3. For each element of this set compute the set of possible profiles
(as described on p. 339).
4. Define an equivalence relation ~ on these profiles
(as described on p. 339).
5. Separate each profile into equivalence classes by the relation ~
and take the unification of these sets.
6. Minimize each element of this unified set (as described on p. 339).
7. Transform each term in the minimized set into a qualified function
application term.
---------------------------------------------------------- -}
minExpTermOp :: (FormExtension f, TermExtension f)
=> Min f e -> Sign f e -> OP_SYMB -> [TERM f] -> Range -> Result [[TERM f]]
minExpTermOp mef sign osym args pos = case osym of
Op_name ide@(Id ts _ _) ->
let res = minExpTermAppl mef sign ide Nothing args pos in
if null args && isSimpleId ide then
let vars = minExpTermVar sign (head ts) Nothing
in if null vars then res else
case maybeResult res of
Nothing -> return vars
Just ops -> return $ ops ++ vars
else res
Qual_op_name ide ty pos1 ->
if length args /= length (args_OP_TYPE ty) then
mkError "type qualification does not match number of arguments" ide
else minExpTermAppl mef sign ide (Just $ toOpType ty) args
(pos1 `appRange` pos)
{- ----------------------------------------------------------
- Minimal expansion of a conditional -
Expand a conditional by typing information (see minExpTermEq)
First expand the subterms and subformula. Then calculate a profile
P(C_1, C_2) for each (C_1, C_2) \in minExpTerm(t1) x minExpTerm(t_2).
Separate these profiles into equivalence classes and take the
unification of all these classes. Minimize each equivalence class.
Finally transform the eq. classes into lists of
conditionals with equally sorted terms.
---------------------------------------------------------- -}
minExpTermCond :: (FormExtension f, TermExtension f)
=> Min f e -> Sign f e -> (TERM f -> TERM f -> a) -> TERM f -> TERM f
-> Range -> Result ([[a]], String)
minExpTermCond mef sign f term1 term2 pos = do
pairs <- minExpTermEq mef sign term1 term2
let (lhs, rhs) = unzip $ concat pairs
mins = keepMinimals sign id . map sortOfTerm
ds = "\n" ++ showSort (mins lhs) ++ "on the lhs but\n"
++ showSort (mins rhs) ++ "on the rhs."
return (map (concatMap ( \ (t1, t2) ->
let s1 = sortOfTerm t1
s2 = sortOfTerm t2
in map ( \ s -> f (mkSorted sign t1 s pos)
(mkSorted sign t2 s pos))
$ minimalSupers sign s1 s2)) pairs, ds)
{- ----------------------------------------------------------
Let P be a set of equivalence classes of qualified terms.
For each C \in P, let C' choose _one_
t:s \in C for each s minimal such that t:s \in C.
That is, discard all terms whose sort is a supersort of
any other term in the same equivalence class.
---------------------------------------------------------- -}
minimizeOp :: Sign f e -> [(OpType, [TERM f], a)] -> [(OpType, [TERM f], a)]
minimizeOp sign = keepMinimals sign (opRes . \ (a, _, _) -> a)
-- | the (possibly incomplete) list of supersorts common to both sorts
commonSupersorts :: Bool -> Sign f e -> SORT -> SORT -> [SORT]
commonSupersorts b sign s1 s2 =
if s1 == s2 then [s1] else
let l1 = supersortsOf s1 sign
l2 = supersortsOf s2 sign in
if Set.member s2 l1 then if b then [s2] else [s1] else
if Set.member s1 l2 then if b then [s1] else [s2] else
Set.toList $ if b then Set.intersection l1 l2
else Set.intersection (subsortsOf s1 sign)
$ subsortsOf s2 sign
-- | True if both sorts have a common supersort
haveCommonSupersorts :: Bool -> Sign f e -> SORT -> SORT -> Bool
haveCommonSupersorts b s s1 = not . null . commonSupersorts b s s1
-- True if both sorts have a common subsort
haveCommonSubsorts :: Sign f e -> SORT -> SORT -> Bool
haveCommonSubsorts = haveCommonSupersorts False
-- | if True test if s1 > s2
geqSort :: Bool -> Sign f e -> SORT -> SORT -> Bool
geqSort b sign s1 s2 = s1 == s2 || let rel = sortRel sign in
if b then Rel.member s2 s1 rel else Rel.member s1 s2 rel
-- | test if s1 < s2
leqSort :: Sign f e -> SORT -> SORT -> Bool
leqSort = geqSort False
-- | minimal common supersorts of the two input sorts
minimalSupers :: Sign f e -> SORT -> SORT -> [SORT]
minimalSupers = minimalSupers1 True
minimalSupers1 :: Bool -> Sign f e -> SORT -> SORT -> [SORT]
minimalSupers1 b s s1 = keepMinimals1 b s id . commonSupersorts b s s1
-- | maximal common subsorts of the two input sorts
maximalSubs :: Sign f e -> SORT -> SORT -> [SORT]
maximalSubs = minimalSupers1 False
-- | only keep elements with minimal (and different) sorts
keepMinimals :: Sign f e -> (a -> SORT) -> [a] -> [a]
keepMinimals = keepMinimals1 True
keepMinimals1 :: Bool -> Sign f e -> (a -> SORT) -> [a] -> [a]
keepMinimals1 b s f = let lt x y = geqSort b s (f y) (f x) in keepMins lt
-- | True if both ops are in the overloading relation
leqF :: Sign f e -> OpType -> OpType -> Bool
leqF sign o1 o2 = length (opArgs o1) == length (opArgs o2) && leqF' sign o1 o2
leqF' :: Sign f e -> OpType -> OpType -> Bool
leqF' sign o1 o2 = haveCommonSupersorts True sign (opRes o1) (opRes o2) &&
and (zipWith (haveCommonSubsorts sign) (opArgs o1) (opArgs o2))
-- | True if both preds are in the overloading relation
leqP :: Sign f e -> PredType -> PredType -> Bool
leqP sign p1 p2 = length (predArgs p1) == length (predArgs p2)
&& leqP' sign p1 p2
leqP' :: Sign f e -> PredType -> PredType -> Bool
leqP' sign p1 =
and . zipWith (haveCommonSubsorts sign) (predArgs p1) . predArgs
cmpSubsort :: Sign f e -> POrder SORT
cmpSubsort sign s1 s2 =
if s1 == s2 then Just EQ else
let l1 = supersortsOf s1 sign
l2 = supersortsOf s2 sign
b = Set.member s1 l2 in
if Set.member s2 l1 then
Just $ if b then EQ else LT
else if b then Just GT else Nothing
cmpSubsorts :: Sign f e -> POrder [SORT]
cmpSubsorts sign l1 l2 =
let l = zipWith (cmpSubsort sign) l1 l2
in if null l then Just EQ else foldr1
( \ c1 c2 -> if c1 == c2 then c1 else case (c1, c2) of
(Just EQ, _) -> c2
(_, Just EQ) -> c1
_ -> Nothing) l
pSortBy :: (a -> [SORT]) -> Sign f e -> [a] -> [a]
pSortBy f sign = let pOrd a b = cmpSubsorts sign (f a) (f b)
in concat . rankBy pOrd
| keithodulaigh/Hets | CASL/Overload.hs | gpl-2.0 | 23,545 | 0 | 27 | 6,358 | 7,348 | 3,675 | 3,673 | 398 | 15 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Route53.DeleteHealthCheck
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This action deletes a health check. To delete a health check, send a 'DELETE'
-- request to the '2013-04-01/healthcheck//health check ID/ resource.
--
-- You can delete a health check only if there are no resource record sets
-- associated with this health check. If resource record sets are associated
-- with this health check, you must disassociate them before you can delete your
-- health check. If you try to delete a health check that is associated with
-- resource record sets, Route 53 will deny your request with a 'HealthCheckInUse'
-- error. For information about disassociating the records from your health
-- check, see 'ChangeResourceRecordSets'.
--
-- <http://docs.aws.amazon.com/Route53/latest/APIReference/API_DeleteHealthCheck.html>
module Network.AWS.Route53.DeleteHealthCheck
(
-- * Request
DeleteHealthCheck
-- ** Request constructor
, deleteHealthCheck
-- ** Request lenses
, dhcHealthCheckId
-- * Response
, DeleteHealthCheckResponse
-- ** Response constructor
, deleteHealthCheckResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.RestXML
import Network.AWS.Route53.Types
import qualified GHC.Exts
newtype DeleteHealthCheck = DeleteHealthCheck
{ _dhcHealthCheckId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DeleteHealthCheck' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dhcHealthCheckId' @::@ 'Text'
--
deleteHealthCheck :: Text -- ^ 'dhcHealthCheckId'
-> DeleteHealthCheck
deleteHealthCheck p1 = DeleteHealthCheck
{ _dhcHealthCheckId = p1
}
-- | The ID of the health check to delete.
dhcHealthCheckId :: Lens' DeleteHealthCheck Text
dhcHealthCheckId = lens _dhcHealthCheckId (\s a -> s { _dhcHealthCheckId = a })
data DeleteHealthCheckResponse = DeleteHealthCheckResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DeleteHealthCheckResponse' constructor.
deleteHealthCheckResponse :: DeleteHealthCheckResponse
deleteHealthCheckResponse = DeleteHealthCheckResponse
instance ToPath DeleteHealthCheck where
toPath DeleteHealthCheck{..} = mconcat
[ "/2013-04-01/healthcheck/"
, toText _dhcHealthCheckId
]
instance ToQuery DeleteHealthCheck where
toQuery = const mempty
instance ToHeaders DeleteHealthCheck
instance ToXMLRoot DeleteHealthCheck where
toXMLRoot = const (namespaced ns "DeleteHealthCheck" [])
instance ToXML DeleteHealthCheck
instance AWSRequest DeleteHealthCheck where
type Sv DeleteHealthCheck = Route53
type Rs DeleteHealthCheck = DeleteHealthCheckResponse
request = delete
response = nullResponse DeleteHealthCheckResponse
| romanb/amazonka | amazonka-route53/gen/Network/AWS/Route53/DeleteHealthCheck.hs | mpl-2.0 | 3,744 | 0 | 9 | 755 | 372 | 228 | 144 | 49 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | Dealing with the 00-index file and all its cabal files.
module Stack.PackageIndex
( updateAllIndices
, getPackageCaches
) where
import qualified Codec.Archive.Tar as Tar
import Control.Exception (Exception)
import Control.Exception.Enclosed (tryIO)
import Control.Monad (unless, when, liftM)
import Control.Monad.Catch (MonadThrow, throwM, MonadCatch)
import qualified Control.Monad.Catch as C
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Logger (MonadLogger, logDebug,
logInfo, logWarn)
import Control.Monad.Reader (asks)
import Control.Monad.Trans.Control
import Data.Aeson.Extended
import Data.Binary.VersionTagged
import qualified Data.ByteString.Lazy as L
import Data.Conduit (($$), (=$))
import Data.Conduit.Binary (sinkHandle,
sourceHandle)
import Data.Conduit.Zlib (ungzip)
import Data.Foldable (forM_)
import Data.Int (Int64)
import Data.Map (Map)
import qualified Data.Map.Strict as Map
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Unsafe (unsafeTail)
import Data.Traversable (forM)
import Data.Typeable (Typeable)
import Network.HTTP.Download
import Path (mkRelDir, parent,
parseRelDir, toFilePath,
parseAbsFile, (</>))
import Path.IO
import Prelude -- Fix AMP warning
import Stack.Types
import Stack.Types.StackT
import System.FilePath (takeBaseName, (<.>))
import System.IO (IOMode (ReadMode, WriteMode),
withBinaryFile)
import System.Process.Read (readInNull, readProcessNull, ReadProcessException(..),
EnvOverride, doesExecutableExist)
-- | Populate the package index caches and return them.
populateCache
:: (MonadIO m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
=> EnvOverride
-> PackageIndex
-> m (Map PackageIdentifier PackageCache)
populateCache menv index = do
requireIndex menv index
-- This uses full on lazy I/O instead of ResourceT to provide some
-- protections. Caveat emptor
path <- configPackageIndex (indexName index)
let loadPIS = do
$logSticky "Populating index cache ..."
lbs <- liftIO $ L.readFile $ Path.toFilePath path
loop 0 Map.empty (Tar.read lbs)
pis <- loadPIS `C.catch` \e -> do
$logWarn $ "Exception encountered when parsing index tarball: "
<> T.pack (show (e :: Tar.FormatError))
$logWarn "Automatically updating index and trying again"
updateIndex menv index
loadPIS
when (indexRequireHashes index) $ forM_ (Map.toList pis) $ \(ident, pc) ->
case pcDownload pc of
Just _ -> return ()
Nothing -> throwM $ MissingRequiredHashes (indexName index) ident
$logStickyDone "Populated index cache."
return pis
where
loop !blockNo !m (Tar.Next e es) =
loop (blockNo + entrySizeInBlocks e) (goE blockNo m e) es
loop _ m Tar.Done = return m
loop _ _ (Tar.Fail e) = throwM e
goE blockNo m e =
case Tar.entryContent e of
Tar.NormalFile lbs size ->
case parseNameVersion $ Tar.entryPath e of
Just (ident, ".cabal") -> addCabal ident size
Just (ident, ".json") -> addJSON ident lbs
_ -> m
_ -> m
where
addCabal ident size = Map.insertWith
(\_ pcOld -> pcNew { pcDownload = pcDownload pcOld })
ident
pcNew
m
where
pcNew = PackageCache
{ pcOffset = (blockNo + 1) * 512
, pcSize = size
, pcDownload = Nothing
}
addJSON ident lbs =
case decode lbs of
Nothing -> m
Just !pd -> Map.insertWith
(\_ pc -> pc { pcDownload = Just pd })
ident
PackageCache
{ pcOffset = 0
, pcSize = 0
, pcDownload = Just pd
}
m
breakSlash x
| T.null z = Nothing
| otherwise = Just (y, unsafeTail z)
where
(y, z) = T.break (== '/') x
parseNameVersion t1 = do
(p', t3) <- breakSlash
$ T.map (\c -> if c == '\\' then '/' else c)
$ T.pack t1
p <- parsePackageName p'
(v', t5) <- breakSlash t3
v <- parseVersion v'
let (t6, suffix) = T.break (== '.') t5
if t6 == p'
then return (PackageIdentifier p v, suffix)
else Nothing
data PackageIndexException
= GitNotAvailable IndexName
| MissingRequiredHashes IndexName PackageIdentifier
deriving Typeable
instance Exception PackageIndexException
instance Show PackageIndexException where
show (GitNotAvailable name) = concat
[ "Package index "
, T.unpack $ indexNameText name
, " only provides Git access, and you do not have"
, " the git executable on your PATH"
]
show (MissingRequiredHashes name ident) = concat
[ "Package index "
, T.unpack $ indexNameText name
, " is configured to require package hashes, but no"
, " hash is available for "
, packageIdentifierString ident
]
-- | Require that an index be present, updating if it isn't.
requireIndex :: (MonadIO m,MonadLogger m
,MonadReader env m,HasHttpManager env
,HasConfig env,MonadBaseControl IO m,MonadCatch m)
=> EnvOverride
-> PackageIndex
-> m ()
requireIndex menv index = do
tarFile <- configPackageIndex $ indexName index
exists <- doesFileExist tarFile
unless exists $ updateIndex menv index
-- | Update all of the package indices
updateAllIndices
:: (MonadIO m,MonadLogger m
,MonadReader env m,HasHttpManager env
,HasConfig env,MonadBaseControl IO m, MonadCatch m)
=> EnvOverride
-> m ()
updateAllIndices menv =
asks (configPackageIndices . getConfig) >>= mapM_ (updateIndex menv)
-- | Update the index tarball
updateIndex :: (MonadIO m,MonadLogger m
,MonadReader env m,HasHttpManager env
,HasConfig env,MonadBaseControl IO m, MonadCatch m)
=> EnvOverride
-> PackageIndex
-> m ()
updateIndex menv index =
do let name = indexName index
logUpdate mirror = $logSticky $ "Updating package index " <> indexNameText (indexName index) <> " (mirrored at " <> mirror <> ") ..."
git <- isGitInstalled menv
case (git, indexLocation index) of
(True, ILGit url) -> logUpdate url >> updateIndexGit menv name index url
(True, ILGitHttp url _) -> logUpdate url >> updateIndexGit menv name index url
(_, ILHttp url) -> logUpdate url >> updateIndexHTTP name index url
(False, ILGitHttp _ url) -> logUpdate url >> updateIndexHTTP name index url
(False, ILGit url) -> logUpdate url >> throwM (GitNotAvailable name)
-- | Update the index Git repo and the index tarball
updateIndexGit :: (MonadIO m,MonadLogger m,MonadReader env m,HasConfig env,MonadBaseControl IO m, MonadCatch m)
=> EnvOverride
-> IndexName
-> PackageIndex
-> Text -- ^ Git URL
-> m ()
updateIndexGit menv indexName' index gitUrl = do
tarFile <- configPackageIndex indexName'
let idxPath = parent tarFile
ensureDir idxPath
do
repoName <- parseRelDir $ takeBaseName $ T.unpack gitUrl
let cloneArgs =
["clone"
,T.unpack gitUrl
,toFilePath repoName
,"--depth"
,"1"
,"-b" --
,"display"]
sDir <- configPackageIndexRoot indexName'
let suDir =
sDir </>
$(mkRelDir "git-update")
acfDir = suDir </> repoName
repoExists <- doesDirExist acfDir
unless repoExists
(readInNull suDir "git" menv cloneArgs Nothing)
$logSticky "Fetching package index ..."
readProcessNull (Just acfDir) menv "git" ["fetch","--tags","--depth=1"] `C.catch` \(ex :: ReadProcessException) -> do
-- we failed, so wipe the directory and try again, see #1418
$logWarn (T.pack (show ex))
$logStickyDone "Failed to fetch package index, retrying."
removeDirRecur acfDir
readInNull suDir "git" menv cloneArgs Nothing
$logSticky "Fetching package index ..."
readInNull acfDir "git" menv ["fetch","--tags","--depth=1"] Nothing
$logStickyDone "Fetched package index."
ignoringAbsence (removeFile tarFile)
when (indexGpgVerify index)
(readInNull acfDir
"git"
menv
["tag","-v","current-hackage"]
(Just (T.unlines ["Signature verification failed. "
,"Please ensure you've set up your"
,"GPG keychain to accept the D6CF60FD signing key."
,"For more information, see:"
,"https://github.com/fpco/stackage-update#readme"])))
$logDebug ("Exporting a tarball to " <>
(T.pack . toFilePath) tarFile)
deleteCache indexName'
let tarFileTmp = toFilePath tarFile ++ ".tmp"
readInNull acfDir
"git"
menv
["archive"
,"--format=tar"
,"-o"
,tarFileTmp
,"current-hackage"]
Nothing
tarFileTmpPath <- parseAbsFile tarFileTmp
renameFile tarFileTmpPath tarFile
-- | Update the index tarball via HTTP
updateIndexHTTP :: (MonadIO m,MonadLogger m
,MonadThrow m,MonadReader env m,HasHttpManager env,HasConfig env)
=> IndexName
-> PackageIndex
-> Text -- ^ url
-> m ()
updateIndexHTTP indexName' index url = do
req <- parseUrl $ T.unpack url
$logInfo ("Downloading package index from " <> url)
gz <- configPackageIndexGz indexName'
tar <- configPackageIndex indexName'
wasDownloaded <- redownload req gz
toUnpack <-
if wasDownloaded
then return True
else not `liftM` doesFileExist tar
when toUnpack $ do
let tmp = toFilePath tar <.> "tmp"
tmpPath <- parseAbsFile tmp
deleteCache indexName'
liftIO $ do
withBinaryFile (toFilePath gz) ReadMode $ \input ->
withBinaryFile tmp WriteMode $ \output ->
sourceHandle input
$$ ungzip
=$ sinkHandle output
renameFile tmpPath tar
when (indexGpgVerify index)
$ $logWarn
$ "You have enabled GPG verification of the package index, " <>
"but GPG verification only works with Git downloading"
-- | Is the git executable installed?
isGitInstalled :: MonadIO m
=> EnvOverride
-> m Bool
isGitInstalled = flip doesExecutableExist "git"
-- | Delete the package index cache
deleteCache :: (MonadIO m, MonadReader env m, HasConfig env, MonadLogger m, MonadThrow m) => IndexName -> m ()
deleteCache indexName' = do
fp <- configPackageIndexCache indexName'
eres <- liftIO $ tryIO $ removeFile fp
case eres of
Left e -> $logDebug $ "Could not delete cache: " <> T.pack (show e)
Right () -> $logDebug $ "Deleted index cache at " <> T.pack (toFilePath fp)
-- | Load the cached package URLs, or created the cache if necessary.
getPackageCaches :: (MonadIO m, MonadLogger m, MonadReader env m, HasConfig env, MonadThrow m, HasHttpManager env, MonadBaseControl IO m, MonadCatch m)
=> EnvOverride
-> m (Map PackageIdentifier (PackageIndex, PackageCache))
getPackageCaches menv = do
config <- askConfig
liftM mconcat $ forM (configPackageIndices config) $ \index -> do
fp <- configPackageIndexCache (indexName index)
PackageCacheMap pis' <- taggedDecodeOrLoad fp $ liftM PackageCacheMap $ populateCache menv index
return (fmap (index,) pis')
--------------- Lifted from cabal-install, Distribution.Client.Tar:
-- | Return the number of blocks in an entry.
entrySizeInBlocks :: Tar.Entry -> Int64
entrySizeInBlocks entry = 1 + case Tar.entryContent entry of
Tar.NormalFile _ size -> bytesToBlocks size
Tar.OtherEntryType _ _ size -> bytesToBlocks size
_ -> 0
where
bytesToBlocks s = 1 + ((fromIntegral s - 1) `div` 512)
| harendra-kumar/stack | src/Stack/PackageIndex.hs | bsd-3-clause | 14,468 | 0 | 19 | 5,224 | 3,311 | 1,688 | 1,623 | 302 | 10 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.Init
( findCabalFiles
, initProject
, InitOpts (..)
, SnapPref (..)
, Method (..)
, makeConcreteResolver
) where
import Control.Exception (assert)
import Control.Exception.Enclosed (handleIO, catchAny)
import Control.Monad (liftM, when)
import Control.Monad.Catch (MonadMask, throwM, MonadThrow)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Data.IntMap as IntMap
import Data.List (isSuffixOf,sort)
import Data.List.Extra (nubOrd)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (mapMaybe)
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Yaml as Yaml
import qualified Distribution.PackageDescription as C
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.Find
import Path.IO
import Stack.BuildPlan
import Stack.Constants
import Stack.Package
import Stack.Solver
import Stack.Types
import System.Directory (getDirectoryContents)
findCabalFiles :: MonadIO m => Bool -> Path Abs Dir -> m [Path Abs File]
findCabalFiles recurse dir =
liftIO $ findFiles dir isCabal (\subdir -> recurse && not (isIgnored subdir))
where
isCabal path = ".cabal" `isSuffixOf` toFilePath path
isIgnored path = toFilePath (dirname path) `Set.member` ignoredDirs
-- | Special directories that we don't want to traverse for .cabal files
ignoredDirs :: Set FilePath
ignoredDirs = Set.fromList
[ ".git"
, "dist"
, ".stack-work"
]
-- | Generate stack.yaml
initProject :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m, MonadBaseControl IO m)
=> Path Abs Dir
-> InitOpts
-> m ()
initProject currDir initOpts = do
let dest = currDir </> stackDotYaml
dest' = toFilePath dest
exists <- fileExists dest
when (not (forceOverwrite initOpts) && exists) $
error ("Refusing to overwrite existing stack.yaml, " <>
"please delete before running stack init " <>
"or if you are sure use \"--force\"")
cabalfps <- findCabalFiles (includeSubDirs initOpts) currDir
$logInfo $ "Writing default config file to: " <> T.pack dest'
$logInfo $ "Basing on cabal files:"
mapM_ (\path -> $logInfo $ "- " <> T.pack (toFilePath path)) cabalfps
$logInfo ""
when (null cabalfps) $ error "In order to init, you should have an existing .cabal file. Please try \"stack new\" instead"
(warnings,gpds) <- fmap unzip (mapM readPackageUnresolved cabalfps)
sequence_ (zipWith (mapM_ . printCabalFileWarning) cabalfps warnings)
(r, flags, extraDeps) <- getDefaultResolver cabalfps gpds initOpts
let p = Project
{ projectPackages = pkgs
, projectExtraDeps = extraDeps
, projectFlags = flags
, projectResolver = r
}
pkgs = map toPkg cabalfps
toPkg fp = PackageEntry
{ peValidWanted = Nothing
, peExtraDepMaybe = Nothing
, peLocation = PLFilePath $
case stripDir currDir $ parent fp of
Nothing
| currDir == parent fp -> "."
| otherwise -> assert False $ toFilePath $ parent fp
Just rel -> toFilePath rel
, peSubdirs = []
}
$logInfo $ "Selected resolver: " <> resolverName r
liftIO $ Yaml.encodeFile dest' p
$logInfo $ "Wrote project config to: " <> T.pack dest'
getSnapshots' :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m, MonadBaseControl IO m)
=> m (Maybe Snapshots)
getSnapshots' =
liftM Just getSnapshots `catchAny` \e -> do
$logError $
"Unable to download snapshot list, and therefore could " <>
"not generate a stack.yaml file automatically"
$logError $
"This sometimes happens due to missing Certificate Authorities " <>
"on your system. For more information, see:"
$logError ""
$logError " https://github.com/commercialhaskell/stack/issues/234"
$logError ""
$logError "You can try again, or create your stack.yaml file by hand. See:"
$logError ""
$logError " https://github.com/commercialhaskell/stack/wiki/stack.yaml"
$logError ""
$logError $ "Exception was: " <> T.pack (show e)
return Nothing
-- | Get the default resolver value
getDefaultResolver :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m, MonadBaseControl IO m)
=> [Path Abs File] -- ^ cabal files
-> [C.GenericPackageDescription] -- ^ cabal descriptions
-> InitOpts
-> m (Resolver, Map PackageName (Map FlagName Bool), Map PackageName Version)
getDefaultResolver cabalfps gpds initOpts =
case ioMethod initOpts of
MethodSnapshot snapPref -> do
msnapshots <- getSnapshots'
names <-
case msnapshots of
Nothing -> return []
Just snapshots -> getRecommendedSnapshots snapshots snapPref
mpair <- findBuildPlan gpds names
case mpair of
Just (snap, flags) ->
return (ResolverSnapshot snap, flags, Map.empty)
Nothing -> throwM $ NoMatchingSnapshot names
MethodResolver aresolver -> do
resolver <- makeConcreteResolver aresolver
mpair <-
case resolver of
ResolverSnapshot name -> findBuildPlan gpds [name]
ResolverCompiler _ -> return Nothing
ResolverCustom _ _ -> return Nothing
case mpair of
Just (snap, flags) ->
return (ResolverSnapshot snap, flags, Map.empty)
Nothing -> return (resolver, Map.empty, Map.empty)
MethodSolver -> do
(compilerVersion, extraDeps) <- cabalSolver Ghc (map parent cabalfps) Map.empty []
return
( ResolverCompiler compilerVersion
, Map.filter (not . Map.null) $ fmap snd extraDeps
, fmap fst extraDeps
)
getRecommendedSnapshots :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m, MonadBaseControl IO m)
=> Snapshots
-> SnapPref
-> m [SnapName]
getRecommendedSnapshots snapshots pref = do
-- Get the most recent LTS and Nightly in the snapshots directory and
-- prefer them over anything else, since odds are high that something
-- already exists for them.
existing <-
liftM (reverse . sort . mapMaybe (parseSnapName . T.pack)) $
snapshotsDir >>=
liftIO . handleIO (const $ return [])
. getDirectoryContents . toFilePath
let isLTS LTS{} = True
isLTS Nightly{} = False
isNightly Nightly{} = True
isNightly LTS{} = False
names = nubOrd $ concat
[ take 2 $ filter isLTS existing
, take 2 $ filter isNightly existing
, map (uncurry LTS)
(take 2 $ reverse $ IntMap.toList $ snapshotsLts snapshots)
, [Nightly $ snapshotsNightly snapshots]
]
namesLTS = filter isLTS names
namesNightly = filter isNightly names
case pref of
PrefNone -> return names
PrefLTS -> return $ namesLTS ++ namesNightly
PrefNightly -> return $ namesNightly ++ namesLTS
data InitOpts = InitOpts
{ ioMethod :: !Method
-- ^ Preferred snapshots
, forceOverwrite :: Bool
-- ^ Overwrite existing files
, includeSubDirs :: Bool
-- ^ If True, include all .cabal files found in any sub directories
}
data SnapPref = PrefNone | PrefLTS | PrefNightly
-- | Method of initializing
data Method = MethodSnapshot SnapPref | MethodResolver AbstractResolver | MethodSolver
-- | Turn an 'AbstractResolver' into a 'Resolver'.
makeConcreteResolver :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m, HasHttpManager env, MonadLogger m)
=> AbstractResolver
-> m Resolver
makeConcreteResolver (ARResolver r) = return r
makeConcreteResolver ar = do
snapshots <- getSnapshots
r <-
case ar of
ARResolver r -> assert False $ return r
ARGlobal -> do
stackRoot <- asks $ configStackRoot . getConfig
let fp = implicitGlobalDir stackRoot </> stackDotYaml
(ProjectAndConfigMonoid project _, _warnings) <-
liftIO (Yaml.decodeFileEither $ toFilePath fp)
>>= either throwM return
return $ projectResolver project
ARLatestNightly -> return $ ResolverSnapshot $ Nightly $ snapshotsNightly snapshots
ARLatestLTSMajor x ->
case IntMap.lookup x $ snapshotsLts snapshots of
Nothing -> error $ "No LTS release found with major version " ++ show x
Just y -> return $ ResolverSnapshot $ LTS x y
ARLatestLTS
| IntMap.null $ snapshotsLts snapshots -> error $ "No LTS releases found"
| otherwise ->
let (x, y) = IntMap.findMax $ snapshotsLts snapshots
in return $ ResolverSnapshot $ LTS x y
$logInfo $ "Selected resolver: " <> resolverName r
return r
| adinapoli/stack | src/Stack/Init.hs | bsd-3-clause | 10,453 | 0 | 20 | 3,517 | 2,439 | 1,232 | 1,207 | 207 | 8 |
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import qualified CV.Matrix as M
import CV.Transforms
main = do
let mat = M.fromList (5,2) . concatMap (\(x,y) -> [x,y])
source = [(1,1)
,(1,2)
,(2,1)
,(5,5)
,(2,4)]
tr = map (\(x,y) -> (x+100,y+100)) source
sc = map (\(x,y) -> (x*100,y*100)) source
print (getHomography' (mat source) (mat $ tr ) Ransac 0.1)
print (getHomography' (mat source) (mat $ sc ) Ransac 0.1)
print (getHomography' (mat source) (mat source) LMeds 0.1)
| TomMD/CV | examples/homography.hs | bsd-3-clause | 569 | 0 | 14 | 164 | 288 | 163 | 125 | 16 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
#if __GLASGOW_HASKELL__ >= 800
{-# OPTIONS_GHC -Wno-redundant-constraints #-}
#endif
-- | Execute commands within the properly configured Stack
-- environment.
module Stack.Exec where
import Control.Monad.Reader
import Control.Monad.Logger
import Control.Monad.Trans.Control (MonadBaseControl)
import Stack.Types.Config
import System.Process.Log
import Control.Exception.Lifted
import Data.Streaming.Process (ProcessExitedUnsuccessfully(..))
import System.Exit
import System.IO (stderr, stdin, stdout, hSetBuffering, BufferMode(..))
import System.Process.Run (callProcess, callProcessObserveStdout, Cmd(..))
#ifdef WINDOWS
import System.Process.Read (EnvOverride)
#else
import qualified System.Process.PID1 as PID1
import System.Process.Read (EnvOverride, envHelper, preProcess)
#endif
-- | Default @EnvSettings@ which includes locals and GHC_PACKAGE_PATH
defaultEnvSettings :: EnvSettings
defaultEnvSettings = EnvSettings
{ esIncludeLocals = True
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
}
-- | Environment settings which do not embellish the environment
plainEnvSettings :: EnvSettings
plainEnvSettings = EnvSettings
{ esIncludeLocals = False
, esIncludeGhcPackagePath = False
, esStackExe = False
, esLocaleUtf8 = False
}
-- | Execute a process within the Stack configured environment.
--
-- Execution will not return, because either:
--
-- 1) On non-windows, execution is taken over by execv of the
-- sub-process. This allows signals to be propagated (#527)
--
-- 2) On windows, an 'ExitCode' exception will be thrown.
exec :: (MonadIO m, MonadLogger m, MonadBaseControl IO m)
=> EnvOverride -> String -> [String] -> m b
#ifdef WINDOWS
exec = execSpawn
#else
exec menv cmd0 args = do
setNoBuffering
cmd <- preProcess Nothing menv cmd0
$withProcessTimeLog cmd args $
liftIO $ PID1.run cmd args (envHelper menv)
#endif
-- | Like 'exec', but does not use 'execv' on non-windows. This way, there
-- is a sub-process, which is helpful in some cases (#1306)
--
-- This function only exits by throwing 'ExitCode'.
execSpawn :: (MonadIO m, MonadLogger m, MonadBaseControl IO m)
=> EnvOverride -> String -> [String] -> m b
execSpawn menv cmd0 args = do
setNoBuffering
e <- $withProcessTimeLog cmd0 args $
try (callProcess (Cmd Nothing cmd0 menv args))
liftIO $ case e of
Left (ProcessExitedUnsuccessfully _ ec) -> exitWith ec
Right () -> exitSuccess
execObserve :: (MonadIO m, MonadLogger m, MonadBaseControl IO m)
=> EnvOverride -> String -> [String] -> m String
execObserve menv cmd0 args = do
e <- $withProcessTimeLog cmd0 args $
try (callProcessObserveStdout (Cmd Nothing cmd0 menv args))
case e of
Left (ProcessExitedUnsuccessfully _ ec) -> liftIO $ exitWith ec
Right s -> return s
setNoBuffering :: MonadIO m => m ()
setNoBuffering = liftIO $ do
hSetBuffering stdout NoBuffering
hSetBuffering stdin NoBuffering
hSetBuffering stderr NoBuffering
| deech/stack | src/Stack/Exec.hs | bsd-3-clause | 3,290 | 0 | 13 | 700 | 608 | 340 | 268 | 58 | 2 |
module MediaWiki.API.Query.Blocks.Import where
import MediaWiki.API.Utils
import MediaWiki.API.Query.Blocks
import Text.XML.Light.Types
import Control.Monad
import Data.Maybe
stringXml :: String -> Either (String,[{-Error msg-}String]) BlocksResponse
stringXml s = parseDoc xml s
xml :: Element -> Maybe BlocksResponse
xml e = do
guard (elName e == nsName "api")
let es1 = children e
p <- pNode "query" es1
let es = children p
ps <- fmap (mapMaybe xmlB) (fmap children $ pNode "blocks" es)
let cont = pNode "query-continue" es1 >>= xmlContinue "blocks" "blfrom"
return emptyBlocksResponse{bkBlocks=ps,bkContinue=cont}
xmlB :: Element -> Maybe BlockInfo
xmlB e = do
guard (elName e == nsName "block")
let i = pAttr "id" e
let usr = pAttr "user" e
let by = pAttr "by" e
let ts = pAttr "timestamp" e
let ex = pAttr "expiry" e
let re = pAttr "reason" e
let ras = pAttr "rangestart" e
let rae = pAttr "rangeend" e
let isa = isJust $ pAttr "automatic" e
let isan = isJust $ pAttr "anononly" e
let isnc = isJust $ pAttr "nocreate" e
let isab = isJust $ pAttr "autoblock" e
let isne = isJust $ pAttr "noemail" e
let ishi = isJust $ pAttr "hidden" e
return emptyBlockInfo
{ bkId = i
, bkUser = usr
, bkBy = by
, bkTimestamp = ts
, bkExpiry = ex
, bkReason = re
, bkRangeStart = ras
, bkRangeEnd = rae
, bkIsAuto = isa
, bkIsAnonOnly = isan
, bkIsNoCreate = isnc
, bkIsAutoBlock = isab
, bkIsNoEmail = isne
, bkIsHidden = ishi
}
| HyperGainZ/neobot | mediawiki/MediaWiki/API/Query/Blocks/Import.hs | bsd-3-clause | 1,598 | 1 | 11 | 431 | 563 | 290 | 273 | 49 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Auto.G.BigSum where
import Control.DeepSeq
import Data.Aeson
import GHC.Generics (Generic)
import Options
data BigSum = F01 | F02 | F03 | F04 | F05
| F06 | F07 | F08 | F09 | F10
| F11 | F12 | F13 | F14 | F15
| F16 | F17 | F18 | F19 | F20
| F21 | F22 | F23 | F24 | F25
deriving (Show, Eq, Generic)
instance NFData BigSum where
rnf a = a `seq` ()
instance ToJSON BigSum where
toJSON = genericToJSON opts
toEncoding = genericToEncoding opts
instance FromJSON BigSum where
parseJSON = genericParseJSON opts
bigSum :: BigSum
bigSum = F25
| dmjio/aeson | benchmarks/bench/Auto/G/BigSum.hs | bsd-3-clause | 638 | 0 | 7 | 175 | 199 | 119 | 80 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -Wno-orphans #-}
module Database.Persist.Redis.Config
( RedisAuth (..)
, RedisConf (..)
, R.RedisCtx
, R.Redis
, R.Connection
, R.PortID (..)
, RedisT
, runRedisPool
, withRedisConn
, thisConnection
, module Database.Persist
) where
import Control.Monad.IO.Class (MonadIO (..))
import Control.Monad.Reader(ReaderT(..))
import Control.Monad.Reader.Class
import Data.Aeson (Value (Object, Number, String), (.:?), (.!=), FromJSON(..))
import qualified Data.ByteString.Char8 as B
import Control.Monad (mzero, MonadPlus(..))
import Data.Scientific() -- we require only RealFrac instance of Scientific
import Data.Text (Text, unpack, pack)
import qualified Database.Redis as R
import Database.Persist
newtype RedisAuth = RedisAuth Text deriving (Eq, Show)
-- | Information required to connect to a Redis server
data RedisConf = RedisConf {
rdHost :: Text, -- ^ Host
rdPort :: R.PortID, -- ^ Port
rdAuth :: Maybe RedisAuth, -- ^ Auth info
rdMaxConn :: Int -- ^ Maximum number of connections
} deriving (Show)
instance FromJSON R.PortID where
parseJSON (Number x) = (return . R.PortNumber . fromInteger . truncate) x
parseJSON _ = fail "persistent Redis: couldn't parse port number"
instance FromJSON RedisAuth where
parseJSON (String t) = (return . RedisAuth) t
parseJSON _ = fail "persistent ResisAuth: couldn't parse auth"
-- | Monad reader transformer keeping Redis connection through out the work
type RedisT = ReaderT R.Connection
-- | Extracts connection from RedisT monad transformer
thisConnection :: Monad m => RedisT m R.Connection
thisConnection = ask
-- | Run a connection reader function against a Redis configuration
withRedisConn :: (MonadIO m) => RedisConf -> (R.Connection -> m a) -> m a
withRedisConn conf connectionReader = do
conn <- liftIO $ createPoolConfig conf
connectionReader conn
runRedisPool :: RedisT m a -> R.Connection -> m a
runRedisPool r = runReaderT r
instance PersistConfig RedisConf where
type PersistConfigBackend RedisConf = RedisT
type PersistConfigPool RedisConf = R.Connection
loadConfig (Object o) = do
host <- o .:? "host" .!= R.connectHost R.defaultConnectInfo
port <- o .:? "port" .!= R.connectPort R.defaultConnectInfo
mPass <- o .:? "password"
maxConn <- o .:? "maxConn" .!= R.connectMaxConnections R.defaultConnectInfo
return RedisConf {
rdHost = pack host,
rdPort = port,
rdAuth = mPass,
rdMaxConn = maxConn
}
loadConfig _ = mzero
createPoolConfig (RedisConf h p Nothing m) =
R.connect $
R.defaultConnectInfo {
R.connectHost = unpack h,
R.connectPort = p,
R.connectMaxConnections = m
}
createPoolConfig (RedisConf h p (Just (RedisAuth pwd)) m) =
R.connect $
R.defaultConnectInfo {
R.connectHost = unpack h,
R.connectPort = p,
R.connectAuth = Just $ B.pack $ unpack pwd,
R.connectMaxConnections = m
}
runPool _ = runRedisPool
| paul-rouse/persistent | persistent-redis/Database/Persist/Redis/Config.hs | mit | 3,273 | 0 | 12 | 832 | 829 | 466 | 363 | 75 | 1 |
{-# LANGUAGE NumericUnderscores #-}
{-# LANGUAGE BinaryLiterals #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE NegativeLiterals #-}
-- Test for NumericUnderscores extension.
-- See Trac #14473
-- This is a testcase for unboxed literals.
import GHC.Types
main :: IO ()
main = do
-- Each case corresponds to the definition of Lexer.x
--
-- Unboxed ints and words
-- decimal int
print [ (I# 1_000_000#) == 1000000,
(I# 299_792_458#) == 299792458
]
-- binary int
print [ (I# 0b01_0000_0000#) == 0b0100000000,
(I# 0b1_11_01_0000_0_111#) == 0b1110100000111
]
-- octal int
print [ (I# 0o1_000_000#) == 0o1000000,
(I# 0O1__0#) == 0O10
]
-- hexadecimal int
print [ (I# 0x1_000_000#) == 0x1000000,
(I# 0X3fff_ffff#) == 0x3fffffff
]
-- negative decimal int
print [ (I# -1_000_000#) == -1000000
]
-- negative binary int
print [ (I# -0b01_0000_0000#) == -0b0100000000
]
-- negative octal int
print [ (I# -0o1_000_000#) == -0o1000000
]
-- negative hexadecimal int
print [ (I# -0x1_000_000#) == -0x1000000
]
-- decimal word
print [ (W# 1_000_000##) == 1000000,
(W# 299_792_458##) == 299792458
]
-- binary word
print [ (W# 0b1_0##) == 0b10
]
-- octal word
print [ (W# 0o1_0##) == 0o10
]
-- hexadecimal word
print [ (W# 0x1_0##) == 0x10
]
-- Unboxed floats and doubles
-- float
print [ (F# 3.141_592_653_589_793#) == 3.141592653589793,
(F# 3_14e-2#) == 314e-2,
(F# 96_485.332_89#) == 96485.33289,
(F# 6.022_140_857e+23#) == 6.022140857e+23,
(F# -3.141_592#) == -3.141592,
(F# -3_14e-2#) == -314e-2,
(F# -6.022_140e+23#) == -6.022140e+23
]
-- double
print [ (D# 3_14e-2##) == 314e-2,
(D# 96_485.332_89##) == 96485.33289,
(D# 6.022_140_857e+23##) == 6.022140857e+23,
(D# -3.141_592##) == -3.141592,
(D# -3_14e-2##) == -314e-2,
(D# -6.022_140e+23##) == -6.022140e+23
]
| shlevy/ghc | testsuite/tests/parser/should_run/NumericUnderscores1.hs | bsd-3-clause | 2,222 | 0 | 12 | 744 | 560 | 299 | 261 | 37 | 1 |
{-# LANGUAGE CPP #-}
#if !defined(TESTING) && __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Safe #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.Map.Lazy
-- Copyright : (c) Daan Leijen 2002
-- (c) Andriy Palamarchuk 2008
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- An efficient implementation of ordered maps from keys to values
-- (dictionaries).
--
-- API of this module is strict in the keys, but lazy in the values.
-- If you need value-strict maps, use 'Data.Map.Strict' instead.
-- The 'Map' type itself is shared between the lazy and strict modules,
-- meaning that the same 'Map' value can be passed to functions in
-- both modules (although that is rarely needed).
--
-- These modules are intended to be imported qualified, to avoid name
-- clashes with Prelude functions, e.g.
--
-- > import qualified Data.Map.Lazy as Map
--
-- The implementation of 'Map' is based on /size balanced/ binary trees (or
-- trees of /bounded balance/) as described by:
--
-- * Stephen Adams, \"/Efficient sets: a balancing act/\",
-- Journal of Functional Programming 3(4):553-562, October 1993,
-- <http://www.swiss.ai.mit.edu/~adams/BB/>.
--
-- * J. Nievergelt and E.M. Reingold,
-- \"/Binary search trees of bounded balance/\",
-- SIAM journal of computing 2(1), March 1973.
--
-- Note that the implementation is /left-biased/ -- the elements of a
-- first argument are always preferred to the second, for example in
-- 'union' or 'insert'.
--
-- Operation comments contain the operation time complexity in
-- the Big-O notation (<http://en.wikipedia.org/wiki/Big_O_notation>).
-----------------------------------------------------------------------------
module Data.Map.Lazy (
-- * Strictness properties
-- $strictness
-- * Map type
#if !defined(TESTING)
Map -- instance Eq,Show,Read
#else
Map(..) -- instance Eq,Show,Read
#endif
-- * Operators
, (!), (\\)
-- * Query
, M.null
, size
, member
, notMember
, M.lookup
, findWithDefault
, lookupLT
, lookupGT
, lookupLE
, lookupGE
-- * Construction
, empty
, singleton
-- ** Insertion
, insert
, insertWith
, insertWithKey
, insertLookupWithKey
-- ** Delete\/Update
, delete
, adjust
, adjustWithKey
, update
, updateWithKey
, updateLookupWithKey
, alter
-- * Combine
-- ** Union
, union
, unionWith
, unionWithKey
, unions
, unionsWith
-- ** Difference
, difference
, differenceWith
, differenceWithKey
-- ** Intersection
, intersection
, intersectionWith
, intersectionWithKey
-- ** Universal combining function
, mergeWithKey
-- * Traversal
-- ** Map
, M.map
, mapWithKey
, traverseWithKey
, mapAccum
, mapAccumWithKey
, mapAccumRWithKey
, mapKeys
, mapKeysWith
, mapKeysMonotonic
-- * Folds
, M.foldr
, M.foldl
, foldrWithKey
, foldlWithKey
-- ** Strict folds
, foldr'
, foldl'
, foldrWithKey'
, foldlWithKey'
-- * Conversion
, elems
, keys
, assocs
, keysSet
, fromSet
-- ** Lists
, toList
, fromList
, fromListWith
, fromListWithKey
-- ** Ordered lists
, toAscList
, toDescList
, fromAscList
, fromAscListWith
, fromAscListWithKey
, fromDistinctAscList
-- * Filter
, M.filter
, filterWithKey
, partition
, partitionWithKey
, mapMaybe
, mapMaybeWithKey
, mapEither
, mapEitherWithKey
, split
, splitLookup
-- * Submap
, isSubmapOf, isSubmapOfBy
, isProperSubmapOf, isProperSubmapOfBy
-- * Indexed
, lookupIndex
, findIndex
, elemAt
, updateAt
, deleteAt
-- * Min\/Max
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, updateMin
, updateMax
, updateMinWithKey
, updateMaxWithKey
, minView
, maxView
, minViewWithKey
, maxViewWithKey
-- * Debugging
, showTree
, showTreeWith
, valid
#if defined(TESTING)
-- * Internals
, bin
, balanced
, join
, merge
#endif
) where
import Data.Map.Base as M
-- $strictness
--
-- This module satisfies the following strictness property:
--
-- * Key arguments are evaluated to WHNF
--
-- Here are some examples that illustrate the property:
--
-- > insertWith (\ new old -> old) undefined v m == undefined
-- > insertWith (\ new old -> old) k undefined m == OK
-- > delete undefined m == undefined
| technogeeky/d-A | include/containers-0.5.0.0/Data/Map/Lazy.hs | gpl-3.0 | 5,877 | 0 | 5 | 2,386 | 452 | 331 | 121 | 106 | 0 |
import Control.Monad.Fix
import Data.IORef
data N a = N (IORef Bool, N a, a, N a)
newNode :: N a -> a -> N a -> IO (N a)
newNode b c f = do v <- newIORef False
return (N (v, b, c, f))
ll = mdo n0 <- newNode n3 0 n1
n1 <- newNode n0 1 n2
n2 <- newNode n1 2 n3
n3 <- newNode n2 3 n0
return n0
data Dir = F | B deriving Eq
traverse :: Dir -> N a -> IO [a]
traverse d (N (v, b, i, f)) =
do visited <- readIORef v
if visited
then return []
else do writeIORef v True
let next = if d == F then f else b
is <- traverse d next
return (i:is)
l2dll :: [a] -> IO (N a)
l2dll (x:xs) = mdo c <- newNode l x f
(f, l) <- l2dll' c xs
return c
l2dll' :: N a -> [a] -> IO (N a, N a)
l2dll' p [] = return (p, p)
l2dll' p (x:xs) = mdo c <- newNode p x f
(f, l) <- l2dll' c xs
return (c, l) | frantisekfarka/ghc-dsi | testsuite/tests/ghci.debugger/mdo.hs | bsd-3-clause | 1,012 | 1 | 14 | 439 | 525 | 257 | 268 | -1 | -1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
-- Utility functions for the test suite.
module Tests.Helpers ( test
, (=?>)
, property
, ToString(..)
, ToPandoc(..)
)
where
import Text.Pandoc.Definition
import Text.Pandoc.Builder (Inlines, Blocks, doc, plain)
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit (assertBool)
import Text.Pandoc.Shared (normalize, trimr)
import Text.Pandoc.Options
import Text.Pandoc.Writers.Native (writeNative)
import qualified Test.QuickCheck.Property as QP
import Data.Algorithm.Diff
import qualified Data.Map as M
test :: (ToString a, ToString b, ToString c)
=> (a -> b) -- ^ function to test
-> String -- ^ name of test case
-> (a, c) -- ^ (input, expected value)
-> Test
test fn name (input, expected) =
testCase name $ assertBool msg (actual' == expected')
where msg = nl ++ dashes "input" ++ nl ++ input' ++ nl ++
dashes "result" ++ nl ++
unlines (map vividize diff) ++
dashes ""
nl = "\n"
input' = toString input
actual' = lines $ toString $ fn input
expected' = lines $ toString expected
diff = getDiff expected' actual'
dashes "" = replicate 72 '-'
dashes x = replicate (72 - length x - 5) '-' ++ " " ++ x ++ " ---"
vividize :: Diff String -> String
vividize (Both s _) = " " ++ s
vividize (First s) = "- " ++ s
vividize (Second s) = "+ " ++ s
property :: QP.Testable a => TestName -> a -> Test
property = testProperty
infix 5 =?>
(=?>) :: a -> b -> (a,b)
x =?> y = (x, y)
class ToString a where
toString :: a -> String
instance ToString Pandoc where
toString d = writeNative def{ writerStandalone = s } $ toPandoc d
where s = case d of
(Pandoc (Meta m) _)
| M.null m -> False
| otherwise -> True
instance ToString Blocks where
toString = writeNative def . toPandoc
instance ToString Inlines where
toString = trimr . writeNative def . toPandoc
instance ToString String where
toString = id
class ToPandoc a where
toPandoc :: a -> Pandoc
instance ToPandoc Pandoc where
toPandoc = normalize
instance ToPandoc Blocks where
toPandoc = normalize . doc
instance ToPandoc Inlines where
toPandoc = normalize . doc . plain
| gbataille/pandoc | tests/Tests/Helpers.hs | gpl-2.0 | 2,524 | 0 | 15 | 746 | 765 | 416 | 349 | 67 | 2 |
{-# LANGUAGE GADTs, TypeFamilies #-}
module T10562 where
type family Flip a
data QueryRep qtyp a where
QAtom :: a -> QueryRep () a
QOp :: QueryRep (Flip qtyp) a -> QueryRep qtyp a
instance Eq (QueryRep qtyp a) where
(==) = error "urk"
instance (Ord a) => Ord (QueryRep qtyp a) where
compare (QOp a) (QOp b) = a `compare` b
| urbanslug/ghc | testsuite/tests/typecheck/should_compile/T10562.hs | bsd-3-clause | 341 | 0 | 9 | 80 | 137 | 75 | 62 | 10 | 0 |
-- @Author: Zeyuan Shang
-- @Date: 2016-06-01 19:22:05
-- @Last Modified by: Zeyuan Shang
-- @Last Modified time: 2016-06-01 19:25:14
data Tree a = Node a [Tree a]
deriving (Eq, Show)
ipl :: Tree a -> Int
ipl = ipl' 0
where ipl' x (Node _ ts) = x + (sum $ map (ipl' (x + 1)) ts)
tree1 = Node 'a' []
tree2 = Node 'a' [Node 'b' []]
tree3 = Node 'a' [Node 'b' [Node 'c' []]]
tree4 = Node 'b' [Node 'd' [], Node 'e' []]
tree5 = Node 'a' [
Node 'f' [Node 'g' []],
Node 'c' [],
Node 'b' [Node 'd' [], Node 'e' []]
]
main = do
let value = ipl tree5
print value | zeyuanxy/haskell-playground | ninety-nine-haskell-problems/vol8/71.hs | mit | 640 | 19 | 12 | 207 | 282 | 146 | 136 | 16 | 1 |
module Ives.ExampleGen.Conc (concretify, send) where
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import Control.Monad
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
send :: Name -> Q Exp
send nm = do
return $ VarE nm
preferredTys :: Q [Type]
preferredTys = do
Just intTy <- lookupTypeName "Int"
Just boolTy <- lookupTypeName "Bool"
Just charTy <- lookupTypeName "Char"
return $ map ConT [intTy, boolTy, charTy]
preferredTyCons :: Q [Type]
preferredTyCons = do
Just maybeTy <- lookupTypeName "Maybe"
Just eitherTy <- lookupTypeName "Either"
return $ ListT:(map ConT [maybeTy, eitherTy])
concretify :: Name -> Q Type
concretify nm = do
info <- reify nm
let ty = case info of
VarI _ ty _ _ -> ty
ClassOpI _ ty _ _ -> ty
prefTys <- preferredTys
prefTyCons <- preferredTyCons
let getTy = getType prefTys Map.empty
let getTyCon = getType prefTyCons Map.empty
conc getTy getTyCon ty
-- concrete type generator -> concrete type constructor generator -> function type -> concrete function type
conc :: (Name -> Type) -> (Name -> Type) -> Type -> Q Type
-- replace type variable with a concrete one
conc getTy getTyCon (VarT nm) = conc getTy getTyCon ty
where ty = getTy nm
conc getTy getTyCon (AppT ArrowT ty) = do
newTy <- conc getTy getTyCon ty
return $ AppT ArrowT newTy
conc getTy getTyCon (AppT tyCon tyVar) = do
-- if the type constructor is variable, get a type constructor instead of a type variable
newTyCon <- case tyCon of
VarT nm -> return $ getTyCon nm
otherwise -> conc getTy getTyCon tyCon
newTyVar <- conc getTy getTyCon tyVar
return $ AppT newTyCon newTyVar
conc _ _ (ForallT _ cxt ty) = do
constraints <- processConstraints cxt Map.empty
-- curry getType with preferred types and constraints
prefTys <- preferredTys
prefTyCons <- preferredTyCons
let getTy = getType prefTys constraints
let getTyCon = getType prefTyCons constraints
conc getTy getTyCon ty
conc _ _ ty = return ty
-- Gets type from a map given a name and a list of preffered types
getType :: [Type] -> Map.Map Name (Set.Set Type) -> Name -> Type
getType [] m nm = case Map.lookup nm m of
Just instances -> Set.elemAt (Set.size instances - 1) instances
Nothing -> ListT -- shouldn't ever happen
getType (ty:tys) m nm = case Map.lookup nm m of
Just instances -> if Set.member ty instances then ty else getType tys m nm
Nothing -> ty
-- Cxt = [Pred] = [Type]
processConstraints :: Cxt -> Map.Map Name (Set.Set Type) -> Q (Map.Map Name (Set.Set Type))
processConstraints [] m = return m
processConstraints (AppT (ConT cls) (VarT var):xs) m = do
tys <- getInstances cls
let newM = Map.insertWith Set.intersection var (Set.fromList tys) m
processConstraints xs newM
getInstances :: Name -> Q [Type]
getInstances nm = do
ClassI _ ins <- reify nm
let nms = map (\(InstanceD _ (AppT _ ty) _) -> ty) ins
return nms
| santolucito/ives | Ives/ExampleGen/Conc.hs | mit | 2,964 | 5 | 16 | 623 | 1,051 | 510 | 541 | 69 | 4 |
module Recursion where
fib :: Int -> Int
fib 0 = 0
fib 1 = 1
fib x = fib (x-1) + fib (x-2)
altMaximum' :: (Ord a) => [a] -> a
altMaximum' [] = error "maximum of empty list"
altMaximum' [x] = x
altMaximum' (x:xs)
| x > maxTail = x
| otherwise = maxTail
where maxTail = altMaximum' xs
maximum' :: (Ord a) => [a] -> a
maximum' [] = error "maximum of empty list"
maximum' [x] = x
maximum' (x:xs) = max x (maximum' xs)
replicate' :: (Num i, Ord i) => i -> a -> [a]
replicate' n element
| n <= 0 = []
| otherwise = element:replicate' (n-1) element
take' :: (Num i, Ord i) => i -> [a] -> [a]
take' n _
| n <= 0 = []
take' _ [] = []
take' n (x:xs) = x : take' (n-1) xs
reverse' :: [a] -> [a]
reverse' [] = []
reverse' (x:xs) = reverse' xs ++ [x]
repeat' :: a -> [a]
repeat' x = x:repeat' x
zip' :: [a] -> [b] -> [(a,b)]
zip' _ [] = []
zip' [] _ = []
zip' (x:xs) (y:ys) = (x,y):zip' xs ys
elem' :: (Eq a) => a -> [a] -> Bool
_ `elem'` [] = False
a `elem'` (x:xs)
| a == x = True
| otherwise = a `elem'` xs
quicksort :: (Ord a) => [a] -> [a]
quicksort [] = []
quicksort (x:xs) =
let smallerSorted = quicksort [a | a <- xs, a <= x]
biggerSorted = quicksort [a | a <- xs, a > x]
in smallerSorted ++ [x] ++ biggerSorted
longList :: [Int]
longList = [10,2,5,3,1,6,7,4,2,3,4,8,9]
| bionikspoon/playing-with-haskell---learnyouahaskell | recursion.hs | mit | 1,326 | 0 | 12 | 341 | 835 | 443 | 392 | 47 | 1 |
-- |
-- Module: Math.NumberTheory.Powers.Integer
-- Copyright: (c) 2011-2014 Daniel Fischer
-- Licence: MIT
-- Maintainer: Daniel Fischer <[email protected]>
-- Stability: Provisional
-- Portability: Non-portable (GHC extensions)
--
-- Potentially faster power function for 'Integer' base and 'Int'
-- or 'Word' exponent.
--
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 702
#if __GLASGOW_HASKELL__ >= 704
{-# LANGUAGE Safe #-}
#else
{-# LANGUAGE Trustworthy #-}
#endif
#endif
module Math.NumberTheory.Powers.Integer
{-# DEPRECATED "It is no faster than (^)" #-}
( integerPower
, integerWordPower
) where
#if !MIN_VERSION_base(4,8,0)
import Data.Word
#endif
-- | Power of an 'Integer' by the left-to-right repeated squaring algorithm.
-- This needs two multiplications in each step while the right-to-left
-- algorithm needs only one multiplication for 0-bits, but here the
-- two factors always have approximately the same size, which on average
-- gains a bit when the result is large.
--
-- For small results, it is unlikely to be any faster than '(^)', quite
-- possibly slower (though the difference shouldn't be large), and for
-- exponents with few bits set, the same holds. But for exponents with
-- many bits set, the speedup can be significant.
--
-- /Warning:/ No check for the negativity of the exponent is performed,
-- a negative exponent is interpreted as a large positive exponent.
integerPower :: Integer -> Int -> Integer
integerPower = (^)
{-# DEPRECATED integerPower "Use (^) instead" #-}
-- | Same as 'integerPower', but for exponents of type 'Word'.
integerWordPower :: Integer -> Word -> Integer
integerWordPower = (^)
{-# DEPRECATED integerWordPower "Use (^) instead" #-}
| Bodigrim/integer-logarithms | src/Math/NumberTheory/Powers/Integer.hs | mit | 1,775 | 0 | 6 | 322 | 98 | 75 | 23 | 12 | 1 |
------------------------------------------------------------------------------
-- | This module is where all the routes and handlers are defined for the
-- site. The 'haskitterInit' function is the initializer that combines everything
-- together and is exported by this module.
module Site
( haskitterInit
) where
------------------------------------------------------------------------------
import Control.Applicative
import qualified Data.ByteString as BS
import Snap.Core
import Control.Lens
import Snap
import Snap.Snaplet
import Snap.Snaplet.PostgresqlSimple
import Data.Aeson
------------------------------------------------------------------------------
import Application
import Users
import Posts
import Helpers
import Feed
import Errors
------------------------------------------------------------------------------
-- | The application's routes.
routes :: [(BS.ByteString, AppHandler ())]
routes = [
( "/posts" , method GET $ headersHandler $ runHandler $ genericHandler $ postsIndexHandler )
, ( "/postsWithUser" , method GET $ headersHandler $ runHandler $ genericHandler $ postsWitUserIndexHandler )
, ( "/users" , method GET $ headersHandler $ runHandler $ genericHandler $ usersIndexHandler )
, ( "/user/:id" , method GET $ headersHandler $ runHandler $ genericHandler $ catchHandler $ userIdHandler $ userHandler )
, ( "/feed/:id" , method GET $ headersHandler $ runHandler $ genericHandler $ catchHandler $ userIdHandler $ feedHandler )
, ( "/post" , method POST $ headersHandler $ runHandler $ genericHandler $ catchHandler $ loginHandler $ postHandler )
, ( "/follow" , method POST $ headersHandler $ runHandler $ genericHandler $ catchHandler $ loginHandler $ followHandler )
, ( "/signup" , method POST $ headersHandler $ runHandler $ genericHandler $ catchHandler $ signUpHandler )
, ( "/user/:id" , method DELETE $ headersHandler $ runHandler $ genericHandler $ catchHandler $ loginHandler $ deleteHandler )
]
------------------------------------------------------------------------------
-- | Build a new Haskitter snaplet.
haskitterInit :: SnapletInit Haskitter Haskitter
haskitterInit = makeSnaplet "hashkitterInit" "A simple twitter written in Haskell" Nothing $ do
p <- nestSnaplet "pg" pg pgsInit
addRoutes routes
return $ Haskitter { _pg = p}
headersHandler :: AppHandler () -> AppHandler ()
headersHandler appHandler = do
modifyResponse $ setHeader "Content-Type" "application/json"
appHandler
runHandler :: ExceptT Error AppHandler () -> AppHandler ()
runHandler handler = do
runExceptT handler
return ()
genericHandler :: ToJSON a => ExceptT Error AppHandler a -> ExceptT Error AppHandler ()
genericHandler handler = do
obj <- handler
lift $ writeLBS . encode $ obj
catchHandler :: ExceptT Error AppHandler a -> ExceptT Error AppHandler a
catchHandler handler = handler `catchE` printError
postsIndexHandler :: ExceptT Error AppHandler [Post]
postsIndexHandler = getPosts
postsWitUserIndexHandler :: ExceptT Error AppHandler [PostWithUser]
postsWitUserIndexHandler = getPostsWithUser
usersIndexHandler :: ExceptT Error AppHandler [User]
usersIndexHandler = getUsers
printError :: Error -> ExceptT Error AppHandler a
printError err = do
lift . writeBS . getJSONError $ case err of
NullId -> "User id is null"
NoSuchUser -> "User does not exist"
EmailAlreadyTaken -> "Email already taken"
NullEmail -> "User email is null"
NullName -> "User name is null"
NullPassword -> "User password is null"
NullPasswordConfirmation -> "User password confirmation is null"
PasswordConfirmationMissmatch -> "There was a missmatch between user password and user password confirmation"
NullMessage -> "User message is null"
NullFollowerId -> "Follower id is null"
InvalidDelete -> "Invalid delete"
InvalidFollow -> "Invalid follow"
throwE err
getJSONError :: BS.ByteString -> BS.ByteString
getJSONError error = "{\"error\": \"" `BS.append` error `BS.append` "\"}"
userIdHandler :: (User -> ExceptT Error AppHandler a) -> ExceptT Error AppHandler a
userIdHandler handler = do
user_id <- lift $ getParam "id"
user <- maybe (throwE NullId) (\user_id -> getUserById $ (byteStringToString user_id)) user_id
handler user
userHandler :: User -> ExceptT Error AppHandler User
userHandler user = lift $ return user
-- The parameter mapping decoded from the POST body. Note that Snap only
-- auto-decodes POST request bodies when the request's Content-Type is
-- application/x-www-form-urlencoded. For multipart/form-data use
-- handleFileUploads to decode the POST request and fill this mapping.
-- https://hackage.haskell.org/package/snap-core-0.9.8.0/docs/Snap-Core.html#v:rqPostParams
loginHandler :: (User -> ExceptT Error AppHandler a) -> ExceptT Error AppHandler a
loginHandler handler = do
user_email <- nullCheck NullEmail (lift . return) "user_email"
user_password <- nullCheck NullPassword (lift . return) "user_password"
user <- getUserByEmail $ (byteStringToString user_email)
user <- checkPassword user (byteStringToString user_password)
handler user
feedHandler :: User -> ExceptT Error AppHandler [Post]
feedHandler user = getFollowedPostsByUserId user
postHandler :: User -> ExceptT Error AppHandler Post
postHandler user = do
user_message <- nullCheck NullMessage (lift . return) "user_message"
createPost (byteStringToString user_message) user
followHandler :: User -> ExceptT Error AppHandler Follow
followHandler follower = do
followed_id <- nullCheck NullFollowerId (lift . return) "followed_id"
followed <- getUserById $ (byteStringToString followed_id)
if uid follower == uid followed then throwE InvalidFollow else subscribe follower followed
nullCheck :: Error -> (BS.ByteString -> ExceptT Error AppHandler BS.ByteString) -> BS.ByteString -> ExceptT Error AppHandler BS.ByteString
nullCheck error f object_id = do
maybe_object <- lift $ getParam object_id
maybe (throwE error) f maybe_object
signUpHandler :: ExceptT Error AppHandler User
signUpHandler = do
user_email <- nullCheck NullEmail (lift . return) "user_email"
user_name <- nullCheck NullName (lift . return) "user_name"
user_password <- nullCheck NullPassword (lift . return) "user_password"
user_password_confirmation <- nullCheck NullPasswordConfirmation (lift . return) "user_password_confirmation"
if user_password /= user_password_confirmation
then throwE PasswordConfirmationMissmatch
else (do getUserByEmail (byteStringToString user_email); throwE EmailAlreadyTaken) `catchE` (signUpNoSuchUserHandler (byteStringToString user_email) (byteStringToString user_name) (byteStringToString user_password))
signUpNoSuchUserHandler :: String -> String -> String -> Error -> ExceptT Error AppHandler User
signUpNoSuchUserHandler user_email user_name user_password err = -- do
case err of
NoSuchUser -> signUp user_email user_name user_password
_ -> throwE err
deleteHandler :: User -> ExceptT Error AppHandler User
deleteHandler user = do
user_id <- nullCheck NullId (lift . return) "user_id"
if (byteStringToString user_id) /= (show $ uid user) then throwE InvalidDelete else delete user
| lkania/Haskitter | src/Site.hs | mit | 7,596 | 0 | 14 | 1,500 | 1,713 | 867 | 846 | 117 | 12 |
module HsPredictor.ANN where
import HFANN
fannDef :: [Int]
fannDef = [6, 3, 1]
{-| Train artificial neural network and save trained network to file. -}
trainAndSaveANN :: String -- ^ path to exported file
-> String -- ^ where save network
-> Int -- ^ number of epochs
-> Int -- ^ reports frequency (0 - no reports)
-> Double -- ^ desired error (stops training whech achieved)
-> IO ()
trainAndSaveANN expPath savePath epochs reports desiredError =
withStandardFann fannDef $ \fann -> do
setActivationFunctionHidden fann activationSigmoidSymmetric
setActivationFunctionOutput fann activationSigmoidSymmetric
trainOnFile fann expPath epochs reports desiredError
saveFann fann savePath
useANN :: String -- ^ path to saved ann
-> [Double] -- ^ input data
-> IO [Double]
useANN annPath inp = withSavedFann annPath $ \fann -> runFann fann inp
| Taketrung/HsPredictor | library/HsPredictor/ANN.hs | mit | 963 | 0 | 11 | 255 | 184 | 97 | 87 | 20 | 1 |
{-# OPTIONS_GHC -fno-warn-unused-binds -fno-warn-orphans #-}
{-# LANGUAGE UndecidableInstances #-} -- FIXME
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
module PersistentTest where
import Test.HUnit hiding (Test)
import Control.Monad.Trans.Resource (runResourceT)
import Test.Hspec.Expectations ()
import Test.Hspec.QuickCheck(prop)
import Database.Persist
#ifdef WITH_NOSQL
#ifdef WITH_MONGODB
import qualified Database.MongoDB as MongoDB
import Database.Persist.MongoDB (toInsertDoc, docToEntityThrow, collectionName, recordToDocument)
#endif
#else
import Database.Persist.TH (mkDeleteCascade, mkSave)
import Control.Exception (SomeException)
import qualified Data.Text as T
import qualified Control.Exception as E
# ifdef WITH_POSTGRESQL
import Data.List (sort)
# endif
# if WITH_MYSQL
import Database.Persist.MySQL()
# endif
#endif
import qualified Control.Monad.Trans.Control
import Control.Exception.Lifted (catch)
import Control.Monad.IO.Class
import Web.PathPieces (PathPiece (..))
import Data.Maybe (fromJust)
import qualified Data.HashMap.Lazy as M
import Init
import Data.Aeson
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.Functor.Identity
import Data.Functor.Constant
import PersistTestPetType
import PersistTestPetCollarType
#ifdef WITH_NOSQL
mkPersist persistSettings [persistUpperCase|
#else
share [mkPersist persistSettings, mkMigrate "testMigrate", mkDeleteCascade persistSettings, mkSave "_ignoredSave"] [persistUpperCase|
#endif
-- Dedented comment
-- Header-level comment
-- Indented comment
Person json
name Text
age Int "some ignored -- \" attribute"
color Text Maybe -- this is a comment sql=foobarbaz
PersonNameKey name -- this is a comment sql=foobarbaz
deriving Show Eq
Person1
-- Dedented comment
-- Header-level comment
-- Indented comment
name Text
age Int
PersonMaybeAge
name Text
age Int Maybe
PersonMay json
name Text Maybe
color Text Maybe
deriving Show Eq
Pet
ownerId PersonId
name Text
-- deriving Show Eq
-- Dedented comment
-- Header-level comment
-- Indented comment
type PetType
MaybeOwnedPet
ownerId PersonId Maybe
name Text
type PetType
-- Dedented comment
-- Header-level comment
-- Indented comment
NeedsPet
petKey PetId
OutdoorPet
ownerId PersonId
collar PetCollar
type PetType
-- From the scaffold
UserPT
ident Text
password Text Maybe
UniqueUserPT ident
EmailPT
email Text
user UserPTId Maybe
verkey Text Maybe
UniqueEmailPT email
Upsert
email Text
counter Int
UniqueUpsert email
deriving Show
UpsertBy
email Text
city Text
state Text
counter Int
UniqueUpsertBy email
UniqueUpsertByCityState city state
deriving Show
Strict
!yes Int
~no Int
def Int
|]
deriving instance Show (BackendKey backend) => Show (PetGeneric backend)
deriving instance Eq (BackendKey backend) => Eq (PetGeneric backend)
share [mkPersist persistSettings { mpsPrefixFields = False, mpsGeneric = False }
#ifdef WITH_NOSQL
] [persistUpperCase|
#else
, mkMigrate "noPrefixMigrate"
] [persistLowerCase|
#endif
NoPrefix1
someFieldName Int
deriving Show Eq
NoPrefix2
someOtherFieldName Int
unprefixedRef NoPrefix1Id
deriving Show Eq
+NoPrefixSum
unprefixedLeft Int
unprefixedRight String
deriving Show Eq
|]
cleanDB :: (MonadIO m, PersistQuery backend, PersistEntityBackend EmailPT ~ backend) => ReaderT backend m ()
cleanDB = do
deleteWhere ([] :: [Filter Person])
deleteWhere ([] :: [Filter Person1])
deleteWhere ([] :: [Filter Pet])
deleteWhere ([] :: [Filter MaybeOwnedPet])
deleteWhere ([] :: [Filter NeedsPet])
deleteWhere ([] :: [Filter OutdoorPet])
deleteWhere ([] :: [Filter UserPT])
deleteWhere ([] :: [Filter EmailPT])
#ifdef WITH_NOSQL
db :: Action IO () -> Assertion
db = db' cleanDB
#endif
catchPersistException :: Control.Monad.Trans.Control.MonadBaseControl IO m => m a -> b -> m b
catchPersistException action errValue = do
Left res <-
(Right `fmap` action) `catch`
(\(_::PersistException) -> return $ Left errValue)
return res
specs :: Spec
specs = describe "persistent" $ do
it "fieldLens" $ do
let michael = Entity undefined $ Person "Michael" 28 Nothing :: Entity Person
michaelP1 = Person "Michael" 29 Nothing :: Person
view michael (fieldLens PersonAge) @?= 28
entityVal (set (fieldLens PersonAge) 29 michael) @?= michaelP1
it "FilterOr []" $ db $ do
let p = Person "z" 1 Nothing
_ <- insert p
#ifdef WITH_MONGODB
ps <- catchPersistException (selectList [FilterOr []] [Desc PersonAge]) []
#else
ps <- (selectList [FilterOr []] [Desc PersonAge])
#endif
assertEmpty ps
it "||. []" $ db $ do
let p = Person "z" 1 Nothing
_ <- insert p
#ifdef WITH_MONGODB
c <- catchPersistException (count $ [PersonName ==. "a"] ||. []) 1
#else
c <- (count $ [PersonName ==. "a"] ||. [])
#endif
c @== (1::Int)
it "FilterAnd []" $ db $ do
let p = Person "z" 1 Nothing
_ <- insert p
ps <- selectList [FilterAnd []] [Desc PersonAge]
assertNotEmpty ps
it "order of opts is irrelevant" $ db $ do
let eq (a, b, _) (c, d) = (a, b) @== (c, d)
limitOffsetOrder' :: [SelectOpt Person] -> (Int, Int, [SelectOpt Person])
limitOffsetOrder' = limitOffsetOrder
limitOffsetOrder' [Desc PersonAge] `eq` (0, 0)
limitOffsetOrder' [LimitTo 2, Desc PersonAge] `eq` (2, 0)
limitOffsetOrder' [Desc PersonAge, LimitTo 2] `eq` (2, 0)
limitOffsetOrder' [LimitTo 2, Desc PersonAge, OffsetBy 3] `eq` (2, 3)
insertMany_ [ Person "z" 1 Nothing
, Person "y" 2 Nothing
, Person "x" 1 Nothing
, Person "w" 2 Nothing
, Person "v" 1 Nothing
, Person "u" 2 Nothing
]
a <- fmap (map $ personName . entityVal) $ selectList [] [Desc PersonAge, Asc PersonName, OffsetBy 2, LimitTo 3]
a @== ["y", "v", "x"]
b <- fmap (map $ personName . entityVal) $ selectList [] [OffsetBy 2, Desc PersonAge, LimitTo 3, Asc PersonName]
b @== a
c <- fmap (map $ personName . entityVal) $ selectList [] [OffsetBy 2, Desc PersonAge, LimitTo 3, Asc PersonName, LimitTo 1, OffsetBy 1]
c @== a
it "passes the general tests" $ db $ do
let mic26 = Person "Michael" 26 Nothing
micK <- insert mic26
results <- selectList [PersonName ==. "Michael"] []
results @== [Entity micK mic26]
results' <- selectList [PersonAge <. 28] []
results' @== [Entity micK mic26]
p28 <- updateGet micK [PersonAge =. 28]
personAge p28 @== 28
#ifdef WITH_NOSQL
updateWhere [PersonName ==. "Michael"] [PersonAge =. 29]
#else
uc <- updateWhereCount [PersonName ==. "Michael"] [PersonAge =. 29]
uc @== 1
#endif
Just mic29 <- get micK
personAge mic29 @== 29
let eli = Person "Eliezer" 2 $ Just "blue"
_ <- insert eli
pasc <- selectList [] [Asc PersonAge]
map entityVal pasc @== [eli, mic29]
let abe30 = Person "Abe" 30 $ Just "black"
_ <- insert abe30
-- pdesc <- selectList [PersonAge <. 30] [Desc PersonName]
map entityVal pasc @== [eli, mic29]
abes <- selectList [PersonName ==. "Abe"] []
map entityVal abes @== [abe30]
Just (Entity _ p3) <- getBy $ PersonNameKey "Michael"
p3 @== mic29
ps <- selectList [PersonColor ==. Just "blue"] []
map entityVal ps @== [eli]
ps2 <- selectList [PersonColor ==. Nothing] []
map entityVal ps2 @== [mic29]
delete micK
Nothing <- get micK
return ()
#ifdef WITH_ZOOKEEPER
-- zookeeper backend does not support idfield
-- zookeeper's key is node-name.
-- When uniq-key exists, zookeeper's key becomes encoded uniq-key.
#else
it "persistIdField" $ db $ do
let p = Person "foo" 100 (Just "blue")
q = Person "bar" 101 Nothing
pk <- insert p
qk <- insert q
mp <- selectFirst [persistIdField ==. pk] []
fmap entityVal mp @== Just p
mq <- selectFirst [persistIdField ==. qk] []
fmap entityVal mq @== Just q
#endif
it "!=." $ db $ do
deleteWhere ([] :: [Filter Person])
let mic = Person "Michael" 25 Nothing
_ <- insert mic
let eli = Person "Eliezer" 25 (Just "Red")
_ <- insert eli
pne <- selectList [PersonName !=. "Michael"] []
map entityVal pne @== [eli]
ps <- selectList [PersonColor !=. Nothing] []
map entityVal ps @== [eli]
pnm <- selectList [PersonName !=. "Eliezer"] []
map entityVal pnm @== [mic]
it "Double Maybe" $ db $ do
deleteWhere ([] :: [Filter PersonMay])
let mic = PersonMay (Just "Michael") Nothing
_ <- insert mic
let eli = PersonMay (Just "Eliezer") (Just "Red")
_ <- insert eli
pe <- selectList [PersonMayName ==. Nothing, PersonMayColor ==. Nothing] []
map entityVal pe @== []
pne <- selectList [PersonMayName !=. Nothing, PersonMayColor !=. Nothing] []
map entityVal pne @== [eli]
it "and/or" $ db $ do
deleteWhere ([] :: [Filter Person1])
insertMany_ [ Person1 "Michael" 25
, Person1 "Miriam" 25
, Person1 "Michael" 30
, Person1 "Michael" 35
]
c10 <- count $ [Person1Name ==. "Michael"] ||. [Person1Name ==. "Miriam", Person1Age ==. 25]
c10 @== 4
c12 <- count [FilterOr [FilterAnd [Person1Name ==. "Michael"], FilterAnd [Person1Name ==. "Miriam"]]]
c12 @== 4
c14 <- count [FilterOr [FilterAnd [Person1Name ==. "Michael"], FilterAnd [Person1Name ==. "Miriam"],
FilterAnd [Person1Age >. 29, Person1Age <=. 30]]]
c14 @== 4
c20 <- count $ [Person1Name ==. "Miriam"] ||. [Person1Age >. 29, Person1Age <=. 30]
c20 @== 2
c22 <- count $ [Person1Age <=. 30] ++ [Person1Age >. 29]
c22 @== 1
c24 <- count $ [FilterAnd [Person1Age <=. 30, Person1Age >. 29]]
c24 @== 1
c26 <- count $ [Person1Age <=. 30] ++ [Person1Age >. 29]
c26 @== 1
c34 <- count $ [Person1Name ==. "Michael"] ||. [Person1Name ==. "Mirieam"] ++ [Person1Age <.35]
c34 @== 3
c30 <- count $ ([Person1Name ==. "Michael"] ||. [Person1Name ==. "Miriam"]) ++ [Person1Age <.35]
c30 @== 3
c36 <- count $ [Person1Name ==. "Michael"] ||. ([Person1Name ==. "Miriam"] ++ [Person1Age <.35])
c36 @== 4
c40 <- count $ ([Person1Name ==. "Michael"] ||. [Person1Name ==. "Miriam"] ||. [Person1Age <.35])
c40 @== 4
it "deleteWhere" $ db $ do
key2 <- insert $ Person "Michael2" 90 Nothing
_ <- insert $ Person "Michael3" 90 Nothing
let p91 = Person "Michael4" 91 Nothing
key91 <- insert $ p91
ps90 <- selectList [PersonAge ==. 90] []
assertNotEmpty ps90
deleteWhere [PersonAge ==. 90]
ps90' <- selectList [PersonAge ==. 90] []
assertEmpty ps90'
Nothing <- get key2
Just p2_91 <- get key91
p91 @== p2_91
it "deleteBy" $ db $ do
_ <- insert $ Person "Michael2" 27 Nothing
let p3 = Person "Michael3" 27 Nothing
key3 <- insert $ p3
ps2 <- selectList [PersonName ==. "Michael2"] []
assertNotEmpty ps2
deleteBy $ PersonNameKey "Michael2"
ps2' <- selectList [PersonName ==. "Michael2"] []
assertEmpty ps2'
Just p32 <- get key3
p3 @== p32
it "delete" $ db $ do
key2 <- insert $ Person "Michael2" 27 Nothing
let p3 = Person "Michael3" 27 Nothing
key3 <- insert $ p3
pm2 <- selectList [PersonName ==. "Michael2"] []
assertNotEmpty pm2
delete key2
pm2' <- selectList [PersonName ==. "Michael2"] []
assertEmpty pm2'
Just p <- get key3
p3 @== p
#ifdef WITH_ZOOKEEPER
it "toPathPiece . fromPathPiece" $ do
-- Below quickcheck causes error of "Cannot convert PersistObjectId to Text."
-- Currently, ZooKey does not support PersistObjectId.
let key1 = ZooKey "hogehogekey" :: (BackendKey BackendMonad)
key2 = fromJust $ fromPathPiece $ toPathPiece key1 :: (BackendKey BackendMonad)
toPathPiece key1 `shouldBe` toPathPiece key2
#else
prop "toPathPiece . fromPathPiece" $ \piece ->
let key1 = piece :: (BackendKey BackendMonad)
key2 = fromJust $ fromPathPiece $ toPathPiece key1 :: (BackendKey BackendMonad)
in toPathPiece key1 == toPathPiece key2
#endif
it "replace" $ db $ do
key2 <- insert $ Person "Michael2" 27 Nothing
let p3 = Person "Michael3" 27 Nothing
replace key2 p3
Just p <- get key2
p @== p3
-- test replace an empty key
delete key2
Nothing <- get key2
_ <- replace key2 p3
Nothing <- get key2
return ()
let mic = Person "Michael" 25 Nothing
micK <- insert mic
Just p1 <- get micK
p1 @== mic
replace micK $ Person "Michael" 25 Nothing
Just p2 <- get micK
p2 @== mic
replace micK $ Person "Michael" 26 Nothing
Just mic26 <- get micK
mic26 @/= mic
personAge mic26 @== personAge mic + 1
it "getBy" $ db $ do
let p2 = Person "Michael2" 27 Nothing
key2 <- insert p2
Just (Entity k p) <- getBy $ PersonNameKey "Michael2"
p @== p2
k @== key2
Nothing <- getBy $ PersonNameKey "Michael9"
Just (Entity k' p') <- getByValue p2
k' @== k
p' @== p
return ()
it "updateGet" $ db $ do
let p25 = Person "Michael" 25 Nothing
key25 <- insert p25
pBlue28 <- updateGet key25 [PersonAge =. 28, PersonName =. "Updated"]
pBlue28 @== Person "Updated" 28 Nothing
pBlue30 <- updateGet key25 [PersonAge +=. 2]
pBlue30 @== Person "Updated" 30 Nothing
it "upsert without updates" $ db $ do
deleteWhere ([] :: [Filter Upsert])
let email = "[email protected]"
Nothing :: Maybe (Entity Upsert) <- getBy $ UniqueUpsert email
let counter1 = 0
Entity k1 u1 <- upsert (Upsert email counter1) []
upsertCounter u1 @== counter1
let counter2 = 1
Entity k2 u2 <- upsert (Upsert email counter2) []
upsertCounter u2 @== counter2
k1 @== k2
it "upsert with updates" $ db $ do
deleteWhere ([] :: [Filter Upsert])
let email = "[email protected]"
Nothing :: Maybe (Entity Upsert) <- getBy $ UniqueUpsert email
let up0 = Upsert email 0
Entity _ up1 <- upsert up0 [UpsertCounter +=. 1]
upsertCounter up1 @== 1
Entity _ up2 <- upsert up1 [UpsertCounter +=. 1]
upsertCounter up2 @== 2
it "upsertBy without updates" $ db $ do
deleteWhere ([] :: [Filter UpsertBy])
let email = "[email protected]"
city = "Boston"
state = "Massachussets"
Nothing :: Maybe (Entity UpsertBy) <- getBy $ UniqueUpsertBy email
let counter1 = 0
unique = UniqueUpsertBy email
Entity k1 u1 <- upsertBy unique (UpsertBy email city state counter1) []
upsertByCounter u1 @== counter1
let counter2 = 1
Entity k2 u2 <- upsertBy unique (UpsertBy email city state counter2) []
upsertByCounter u2 @== counter2
k1 @== k2
it "upsertBy with updates" $ db $ do
deleteWhere ([] :: [Filter UpsertBy])
let email = "[email protected]"
city = "Boston"
state = "Massachussets"
Nothing :: Maybe (Entity UpsertBy) <- getBy $ UniqueUpsertBy email
let up0 = UpsertBy email city state 0
Entity _ up1 <- upsertBy (UniqueUpsertBy email) up0 [UpsertByCounter +=. 1]
upsertByCounter up1 @== 1
Entity _ up2 <- upsertBy (UniqueUpsertBy email) up1 [UpsertByCounter +=. 1]
upsertByCounter up2 @== 2
it "maybe update" $ db $ do
let noAge = PersonMaybeAge "Michael" Nothing
keyNoAge <- insert noAge
noAge2 <- updateGet keyNoAge [PersonMaybeAgeAge +=. Just 2]
-- the correct answer is very debatable
#ifdef WITH_NOSQL
personMaybeAgeAge noAge2 @== Just 2
#else
personMaybeAgeAge noAge2 @== Nothing
#endif
it "updateWhere" $ db $ do
let p1 = Person "Michael" 25 Nothing
let p2 = Person "Michael2" 25 Nothing
key1 <- insert p1
key2 <- insert p2
updateWhere [PersonName ==. "Michael2"]
[PersonAge +=. 3, PersonName =. "Updated"]
Just pBlue28 <- get key2
pBlue28 @== Person "Updated" 28 Nothing
Just p <- get key1
p @== p1
it "selectList" $ db $ do
let p25 = Person "Michael" 25 Nothing
let p26 = Person "Michael2" 26 Nothing
[key25, key26] <- insertMany [p25, p26]
ps1 <- selectList [] [Asc PersonAge]
ps1 @== [(Entity key25 p25), (Entity key26 p26)]
-- limit
ps2 <- selectList [] [Asc PersonAge, LimitTo 1]
ps2 @== [(Entity key25 p25)]
-- offset
ps3 <- selectList [] [Asc PersonAge, OffsetBy 1]
ps3 @== [(Entity key26 p26)]
-- limit & offset
ps4 <- selectList [] [Asc PersonAge, LimitTo 1, OffsetBy 1]
ps4 @== [(Entity key26 p26)]
ps5 <- selectList [] [Desc PersonAge]
ps5 @== [(Entity key26 p26), (Entity key25 p25)]
ps6 <- selectList [PersonAge ==. 26] []
ps6 @== [(Entity key26 p26)]
it "selectSource" $ db $ do
let p1 = Person "selectSource1" 1 Nothing
p2 = Person "selectSource2" 2 Nothing
p3 = Person "selectSource3" 3 Nothing
[k1,k2,k3] <- insertMany [p1, p2, p3]
ps1 <- runResourceT $ selectSource [] [Desc PersonAge] $$ await
ps1 @== Just (Entity k3 p3)
ps2 <- runResourceT $ selectSource [PersonAge <. 3] [Asc PersonAge] $$ CL.consume
ps2 @== [Entity k1 p1, Entity k2 p2]
runResourceT $ selectSource [] [Desc PersonAge] $$ do
e1 <- await
e1 @== Just (Entity k3 p3)
e2 <- await
e2 @== Just (Entity k2 p2)
e3 <- await
e3 @== Just (Entity k1 p1)
e4 <- await
e4 @== Nothing
it "selectFirst" $ db $ do
_ <- insert $ Person "Michael" 26 Nothing
let pOld = Person "Oldie" 75 Nothing
kOld <- insert pOld
x <- selectFirst [] [Desc PersonAge]
x @== Just (Entity kOld pOld)
it "selectKeys" $ db $ do
let p1 = Person "selectKeys1" 1 Nothing
p2 = Person "selectKeys2" 2 Nothing
p3 = Person "selectKeys3" 3 Nothing
[k1,k2,k3] <- insertMany [p1, p2, p3]
ps1 <- runResourceT $ selectKeys [] [Desc PersonAge] $$ await
ps1 @== Just k3
ps2 <- runResourceT $ selectKeys [PersonAge <. 3] [Asc PersonAge] $$ CL.consume
ps2 @== [k1, k2]
runResourceT $ selectKeys [] [Desc PersonAge] $$ do
e1 <- await
e1 @== Just k3
e2 <- await
e2 @== Just k2
e3 <- await
e3 @== Just k1
e4 <- await
e4 @== Nothing
it "insertMany_ with no arguments" $ db $ do
_ <- insertMany_ ([] :: [Person])
rows <- count ([] :: [Filter Person])
rows @== 0
_ <- insertMany ([] :: [Person])
rows2 <- count ([] :: [Filter Person])
rows2 @== 0
_ <- insertEntityMany ([] :: [Entity Person])
rows3 <- count ([] :: [Filter Person])
rows3 @== 0
it "insertEntityMany" $ db $ do
id1:id2:id3:id4:id5:[] <- liftIO $ replicateM 5 (PersonKey `fmap` generateKey)
let p1 = Entity id1 $ Person "insertEntityMany1" 1 Nothing
p2 = Entity id2 $ Person "insertEntityMany2" 2 Nothing
p3 = Entity id3 $ Person "insertEntityMany3" 3 Nothing
p4 = Entity id4 $ Person "insertEntityMany4" 3 Nothing
p5 = Entity id5 $ Person "insertEntityMany5" 3 Nothing
insertEntityMany [p1,p2,p3,p4,p5]
rows <- count ([] :: [Filter Person])
rows @== 5
it "insertBy" $ db $ do
Right _ <- insertBy $ Person "name" 1 Nothing
Left _ <- insertBy $ Person "name" 1 Nothing
Right _ <- insertBy $ Person "name2" 1 Nothing
return ()
it "insertKey" $ db $ do
k <- liftIO (PersonKey `fmap` generateKey)
insertKey k $ Person "Key" 26 Nothing
Just (Entity k2 _) <- selectFirst [PersonName ==. "Key"] []
k2 @== k
it "insertEntity" $ db $ do
Entity k p <- insertEntity $ Person "name" 1 Nothing
Just p2 <- get k
p2 @== p
it "repsert" $ db $ do
k <- liftIO (PersonKey `fmap` generateKey)
Nothing <- selectFirst [PersonName ==. "Repsert"] []
repsert k $ Person "Repsert" 26 Nothing
Just (Entity k2 _) <- selectFirst [PersonName ==. "Repsert"] []
k2 @== k
repsert k $ Person "Repsert" 27 Nothing
Just (Entity k3 p) <- selectFirst [PersonName ==. "Repsert"] []
k3 @== k
27 @== personAge p
it "retrieves a belongsToJust association" $ db $ do
let p = Person "pet owner" 30 Nothing
person <- insert $ p
let cat = Pet person "Mittens" Cat
p2 <- getJust $ petOwnerId cat
p @== p2
p3 <- belongsToJust petOwnerId $ cat
p @== p3
it "retrieves a belongsTo association" $ db $ do
let p = Person "pet owner" 30 Nothing
person <- insert p
let cat = MaybeOwnedPet (Just person) "Mittens" Cat
p2 <- getJust $ fromJust $ maybeOwnedPetOwnerId cat
p @== p2
Just p4 <- belongsTo maybeOwnedPetOwnerId $ cat
p @== p4
it "derivePersistField" $ db $ do
person <- insert $ Person "pet owner" 30 Nothing
catKey <- insert $ Pet person "Mittens" Cat
Just cat' <- get catKey
liftIO $ petType cat' @?= Cat
dog <- insert $ Pet person "Spike" Dog
Just dog' <- get dog
liftIO $ petType dog' @?= Dog
it "derivePersistFieldJSON" $ db $ do
let mittensCollar = PetCollar "Mittens\n1-714-668-9672" True
pkey <- insert $ Person "pet owner" 30 Nothing
catKey <- insert $ OutdoorPet pkey mittensCollar Cat
Just (OutdoorPet _ collar' _) <- get catKey
liftIO $ collar' @?= mittensCollar
#ifdef WITH_ZOOKEEPER
-- zookeeper backend does not support idfield
-- zookeeper's key is node-name.
-- When uniq-key exists, zookeeper's key becomes encoded uniq-key.
#else
it "idIn" $ db $ do
let p1 = Person "D" 0 Nothing
p2 = Person "E" 1 Nothing
p3 = Person "F" 2 Nothing
pid1 <- insert p1
_ <- insert p2
pid3 <- insert p3
x <- selectList [PersonId <-. [pid1, pid3]] []
liftIO $ x @?= [Entity pid1 p1, Entity pid3 p3]
#endif
describe "toJSON" $ do
it "serializes" $ db $ do
let p = Person "D" 0 Nothing
k <- insert p
liftIO $ toJSON (Entity k p) @?=
Object (M.fromList [("id", toJSON k), ("color",Null),("name",String "D"),("age",Number 0)])
{- FIXME
prop "fromJSON . toJSON $ key" $ \(person :: Key Person) ->
case (fromJSON . toJSON) person of
Success p -> p == person
_ -> error "fromJSON"
-}
#ifdef WITH_NOSQL
#ifdef WITH_MONGODB
describe "raw MongoDB helpers" $ do
it "collectionName" $ do
collectionName (Person "Duder" 0 Nothing) @?= "Person"
it "toInsertFields, entityFields, & docToEntityThrow" $ db $ do
let p1 = Person "Duder" 0 Nothing
let doc = toInsertDoc p1
MongoDB.ObjId _id <- MongoDB.insert "Person" $ doc
let idSelector = "_id" MongoDB.=: _id
Entity _ ent1 <- docToEntityThrow $ idSelector:doc
liftIO $ p1 @?= ent1
let p2 = p1 {personColor = Just "blue"}
let doc2 = idSelector:recordToDocument p2
MongoDB.save "Person" doc2
Entity _ ent2 <- docToEntityThrow doc2
liftIO $ p2 @?= ent2
#endif
#else
it "rawSql/2+2" $ db $ do
ret <- rawSql "SELECT 2+2" []
liftIO $ ret @?= [Single (4::Int)]
it "rawSql/?-?" $ db $ do
ret <- rawSql "SELECT ?-?" [PersistInt64 5, PersistInt64 3]
liftIO $ ret @?= [Single (2::Int)]
it "rawSql/NULL" $ db $ do
ret <- rawSql "SELECT NULL" []
liftIO $ ret @?= [Nothing :: Maybe (Single Int)]
it "rawSql/entity" $ db $ do
let insert' :: (PersistStore backend, PersistEntity val, PersistEntityBackend val ~ BaseBackend backend, MonadIO m)
=> val -> ReaderT backend m (Key val, val)
insert' v = insert v >>= \k -> return (k, v)
(p1k, p1) <- insert' $ Person "Mathias" 23 Nothing
(p2k, p2) <- insert' $ Person "Norbert" 44 Nothing
(p3k, _ ) <- insert' $ Person "Cassandra" 19 Nothing
(_ , _ ) <- insert' $ Person "Thiago" 19 Nothing
(a1k, a1) <- insert' $ Pet p1k "Rodolfo" Cat
(a2k, a2) <- insert' $ Pet p1k "Zeno" Cat
(a3k, a3) <- insert' $ Pet p2k "Lhama" Dog
(_ , _ ) <- insert' $ Pet p3k "Abacate" Cat
escape <- ((. DBName) . connEscapeName) `fmap` ask
person <- getTableName (error "rawSql Person" :: Person)
name <- getFieldName PersonName
let query = T.concat [ "SELECT ??, ?? "
, "FROM ", person
, ", ", escape "Pet"
, " WHERE ", person, ".", escape "age", " >= ? "
, "AND ", escape "Pet", ".", escape "ownerId", " = "
, person, ".", escape "id"
, " ORDER BY ", person, ".", name
]
ret <- rawSql query [PersistInt64 20]
liftIO $ ret @?= [ (Entity p1k p1, Entity a1k a1)
, (Entity p1k p1, Entity a2k a2)
, (Entity p2k p2, Entity a3k a3) ]
ret2 <- rawSql query [PersistInt64 20]
liftIO $ ret2 @?= [ (Just (Entity p1k p1), Just (Entity a1k a1))
, (Just (Entity p1k p1), Just (Entity a2k a2))
, (Just (Entity p2k p2), Just (Entity a3k a3)) ]
ret3 <- rawSql query [PersistInt64 20]
liftIO $ ret3 @?= [ Just (Entity p1k p1, Entity a1k a1)
, Just (Entity p1k p1, Entity a2k a2)
, Just (Entity p2k p2, Entity a3k a3) ]
it "rawSql/order-proof" $ db $ do
let p1 = Person "Zacarias" 93 Nothing
p1k <- insert p1
escape <- ((. DBName) . connEscapeName) `fmap` ask
let query = T.concat [ "SELECT ?? "
, "FROM ", escape "Person"
]
ret1 <- rawSql query []
ret2 <- rawSql query [] :: MonadIO m => SqlPersistT m [Entity (ReverseFieldOrder Person)]
liftIO $ ret1 @?= [Entity p1k p1]
liftIO $ ret2 @?= [Entity (RFOKey $ unPersonKey $ p1k) (RFO p1)]
it "rawSql/OUTER JOIN" $ db $ do
let insert' :: (PersistStore backend, PersistEntity val, PersistEntityBackend val ~ BaseBackend backend, MonadIO m)
=> val -> ReaderT backend m (Key val, val)
insert' v = insert v >>= \k -> return (k, v)
(p1k, p1) <- insert' $ Person "Mathias" 23 Nothing
(p2k, p2) <- insert' $ Person "Norbert" 44 Nothing
(a1k, a1) <- insert' $ Pet p1k "Rodolfo" Cat
(a2k, a2) <- insert' $ Pet p1k "Zeno" Cat
escape <- ((. DBName) . connEscapeName) `fmap` ask
let query = T.concat [ "SELECT ??, ?? "
, "FROM ", person
, "LEFT OUTER JOIN ", pet
, " ON ", person, ".", escape "id"
, " = ", pet, ".", escape "ownerId"
, " ORDER BY ", person, ".", escape "name"]
person = escape "Person"
pet = escape "Pet"
ret <- rawSql query []
liftIO $ ret @?= [ (Entity p1k p1, Just (Entity a1k a1))
, (Entity p1k p1, Just (Entity a2k a2))
, (Entity p2k p2, Nothing) ]
it "commit/rollback" (caseCommitRollback >> runResourceT (runConn cleanDB))
#ifndef WITH_MYSQL
# ifndef WITH_POSTGRESQL
# ifndef WITH_NOSQL
it "afterException" $ db $ do
let catcher :: Monad m => SomeException -> m ()
catcher _ = return ()
_ <- insert $ Person "A" 0 Nothing
_ <- (insert (Person "A" 1 Nothing) >> return ()) `catch` catcher
_ <- insert $ Person "B" 0 Nothing
return ()
# endif
# endif
#endif
#ifndef WITH_NOSQL
it "mpsNoPrefix" $ db $ do
deleteWhere ([] :: [Filter NoPrefix2])
deleteWhere ([] :: [Filter NoPrefix1])
np1a <- insert $ NoPrefix1 1
update np1a [SomeFieldName =. 2]
np1b <- insert $ NoPrefix1 3
np2 <- insert $ NoPrefix2 4 np1a
update np2 [UnprefixedRef =. np1b, SomeOtherFieldName =. 5]
mnp1a <- get np1a
liftIO $ mnp1a @?= Just (NoPrefix1 2)
liftIO $ fmap someFieldName mnp1a @?= Just 2
mnp2 <- get np2
liftIO $ fmap unprefixedRef mnp2 @?= Just np1b
liftIO $ fmap someOtherFieldName mnp2 @?= Just 5
insert_ $ UnprefixedLeftSum 5
insert_ $ UnprefixedRightSum "Hello"
it "IsSqlKey instance" $ db $ do
let p = Person "Alice" 30 Nothing
key@(PersonKey (SqlBackendKey i)) <- insert p
liftIO $ fromSqlKey key `shouldBe` (i :: Int64)
mp <- get $ toSqlKey i
liftIO $ mp `shouldBe` Just p
#endif
describe "strictness" $ do
it "bang" $ (return $! Strict (error "foo") 5 5) `shouldThrow` anyErrorCall
it "tilde" $ void (return $! Strict 5 (error "foo") 5 :: IO Strict)
it "blank" $ (return $! Strict 5 5 (error "foo")) `shouldThrow` anyErrorCall
#ifdef WITH_POSTGRESQL
describe "rawSql/array_agg" $ do
let runArrayAggTest dbField expected = db $ do
void $ insertMany
[ UserPT "a" $ Just "b"
, UserPT "c" $ Just "d"
, UserPT "e" Nothing
, UserPT "g" $ Just "h" ]
escape <- ((. DBName) . connEscapeName) `fmap` ask
let query = T.concat [ "SELECT array_agg(", escape dbField, ") "
, "FROM ", escape "UserPT"
]
[Single xs] <- rawSql query []
liftIO $ sort xs @?= expected
it "works for [Text]" $ runArrayAggTest "ident" ["a", "c", "e", "g" :: Text]
it "works for [Maybe Text]" $ runArrayAggTest "password" [Nothing, Just "b", Just "d", Just "h" :: Maybe Text]
#endif
-- | Reverses the order of the fields of an entity. Used to test
-- @??@ placeholders of 'rawSql'.
newtype ReverseFieldOrder a = RFO {unRFO :: a} deriving (Eq, Show)
instance ToJSON (Key (ReverseFieldOrder a)) where toJSON = error "ReverseFieldOrder"
instance FromJSON (Key (ReverseFieldOrder a)) where parseJSON = error "ReverseFieldOrder"
instance (PersistEntity a) => PersistEntity (ReverseFieldOrder a) where
type PersistEntityBackend (ReverseFieldOrder a) = PersistEntityBackend a
newtype Key (ReverseFieldOrder a) = RFOKey { unRFOKey :: BackendKey SqlBackend } deriving (Show, Read, Eq, Ord, PersistField, PersistFieldSql)
keyFromValues = fmap RFOKey . fromPersistValue . head
keyToValues = (:[]) . toPersistValue . unRFOKey
entityDef = revFields . entityDef . liftM unRFO
where
revFields ed = ed { entityFields = reverse (entityFields ed) }
toPersistFields = reverse . toPersistFields . unRFO
newtype EntityField (ReverseFieldOrder a) b = EFRFO {unEFRFO :: EntityField a b}
persistFieldDef = persistFieldDef . unEFRFO
fromPersistValues = fmap RFO . fromPersistValues . reverse
newtype Unique (ReverseFieldOrder a) = URFO {unURFO :: Unique a }
persistUniqueToFieldNames = reverse . persistUniqueToFieldNames . unURFO
persistUniqueToValues = reverse . persistUniqueToValues . unURFO
persistUniqueKeys = map URFO . reverse . persistUniqueKeys . unRFO
persistIdField = error "ReverseFieldOrder.persistIdField"
fieldLens = error "ReverseFieldOrder.fieldLens"
caseCommitRollback :: Assertion
caseCommitRollback = db $ do
let filt :: [Filter Person1]
filt = []
let p = Person1 "foo" 0
_ <- insert p
_ <- insert p
_ <- insert p
c1 <- count filt
c1 @== 3
transactionSave
c2 <- count filt
c2 @== 3
_ <- insert p
transactionUndo
c3 <- count filt
c3 @== 3
_ <- insert p
transactionSave
_ <- insert p
_ <- insert p
transactionUndo
c4 <- count filt
c4 @== 4
catch' :: (Control.Monad.Trans.Control.MonadBaseControl IO m, E.Exception e)
=> m a -- ^ The computation to run
-> (e -> m a) -- ^ Handler to invoke if an exception is raised
-> m a
catch' a handler = Control.Monad.Trans.Control.control $ \runInIO ->
E.catch (runInIO a)
(\e -> runInIO $ handler e)
#endif
-- Test proper polymorphism
_polymorphic :: (MonadIO m, PersistQuery backend, BaseBackend backend ~ PersistEntityBackend Pet) => ReaderT backend m ()
_polymorphic = do
((Entity id' _):_) <- selectList [] [LimitTo 1]
_ <- selectList [PetOwnerId ==. id'] []
_ <- insert $ Pet id' "foo" Cat
return ()
-- Some lens stuff
type ASetter s t a b = (a -> Identity b) -> s -> Identity t
set :: ASetter s t a b -> b -> s -> t
set l b = runIdentity . (l (\_ -> Identity b))
type Getting r s t a b = (a -> Constant r b) -> s -> Constant r t
view :: s -> Getting a s t a b -> a
view s l = getConstant (l Constant s)
| pseudonom/persistent | persistent-test/src/PersistentTest.hs | mit | 33,819 | 14 | 25 | 9,756 | 9,216 | 4,401 | 4,815 | 654 | 1 |
module Handler.CommandSpec
( main
, spec
) where
import SpecHelper
import Data.UUID (fromText)
data Response = Response Token
instance FromJSON Response where
parseJSON = withObject "Response" $ \o -> Response
<$> (parseToken =<< o .: "token")
where
parseToken = maybe mzero (return . Token) . fromText
main :: IO ()
main = hspec spec
spec :: Spec
spec = withApp $ do
describe "POST /commands" $ do
it "creates a new command" $ do
postJSON CommandsR $ object []
withJSONResponse $ \(Response token) -> do
Entity _ command <- runDB $ getBy404 $ UniqueCommand token
commandDescription command `shouldBe` Nothing
it "creates a command with a description" $ do
postJSON CommandsR $ object ["description" .= ("test command" :: Text)]
withJSONResponse $ \(Response token) -> do
Entity _ command <- runDB $ getBy404 $ UniqueCommand token
commandDescription command `shouldBe` Just "test command"
describe "DELETE /commands/token" $
it "deletes the command's data" $ do
now <- liftIO getCurrentTime
token <- newToken
void $ runDB $ insert Command
{ commandToken = token
, commandDescription = Just "a description"
, commandCreatedAt = now
}
delete $ CommandR token
statusIs 200
results <- runDB $ selectList [CommandToken ==. token] []
results `shouldBe` []
| pbrisbin/tee-io | test/Handler/CommandSpec.hs | mit | 1,592 | 0 | 20 | 539 | 434 | 212 | 222 | 37 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
module PostgREST.Error (pgErrResponse, errResponse) where
import Data.Aeson ((.=))
import qualified Data.Aeson as JSON
import Data.Monoid ((<>))
import Data.String.Conversions (cs)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Hasql.Session as H
import Network.HTTP.Types.Header
import qualified Network.HTTP.Types.Status as HT
import Network.Wai (Response, responseLBS)
errResponse :: HT.Status -> Text -> Response
errResponse status message = responseLBS status [(hContentType, "application/json")] (cs $ T.concat ["{\"message\":\"",message,"\"}"])
pgErrResponse :: H.Error -> Response
pgErrResponse e = responseLBS (httpStatus e)
[(hContentType, "application/json")] (JSON.encode e)
instance JSON.ToJSON H.Error where
toJSON (H.ResultError (H.ServerError c m d h)) = JSON.object [
"code" .= (cs c::T.Text),
"message" .= (cs m::T.Text),
"details" .= (fmap cs d::Maybe T.Text),
"hint" .= (fmap cs h::Maybe T.Text)]
toJSON (H.ResultError (H.UnexpectedResult m)) = JSON.object [
"message" .= (cs m::T.Text)]
toJSON (H.ResultError (H.RowError i H.EndOfInput)) = JSON.object [
"message" .= ("Row error: end of input"::String),
"details" .=
("Attempt to parse more columns than there are in the result"::String),
"details" .= ("Row number " <> show i)]
toJSON (H.ResultError (H.RowError i H.UnexpectedNull)) = JSON.object [
"message" .= ("Row error: unexpected null"::String),
"details" .= ("Attempt to parse a NULL as some value."::String),
"details" .= ("Row number " <> show i)]
toJSON (H.ResultError (H.RowError i (H.ValueError d))) = JSON.object [
"message" .= ("Row error: Wrong value parser used"::String),
"details" .= d,
"details" .= ("Row number " <> show i)]
toJSON (H.ResultError (H.UnexpectedAmountOfRows i)) = JSON.object [
"message" .= ("Unexpected amount of rows"::String),
"details" .= i]
toJSON (H.ClientError d) = JSON.object [
"message" .= ("Database client error"::String),
"details" .= (fmap cs d::Maybe T.Text)]
httpStatus :: H.Error -> HT.Status
httpStatus (H.ResultError (H.ServerError c _ _ _)) =
case cs c of
'0':'8':_ -> HT.status503 -- pg connection err
'0':'9':_ -> HT.status500 -- triggered action exception
'0':'L':_ -> HT.status403 -- invalid grantor
'0':'P':_ -> HT.status403 -- invalid role specification
"23503" -> HT.status409 -- foreign_key_violation
"23505" -> HT.status409 -- unique_violation
'2':'5':_ -> HT.status500 -- invalid tx state
'2':'8':_ -> HT.status403 -- invalid auth specification
'2':'D':_ -> HT.status500 -- invalid tx termination
'3':'8':_ -> HT.status500 -- external routine exception
'3':'9':_ -> HT.status500 -- external routine invocation
'3':'B':_ -> HT.status500 -- savepoint exception
'4':'0':_ -> HT.status500 -- tx rollback
'5':'3':_ -> HT.status503 -- insufficient resources
'5':'4':_ -> HT.status413 -- too complex
'5':'5':_ -> HT.status500 -- obj not on prereq state
'5':'7':_ -> HT.status500 -- operator intervention
'5':'8':_ -> HT.status500 -- system error
'F':'0':_ -> HT.status500 -- conf file error
'H':'V':_ -> HT.status500 -- foreign data wrapper error
'P':'0':_ -> HT.status500 -- PL/pgSQL Error
'X':'X':_ -> HT.status500 -- internal Error
"42P01" -> HT.status404 -- undefined table
"42501" -> HT.status404 -- insufficient privilege
_ -> HT.status400
httpStatus (H.ResultError _) = HT.status500
httpStatus (H.ClientError _) = HT.status503
| motiz88/postgrest | src/PostgREST/Error.hs | mit | 3,821 | 0 | 13 | 819 | 1,207 | 656 | 551 | 76 | 25 |
{- |
Module representing a JSON-API resource object.
Specification: <http://jsonapi.org/format/#document-resource-objects>
-}
module Network.JSONApi.Resource
( Resource (..)
, Relationships
, ResourcefulEntity (..)
, Relationship
, mkRelationship
, mkRelationships
) where
import Control.Lens.TH
import Data.Aeson (ToJSON, FromJSON, (.=), (.:), (.:?))
import qualified Data.Aeson as AE
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid
import Data.Text (Text)
import GHC.Generics hiding (Meta)
import Network.JSONApi.Identifier (HasIdentifier (..), Identifier (..))
import Network.JSONApi.Link (Links)
import Network.JSONApi.Meta (Meta)
import Prelude hiding (id)
import Control.DeepSeq (NFData)
{- |
Type representing a JSON-API resource object.
A Resource supplies standardized data and metadata about a resource.
Specification: <http://jsonapi.org/format/#document-resource-objects>
-}
data Resource a = Resource
{ getIdentifier :: Identifier
, getResource :: a
, getLinks :: Maybe Links
, getRelationships :: Maybe Relationships
} deriving (Show, Eq, Generic)
instance NFData a => NFData (Resource a)
instance (ToJSON a) => ToJSON (Resource a) where
toJSON (Resource (Identifier resId resType metaObj) resObj linksObj rels) =
AE.object [ "id" .= resId
, "type" .= resType
, "attributes" .= resObj
, "links" .= linksObj
, "meta" .= metaObj
, "relationships" .= rels
]
instance (FromJSON a) => FromJSON (Resource a) where
parseJSON = AE.withObject "resourceObject" $ \v -> do
id <- v .: "id"
typ <- v .: "type"
attrs <- v .: "attributes"
links <- v .:? "links"
meta <- v .:? "meta"
rels <- v .:? "relationships"
return $ Resource (Identifier id typ meta) attrs links rels
instance HasIdentifier (Resource a) where
identifier = getIdentifier
{- |
A typeclass for decorating an entity with JSON API properties
-}
class ResourcefulEntity a where
resourceIdentifier :: a -> Text
resourceType :: a -> Text
resourceLinks :: a -> Maybe Links
resourceMetaData :: a -> Maybe Meta
resourceRelationships :: a -> Maybe Relationships
fromResource :: Resource a -> a
fromResource = getResource
toResource :: a -> Resource a
toResource a =
Resource
(Identifier (resourceIdentifier a) (resourceType a) (resourceMetaData a))
a
(resourceLinks a)
(resourceRelationships a)
{- |
A type representing the Relationship between 2 entities
A Relationship provides basic information for fetching further information
about a related resource.
Specification: <http://jsonapi.org/format/#document-resource-object-relationships>
-}
data Relationship = Relationship
{ _data :: Maybe Identifier
, _links :: Maybe Links
} deriving (Show, Eq, Generic)
instance NFData Relationship
instance ToJSON Relationship where
toJSON = AE.genericToJSON
AE.defaultOptions { AE.fieldLabelModifier = drop 1 }
instance FromJSON Relationship where
parseJSON = AE.genericParseJSON
AE.defaultOptions { AE.fieldLabelModifier = drop 1 }
data Relationships = Relationships (Map Text Relationship)
deriving (Show, Eq, Generic)
instance NFData Relationships
instance ToJSON Relationships
instance FromJSON Relationships
instance Semigroup Relationships where
(<>) (Relationships a) (Relationships b) = Relationships (a <> b)
instance Monoid Relationships where
mempty = Relationships Map.empty
mkRelationships :: Relationship -> Relationships
mkRelationships rel =
Relationships $ Map.singleton (relationshipType rel) rel
relationshipType :: Relationship -> Text
relationshipType relationship = case _data relationship of
Nothing -> "unidentified"
(Just (Identifier _ typ _)) -> typ
{- |
Constructor function for creating a Relationship record
A relationship must contain either an Identifier or a Links record
-}
mkRelationship :: Maybe Identifier -> Maybe Links -> Maybe Relationship
mkRelationship Nothing Nothing = Nothing
mkRelationship resId links = Just $ Relationship resId links
makeLenses ''Resource
| toddmohney/json-api | src/Network/JSONApi/Resource.hs | mit | 4,164 | 0 | 13 | 805 | 1,039 | 558 | 481 | -1 | -1 |
module Problem0030 where
import Data.Char
import Data.List(sort)
type Composite = (OriginalNumber, SumOfPowers, DigitsLeft, SumIsLessThanOrEqualToNumber, SumIsEqualToOriginal)
type OriginalNumber = Int
type SumOfPowers = Int
type DigitsLeft = [Int]
type SumIsLessThanOrEqualToNumber = Bool
type SumIsEqualToOriginal = Bool
valuesOfDigits :: Int -> [Int]
valuesOfDigits number = reverse
$ sort
$ filter (>0)
$ map digitToInt
$ show number
getOriginalNumber :: Composite -> Int
getOriginalNumber (original, _, _, _, _) = original
getSumOfPowers :: Composite -> Int
getSumOfPowers (_, sumOfPowers, _, _, _) = sumOfPowers
getDigitsLeft :: Composite -> [Int]
getDigitsLeft (_, _, digits, _, _) = digits
getLessThanNumber :: Composite -> Bool
getLessThanNumber (_, _, _, isLessThan, _) = isLessThan
getIsSumEqualToOriginal :: Composite -> Bool
getIsSumEqualToOriginal (_,_,_,_,isEqual) = isEqual
createComposite :: Int -> Composite
createComposite number = (number, 0, valuesOfDigits number, True, False)
updateComposite :: Composite -> Int -> Composite
updateComposite oldComposite numberToAdd = (newOriginal, newSum, newDigitsLeft, isLessThanOrEqual, isEqual)
where
newOriginal = getOriginalNumber oldComposite
newDigitsLeft = tail $ getDigitsLeft oldComposite
newSum = (getSumOfPowers oldComposite) + numberToAdd
isLessThanOrEqual = newOriginal >= newSum
isEqual = (length newDigitsLeft == 0) && newOriginal == newSum
findPowerOfDigitsEqualsNumber :: Int -> [Int]
findPowerOfDigitsEqualsNumber power = filter (>1) (calcPower initialList 9)
where
maximumInt = (power + 1) * (9 ^ power)
initialList = map createComposite [1..maximumInt]
calcPower list 0 = map getOriginalNumber $ filter getIsSumEqualToOriginal list
calcPower list int = calcPower newList (int - 1)
where
currentNumberToAdd = int ^ power
newList = filter getLessThanNumber $ map updateCompositeIfNeedBe list
where
updateCompositeIfNeedBe :: Composite -> Composite
updateCompositeIfNeedBe composite
| (length $ getDigitsLeft composite) == 0 = composite
| (head $ getDigitsLeft composite) == int = updateCompositeIfNeedBe $ updateComposite composite currentNumberToAdd
| otherwise = composite
| Sobieck00/practice | pe/nonvisualstudio/haskell/problem0030/Solution.hs | mit | 2,497 | 0 | 17 | 618 | 656 | 363 | 293 | 47 | 2 |
{-# LANGUAGE TypeFamilies #-}
module Drifter.Types where
import Data.Text
type Name = Text
type Description = Text
data Change a = Change
{ changeName :: Name
, changeDescription :: Maybe Description
, changeDependencies :: [Name]
, changeMethod :: Method a
}
data family Method a
data family DBConnection a
class Drifter a where
migrate :: DBConnection a -> [Change a] -> IO (Either String ())
| MichaelXavier/drifter | src/Drifter/Types.hs | mit | 478 | 0 | 12 | 148 | 121 | 71 | 50 | 14 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE PostfixOperators #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE BangPatterns #-}
-----------------------------------------------------------------------------
-- | This module exports a bunch of utilities for working inside the CReal
-- datatype. One should be careful to maintain the CReal invariant when using
-- these functions
----------------------------------------------------------------------------
module Data.CReal.Internal
(
-- * The CReal type
CReal(..)
-- ** Memoization
, Cache(..)
-- ** Simple utilities
, atPrecision
, crealPrecision
-- * More efficient variants of common functions
-- Note that the preconditions to these functions are not checked
-- ** Additive
, plusInteger
-- ** Multiplicative
, mulBounded
, (.*.)
, mulBoundedL
, (.*)
, (*.)
, recipBounded
, shiftL
, shiftR
, square
, squareBounded
-- ** Exponential
, expBounded
, expPosNeg
, logBounded
-- ** Trigonometric
, atanBounded
, sinBounded
, cosBounded
-- * Utilities for operating inside CReals
, crMemoize
, powerSeries
, alternateSign
-- ** Integer operations
, (/.)
, (/^)
, log2
, log10
, isqrt
-- * Utilities for converting CReals to Strings
, showAtPrecision
, decimalDigitsAtPrecision
, rationalToDecimal
) where
import Data.List (scanl')
import qualified Data.Bits as B
import Data.Bits hiding (shiftL, shiftR)
import GHC.Base (Int(..))
import GHC.Integer.Logarithms (integerLog2#, integerLogBase#)
import GHC.Real (Ratio(..), (%))
import GHC.TypeLits
import Text.Read
import qualified Text.Read.Lex as L
import System.Random (Random(..))
import Control.Concurrent.MVar
import Control.Exception
import System.IO.Unsafe (unsafePerformIO)
{-# ANN module ("HLint: ignore Reduce duplication" :: String) #-}
-- $setup
-- >>> :set -XDataKinds
-- >>> :set -XPostfixOperators
default ()
-- | The Cache type represents a way to memoize a `CReal`. It holds the largest
-- precision the number has been evaluated that, as well as the value. Rounding
-- it down gives the value for lower numbers.
data Cache
= Never
| Current {-# UNPACK #-} !Int !Integer
deriving (Show)
-- | The type CReal represents a fast binary Cauchy sequence. This is a Cauchy
-- sequence with the invariant that the pth element divided by 2^p will be
-- within 2^-p of the true value. Internally this sequence is represented as a
-- function from Ints to Integers, as well as an `MVar` to hold the highest
-- precision cached value.
data CReal (n :: Nat) = CR {-# UNPACK #-} !(MVar Cache) (Int -> Integer)
-- | 'crMemoize' takes a fast binary Cauchy sequence and returns a CReal
-- represented by that sequence which will memoize the values at each
-- precision. This is essential for getting good performance.
crMemoize :: (Int -> Integer) -> CReal n
crMemoize fn = unsafePerformIO $ do
mvc <- newMVar Never
return $ CR mvc fn
-- | crealPrecision x returns the type level parameter representing x's default
-- precision.
--
-- >>> crealPrecision (1 :: CReal 10)
-- 10
crealPrecision :: KnownNat n => CReal n -> Int
crealPrecision = fromInteger . natVal
-- | @x \`atPrecision\` p@ returns the numerator of the pth element in the
-- Cauchy sequence represented by x. The denominator is 2^p.
--
-- >>> 10 `atPrecision` 10
-- 10240
atPrecision :: CReal n -> Int -> Integer
(CR mvc f) `atPrecision` (!p) = unsafePerformIO $ modifyMVar mvc $ \vc -> do
vc' <- evaluate vc
case vc' of
Current j v | j >= p ->
pure (vc', v /^ (j - p))
_ -> do
v <- evaluate $ f p
let !vcn = Current p v
pure (vcn, v)
{-# INLINABLE atPrecision #-}
-- | A CReal with precision p is shown as a decimal number d such that d is
-- within 2^-p of the true value.
--
-- >>> show (47176870 :: CReal 0)
-- "47176870"
--
-- >>> show (pi :: CReal 230)
-- "3.1415926535897932384626433832795028841971693993751058209749445923078164"
instance KnownNat n => Show (CReal n) where
show x = showAtPrecision (crealPrecision x) x
-- | The instance of Read will read an optionally signed number expressed in
-- decimal scientific notation
instance Read (CReal n) where
readPrec = parens $ do
lit <- lexP
case lit of
Number n -> return $ fromRational $ L.numberToRational n
Symbol "-" -> prec 6 $ do
lit' <- lexP
case lit' of
Number n -> return $ fromRational $ negate $ L.numberToRational n
_ -> pfail
_ -> pfail
{-# INLINE readPrec #-}
readListPrec = readListPrecDefault
{-# INLINE readListPrec #-}
readsPrec = readPrec_to_S readPrec
{-# INLINE readsPrec #-}
readList = readPrec_to_S readListPrec 0
{-# INLINE readList #-}
-- | @signum (x :: CReal p)@ returns the sign of @x@ at precision @p@. It's
-- important to remember that this /may not/ represent the actual sign of @x@ if
-- the distance between @x@ and zero is less than 2^-@p@.
--
-- This is a little bit of a fudge, but it's probably better than failing to
-- terminate when trying to find the sign of zero. The class still respects the
-- abs-signum law though.
--
-- >>> signum (0.1 :: CReal 2)
-- 0.0
--
-- >>> signum (0.1 :: CReal 3)
-- 1.0
instance Num (CReal n) where
{-# INLINE fromInteger #-}
fromInteger i = let
!vc = Current 0 i
in unsafePerformIO $ do
mvc <- newMVar vc
return $ CR mvc (B.shiftL i)
-- @negate@ and @abs@ try to give initial guesses, but don't wait if the
-- @\'MVar\'@ is being used elsewhere.
{-# INLINE negate #-}
negate (CR mvc fn) = unsafePerformIO $ do
vcc <- tryReadMVar mvc
let
!vcn = case vcc of
Nothing -> Never
Just Never -> Never
Just (Current p v) -> Current p (negate v)
mvn <- newMVar vcn
return $ CR mvn (negate . fn)
{-# INLINE abs #-}
abs (CR mvc fn) = unsafePerformIO $ do
vcc <- tryReadMVar mvc
let
!vcn = case vcc of
Nothing -> Never
Just Never -> Never
Just (Current p v) -> Current p (abs v)
mvn <- newMVar vcn
return $ CR mvn (abs . fn)
{-# INLINE (+) #-}
x1 + x2 = crMemoize (\p -> let n1 = atPrecision x1 (p + 2)
n2 = atPrecision x2 (p + 2)
in (n1 + n2) /^ 2)
{-# INLINE (-) #-}
x1 - x2 = crMemoize (\p -> let n1 = atPrecision x1 (p + 2)
n2 = atPrecision x2 (p + 2)
in (n1 - n2) /^ 2)
{-# INLINE (*) #-}
x1 * x2 = let
s1 = log2 (abs (atPrecision x1 0) + 2) + 3
s2 = log2 (abs (atPrecision x2 0) + 2) + 3
in crMemoize (\p -> let n1 = atPrecision x1 (p + s2)
n2 = atPrecision x2 (p + s1)
in (n1 * n2) /^ (p + s1 + s2))
signum x = crMemoize (\p -> B.shiftL (signum (x `atPrecision` p)) p)
-- | Taking the reciprocal of zero will not terminate
instance Fractional (CReal n) where
{-# INLINE fromRational #-}
-- Use @roundD@ instead of @/.@ because we know @d > 0@ for a valid Rational.
fromRational (n :% d) = crMemoize (\p -> roundD (B.shiftL n p) d)
{-# INLINE recip #-}
-- TODO: Make recip 0 throw an error (if, for example, it would take more
-- than 4GB of memory to represent the result)
recip x = let
s = findFirstMonotonic ((3 <=) . abs . atPrecision x)
in crMemoize (\p -> let n = atPrecision x (p + 2 * s + 2)
in bit (2 * p + 2 * s + 2) /. n)
instance Floating (CReal n) where
-- TODO: Could we use something faster such as Ramanujan's formula
pi = piBy4 `shiftL` 2
exp x = let o = shiftL (x *. recipBounded (shiftL ln2 1)) 1
l = atPrecision o 0
y = x - fromInteger l *. ln2
in if l == 0
then expBounded x
else expBounded y `shiftL` fromInteger l
-- | Range reduction on the principle that ln (a * b) = ln a + ln b
log x = let l = log2 (atPrecision x 2) - 2
in if -- x <= 0.75
| l < 0 -> - log (recip x)
-- 0.75 <= x <= 2
| l == 0 -> logBounded x
-- x >= 2
| otherwise -> let a = x `shiftR` l
in logBounded a + fromIntegral l *. ln2
sqrt x = crMemoize (\p -> let n = atPrecision x (2 * p)
in isqrt n)
-- | This will diverge when the base is not positive
x ** y = exp (log x * y)
logBase x y = log y / log x
sin x = cos (x - piBy2)
cos x = let o = shiftL (x *. recipBounded pi) 2
s = atPrecision o 1 /^ 1
octant = fromInteger $ s .&. 7
offset = x - (fromIntegral s *. piBy4)
fs = [ cosBounded
, negate . sinBounded . subtract piBy4
, negate . sinBounded
, negate . cosBounded . (piBy4-)
, negate . cosBounded
, sinBounded . subtract piBy4
, sinBounded
, cosBounded . (piBy4-)]
in (fs !! octant) offset
tan x = sin x .* recip (cos x)
asin x = atan (x .*. recipBounded (1 + sqrt (1 - squareBounded x))) `shiftL` 1
acos x = piBy2 - asin x
atan x = let -- q is 4 times x to within 1/4
q = x `atPrecision` 2
in if -- x <= -1
| q < -4 -> atanBounded (negate (recipBounded x)) - piBy2
-- -1.25 <= x <= -0.75
| q == -4 -> -(piBy4 + atanBounded ((x + 1) .*. recipBounded (x - 1)))
-- 0.75 <= x <= 1.25
| q == 4 -> piBy4 + atanBounded ((x - 1) .*. recipBounded (x + 1))
-- x >= 1
| q > 4 -> piBy2 - atanBounded (recipBounded x)
-- -0.75 <= x <= 0.75
| otherwise -> atanBounded x
-- TODO: benchmark replacing these with their series expansion
sinh x = let (expX, expNegX) = expPosNeg x
in (expX - expNegX) `shiftR` 1
cosh x = let (expX, expNegX) = expPosNeg x
in (expX + expNegX) `shiftR` 1
tanh x = let e2x = exp (x `shiftL` 1)
in (e2x - 1) *. recipBounded (e2x + 1)
asinh x = log (x + sqrt (square x + 1))
acosh x = log (x + sqrt (x + 1) * sqrt (x - 1))
atanh x = (log (1 + x) - log (1 - x)) `shiftR` 1
-- | 'toRational' returns the CReal n evaluated at a precision of 2^-n
instance KnownNat n => Real (CReal n) where
toRational x = let p = crealPrecision x
in x `atPrecision` p % bit p
instance KnownNat n => RealFrac (CReal n) where
properFraction x = let p = crealPrecision x
v = x `atPrecision` p
r = v .&. (bit p - 1)
c = unsafeShiftR (v - r) p
n = if c < 0 && r /= 0 then c + 1 else c
f = plusInteger x (negate n)
in (fromInteger n, f)
truncate x = let p = crealPrecision x
v = x `atPrecision` p
r = v .&. (bit p - 1)
c = unsafeShiftR (v - r) p
n = if c < 0 && r /= 0 then c + 1 else c
in fromInteger n
round x = let p = crealPrecision x
n = (x `atPrecision` p) /^ p
in fromInteger n
ceiling x = let p = crealPrecision x
v = x `atPrecision` p
r = v .&. (bit p - 1)
n = unsafeShiftR (v - r) p
in if r /= 0 then fromInteger $ n + 1 else fromInteger n
floor x = let p = crealPrecision x
v = x `atPrecision` p
r = v .&. (bit p - 1)
n = unsafeShiftR (v - r) p
in fromInteger n
-- | Several of the functions in this class ('floatDigits', 'floatRange',
-- 'exponent', 'significand') only make sense for floats represented by a
-- mantissa and exponent. These are bound to error.
--
-- @atan2 y x `atPrecision` p@ performs the comparison to determine the
-- quadrant at precision p. This can cause atan2 to be slightly slower than atan
instance KnownNat n => RealFloat (CReal n) where
floatRadix _ = 2
floatDigits _ = error "Data.CReal.Internal floatDigits"
floatRange _ = error "Data.CReal.Internal floatRange"
decodeFloat x = let p = crealPrecision x
in (x `atPrecision` p, -p)
encodeFloat m n = if n <= 0
then fromRational (m % bit (negate n))
else fromRational (unsafeShiftL m n :% 1)
exponent = error "Data.CReal.Internal exponent"
significand = error "Data.CReal.Internal significand"
scaleFloat = flip shiftL
isNaN _ = False
isInfinite _ = False
isDenormalized _ = False
isNegativeZero _ = False
isIEEE _ = False
atan2 y x = crMemoize (\p ->
let y' = y `atPrecision` p
x' = x `atPrecision` p
θ = if | x' > 0 -> atan (y/x)
| x' == 0 && y' > 0 -> piBy2
| x' < 0 && y' > 0 -> pi + atan (y/x)
| x' <= 0 && y' < 0 -> -atan2 (-y) x
| y' == 0 && x' < 0 -> pi -- must be after the previous test on zero y
| x'==0 && y'==0 -> 0 -- must be after the other double zero tests
| otherwise -> error "Data.CReal.Internal atan2"
in θ `atPrecision` p)
-- | Values of type @CReal p@ are compared for equality at precision @p@. This
-- may cause values which differ by less than 2^-p to compare as equal.
--
-- >>> 0 == (0.1 :: CReal 1)
-- True
instance KnownNat n => Eq (CReal n) where
-- TODO, should this try smaller values first?
CR mvx _ == CR mvy _ | mvx == mvy = True
x == y = let p = crealPrecision x
in ((x - y) `atPrecision` p) == 0
-- | Like equality, values of type @CReal p@ are compared at precision @p@.
instance KnownNat n => Ord (CReal n) where
compare (CR mvx _) (CR mvy _) | mvx == mvy = EQ
compare x y =
let p = crealPrecision x
in compare ((x - y) `atPrecision` p) 0
max x y = crMemoize (\p -> max (atPrecision x p) (atPrecision y p))
min x y = crMemoize (\p -> min (atPrecision x p) (atPrecision y p))
-- | The 'Random' instance for @\'CReal\' p@ will return random number with at
-- least @p@ digits of precision, every digit after that is zero.
instance KnownNat n => Random (CReal n) where
randomR (lo, hi) g = let d = hi - lo
l = 1 + log2 (abs d `atPrecision` 0)
p = l + crealPrecision lo
(n, g') = randomR (0, 2^p) g
r = fromRational (n % 2^p)
in (r * d + lo, g')
random g = let p = 1 + crealPrecision (undefined :: CReal n)
(n, g') = randomR (0, max 0 (2^p - 2)) g
r = fromRational (n % 2^p)
in (r, g')
--------------------------------------------------------------------------------
-- Some utility functions
--------------------------------------------------------------------------------
--
-- Constants
--
piBy4 :: CReal n
piBy4 = atanBounded (recipBounded 5) `shiftL` 2 - atanBounded (recipBounded 239) -- Machin Formula
piBy2 :: CReal n
piBy2 = piBy4 `shiftL` 1
ln2 :: CReal n
ln2 = logBounded 2
--
-- Bounded multiplication
--
infixl 7 `mulBounded`, `mulBoundedL`, .*, *., .*.
-- | Alias for @'mulBoundedL'@
(.*) :: CReal n -> CReal n -> CReal n
(.*) = mulBoundedL
-- | Alias for @flip 'mulBoundedL'@
(*.) :: CReal n -> CReal n -> CReal n
(*.) = flip mulBoundedL
-- | Alias for @'mulBounded'@
(.*.) :: CReal n -> CReal n -> CReal n
(.*.) = mulBounded
-- | A more efficient multiply with the restriction that the first argument
-- must be in the closed range [-1..1]
mulBoundedL :: CReal n -> CReal n -> CReal n
mulBoundedL x1 x2 = let
s1 = 4
s2 = log2 (abs (atPrecision x2 0) + 2) + 3
in crMemoize (\p -> let n1 = atPrecision x1 (p + s2)
n2 = atPrecision x2 (p + s1)
in (n1 * n2) /^ (p + s1 + s2))
-- | A more efficient multiply with the restriction that both values must be
-- in the closed range [-1..1]
mulBounded :: CReal n -> CReal n -> CReal n
mulBounded x1 x2 = let
s1 = 4
s2 = 4
in crMemoize (\p -> let n1 = atPrecision x1 (p + s2)
n2 = atPrecision x2 (p + s1)
in (n1 * n2) /^ (p + s1 + s2))
-- | A more efficient 'recip' with the restriction that the input must have
-- absolute value greater than or equal to 1
recipBounded :: CReal n -> CReal n
recipBounded x = crMemoize (\p -> let s = 2
n = atPrecision x (p + 2 * s + 2)
in bit (2 * p + 2 * s + 2) /. n)
-- | Return the square of the input, more efficient than @('*')@
{-# INLINABLE square #-}
square :: CReal n -> CReal n
square x = let
s = log2 (abs (atPrecision x 0) + 2) + 3
in crMemoize (\p -> let n = atPrecision x (p + s)
in (n * n) /^ (p + 2 * s))
-- | A more efficient 'square' with the restrictuion that the value must be in
-- the closed range [-1..1]
{-# INLINABLE squareBounded #-}
squareBounded :: CReal n -> CReal n
squareBounded x@(CR mvc _) = unsafePerformIO $ do
vcc <- tryReadMVar mvc
let
!s = 4
!vcn = case vcc of
Nothing -> Never
Just Never -> Never
Just (Current j n) -> case j - s of
p | p < 0 -> Never
p -> Current p ((n * n) /^ (p + 2 * s))
fn' !p = let n = atPrecision x (p + s)
in (n * n) /^ (p + 2 * s)
mvn <- newMVar vcn
return $ CR mvn fn'
--
-- Bounded exponential functions and expPosNeg
--
-- | A more efficient 'exp' with the restriction that the input must be in the
-- closed range [-1..1]
expBounded :: CReal n -> CReal n
expBounded x = let q = (1%) <$> scanl' (*) 1 [1..]
in powerSeries q (max 5) x
-- | A more efficient 'log' with the restriction that the input must be in the
-- closed range [2/3..2]
logBounded :: CReal n -> CReal n
logBounded x = let q = [1 % n | n <- [1..]]
y = (x - 1) .* recip x
in y .* powerSeries q id y
-- | @expPosNeg x@ returns @(exp x, exp (-x))#
expPosNeg :: CReal n -> (CReal n, CReal n)
expPosNeg x = let o = x / ln2
l = atPrecision o 0
y = x - fromInteger l * ln2
in if l == 0
then (expBounded x, expBounded (-x))
else (expBounded y `shiftL` fromInteger l,
expBounded (negate y) `shiftR` fromInteger l)
--
-- Bounded trigonometric functions
--
-- | A more efficient 'sin' with the restriction that the input must be in the
-- closed range [-1..1]
sinBounded :: CReal n -> CReal n
sinBounded x = let q = alternateSign (scanl' (*) 1 [ 1 % (n*(n+1)) | n <- [2,4..]])
in x .* powerSeries q (max 1) (squareBounded x)
-- | A more efficient 'cos' with the restriction that the input must be in the
-- closed range [-1..1]
cosBounded :: CReal n -> CReal n
cosBounded x = let q = alternateSign (scanl' (*) 1 [1 % (n*(n+1)) | n <- [1,3..]])
in powerSeries q (max 1) (squareBounded x)
-- | A more efficient 'atan' with the restriction that the input must be in the
-- closed range [-1..1]
atanBounded :: CReal n -> CReal n
atanBounded x = let q = scanl' (*) 1 [n % (n + 1) | n <- [2,4..]]
s = squareBounded x
rd = recipBounded (plusInteger s 1)
in (x .*. rd) .* powerSeries q (+1) (s .*. rd)
--
-- Integer addition
--
infixl 6 `plusInteger`
-- | @x \`plusInteger\` n@ is equal to @x + fromInteger n@, but more efficient
{-# INLINE plusInteger #-}
plusInteger :: CReal n -> Integer -> CReal n
plusInteger x 0 = x
plusInteger (CR mvc fn) n = unsafePerformIO $ do
vcc <- tryReadMVar mvc
let
!vcn = case vcc of
Nothing -> Never
Just Never -> Never
Just (Current j v) -> Current j (v + unsafeShiftL n j)
fn' !p = fn p + B.shiftL n p
mvc' <- newMVar vcn
return $ CR mvc' fn'
--
-- Multiplication with powers of two
--
infixl 8 `shiftL`, `shiftR`
-- | @x \`shiftR\` n@ is equal to @x@ divided by 2^@n@
--
-- @n@ can be negative or zero
--
-- This can be faster than doing the division
shiftR :: CReal n -> Int -> CReal n
shiftR x n = crMemoize (\p -> let p' = p - n
in if p' >= 0
then atPrecision x p'
else atPrecision x 0 /^ negate p')
-- | @x \`shiftL\` n@ is equal to @x@ multiplied by 2^@n@
--
-- @n@ can be negative or zero
--
-- This can be faster than doing the multiplication
shiftL :: CReal n -> Int -> CReal n
shiftL x = shiftR x . negate
--
-- Showing CReals
--
-- | Return a string representing a decimal number within 2^-p of the value
-- represented by the given @CReal p@.
showAtPrecision :: Int -> CReal n -> String
showAtPrecision p x = let places = decimalDigitsAtPrecision p
r = atPrecision x p % bit p
in rationalToDecimal places r
-- | How many decimal digits are required to represent a number to within 2^-p
decimalDigitsAtPrecision :: Int -> Int
decimalDigitsAtPrecision 0 = 0
decimalDigitsAtPrecision p = log10 (bit p) + 1
-- | @rationalToDecimal p x@ returns a string representing @x@ at @p@ decimal
-- places.
rationalToDecimal :: Int -> Rational -> String
rationalToDecimal places (n :% d) = p ++ is ++ if places > 0 then "." ++ fs else ""
where p = case signum n of
-1 -> "-"
_ -> ""
ds = show (roundD (abs n * 10^places) d)
l = length ds
(is, fs) = if l <= places then ("0", replicate (places - l) '0' ++ ds) else splitAt (l - places) ds
--
-- Integer operations
--
divZeroErr :: a
divZeroErr = error "Division by zero"
{-# NOINLINE divZeroErr #-}
roundD :: Integer -> Integer -> Integer
roundD n d = case divMod n d of
(q, r) -> case compare (unsafeShiftL r 1) d of
LT -> q
EQ -> if testBit q 0 then q + 1 else q
GT -> q + 1
{-# INLINE roundD #-}
infixl 7 /.
-- | Division rounding to the nearest integer and rounding half integers to the
-- nearest even integer.
(/.) :: Integer -> Integer -> Integer
(!n) /. (!d) = case compare d 0 of
LT -> roundD (negate n) (negate d)
EQ -> divZeroErr
GT -> roundD n d
{-# INLINABLE (/.) #-}
infixl 7 /^
-- | @n /^ p@ is equivalent to @n \'/.\' (2^p)@, but faster, and it works for
-- negative values of p.
(/^) :: Integer -> Int -> Integer
(!n) /^ (!p) = case compare p 0 of
LT -> unsafeShiftL n (negate p)
EQ -> n
GT -> let
!bp = bit p
!r = n .&. (bp - 1)
!q = unsafeShiftR (n - r) p
in case compare (unsafeShiftL r 1) bp of
LT -> q
EQ -> if testBit q 0 then q + 1 else q
GT -> q + 1
-- | @log2 x@ returns the base 2 logarithm of @x@ rounded towards zero.
--
-- The input must be positive
{-# INLINE log2 #-}
log2 :: Integer -> Int
log2 x = I# (integerLog2# x)
-- | @log10 x@ returns the base 10 logarithm of @x@ rounded towards zero.
--
-- The input must be positive
{-# INLINE log10 #-}
log10 :: Integer -> Int
log10 x = I# (integerLogBase# 10 x)
-- | @isqrt x@ returns the square root of @x@ rounded towards zero.
--
-- The input must not be negative
{-# INLINABLE isqrt #-}
isqrt :: Integer -> Integer
isqrt x | x < 0 = error "Sqrt applied to negative Integer"
| x == 0 = 0
| otherwise = until satisfied improve initialGuess
where improve r = unsafeShiftR (r + x `div` r) 1
satisfied r = let r2 = r * r in r2 <= x && r2 + unsafeShiftL r 1 >= x
initialGuess = bit (unsafeShiftR (log2 x) 1)
--
-- Searching
--
-- | Given a monotonic function
{-# INLINABLE findFirstMonotonic #-}
findFirstMonotonic :: (Int -> Bool) -> Int
findFirstMonotonic p = findBounds 0 1
where findBounds !l !u = if p u then binarySearch l u
else findBounds u (u * 2)
binarySearch !l !u = let !m = l + (u - l) `div` 2
in if | l+1 == u -> l
| p m -> binarySearch l m
| otherwise -> binarySearch m u
--
-- Power series
--
-- | Apply 'negate' to every other element, starting with the second
--
-- >>> alternateSign [1..5]
-- [1,-2,3,-4,5]
{-# INLINABLE alternateSign #-}
alternateSign :: Num a => [a] -> [a]
alternateSign ls = foldr
(\a r b -> if b then negate a : r False else a : r True)
(const [])
ls
False
-- | @powerSeries q f x `atPrecision` p@ will evaluate the power series with
-- coefficients @q@ up to the coefficient at index @f p@ at value @x@
--
-- @f@ should be a function such that the CReal invariant is maintained. This
-- means that if the power series @y = a[0] + a[1] + a[2] + ...@ is evaluated
-- at precision @p@ then the sum of every @a[n]@ for @n > f p@ must be less than
-- 2^-p.
--
-- This is used by all the bounded transcendental functions.
--
-- >>> let (!) x = product [2..x]
-- >>> powerSeries [1 % (n!) | n <- [0..]] (max 5) 1 :: CReal 218
-- 2.718281828459045235360287471352662497757247093699959574966967627724
powerSeries :: [Rational] -> (Int -> Int) -> CReal n -> CReal n
powerSeries q termsAtPrecision x = crMemoize
(\p -> let t = termsAtPrecision p
d = log2 (toInteger t) + 2
p' = p + d
p'' = p' + d
m = atPrecision x p''
xs = (% 1) <$> iterate (\e -> m * e /^ p'') (bit p')
r = sum . take (t + 1) . fmap (round . (* fromInteger (bit d))) $ zipWith (*) q xs
in r /^ (2 * d))
| expipiplus1/exact-real | src/Data/CReal/Internal.hs | mit | 25,738 | 2 | 22 | 7,974 | 7,483 | 3,908 | 3,575 | 463 | 6 |
module Handler.Purge where
import Import
import Handler.Common
purge = runDB $ do
deleteWhere ([] :: [Filter Poll])
deleteWhere ([] :: [Filter Votes])
compareIp :: Text -> HandlerT App IO Bool
compareIp myIp = do
ip <- getIp
return $ ip == myIp
getPurgeR :: Handler Html
getPurgeR = do
approval <- compareIp $ pack "10.1.2.237"
case approval of
True -> do
purge
defaultLayout $ do
[whamlet|<div .jumbotron .text-center><h1>DB purged!|]
False -> do
defaultLayout $ do
[whamlet|<div .jumbotron .text-center><h1>Nice try!|]
| sramekj/lunchvote | Handler/Purge.hs | mit | 663 | 0 | 14 | 221 | 182 | 93 | 89 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Presenter.FileAuto (
-- *Functions
fileAuto
) where
import Control.Auto(arrM)
import Control.Exception(SomeException(..), try)
import Control.Monad(void, when)
import Control.Monad.Trans(liftIO)
import qualified Data.Text as T
import System.Directory(removeFile)
import GUI.Command
import HRowsException
import Model
import Model.DefaultFileNames
import Model.RowStore.RowStoreConf
import Presenter.Input
import Presenter.Auto
fileAuto :: PresenterAuto (FileCommand, Model, SourceInfo) ()
fileAuto = arrM (uncurry3 applyCommand)
uncurry3 :: (a -> b -> c -> d) -> ((a, b, c) -> d)
uncurry3 f (a, b, c) = f a b c
message :: Message -> PresenterM ()
message = sendGUIM . ShowIteration . DisplayMessage
applyCommand :: FileCommand -> Model -> SourceInfo -> PresenterM ()
applyCommand LoadFile _ si = do
r <- liftIO $ try $ readRowStore si
case r of
Right (rst, mconf) -> do
let model = case mconf of
Nothing -> fromRowStore rst
Just cnf -> foldr addSourceInfo (fromRowStore rst) $ sourceInfos cnf
sendInputM $ ChangeModel model
sendInputM $ SetMainSource si
Left (HRowsException mess) -> message $ ErrorMessage mess
applyCommand (LoadFileFromName pc) model info = do
let info' = changePathAndConf pc info
applyCommand LoadFile model info'
applyCommand WriteFile model si = doWrite model si False
applyCommand (WriteFileFromName pc) model si = let
si' = changePathAndConf pc si
in doWrite model si' True
applyCommand (ImportFromFile t si) _ _ = do
r <- liftIO $ try $ readRowStore si
case r of
Right (rst, _) -> sendInputM $ ChooseImportDialog t rst
Left (HRowsException m) -> message $ ErrorMessage m
applyCommand (AddSourceFromSourceInfo name si) _ _ = do
r <- liftIO $ try $ readRowStore si
case r of
Right (rst, _) -> sendInputM . AddNewSource si $ setName name rst
Left (HRowsException m) -> message $ ErrorMessage m
applyCommand WriteBackup model si = do
when (changed `from` model) $ do
let conf = defaultBackupFileName <$> confPath (siPathAndConf si)
fp = defaultBackupFileName $ path (siPathAndConf si)
si' = changePathAndConf (PathAndConf fp conf) si
r <- liftIO $ try $ writeRowStore si' (getSourceInfos model) <@ model
case r of
Right _ -> return ()
Left (HRowsException m) -> message $ ErrorMessage ("Error al hacer la copia de seguridad: " `T.append` m)
applyCommand BackupOnExit model si
| changed `from` model = applyCommand WriteBackup model si
| otherwise = do
let conf = defaultBackupFileName <$> confPath (siPathAndConf si)
fp = defaultBackupFileName $ path (siPathAndConf si)
void . liftIO $ ((try $ do
removeFile fp
maybe (return ()) removeFile conf) :: IO (Either SomeException ()))
doWrite :: Model -> SourceInfo -> Bool -> PresenterM ()
doWrite model si changedSource = do
r <- liftIO $ try $ writeRowStore si (getSourceInfos model) <@ model
case r of
Right _ -> do
message $ InformationMessage "Fichero escrito correctamente."
when changedSource $ sendInputM $ SetMainSource si
sendInputM SetUnchanged
Left (HRowsException m) -> message $ ErrorMessage m
| jvilar/hrows | lib/Presenter/FileAuto.hs | gpl-2.0 | 3,546 | 0 | 21 | 988 | 1,147 | 567 | 580 | 76 | 6 |
Config {
font = "xft:Dejavu Sans Mono:size=10:regular:antialias=true"
-- bgColor = "#362b12",
fgColor = "#fff897",
bgColor = "#000000",
-- fgColor = "#ffffff",
position = Static { xpos = 0, ypos = 0, width = 1920, height = 25 },
lowerOnStart = True,
commands = [
Run Weather "UUDD" ["-t","<tempC>°C","-L","18","-H","25","--normal","green","--high","red","--low","lightblue"] 36000 --moscow
-- Run Weather "UWKD" ["-t","<station>: <tempC>°C","-L","18","-H","25","--normal","green","--high","red","--low","lightblue"] 36000 -- kazan
,Run Memory ["-t","<used>/<total>M (<cache>M)","-H","8192","-L","4096","-h","#FFB6B0","-l","#CEFFAC","-n","#FFFFCC"] 10
,Run Network "wlp2s0" [
"-t" ,"rx:<rx>, tx:<tx>"
,"-H" ,"200"
,"-L" ,"10"
,"-h" ,"#FFB6B0"
,"-l" ,"#CEFFAC"
,"-n" ,"#FFFFCC"
, "-c" , " "
, "-w" , "2"
] 10
,Run Date "%d.%m.%Y %H:%M:%S" "date" 10
,Run MultiCpu [ "--template" , "<autototal>"
, "--Low" , "50" -- units: %
, "--High" , "85" -- units: %
, "--low" , "gray"
, "--normal" , "darkorange"
, "--high" , "darkred"
, "-c" , " "
, "-w" , "3"
] 10
,Run CoreTemp [ "--template" , "<core0> <core1> <core2>°C"
, "--Low" , "70" -- units: °C
, "--High" , "80" -- units: °C
, "--low" , "darkgreen"
, "--normal" , "darkorange"
, "--high" , "darkred"
] 50
,Run StdinReader
,Run PipeReader "/tmp/.volume-pipe" "vol"
],
sepChar = "%",
alignSep = "}{",
template = "%StdinReader% }{ %coretemp% | %multicpu% | %memory% | %wlp2s0% | %UUDD% | <fc=#FFFFCC>%date%</fc> "
}
| ulyanin/xmonad-config | xmobar.hs | gpl-3.0 | 1,961 | 2 | 9 | 703 | 382 | 239 | 143 | -1 | -1 |
{-# Language TemplateHaskell #-}
module Infsabot.Parameters(
LinearF(LinearF), apply,
Parameters(Parameters),
paramBoardSize,
paramNoopCost,
paramMoveCost,
paramDigCost,
paramNewRobotCost,
paramFireCost,
paramInitialHP,
paramInitialMaterial,
paramLineOfSight,
paramLineOfFire,
paramLineOfMessageSending,
paramHPRemoved,
defaultParameters
) where
import Data.DeriveTH(derive, makeArbitrary)
import Test.QuickCheck.Arbitrary
import Infsabot.Tools.Interface
data LinearF a = LinearF a a deriving (Eq, Show)
apply :: (Num a) => LinearF a -> a -> a
apply (LinearF m b) x = m * x + b
data Parameters = Parameters {
paramBoardSize :: Natural,
paramNoopCost :: Natural,
paramMoveCost :: Natural,
paramDigCost :: Natural,
paramNewRobotCost :: Natural,
paramFireCost :: Natural,
paramInitialHP :: Natural,
paramInitialMaterial :: Natural,
paramLineOfSight :: Natural,
paramLineOfFire :: Natural,
paramLineOfMessageSending :: Natural,
paramHPRemoved :: LinearF Natural
} deriving (Show)
defaultParameters :: Parameters
defaultParameters = Parameters {
paramBoardSize = 75,
paramNoopCost = 1,
paramMoveCost = 5,
paramDigCost = 10,
paramNewRobotCost = 20,
paramFireCost = 5,
paramInitialHP = 100,
paramInitialMaterial = 50,
paramLineOfSight = 5,
paramLineOfFire = 3,
paramLineOfMessageSending = 4,
paramHPRemoved = LinearF 1 2
}
$( derive makeArbitrary ''LinearF )
$( derive makeArbitrary ''Parameters )
| kavigupta/Infsabot | Infsabot/Parameters.hs | gpl-3.0 | 1,665 | 0 | 9 | 438 | 372 | 226 | 146 | 58 | 1 |
{-# OPTIONS -cpp #-}
{-# LANGUAGE ScopedTypeVariables #-}
import System.Environment (getArgs)
import System.Directory
import System.Plugins
import Data.List
import PluginAPI
import Control.Applicative
import Pipes.Safe (Base, MonadSafe, bracket)
import Control.Monad (unless, when)
import Pipes (lift, every, runEffect, for)
import Pipes.Safe (bracket, runSafeP)
import Strings (b2s)
import System.Posix.FilePath ((</>))
import Sensing
import Numeric
debug_fp = False
internalObjects = ["PluginAPI.o", "Main.o", "Sensing.o", "Strings.o"]
loadPlugin :: FilePath -> IO PluginI
loadPlugin file = do res <- ((load_ file ["./"] "plugin") :: IO (LoadStatus PluginI))
case res of
LoadFailure msg -> error ("Failed to load " ++ file)
LoadSuccess _ v -> return v
loadMagicPlugins :: IO [PluginI]
loadMagicPlugins = do fileList <- getDirectoryContents "./"
let pluginFileList = filter (\x -> isSuffixOf ".o" x && notElem x internalObjects) fileList
mapM loadPlugin pluginFileList
fromLoadSuc (LoadFailure _) = error "load failed"
fromLoadSuc (LoadSuccess _ v) = v
{--
(test :: FilePathO2) <- sense "."
prRep = print . senseReport
runEffect $ runSafeP $ for (every (senseRecursive5 test)) (lift . (lift . prRep))
--}
printFileRepresentations :: [(FilePath -> IO String)] -> FilePathO2 -> IO ()
printFileRepresentations cnv (FilePathO2 path FileDataObject _ _ _ _) = do results <- sequence [(c (b2s path)) | c <- cnv]
mapM_ putStrLn results
printFileRepresentations _ _ = do return ()
main = do
plugins <- loadMagicPlugins
args <- getArgs
let path = head args
let comparators = map (getFileRepresentation) plugins
(firstElem :: FilePathO2) <- sense path
runEffect $ runSafeP $
for (every (senseRecursive5 firstElem)) (lift . (lift . printFileRepresentations comparators))
| glueckself/mhaskell-ss14 | Main.hs | gpl-3.0 | 2,024 | 0 | 14 | 511 | 569 | 295 | 274 | 41 | 2 |
module Main where
import qualified Jinzamomi.Driver as D
import System.Log.Logger
import System.Log.Handler.Syslog
import System.Log.Handler.Simple
import System.Log.Handler (setFormatter)
import System.Log.Formatter
import qualified GHC.IO.Handle.FD as FD
import System.Environment (getArgs)
import Options.Applicative
data Opt = Opt {
loglevel :: String,
driverOpt :: D.DriverOpt
}
commonOpt :: Parser (D.DriverOpt -> Opt)
commonOpt = Opt
<$> strOption
( long "log"
<> short 'l'
<> value "debug"
<> showDefault
<> metavar "LOGLEVEL"
<> help "Log Level(debug/info/notice/warning/error/critical/alert/emergency)." )
versionOpt :: Parser (a -> a)
versionOpt = infoOption "Jinzamomi 0.0.0"
( long "version"
<> short 'v'
<> help "Print version information" )
loglevelOf :: String -> Priority
loglevelOf "debug" = DEBUG
loglevelOf "info" = INFO
loglevelOf "notice" = NOTICE
loglevelOf "warning" = WARNING
loglevelOf "error" = ERROR
loglevelOf "critical" = CRITICAL
loglevelOf "alert" = ALERT
loglevelOf "emergency" = EMERGENCY
loglevelOf _ = DEBUG
setupLogger :: Priority -> IO ()
setupLogger level = do
logger <- getRootLogger
handler <- verboseStreamHandler FD.stdout DEBUG
let handler' = setFormatter handler (tfLogFormatter "%y/%m/%d %m:%d:%y" "[$time $loggername $prio] $msg")
saveGlobalLogger $ setLevel level $ addHandler handler' $ removeHandler logger
tag :: String
tag = "Main"
main :: IO ()
main = do
opt <- execParser opts
let loglev = loglevelOf (loglevel opt)
setupLogger loglev
noticeM tag " *** Jinzamomi *** "
noticeM tag ("Log Level: " ++ show loglev)
D.execute (driverOpt opt)
--drv:args <- getArgs
--let executor = D.executorOf drv
--executor args
--Main.compile "ext/kag3/data/startup.tjs" "runtime/test/proj/startup.tjs.js"
--Main.compile "ext/kag3/data/system/Initialize.tjs" "runtime/test/proj/system/Initialize.tjs.js"
where
cmds = D.driverOpt
opts = info (versionOpt <*> helper <*> commonOpt <*> cmds)
( fullDesc
<> progDesc "Ahead of Time transpiler from Bishoujo-games to HTML5 "
<> header "Jinzamomi" )
| ledyba/Jinzamomi | app/Main.hs | gpl-3.0 | 2,194 | 0 | 13 | 431 | 534 | 276 | 258 | 58 | 1 |
module Model.PerpetualGrant where
import Data.Aeson
import Import
instance ToJSON (Entity PerpetualGrant) where
toJSON (Entity pgid pg) = object
[ "id" .= pgid
, "recordingUID" .= perpetualGrantRecordingUID pg
, "recipientKeyFingerprint" .= perpetualGrantRecipientKeyFingerprint pg
, "expires" .= perpetualGrantExpires pg
, "created" .= perpetualGrantCreated pg
]
| rumuki/rumuki-server | src/Model/PerpetualGrant.hs | gpl-3.0 | 476 | 0 | 9 | 155 | 94 | 49 | 45 | 10 | 0 |
module Dragonfly.Application (
showRootPage,
handleRoot,
handleSignOut
) where
import Control.Monad
import Data.ByteString.Lazy (unpack)
import Data.Char (chr)
import Text.XHtml
import Text.XHtml.Strict
import Happstack.Server
import Happstack.Helpers
import Dragonfly.ApplicationState
import Dragonfly.URISpace
import Dragonfly.ImageGallery.ImageGallery (divImageGallery)
import Dragonfly.ImageGallery.Upload (divImageUpload)
handleRoot :: MyServerPartT Response
handleRoot = do
rq <- askRq
let paths = rqPaths rq
if null paths then do
let cookies = rqCookies rq
let sc = lookup sessionCookie cookies
let msg = case lookup "_message" (rqInputs rq) of
Just (Input msg' _ _) -> thediv ! [theclass "message"] << thespan << map (chr . fromIntegral) (unpack msg')
Nothing -> thediv << noHtml
rootPage sc msg
else mzero
handleSignOut :: MyServerPartT Response
handleSignOut = exactdir signOutURL $ do
addCookie 0 (mkCookie sessionCookie "0")
rootPage Nothing (thediv << noHtml)
titleText :: String
titleText = "Colin's dragonflies"
showRootPage :: Bool -> Html -> MyServerPartT Response
showRootPage loggedIn msg = do
let divCont = if loggedIn then signOutDiv else loginRegisterDiv
return $ toResponse $ (header << (thetitle << titleText) +++ stylesheet) +++
(body << ((h1 << titleText) +++ msg +++ divCont +++
divImageGallery +++ divImageUpload))
rootPage :: Maybe Cookie -> Html -> MyServerPartT Response
rootPage sc msg = do
-- Actually we should check the session and expire the cookie if not present
let loggedIn = case sc of
Nothing -> False
Just c -> True
showRootPage loggedIn msg
loginRegisterDiv :: Html
loginRegisterDiv = thediv << ((anchor ! [href $ loginURL ++ "?_cont=/"] << "login")
+++ " | "
+++ (anchor ! [href $ registerURL ++ "?_cont=/"] << "register"))
signOutDiv :: Html
signOutDiv = thediv << (anchor ! [href signOutURL] << "sign out")
stylesheet :: Html
stylesheet = style << "div.message { color: red; }"
| colin-adams/dragonfly-website | Dragonfly/Application.hs | gpl-3.0 | 2,336 | 0 | 20 | 689 | 615 | 320 | 295 | 53 | 3 |
module Synacor.Debugger where
import Control.Monad
import Control.Monad.Fix (fix)
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Exception
import qualified Data.Map as M
import Data.Word
import Network.Socket
import System.Exit (die)
import System.IO
import Synacor.Machine
import Synacor.Interpreter (writeTo)
data Vm = Vm { semaphor :: MVar CurrentState, machine :: CurrentState }
type Msg = String
data Cmd =
Pause
| Go
| Quit
| Break Word16
| Step
| DumpReg
| MemDump String
| LoadSave String
| SetR Word16 Word16
deriving (Show, Eq)
parseCmd :: String -> Maybe Cmd
parseCmd "quit" = Just Quit
parseCmd "pause" = Just Pause
parseCmd "go" = Just Go
parseCmd "step" = Just Step
parseCmd "dumpreg" = Just DumpReg
parseCmd xs = let
ws = words xs
in f ws where
f ["memdump", f] = Just $ MemDump f
f ["load", f] = Just $ LoadSave f
f ["break", n] = Just $ Break (read n ::Word16)
f ["set", r, v] = Just $ SetR (read r :: Word16) (read v :: Word16)
f _ = Nothing
startDebugger :: MVar Cmd -> IO ()
startDebugger mvr = do
sock <- socket AF_INET Stream 0
setSocketOption sock ReuseAddr 1
bindSocket sock (SockAddrInet 8888 iNADDR_ANY)
listen sock 1
dbgLoop sock mvr
dbgLoop :: Socket -> MVar Cmd -> IO ()
dbgLoop sock mvr = do
conn <- accept sock
forkIO (runDebugger conn mvr)
return ()
runDebugger :: (Socket, SockAddr) -> MVar Cmd -> IO ()
runDebugger (sock, _) mvr = do
hdl <- socketToHandle sock ReadWriteMode
hSetBuffering hdl NoBuffering
hPutStrLn hdl "Synacor Debugger Online"
handle (\(SomeException _) -> return ()) $ fix $ \loop -> do
line <- liftM init (hGetLine hdl)
case parseCmd line of
Nothing -> do
hPutStrLn hdl "Invalid Command!"
loop
Just Quit -> do
_ <- takeMVar mvr
putMVar mvr Quit
hPutStrLn hdl "Received Quit Command"
Just c -> do
_ <- takeMVar mvr
putMVar mvr c
loop
handleDebug :: MVar Cmd -> (CurrentState, CurrentState) -> IO CurrentState
handleDebug mvar (before, after) = do
c <- takeMVar mvar
case c of Go -> putMVar mvar c >> return after
Pause -> do
putMVar mvar c
print . map (\i-> (memory before) M.! i) $ registers
threadDelay (10^6 * 3)
return before
Step -> do
putMVar mvar Step
print "Inst"
print . inst $ after
print "Registers"
print . map (\i-> (memory after) M.! i) $ registers
print "Stack"
print . stack $ after
--threadDelay (10^6 * 5)
return after
(SetR r v) -> do
putMVar mvar Pause
let tweaked = CurrentState {
inst = inst before,
stack = stack before,
memory = writeTo r v (memory before)
}
return tweaked
(Break i') -> if i' == (inst before)
then putMVar mvar Pause >> return before
else putMVar mvar (Break i') >> return after
(MemDump f) -> do
putMVar mvar Go
writeFile f (show after)
return after
(LoadSave f) -> do
putMVar mvar Go
a <- readFile f
let s = read a :: CurrentState
return s
Quit -> do { die "Received quit command"}
| ChrisCoffey/synacor_challenge | Synacor/src/Synacor/Debugger.hs | gpl-3.0 | 4,220 | 0 | 19 | 1,912 | 1,232 | 597 | 635 | 109 | 9 |
-- (C) Copyright Chris Banks 2011
-- This file is part of The Continuous Pi-calculus Workbench (CPiWB).
-- CPiWB is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
-- CPiWB is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
-- You should have received a copy of the GNU General Public License
-- along with CPiWB. If not, see <http://www.gnu.org/licenses/>.
{-# OPTIONS_GHC -XPatternGuards #-}
module CPi.Semantics
(-- * Important functions:
processMTS,
-- * Datatypes:
Concretion(..),
MTS(..),
Trans(..),
TTau(..),
TTauAff(..),
-- * Functions:
lookupTrans,
pseudoapp,
potentials,
initconc,
primes,
wholeProc
)where
import qualified Data.List as L
import qualified Control.Exception as X
import qualified Data.Map as Map
import Data.Map (Map)
import Data.Maybe
import CPi.Lib
--------------------
-- Species semantics
--------------------
-- Semantic data structures:
data MTS = MTS [Trans]
deriving (Show)
data Trans = TransSC Species Name Concretion -- A ----a-----> (x;y)B
| TransT Species TTau Species -- A ---t@k----> B
| TransTA Species TTauAff Species -- A -t<a,b>@k-> B
deriving (Show,Eq,Ord)
data Concretion = ConcBase Species OutNames InNames
| ConcPar Concretion [Species]
| ConcNew AffNet Concretion
deriving (Show,Eq,Ord)
data TTau = TTau Rate
deriving (Show,Eq,Ord)
data TTauAff = TTauAff (Name,Name)
deriving (Show,Eq,Ord)
-- Pretty printing:
instance Pretty MTS where
pretty (MTS (t:ts)) = ((pretty t)++"\n")++(pretty (MTS ts))
pretty (MTS []) = ""
instance Pretty Trans where
pretty (TransSC s n c)
= (pretty s)++" ---"++n++"--> "++(pretty c)
pretty (TransT s t s')
= prettyTauTrans s t s'
pretty (TransTA s t s')
= prettyTauTrans s t s'
prettyTauTrans s t s' = (pretty s)++" ---"++(pretty t)++"--> "++(pretty s')
instance Pretty Concretion where
pretty (ConcBase s o i)
= "("++(prettyNames o)++";"++(prettyNames i)++")"++(pretty s)
pretty (ConcPar c ss)
= (pretty c)++" | "++concat(L.intersperse " | " (map pretty ss))
-- TODO: parens?
pretty (ConcNew net c)
= (pretty net)++" "++(pretty c)
instance Pretty TTau where
pretty (TTau r) = "tau@<"++(show r)++">"
instance Pretty TTauAff where
pretty (TTauAff (n1,n2)) = "tau@<"++n1++","++n2++">"
-- Free/bound names of concretions:
fnc :: Concretion -> [Name]
fnc (ConcBase s o i) = o \/ ((fn s) \\ i)
fnc (ConcPar c ss) = (fnc c) \/ (fn (Par ss))
fnc (ConcNew n c) = (fnc c) \\ (sites n)
bnc :: Concretion -> [Name]
bnc (ConcBase s o i) = (bn s) \/ ((fn s) /\ i)
bnc (ConcPar c ss) = (bnc c) \/ (bn (Par ss))
bnc (ConcNew n c) = (bnc c) \/ ((fnc c) /\ (sites n))
-- Fresh-for test for restricted concretions
(#<) :: AffNet -> Concretion -> Bool
net#<c = ((sites net) /\ (fnc c)) == []
-- Renaming function for concretions:
concRename :: (Name,Name) -> Concretion -> Concretion
concRename r (ConcBase s ons ins)
= ConcBase (rename r s) (vecRename r ons) (vecRename r ins)
concRename r (ConcPar c ss)
= ConcPar (concRename r c) (map (rename r) ss)
concRename r (ConcNew net c)
= ConcNew (netRename r net) (concRename r c)
-- Alpha-conversion of concretions
concAconv :: (Name,Name) -> Concretion -> Concretion
concAconv (old,new) c
| (not(old `elem` (fnc c))) && (not(new `elem` (fnc c)))
= concRename (old,new) c
| (new `elem` (fnc c))
= X.throw $ CpiException $
"CPi.Semantics.concAconv: "
++"Tried to alpha-convert to an existing free name."
| otherwise
= X.throw $ CpiException
"CPi.Semantics.concAconv: Tried to alpha-convert a non-bound name."
-- Normal form for concretions
-- NOTE: see note on normal form in CPi.Lib
instance Nf Concretion where
nf s
| result==s = result
| otherwise = nf result
where
result = nf' s
-- (b;y)(A|B)=A|(b;y)B when y#A
nf' (ConcBase (Par ss) o i) = liftfps (o,i) ss [] []
where
liftfps (o,i) [] [] ins
= ConcBase (nf (Par ins)) o i
liftfps (o,i) [] outs ins
= ConcPar (ConcBase (nf (Par ins)) o i) (map nf outs)
liftfps (o,i) (s:ss) outs ins
| (i/\(fn s))==[]
= liftfps (o,i) ss (s:outs) ins
| otherwise
= liftfps (o,i) ss outs (s:ins)
-- (b;y)A=(b;y)B when A=B
nf' (ConcBase s o i) = ConcBase (nf s) o i
-- Commu. and assoc. of ConcPar and F|0 = F
nf' (ConcPar c []) = nf c
nf' (ConcPar c ss)
= ConcPar (nf c) (L.sort (dropNils (flatten (map nf ss))))
where
dropNils = filter (\x->x/=Nil)
flatten [] = []
flatten (x:xs) = (f x)++(flatten xs)
where
f (Par ss) = ss
f s = [s]
nf' (ConcNew net@(AffNet ns) (ConcNew net'@(AffNet ns') c))
| net##net' && not(net#<c || net'#<c)
= nf (ConcNew (net `netUnion` net') c)
| net#<c
= nf (ConcNew net' c)
| net'#<c
= nf (ConcNew net c)
nf' (ConcNew net@(AffNet ns) cp@(ConcPar c ss))
| net#<c = ConcPar (nf c) [(nf(New net (Par ss)))]
| net#(Par ss) = ConcPar (nf (ConcNew net c)) [(nf (Par ss))]
| otherwise = ConcNew (AffNet (L.sort ns)) (nf cp)
nf' (ConcNew net@(AffNet ns) c)
| net#<c = nf c
| otherwise = ConcNew (AffNet (L.sort ns)) (nf c)
-- | Get the Multi-Transition System for a Process.
processMTS :: Env -> Process -> MTS
processMTS env (Process scs net) = buildMTS env net (MTS []) (map fst scs)
buildMTS :: Env -> AffNet -> MTS -> [Species] -> MTS
buildMTS env net mts ss = ifnotnil
(newAppls env net precompx)
(buildMTS env net precompx)
precompx
where
precompx = derivMTS env (transs env mts ss)
-- Given initial species transtions, calculate all transition derivatives:
derivMTS :: Env -> MTS -> MTS
derivMTS env mts = ifnotnil (newPrimes env mts) (\x->(derivMTS env (transs env mts x))) mts
-- Find any pseudoapplications in an MTS and calculate their transitions:
complexMTS :: Env -> AffNet -> MTS -> MTS
complexMTS env net mts = derivMTS env (transs env mts (appls net mts))
-- Takes an MTS and returns the all prime species on the RHS of a transition
-- which don't appear on the LHS of some transition.
newPrimes :: Env -> MTS -> [Species]
newPrimes env mts = newPrimes' (openMTS mts)
where
newPrimes' trs = [s | s<-(L.nub (concatMap (primes env) (mapMaybe transDest trs))),
not(inMTS mts s),
revLookupDef env s == Nothing]
-- Find any pseudoapplications in an MTS whose resultant species is a new prime
newAppls :: Env -> AffNet -> MTS -> [Species]
newAppls env net mts = newAppls' net (openMTS mts)
where
newAppls' net trs = [s | s<-(L.nub(concatMap (primes env) (appls net mts))), not(inMTS mts s)]
-- 2016-10-19 Ian Added in that concatMap (primes env) to break down things that look like complexes but are just parallel composition of species. Not sure that solves all problems, but it may do some.
-- Is the species in the MTS?
inMTS :: MTS -> Species -> Bool
inMTS mts s = lookupTrans mts s /= []
-- Cardinality of an MTS:
mtsCard :: MTS -> Int
mtsCard x = length $ openMTS x
-- Add the immediate transitions for a species to the MTS:
trans :: Env -> MTS -> Species -> MTS
trans env mts s = trans' env mts s
where
trans' env mts s'
= ifnotnil (lookupTrans mts s') (\x -> mts) (trans'' env mts s')
where
trans'' :: Env -> MTS -> Species -> MTS
-- Nil
trans'' env mts Nil = mts
-- Def
trans'' env mts (Def _ _)
= maybe ex (trans' env mts) (lookupDef env s')
where ex = X.throw (CpiException
("Species "++(pretty s)++" not in the Environment."))
-- Sum
trans'' _ mts (Sum []) = mts
-- Sum(Tau + ...)
trans'' env mts (Sum (((Tau r),dst):pss))
= MTS ((TransT s (TTau r) dst):
(openMTS(trans' env mts (Sum pss))))
-- Sum(Comm + ...)
trans'' env mts (Sum (((Comm n o i),dst):pss))
= MTS ((TransSC s n (nf(ConcBase dst o i))):
(openMTS(trans' env mts (Sum pss))))
-- Par
trans'' _ mts (Par []) = mts
trans'' env mts (Par (ss))
= MTS (transPar ss [] []) ->++ mts
where
transPar (x:xs) alphas taus
= transPar xs (alphas'++alphas) ((taus' s alphas' alphas)++taus)
where
alphas' = openMTS(trans env (MTS []) x)
taus' src (tr:trs) trs'
| (TransSC s n c) <- tr
= (taus'' tr trs')++(taus' src trs trs')
| otherwise
= taus' src trs trs'
where
taus'' (TransSC src n c) ((TransSC src' n' c'):trs')
| Just dst <- pseudoapp c c'
= (TransTA s (TTauAff (n,n'))
(Par (remove src (replace src' dst ss))))
:(taus'' tr trs')
| otherwise
= taus'' tr trs'
taus'' tr (_:trs') = taus'' tr trs'
taus'' _ [] = []
taus' _ [] _ = []
transPar [] alphas taus
= ((evs alphas)++taus)
where
evs ((TransT src tau dst):ts)
= (TransT s tau (nf(Par (replace src dst ss)))):(evs ts)
evs ((TransSC src n dst):ts)
= (TransSC s n (nf(ConcPar dst (remove src ss)))):(evs ts)
evs (_:ts) = evs ts
evs [] = []
-- New
trans'' env mts (New net c)
= MTS ((restrict(openMTS(trans' env (MTS []) c)))
++(openMTS mts))
where
restrict [] = []
-- restrict (new x...) x.A ----x---> A
restrict ((TransSC _ n dst):trs)
| n `elem` (sites net)
= restrict trs
| otherwise
= (TransSC s n (nf(ConcNew net dst))):(restrict trs)
-- allow taus
restrict ((TransT _ t dst):trs)
= (TransT s t (nf(New net dst))):(restrict trs)
restrict ((TransTA _ t@(TTauAff (n,n')) dst):trs)
-- allow tau<n,m>
| (r /= Nothing)
= (TransT s (TTau ((\(Just x)->x)r))
(nf(New net dst)))
:(restrict trs)
-- restrict tau<n,m> where n or m not in net
| (r == Nothing) && (not(null((sites net) /\ [n,n'])))
= restrict trs
-- allow tau<n,m> otherwise.
| otherwise
= (TransTA s t (nf(New net dst))):(restrict trs)
where r = (aff net (n,n'))
-- Concatenate MTSs
(->++) :: MTS -> MTS -> MTS
x ->++ y = MTS ((openMTS x)++(openMTS y))
-- Add multiple species to the MTS:
transs :: Env -> MTS -> [Species] -> MTS
transs _ mts [] = mts
transs defs mts (spec:specs)
= (transs defs mts' specs)
where mts' = (trans defs mts spec)
-- Get the transition list of an MTS:
openMTS = \(MTS x) -> x
-- | Lookup the transitions (in an MTS) from a species.
lookupTrans :: MTS -> Species -> [Trans]
lookupTrans (MTS []) _ = []
lookupTrans (MTS (tran:trans)) s
| (nf s) == (nf(transSrc tran)) = tran:(lookupTrans (MTS trans) s)
| otherwise = lookupTrans (MTS trans) s
-- The source Species of a transition:
transSrc :: Trans -> Species
transSrc (TransSC s _ _) = s
transSrc (TransT s _ _) = s
transSrc (TransTA s _ _) = s
-- The destination species of a transition
-- is Nothing if RHS is a concretion
transDest :: Trans -> Maybe Species
transDest (TransSC _ _ _) = Nothing
transDest (TransT _ _ s) = Just s
transDest (TransTA _ _ s) = Just s
-- | Pseudo-application of concretions. Takes takes two concretions
-- and if they're compatable then gives the species they combine to form.
pseudoapp :: Concretion -> Concretion -> Maybe Species
pseudoapp (ConcBase s1 a x) (ConcBase s2 b y)
| (length a == length y)&&(length x == length b)
= Just $ nf(Par [(sub (zip x b) s1),(sub (zip y a) s2)])
| otherwise
= Nothing
pseudoapp c1 (ConcPar c2 s2)
= maybe Nothing (Just.(\x->Par (x:s2))) (pseudoapp c1 c2)
pseudoapp c1 (ConcNew net c2)
| net#<c1
= maybe Nothing (Just.(\x->New net x)) (pseudoapp c1 c2)
| otherwise
= pseudoapp c1 (makeFresh (ConcNew net c2))
pseudoapp (ConcPar c1 ss) c2
= maybe Nothing (Just.(\x->Par (x:ss))) (pseudoapp c1 c2)
pseudoapp (ConcNew net c1) c2
| net#<c2
= maybe Nothing (Just.(\x->New net x)) (pseudoapp c1 c2)
| otherwise
= pseudoapp (makeFresh (ConcNew net c1)) c2
-- give a restricted concretion fresh names for its AffNet
makeFresh :: Concretion -> Concretion
makeFresh c@(ConcNew net c') = freshen (sites net) c
where
freshen [] c = c
freshen (n:ns) c = freshen ns (concAconv (n,(concRenaming n c)) c)
makeFresh x = x
-- a fresh renaming of a name in a concretion
concRenaming :: Name -> Concretion -> Name
concRenaming n c = renaming' (renames n) c
where
renaming' (n:ns) c
| not(n `elem` (fnc c)) = n
| otherwise = renaming' ns c
renaming' [] _
= X.throw $ CpiException
"CPi.Semantics.concRenaming: Renaming stream has been exhausted."
-- a stream of possible renamings for a given name
renames x = [x++p | p <- iterate (++"'") "'"]
-- get the resultants (complexes) of pseudoapplications in an MTS
appls :: AffNet -> MTS -> [Species]
appls net (MTS []) = []
appls net (MTS (tr:trs)) = appls' $ concs (tr:trs)
where concs :: [Trans] -> [(Concretion,Name)]
concs [] = []
concs ((TransSC s n c):trs)
= (c,n):(concs trs)
concs (_:trs)
= concs trs
appls' :: [(Concretion,Name)] -> [Species]
appls' cns = [maybe Nil id (pseudoapp c c')
| (c,n) <- cns,
(c',n') <- cns,
aff net (n,n') /= Nothing ]
-- List the distinct prime species in an MTS:
allPrimes :: Env -> MTS -> [Species]
allPrimes env (MTS ts)
= nice . L.nub . concat . map (primes env) $ map transSrc ts
where
nice [] = []
nice (s:ss) = maybe s id (revLookupDef env s) : nice ss
-- | Initial concentration of a species in a process.
initconc :: Process -> Species -> Double
initconc (Process scs _) s = initconc' scs s
where
initconc' ((s',c):scs) s
| s == s' = c
| otherwise = initconc' scs s
initconc' [] _ = 0
-- | Gives the complete syntactic process, including all potentially generated primes.
wholeProc :: Env -> Process -> MTS -> Process
wholeProc env p@(Process scs net) mts = Process scs' net
where
scs' = map (\s->(s,initconc p s)) (allPrimes env mts)
--------------------
-- Process semantics
--------------------
-- | Prime components of a species, i.e. species that are not compositions.
primes :: Env -> Species -> [Species]
primes env spec = primes' env (nf spec)
where
primes' env Nil = []
primes' env s@(Def _ _) = maybe ex (primes env) (lookupDef env s)
where ex = X.throw (CpiException
("Species "++(pretty s)++" not in the Environment."))
primes' env s@(Sum _) = [s]
primes' env s@(New _ _) = [s]
primes' env (Par []) = []
primes' env (Par ss) = concatMap (primes env) ss
-- Support of a process - prime species in a process:
-- NOTE: do we want to check conc.>0 ??
supp :: Env -> Process -> [Species]
supp env (Process [] _) = []
supp env (Process [(s,c)] _) = primes env s
supp env (Process ((s,c):ps) aff) = (primes env s)++(supp env (Process ps aff))
-- | Gives the Class 1 (Species-->Concretion) transitions of an MTS.
potentials :: MTS -> [Trans]
potentials (MTS []) = []
potentials (MTS (t:ts))
| (TransSC _ _ _) <- t
= t:(potentials (MTS (ts)))
| otherwise
= potentials (MTS (ts))
-- cardinality of a transition in an MTS
cardT :: Trans -> MTS -> Integer
cardT t (MTS ts) = card t ts
-- cardinality of a species (s) in the prime decomposition of a species (s')
cardP :: Env -> Species -> Species -> Integer
cardP env d@(Def _ _) s' = case lookupDef env d of
Just s -> card (nf s) (primes env s')
Nothing -> X.throw (CpiException
("Species "++(pretty d)++" not in the Environment."))
cardP env s s' = card (nf s) (primes env s')
-----------------------------------------
-- Process space P and potential space D:
type P = Map Species Double
type D = Map (Species,Name,Concretion) Double
prettyP x = concat $ map (\(k,v)->((pretty k)++" |-> "++(show v)++"\n")) (Map.toList x)
-- Zero vectors:
p0 :: P
p0 = Map.empty
d0 :: D
d0 = Map.empty
-- Basis vectors:
p1 :: Species -> P
p1 x = Map.singleton x 1
d1 :: (Species,Name,Concretion) -> D
d1 x = Map.singleton x 1
-- Scaled basis vectors:
pVec :: Species -> Double -> P
pVec s x = Map.singleton s x
dVec :: (Species,Name,Concretion) -> Double -> D
dVec t x = Map.singleton t x
-- Vector addition in P and D:
pplus :: P -> P -> P
pplus x y = Map.unionWith (+) x y
dplus :: D -> D -> D
dplus x y = Map.unionWith (+) x y
-- Scalar multiplication in P and D:
ptimes :: P -> Double -> P
ptimes p v = Map.map (v *) p
dtimes :: D -> Double -> D
dtimes d v = Map.map (v *) d
-- Vector subtraction in P and D:
pminus :: P -> P -> P
pminus x y = x `pplus` (y `ptimes` (-1))
dminus :: D -> D -> D
dminus x y = x `dplus` (y `dtimes` (-1))
-- Interaction potential
partial :: Env -> Process -> D
partial env (Process [] _) = Map.empty
partial env proc@(Process ps _)
= foldr dplus d0 (map partial' ps)
where
partial' (s,c) = foldr dplus d0
(map (\tr->
dVec (triple tr)
(c * (fromInteger(cardP env (transSrc tr) s))))
pots)
pots = potentials mts
mts = processMTS env proc
triple (TransSC s n c) = (s,n,c)
triple _ = X.throw $ CpiException ("Bug: CPi.Semantics.partial.triple passed something other than a TransSC")
-- Species embedding
embed :: Env -> Species -> P
embed env Nil = Map.empty
embed env d@(Def _ _) = maybe ex (\s->embed env s) (lookupDef env d)
where
ex = X.throw $ CpiException
("Error: Tried to embed unknown definition "++(pretty d)++".")
-- NOTE: if the Def is in S# maybe we want to embed the Def itself
-- rather than its expression? (for UI reasons)
embed env (Par ss) = foldr pplus p0 (map (embed env) ss)
embed env s = p1(s)
-- Interaction tensor
tensor :: Env -> AffNet -> D -> D -> P
tensor env net ds1 ds2 = foldr pplus p0 (map f ds)
where
ds = [(x,y,a,p)
|x<-Map.toList ds1, y<-Map.toList ds2,
a<-[maybe 0.0 id (aff net ((tri2(fst(x))),(tri2(fst(y)))))],
a/=0.0,
p<-[maybe Nil id (pseudoapp (tri3(fst(x))) (tri3(fst(y))))],
p/=Nil
]
-- x,y are (Spec,Name,Conc),Concentration);
-- a is Rate; p is Species result of pseudoapplication
f (((s,n,c),v),((s',n',c'),v'),a,p)
= ((((embed env p) `pminus` (p1 s)) `pminus` (p1 s'))
`ptimes` a) `ptimes` (v*v')
-- Immediate behaviour
dPdt :: Env -> Process -> P
dPdt _ (Process [] _) = p0
dPdt env p@(Process [(s,c)] net)
= (foldr pplus p0 (map tauexpr taus))
`pplus` ((tensor env net part1 part1) `ptimes` 0.5)
where
tauexpr (TransT src (TTau r) dst)
= (((embed env src) `pminus` (embed env dst))
`ptimes` r) `ptimes` c
tauexpr _ = X.throw $ CpiException
("Bug: CPi.Semantics.dPdt.tauexpr passed something other than a TransT")
taus = [x|x<-openMTS(processMTS env p), tau x]
tau (TransT _ _ _) = True
tau _ = False
part1 = partial env (Process [(s,c)] net)
dPdt env (Process (p:ps) net)
= (tensor env net partT partH) `pplus` (dPdt env procT) `pplus` (dPdt env procH)
where
partH = partial env procH
partT = partial env procT
procH = Process [p] net
procT = Process ps net
{-
---------------------------
Here lies an attempted implementation of process semantics
in a nice, lazy way...
---------------------------
type P = Species -> Double
type D = Trans -> Double
-- the interaction potential
partial :: Env -> MTS -> Process -> D
partial env mts (Process ss net) = partial' env mts ss
where
partial' env mts ss
= \t@(TransSC s n c) -> (expr t ss)
expr _ [] = 0
expr t@(TransSC s n c) ((spec,conc):ss) =
((s2d conc) * (fromInteger(cardT t mts)) * (fromInteger(cardP env s spec))) + (expr t ss)
-- NOTE: (\x.f(x))+(\x.f(x)) == \x.f(x)+f(x)
expr t _ = X.throw (CPi.Exception
("This is a bug! Partial behavior depends only on Class 1 trans (SC). Incorrectly given: "++(pretty t)))
-- the species embedding
embed :: Env -> Species -> P
embed env s = embed' $ primes env s
where
embed' ss = (\a->(if (expr a ss) then 1 else 0))
expr _ [] = False
expr a (x:xs) = (a==x)||(expr a xs)
-- the interaction tensor
tensor :: AffNet -> D -> D -> P
tensor = undefined -- TODO:
-}
| continuouspi/cpiwb | CPi/Semantics.hs | gpl-3.0 | 23,339 | 7 | 24 | 7,909 | 8,141 | 4,242 | 3,899 | 404 | 18 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.