_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
57075942974da65528996bdce40403c8ddf28a73b852e87680a548f376420496 | quchen/stgi | Parser.hs | module Test.Parser (tests) where
import Test.Tasty
import qualified Test.Parser.Parser as Parser
import qualified Test.Parser.QuasiQuoter as QuasiQuoter
tests :: TestTree
tests = testGroup "Parser" [Parser.tests, QuasiQuoter.tests]
| null | https://raw.githubusercontent.com/quchen/stgi/dacd45cb0247f73889713dc92a911aaa835afd19/test/Testsuite/Test/Parser.hs | haskell | module Test.Parser (tests) where
import Test.Tasty
import qualified Test.Parser.Parser as Parser
import qualified Test.Parser.QuasiQuoter as QuasiQuoter
tests :: TestTree
tests = testGroup "Parser" [Parser.tests, QuasiQuoter.tests]
|
|
ee37b04a1df0b45e9a8cadae6fb798f4a529bb4b83f517bc94720bca429753fa | qingliangcn/mgee | mgee_chat.erl | %%%----------------------------------------------------------------------
%%%
2010 mgee ( Ming Game Engine Erlang )
%%%
@author odinxu , 2010 - 01 - 13
%%% @doc the mgee chat module
%%% @end
%%%
%%%----------------------------------------------------------------------
-module(mgee_chat).
-behaviour(gen_server).
%% --------------------------------------------------------------------
%% Include files
%% --------------------------------------------------------------------
-include("mgee.hrl").
-include("game_pb.hrl").
-include("global_lang.hrl").
%% --------------------------------------------------------------------
%% External exports
-export([handle/1]).
-export([start_link/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
%% microseconds
-define(CHAT_INTERVAL, 3000000).
-record(state, {}).
%% ====================================================================
%% External functions
%% ====================================================================
%% ====================================================================
%% Server functions
%% ====================================================================
start_link(Name) ->
gen_server:start_link({local, Name}, ?MODULE, [], []).
handle({ClientSock, Module, Method, Data, AccountName, Roleid, RoleName}) ->
case get(previous_chat_time) of
undefined ->
?TEST_MSG("role ~p first chat", [RoleName]),
PreviousChatTime = {0,0,0};
Val ->
PreviousChatTime = Val
end,
Now = mgee_timer:now(),
?TEST_MSG("last chat time ~p, this chat time ~p", [PreviousChatTime, Now]),
TimeDiff = timer:now_diff(Now, PreviousChatTime),
?TEST_MSG("timer dif ~p", [TimeDiff]),
if
TimeDiff >= ?CHAT_INTERVAL ->
put(previous_chat_time, Now),
handle2({ClientSock, Module, Method, Data, AccountName, Roleid, RoleName});
true ->
case Method of
<<"world">> ->
#m_chat_world_toc{succ=false, reason = ?_LANG_CHAT_TOO_FAST};
<<"bubble">> ->
#m_chat_bubble_toc{succ=false, reason = ?_LANG_CHAT_TOO_FAST};
_Other ->
ignore
end
end.
handle2({ClientSock, Module, Method, Data, _AccountName, Roleid, RoleName}) ->
case Method of
<<"world">> ->
world(ClientSock, Module, Method, Data, Roleid, RoleName);
<<"private">> ->
private(ClientSock, Module, Method, Data, Roleid, RoleName);
<<"vw">> ->
vw(ClientSock, Module, Method, Data, Roleid, RoleName);
<<"family">> ->
family(ClientSock, Module, Method, Data, Roleid, RoleName);
<<"team">> ->
team(ClientSock, Module, Method, Data, Roleid, RoleName);
<<"bubble">> ->
bubble(ClientSock, Module, Method, Data, Roleid, RoleName);
Other ->
?DEBUG("undefined method ~p", [Other])
end,
ok.
world(_ClientSock, Module, Method, Data, Roleid, RoleName) ->
DataBroadcast = #m_chat_world_toc{
body=Data#m_chat_world_tos.body,
return_self=false,
from_roleid=Roleid,
from_rolename=RoleName
},
BinBroadcast = mgee_packet:packet_encode(Module, Method, DataBroadcast),
cast({world, BinBroadcast}),
ok.
private(ClientSock, Module, Method, Data, Roleid, RoleName) ->
case mgee_misc:get_socket_by_roleid(Data#m_chat_private_tos.to_roleid) of
{ok, ToClientSock} ->
DataRecord = #m_chat_private_toc{
body = Data#m_chat_private_tos.body,
from_roleid = Roleid,
from_rolename = RoleName,
return_self = false
},
mgee_packet:packet_encode_send(ToClientSock, Module, Method, DataRecord);
Wrong ->
DataRecord = #m_chat_private_toc{succ=false, reason= <<"用户不在线!">>},
mgee_packet:packet_encode_send(ClientSock, Module, Method, DataRecord),
?DEBUG("find pid ~p socket failed", Wrong)
end,
ok.
bubble(_ClientSock, Module, Method, Data, Roleid, RoleName) ->
VwId = mgee_virtual_world_router:get_vwid_by_roleid(Roleid),
VwName = mgee_virtual_world_router:get_virtual_world_name(VwId),
DataRecord = #m_chat_bubble_toc{
from_roleid=Roleid,
from_rolename=RoleName,
body=Data#m_chat_bubble_tos.body,
return_self=false
},
DataBin = mgee_packet:packet_encode(Module, Method, DataRecord),
mgee_virtual_world:broad_in_sence_include(VwName, [Roleid], DataBin).
send msg to virual world ( map )
vw(_ClientSock, _Module, _Method, _Data, _Roleid, _RoleName) ->
ok.
team(_ClientSock, _Module, _Method, _Data, _Roleid, _RoleName) ->
ok.
family(_ClientSock, _Module, _Method, _Data, _Roleid, _RoleName) ->
ok.
cast(Msg) ->
gen_server:cast(mgee_chat_sup:get_chat_name(erlang:system_info(scheduler_id)), Msg).
%% --------------------------------------------------------------------
%% Function: init/1
%% Description: Initiates the server
%% Returns: {ok, State} |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% --------------------------------------------------------------------
init([]) ->
?INFO_MSG("~p init: ~p",[?MODULE, [] ]),
{ok, #state{}}.
%% --------------------------------------------------------------------
Function : handle_call/3
%% Description: Handling call messages
%% Returns: {reply, Reply, State} |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} | (terminate/2 is called)
%% {stop, Reason, State} (terminate/2 is called)
%% --------------------------------------------------------------------
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
%% --------------------------------------------------------------------
%% Function: handle_cast/2
%% Description: Handling cast messages
Returns : { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State} (terminate/2 is called)
%% --------------------------------------------------------------------
handle_cast({world, BinBroadcast}, State) ->
lists:foreach(
fun(Pid) ->
case mgee_misc:get_socket_by_rolepid(Pid) of
{ok, ToClientSock} ->
mgee_packet:send(ToClientSock, BinBroadcast);
Wrong ->
?DEBUG("find pid ~p socket failed", [Wrong])
end
end,
pg2:get_members(pg2_all_role)),
{noreply, State};
handle_cast(_Msg, State) ->
{noreply, State}.
%% --------------------------------------------------------------------
%% Function: handle_info/2
%% Description: Handling all non call/cast messages
Returns : { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State} (terminate/2 is called)
%% --------------------------------------------------------------------
handle_info(_Info, State) ->
{noreply, State}.
%% --------------------------------------------------------------------
%% Function: terminate/2
%% Description: Shutdown the server
%% Returns: any (ignored by gen_server)
%% --------------------------------------------------------------------
terminate(_Reason, _State) ->
ok.
%% --------------------------------------------------------------------
%% Func: code_change/3
%% Purpose: Convert process state when code is changed
%% Returns: {ok, NewState}
%% --------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% --------------------------------------------------------------------
Internal functions
%% --------------------------------------------------------------------
| null | https://raw.githubusercontent.com/qingliangcn/mgee/b65babc3a34ef678ae2b25ce1a8fdd06b2707bb8/src/mgee_chat.erl | erlang | ----------------------------------------------------------------------
@doc the mgee chat module
@end
----------------------------------------------------------------------
--------------------------------------------------------------------
Include files
--------------------------------------------------------------------
--------------------------------------------------------------------
External exports
gen_server callbacks
microseconds
====================================================================
External functions
====================================================================
====================================================================
Server functions
====================================================================
--------------------------------------------------------------------
Function: init/1
Description: Initiates the server
Returns: {ok, State} |
ignore |
{stop, Reason}
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Handling call messages
Returns: {reply, Reply, State} |
{stop, Reason, Reply, State} | (terminate/2 is called)
{stop, Reason, State} (terminate/2 is called)
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: handle_cast/2
Description: Handling cast messages
{stop, Reason, State} (terminate/2 is called)
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: handle_info/2
Description: Handling all non call/cast messages
{stop, Reason, State} (terminate/2 is called)
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: terminate/2
Description: Shutdown the server
Returns: any (ignored by gen_server)
--------------------------------------------------------------------
--------------------------------------------------------------------
Func: code_change/3
Purpose: Convert process state when code is changed
Returns: {ok, NewState}
--------------------------------------------------------------------
--------------------------------------------------------------------
-------------------------------------------------------------------- | 2010 mgee ( Ming Game Engine Erlang )
@author odinxu , 2010 - 01 - 13
-module(mgee_chat).
-behaviour(gen_server).
-include("mgee.hrl").
-include("game_pb.hrl").
-include("global_lang.hrl").
-export([handle/1]).
-export([start_link/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-define(CHAT_INTERVAL, 3000000).
-record(state, {}).
start_link(Name) ->
gen_server:start_link({local, Name}, ?MODULE, [], []).
handle({ClientSock, Module, Method, Data, AccountName, Roleid, RoleName}) ->
case get(previous_chat_time) of
undefined ->
?TEST_MSG("role ~p first chat", [RoleName]),
PreviousChatTime = {0,0,0};
Val ->
PreviousChatTime = Val
end,
Now = mgee_timer:now(),
?TEST_MSG("last chat time ~p, this chat time ~p", [PreviousChatTime, Now]),
TimeDiff = timer:now_diff(Now, PreviousChatTime),
?TEST_MSG("timer dif ~p", [TimeDiff]),
if
TimeDiff >= ?CHAT_INTERVAL ->
put(previous_chat_time, Now),
handle2({ClientSock, Module, Method, Data, AccountName, Roleid, RoleName});
true ->
case Method of
<<"world">> ->
#m_chat_world_toc{succ=false, reason = ?_LANG_CHAT_TOO_FAST};
<<"bubble">> ->
#m_chat_bubble_toc{succ=false, reason = ?_LANG_CHAT_TOO_FAST};
_Other ->
ignore
end
end.
handle2({ClientSock, Module, Method, Data, _AccountName, Roleid, RoleName}) ->
case Method of
<<"world">> ->
world(ClientSock, Module, Method, Data, Roleid, RoleName);
<<"private">> ->
private(ClientSock, Module, Method, Data, Roleid, RoleName);
<<"vw">> ->
vw(ClientSock, Module, Method, Data, Roleid, RoleName);
<<"family">> ->
family(ClientSock, Module, Method, Data, Roleid, RoleName);
<<"team">> ->
team(ClientSock, Module, Method, Data, Roleid, RoleName);
<<"bubble">> ->
bubble(ClientSock, Module, Method, Data, Roleid, RoleName);
Other ->
?DEBUG("undefined method ~p", [Other])
end,
ok.
world(_ClientSock, Module, Method, Data, Roleid, RoleName) ->
DataBroadcast = #m_chat_world_toc{
body=Data#m_chat_world_tos.body,
return_self=false,
from_roleid=Roleid,
from_rolename=RoleName
},
BinBroadcast = mgee_packet:packet_encode(Module, Method, DataBroadcast),
cast({world, BinBroadcast}),
ok.
private(ClientSock, Module, Method, Data, Roleid, RoleName) ->
case mgee_misc:get_socket_by_roleid(Data#m_chat_private_tos.to_roleid) of
{ok, ToClientSock} ->
DataRecord = #m_chat_private_toc{
body = Data#m_chat_private_tos.body,
from_roleid = Roleid,
from_rolename = RoleName,
return_self = false
},
mgee_packet:packet_encode_send(ToClientSock, Module, Method, DataRecord);
Wrong ->
DataRecord = #m_chat_private_toc{succ=false, reason= <<"用户不在线!">>},
mgee_packet:packet_encode_send(ClientSock, Module, Method, DataRecord),
?DEBUG("find pid ~p socket failed", Wrong)
end,
ok.
bubble(_ClientSock, Module, Method, Data, Roleid, RoleName) ->
VwId = mgee_virtual_world_router:get_vwid_by_roleid(Roleid),
VwName = mgee_virtual_world_router:get_virtual_world_name(VwId),
DataRecord = #m_chat_bubble_toc{
from_roleid=Roleid,
from_rolename=RoleName,
body=Data#m_chat_bubble_tos.body,
return_self=false
},
DataBin = mgee_packet:packet_encode(Module, Method, DataRecord),
mgee_virtual_world:broad_in_sence_include(VwName, [Roleid], DataBin).
send msg to virual world ( map )
vw(_ClientSock, _Module, _Method, _Data, _Roleid, _RoleName) ->
ok.
team(_ClientSock, _Module, _Method, _Data, _Roleid, _RoleName) ->
ok.
family(_ClientSock, _Module, _Method, _Data, _Roleid, _RoleName) ->
ok.
cast(Msg) ->
gen_server:cast(mgee_chat_sup:get_chat_name(erlang:system_info(scheduler_id)), Msg).
{ ok , State , Timeout } |
init([]) ->
?INFO_MSG("~p init: ~p",[?MODULE, [] ]),
{ok, #state{}}.
Function : handle_call/3
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
Returns : { noreply , State } |
{ noreply , State , Timeout } |
handle_cast({world, BinBroadcast}, State) ->
lists:foreach(
fun(Pid) ->
case mgee_misc:get_socket_by_rolepid(Pid) of
{ok, ToClientSock} ->
mgee_packet:send(ToClientSock, BinBroadcast);
Wrong ->
?DEBUG("find pid ~p socket failed", [Wrong])
end
end,
pg2:get_members(pg2_all_role)),
{noreply, State};
handle_cast(_Msg, State) ->
{noreply, State}.
Returns : { noreply , State } |
{ noreply , State , Timeout } |
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
|
4988401704ce677f5db5323ebc5085f5845d2921d1f3c1ab3c2d6f4dae735150 | darius/cant | ast.scm | #!chezscheme
;; The internal representation of Cant abstract syntax trees. The
;; player sees them as vectors with small-integer tags; the Cant user
;; will see them as wrapper objects, as implemented elsewhere.
(library (player ast)
(export pack<- pack-tag
e-constant
e-variable
e-term
e-list
e-make
e-do
e-let
e-call
p-constant
p-any
p-variable
p-term
p-list
p-and
p-view
none-exp
exp-vars-defined pat-vars-defined
)
(import (chezscheme) (player util) (player macros))
Parse expressions and patterns to ASTs
(define pack<- vector)
(define (pack-tag vec)
(vector-ref vec 0))
(define-enum
e-constant
e-variable
e-term
e-list
e-make
e-do
e-let
e-call)
(define-enum
p-constant
p-any
p-variable
p-term
p-list
p-and
p-view)
(define none-exp (pack<- e-constant '#f))
;; Variables defined
(define (exp-vars-defined e)
((vector-ref methods/exp-vars-defined (pack-tag e))
e))
(define methods/exp-vars-defined
(vector
(lambda (e) '()) ;e-constant
(lambda (e) '()) ;e-variable
(lambda (e) ;e-term
(unpack e (tag args)
(flatmap exp-vars-defined args)))
(lambda (e) ;e-list
(unpack e (args)
(flatmap exp-vars-defined args)))
(lambda (e) ;e-make
'())
(lambda (e) ;e-do
(unpack e (e1 e2)
(append (exp-vars-defined e1)
(exp-vars-defined e2))))
(lambda (e) ;e-let
(unpack e (p1 e1)
(append (pat-vars-defined p1)
(exp-vars-defined e1))))
(lambda (e) ;e-call
(unpack e (e1 e2)
(append (exp-vars-defined e1)
(exp-vars-defined e2))))))
(define (pat-vars-defined p)
((vector-ref methods/pat-vars-defined (pack-tag p))
p))
(define methods/pat-vars-defined
(vector
(lambda (p) '()) ;p-constant
(lambda (p) '()) ;p-any
(lambda (p) ;p-variable
(unpack p (depth offset var)
(list var)))
(lambda (p) ;p-term
(unpack p (tag args)
(flatmap pat-vars-defined args)))
(lambda (p) ;p-list
(unpack p (args)
(flatmap pat-vars-defined args)))
(lambda (p) ;p-and
(unpack p (p1 p2)
(append (pat-vars-defined p1)
(pat-vars-defined p2))))
(lambda (p) ;p-view
(unpack p (e1 p1)
(append (exp-vars-defined e1)
(pat-vars-defined p1))))))
)
| null | https://raw.githubusercontent.com/darius/cant/e80756894905a98eefd434cd61b656c80aa026ab/player/ast.scm | scheme | The internal representation of Cant abstract syntax trees. The
player sees them as vectors with small-integer tags; the Cant user
will see them as wrapper objects, as implemented elsewhere.
Variables defined
e-constant
e-variable
e-term
e-list
e-make
e-do
e-let
e-call
p-constant
p-any
p-variable
p-term
p-list
p-and
p-view | #!chezscheme
(library (player ast)
(export pack<- pack-tag
e-constant
e-variable
e-term
e-list
e-make
e-do
e-let
e-call
p-constant
p-any
p-variable
p-term
p-list
p-and
p-view
none-exp
exp-vars-defined pat-vars-defined
)
(import (chezscheme) (player util) (player macros))
Parse expressions and patterns to ASTs
(define pack<- vector)
(define (pack-tag vec)
(vector-ref vec 0))
(define-enum
e-constant
e-variable
e-term
e-list
e-make
e-do
e-let
e-call)
(define-enum
p-constant
p-any
p-variable
p-term
p-list
p-and
p-view)
(define none-exp (pack<- e-constant '#f))
(define (exp-vars-defined e)
((vector-ref methods/exp-vars-defined (pack-tag e))
e))
(define methods/exp-vars-defined
(vector
(unpack e (tag args)
(flatmap exp-vars-defined args)))
(unpack e (args)
(flatmap exp-vars-defined args)))
'())
(unpack e (e1 e2)
(append (exp-vars-defined e1)
(exp-vars-defined e2))))
(unpack e (p1 e1)
(append (pat-vars-defined p1)
(exp-vars-defined e1))))
(unpack e (e1 e2)
(append (exp-vars-defined e1)
(exp-vars-defined e2))))))
(define (pat-vars-defined p)
((vector-ref methods/pat-vars-defined (pack-tag p))
p))
(define methods/pat-vars-defined
(vector
(unpack p (depth offset var)
(list var)))
(unpack p (tag args)
(flatmap pat-vars-defined args)))
(unpack p (args)
(flatmap pat-vars-defined args)))
(unpack p (p1 p2)
(append (pat-vars-defined p1)
(pat-vars-defined p2))))
(unpack p (e1 p1)
(append (exp-vars-defined e1)
(pat-vars-defined p1))))))
)
|
f2d73869c0f0d3cc5f1ab271d581cc9e6e34c055c31b4578b7c79431bc3301c9 | eashanhatti/peridot | Surface.hs | module Syntax.Surface where
import Data.Text(Text)
import Numeric.Natural
import Syntax.Common hiding(unId, CStatement(..), Declaration(..))
import Syntax.Common qualified as Cm
import Text.Megaparsec(SourcePos)
import Data.Sequence
data Ast a where
TermAst :: Term -> TermAst
NameAst :: Name -> NameAst
DeclAst :: Declaration -> Id -> DeclarationAst
SourcePos :: Ast a -> SourcePos -> Ast a
deriving instance Show (Ast a)
deriving instance Eq (Ast a)
unName :: NameAst -> Name
unName (NameAst name) = name
-- For declarations
data Universe = Obj | Meta | Prop
deriving (Show, Eq)
unDeclName :: DeclarationAst -> Name
unDeclName (DeclAst (MetaTerm (NameAst name) _ _) _) = name
unDeclName (DeclAst (ObjTerm (NameAst name) _ _) _) = name
unDeclName (DeclAst (Axiom (NameAst name) _) _) = name
unDeclName (DeclAst (Prove _) did) = MachineName (fromIntegral did)
unDeclName (DeclAst (Output _ _) did) = MachineName (fromIntegral did)
unDeclName (DeclAst (Fresh (NameAst name) _) _) = name
unDeclName (SourcePos ast _) = unDeclName ast
stripSourcePos :: DeclarationAst -> Declaration
stripSourcePos (SourcePos ast _) = stripSourcePos ast
stripSourcePos (DeclAst decl _) = decl
unId :: DeclarationAst -> Id
unId (DeclAst _ did) = did
unId (SourcePos ast _) = unId ast
type NameAst = Ast Name
type DeclarationAst = Ast Declaration
data Declaration
= MetaTerm NameAst TermAst TermAst
| ObjTerm NameAst TermAst TermAst
| Axiom NameAst TermAst
| Prove TermAst
| Fresh NameAst TermAst
| Output FilePath TermAst
| Import FilePath NameAst
deriving (Show, Eq)
data Quantification = Ex | Im
deriving (Show, Eq)
type TermAst = Ast Term
data Term
= MetaPi PassMethod NameAst TermAst TermAst
| MetaLam (Seq NameAst) TermAst
| ObjPi PassMethod NameAst TermAst TermAst
| ObjLam (Seq (PassMethod, NameAst)) TermAst
| App TermAst (Seq (PassMethod, TermAst))
| Var Quantification Name
| OUniv
| MUniv
| Let (Seq DeclarationAst) TermAst
| LiftObj TermAst
| QuoteObj TermAst
| SpliceObj TermAst
| ImplProp TermAst TermAst
| ConjProp TermAst TermAst
| DisjProp TermAst TermAst
| ForallProp NameAst TermAst TermAst
| ExistsProp NameAst TermAst TermAst
| EqualProp TermAst TermAst
| Bool
| BTrue
| BFalse
| Case TermAst TermAst TermAst
| Equal TermAst TermAst
| Refl
| Sig (Seq (NameAst, TermAst))
| Struct (Seq (NameAst, TermAst))
| Select TermAst NameAst
| Patch TermAst (Seq (NameAst, TermAst))
| Declare TermAst TermAst TermAst
| Define TermAst TermAst TermAst
| NameType Cm.Universe TermAst
| Text
| TextLiteral Text
| TextAppend TermAst TermAst
| Hole
| Iter TermAst TermAst TermAst
deriving (Show, Eq)
| null | https://raw.githubusercontent.com/eashanhatti/peridot/014d6b8e8b792ac80928fc43c8f1d26df8eb7d2d/src/Syntax/Surface.hs | haskell | For declarations | module Syntax.Surface where
import Data.Text(Text)
import Numeric.Natural
import Syntax.Common hiding(unId, CStatement(..), Declaration(..))
import Syntax.Common qualified as Cm
import Text.Megaparsec(SourcePos)
import Data.Sequence
data Ast a where
TermAst :: Term -> TermAst
NameAst :: Name -> NameAst
DeclAst :: Declaration -> Id -> DeclarationAst
SourcePos :: Ast a -> SourcePos -> Ast a
deriving instance Show (Ast a)
deriving instance Eq (Ast a)
unName :: NameAst -> Name
unName (NameAst name) = name
data Universe = Obj | Meta | Prop
deriving (Show, Eq)
unDeclName :: DeclarationAst -> Name
unDeclName (DeclAst (MetaTerm (NameAst name) _ _) _) = name
unDeclName (DeclAst (ObjTerm (NameAst name) _ _) _) = name
unDeclName (DeclAst (Axiom (NameAst name) _) _) = name
unDeclName (DeclAst (Prove _) did) = MachineName (fromIntegral did)
unDeclName (DeclAst (Output _ _) did) = MachineName (fromIntegral did)
unDeclName (DeclAst (Fresh (NameAst name) _) _) = name
unDeclName (SourcePos ast _) = unDeclName ast
stripSourcePos :: DeclarationAst -> Declaration
stripSourcePos (SourcePos ast _) = stripSourcePos ast
stripSourcePos (DeclAst decl _) = decl
unId :: DeclarationAst -> Id
unId (DeclAst _ did) = did
unId (SourcePos ast _) = unId ast
type NameAst = Ast Name
type DeclarationAst = Ast Declaration
data Declaration
= MetaTerm NameAst TermAst TermAst
| ObjTerm NameAst TermAst TermAst
| Axiom NameAst TermAst
| Prove TermAst
| Fresh NameAst TermAst
| Output FilePath TermAst
| Import FilePath NameAst
deriving (Show, Eq)
data Quantification = Ex | Im
deriving (Show, Eq)
type TermAst = Ast Term
data Term
= MetaPi PassMethod NameAst TermAst TermAst
| MetaLam (Seq NameAst) TermAst
| ObjPi PassMethod NameAst TermAst TermAst
| ObjLam (Seq (PassMethod, NameAst)) TermAst
| App TermAst (Seq (PassMethod, TermAst))
| Var Quantification Name
| OUniv
| MUniv
| Let (Seq DeclarationAst) TermAst
| LiftObj TermAst
| QuoteObj TermAst
| SpliceObj TermAst
| ImplProp TermAst TermAst
| ConjProp TermAst TermAst
| DisjProp TermAst TermAst
| ForallProp NameAst TermAst TermAst
| ExistsProp NameAst TermAst TermAst
| EqualProp TermAst TermAst
| Bool
| BTrue
| BFalse
| Case TermAst TermAst TermAst
| Equal TermAst TermAst
| Refl
| Sig (Seq (NameAst, TermAst))
| Struct (Seq (NameAst, TermAst))
| Select TermAst NameAst
| Patch TermAst (Seq (NameAst, TermAst))
| Declare TermAst TermAst TermAst
| Define TermAst TermAst TermAst
| NameType Cm.Universe TermAst
| Text
| TextLiteral Text
| TextAppend TermAst TermAst
| Hole
| Iter TermAst TermAst TermAst
deriving (Show, Eq)
|
6bb594ea42dcd6b5bcb849997c66c9c905f40ecd9cd729878166a983340db43c | zellio/incrementum | tests-1.5-req.scm |
(add-tests-with-string-output "fx+"
[(fx+ 1 2) => "3\n"]
[(fx+ 1 -2) => "-1\n"]
[(fx+ -1 2) => "1\n"]
[(fx+ -1 -2) => "-3\n"]
[(fx+ 536870911 -1) => "536870910\n"]
[(fx+ 536870910 1) => "536870911\n"]
[(fx+ -536870912 1) => "-536870911\n"]
[(fx+ -536870911 -1) => "-536870912\n"]
[(fx+ 536870911 -536870912) => "-1\n"]
[(fx+ 1 (fx+ 2 3)) => "6\n"]
[(fx+ 1 (fx+ 2 -3)) => "0\n"]
[(fx+ 1 (fx+ -2 3)) => "2\n"]
[(fx+ 1 (fx+ -2 -3)) => "-4\n"]
[(fx+ -1 (fx+ 2 3)) => "4\n"]
[(fx+ -1 (fx+ 2 -3)) => "-2\n"]
[(fx+ -1 (fx+ -2 3)) => "0\n"]
[(fx+ -1 (fx+ -2 -3)) => "-6\n"]
[(fx+ (fx+ 1 2) 3) => "6\n"]
[(fx+ (fx+ 1 2) -3) => "0\n"]
[(fx+ (fx+ 1 -2) 3) => "2\n"]
[(fx+ (fx+ 1 -2) -3) => "-4\n"]
[(fx+ (fx+ -1 2) 3) => "4\n"]
[(fx+ (fx+ -1 2) -3) => "-2\n"]
[(fx+ (fx+ -1 -2) 3) => "0\n"]
[(fx+ (fx+ -1 -2) -3) => "-6\n"]
[(fx+ (fx+ -1 -2) (fx+ 3 4)) => "4\n"]
[(fx+ (fx+ -1 -2) (fx+ 3 -4)) => "-4\n"]
[(fx+ (fx+ -1 -2) (fx+ -3 4)) => "-2\n"]
[(fx+ (fx+ -1 -2) (fx+ -3 -4)) => "-10\n"]
[(fx+ (fx+ (fx+ (fx+ (fx+ (fx+ (fx+ (fx+ 1 2) 3) 4) 5) 6) 7) 8) 9) => "45\n"]
[(fx+ (fx+ (fx+ (fx+ (fx+ (fx+ (fx+ (fx+ 1 2) 3) 4) 5) 6) 7) 8) 9) => "45\n"]
[(fx+ 1 (fx+ 2 (fx+ 3 (fx+ 4 (fx+ 5 (fx+ 6 (fx+ 7 (fx+ 8 9)))))))) => "45\n"]
[(fx+ (fx+ (fx+ (fx+ 1 2) (fx+ 3 4)) (fx+ (fx+ 5 6) (fx+ 7 8)))
(fx+ (fx+ (fx+ 9 10) (fx+ 11 12)) (fx+ (fx+ 13 14) (fx+ 15 16))))
=> "136\n"]
)
(add-tests-with-string-output "fx-"
[(fx- 1 2) => "-1\n"]
[(fx- 1 -2) => "3\n"]
[(fx- -1 2) => "-3\n"]
[(fx- -1 -2) => "1\n"]
[(fx- 536870910 -1) => "536870911\n"]
[(fx- 536870911 1) => "536870910\n"]
[(fx- -536870911 1) => "-536870912\n"]
[(fx- -536870912 -1) => "-536870911\n"]
[(fx- 1 536870911) => "-536870910\n"]
[(fx- -1 536870911) => "-536870912\n"]
[(fx- 1 -536870910) => "536870911\n"]
[(fx- -1 -536870912) => "536870911\n"]
[(fx- 536870911 536870911) => "0\n"]
[ ( fx- 536870911 -536870912 ) = > " -1\n " ]
[(fx- -536870911 -536870912) => "1\n"]
[(fx- 1 (fx- 2 3)) => "2\n"]
[(fx- 1 (fx- 2 -3)) => "-4\n"]
[(fx- 1 (fx- -2 3)) => "6\n"]
[(fx- 1 (fx- -2 -3)) => "0\n"]
[(fx- -1 (fx- 2 3)) => "0\n"]
[(fx- -1 (fx- 2 -3)) => "-6\n"]
[(fx- -1 (fx- -2 3)) => "4\n"]
[(fx- -1 (fx- -2 -3)) => "-2\n"]
[(fx- 0 (fx- -2 -3)) => "-1\n"]
[(fx- (fx- 1 2) 3) => "-4\n"]
[(fx- (fx- 1 2) -3) => "2\n"]
[(fx- (fx- 1 -2) 3) => "0\n"]
[(fx- (fx- 1 -2) -3) => "6\n"]
[(fx- (fx- -1 2) 3) => "-6\n"]
[(fx- (fx- -1 2) -3) => "0\n"]
[(fx- (fx- -1 -2) 3) => "-2\n"]
[(fx- (fx- -1 -2) -3) => "4\n"]
[(fx- (fx- (fx- (fx- (fx- (fx- (fx- (fx- 1 2) 3) 4) 5) 6) 7) 8) 9) => "-43\n"]
[(fx- 1 (fx- 2 (fx- 3 (fx- 4 (fx- 5 (fx- 6 (fx- 7 (fx- 8 9)))))))) => "5\n"]
[(fx- (fx- 1 2) (fx- 3 4)) => "0\n"]
[(fx- (fx- 1 2) (fx- 3 -4)) => "-8\n"]
[(fx- (fx- 1 2) (fx- -3 4)) => "6\n"]
[(fx- (fx- 1 2) (fx- -3 -4)) => "-2\n"]
[(fx- (fx- 1 -2) (fx- 3 4)) => "4\n"]
[(fx- (fx- 1 -2) (fx- 3 -4)) => "-4\n"]
[(fx- (fx- 1 -2) (fx- -3 4)) => "10\n"]
[(fx- (fx- 1 -2) (fx- -3 -4)) => "2\n"]
[(fx- (fx- -1 2) (fx- 3 4)) => "-2\n"]
[(fx- (fx- -1 2) (fx- 3 -4)) => "-10\n"]
[(fx- (fx- -1 2) (fx- -3 4)) => "4\n"]
[(fx- (fx- -1 2) (fx- -3 -4)) => "-4\n"]
[(fx- (fx- -1 -2) (fx- 3 4)) => "2\n"]
[(fx- (fx- -1 -2) (fx- 3 -4)) => "-6\n"]
[(fx- (fx- -1 -2) (fx- -3 4)) => "8\n"]
[(fx- (fx- -1 -2) (fx- -3 -4)) => "0\n"]
[(fx- (fx- (fx- (fx- (fx- (fx- (fx- (fx- 1 2) 3) 4) 5) 6) 7) 8) 9) => "-43\n"]
[(fx- 1 (fx- 2 (fx- 3 (fx- 4 (fx- 5 (fx- 6 (fx- 7 (fx- 8 9)))))))) => "5\n"]
[(fx- (fx- (fx- (fx- 1 2) (fx- 3 4)) (fx- (fx- 5 6) (fx- 7 8)))
(fx- (fx- (fx- 9 10) (fx- 11 12)) (fx- (fx- 13 14) (fx- 15 16))))
=> "0\n"]
)
(add-tests-with-string-output "fx*"
[(fx* 2 3) => "6\n"]
[(fx* 2 -3) => "-6\n"]
[(fx* -2 3) => "-6\n"]
[(fx* -2 -3) => "6\n"]
[(fx* 536870911 1) => "536870911\n"]
[(fx* 536870911 -1) => "-536870911\n"]
[(fx* -536870912 1) => "-536870912\n"]
[(fx* -536870911 -1) => "536870911\n"]
[(fx* 2 (fx* 3 4)) => "24\n"]
[(fx* (fx* 2 3) 4) => "24\n"]
[(fx* (fx* (fx* (fx* (fx* 2 3) 4) 5) 6) 7) => "5040\n"]
[(fx* 2 (fx* 3 (fx* 4 (fx* 5 (fx* 6 7))))) => "5040\n"]
[(fx* (fx* (fx* (fx* 2 3) (fx* 4 5)) (fx* (fx* 6 7) (fx* 8 9)))
(fx* (fx* (fx* 2 3) (fx* 2 3)) (fx* (fx* 2 3) (fx* 2 3))))
=> "470292480\n"]
)
(add-tests-with-string-output "fxlognot"
[(fxlognot -7) => "6\n"]
[(fxlognot (fxlogor (fxlognot 7) 1)) => "6\n"]
[(fxlognot (fxlogor (fxlognot 7) (fxlognot 2))) => "2\n"]
[(fxlogand (fxlognot (fxlognot 12)) (fxlognot (fxlognot 12))) => "12\n"]
)
(add-tests-with-string-output "fxlogand and fxlogor"
[(fxlogor 3 16) => "19\n"]
[(fxlogor 3 5) => "7\n"]
[(fxlogor 3 7) => "7\n"]
[(fxlognot (fxlogor (fxlognot 7) 1)) => "6\n"]
[(fxlognot (fxlogor 1 (fxlognot 7))) => "6\n"]
[(fxlogand 3 7) => "3\n"]
[(fxlogand 3 5) => "1\n"]
[(fxlogand 2346 (fxlognot 2346)) => "0\n"]
[(fxlogand (fxlognot 2346) 2346) => "0\n"]
[(fxlogand 2376 2376) => "2376\n"]
[(fxlognot (fxlogor (fxlognot 7) 1)) => "6\n"]
[(fxlognot (fxlogor (fxlognot 7) (fxlognot 2))) => "2\n"]
[(fxlogand (fxlognot (fxlognot 12)) (fxlognot (fxlognot 12))) => "12\n"]
)
(add-tests-with-string-output "fx="
[(fx= 12 13) => "#f\n"]
[(fx= 12 12) => "#t\n"]
[(fx= 16 (fx+ 13 3)) => "#t\n"]
[(fx= 16 (fx+ 13 13)) => "#f\n"]
[(fx= (fx+ 13 3) 16) => "#t\n"]
[(fx= (fx+ 13 13) 16) => "#f\n"]
[(fx= (fx+ 13 3) (fx+ 10 6)) => "#t\n"]
[(fx= (fx+ 13 0) (fx+ 10 6)) => "#f\n"]
[(fx= (fx+ 12 1) (fx+ -12 -1)) => "#f\n"]
)
(add-tests-with-string-output "fx<"
[(fx< 12 13) => "#t\n"]
[(fx< 12 12) => "#f\n"]
[(fx< 13 12) => "#f\n"]
[(fx< 16 (fx+ 13 1)) => "#f\n"]
[(fx< 16 (fx+ 13 3)) => "#f\n"]
[(fx< 16 (fx+ 13 13)) => "#t\n"]
[(fx< (fx+ 13 1) 16) => "#t\n"]
[(fx< (fx+ 13 3) 16) => "#f\n"]
[(fx< (fx+ 13 13) 16) => "#f\n"]
[(fx< (fx+ 10 6) (fx+ 13 1)) => "#f\n"]
[(fx< (fx+ 10 6) (fx+ 13 3)) => "#f\n"]
[(fx< (fx+ 10 6) (fx+ 13 31)) => "#t\n"]
[(fx< (fx+ 12 1) (fx+ -12 -1)) => "#f\n"]
[(fx< (fx+ -12 -1) (fx+ 12 1)) => "#t\n"]
)
(add-tests-with-string-output "fx<="
[(fx<= 12 13) => "#t\n"]
[(fx<= 12 12) => "#t\n"]
[(fx<= 13 12) => "#f\n"]
[(fx<= 16 (fx+ 13 1)) => "#f\n"]
[(fx<= 16 (fx+ 13 3)) => "#t\n"]
[(fx<= 16 (fx+ 13 13)) => "#t\n"]
[(fx<= (fx+ 13 1) 16) => "#t\n"]
[(fx<= (fx+ 13 3) 16) => "#t\n"]
[(fx<= (fx+ 13 13) 16) => "#f\n"]
)
(add-tests-with-string-output "fx>"
[(fx> 12 13) => "#f\n"]
[(fx> 12 12) => "#f\n"]
[(fx> 13 12) => "#t\n"]
[(fx> 16 (fx+ 13 1)) => "#t\n"]
[(fx> 16 (fx+ 13 3)) => "#f\n"]
[(fx> 16 (fx+ 13 13)) => "#f\n"]
[(fx> (fx+ 13 1) 16) => "#f\n"]
[(fx> (fx+ 13 3) 16) => "#f\n"]
[(fx> (fx+ 13 13) 16) => "#t\n"]
[(fx> (fx+ 10 6) (fx+ 13 1)) => "#t\n"]
[(fx> (fx+ 10 6) (fx+ 13 3)) => "#f\n"]
[(fx> (fx+ 10 6) (fx+ 13 31)) => "#f\n"]
[(fx> (fx+ 12 1) (fx+ -12 -1)) => "#t\n"]
[(fx> (fx+ -12 -1) (fx+ 12 1)) => "#f\n"]
)
(add-tests-with-string-output "fx>="
[(fx>= 12 13) => "#f\n"]
[(fx>= 12 12) => "#t\n"]
[(fx>= 13 12) => "#t\n"]
[(fx>= 16 (fx+ 13 1)) => "#t\n"]
[(fx>= 16 (fx+ 13 3)) => "#t\n"]
[(fx>= 16 (fx+ 13 13)) => "#f\n"]
[(fx>= (fx+ 13 1) 16) => "#f\n"]
[(fx>= (fx+ 13 3) 16) => "#t\n"]
[(fx>= (fx+ 13 13) 16) => "#t\n"]
[(fx<= (fx+ 10 6) (fx+ 13 1)) => "#f\n"]
[(fx<= (fx+ 10 6) (fx+ 13 3)) => "#t\n"]
[(fx<= (fx+ 10 6) (fx+ 13 31)) => "#t\n"]
[(fx<= (fx+ 12 1) (fx+ -12 -1)) => "#f\n"]
[(fx<= (fx+ -12 -1) (fx+ 12 1)) => "#t\n"]
[(fx>= (fx+ 10 6) (fx+ 13 1)) => "#t\n"]
[(fx>= (fx+ 10 6) (fx+ 13 3)) => "#t\n"]
[(fx>= (fx+ 10 6) (fx+ 13 31)) => "#f\n"]
[(fx>= (fx+ 12 1) (fx+ -12 -1)) => "#t\n"]
[(fx>= (fx+ -12 -1) (fx+ 12 1)) => "#f\n"]
)
(add-tests-with-string-output "if"
[(if (fx= 12 13) 12 13) => "13\n"]
[(if (fx= 12 12) 13 14) => "13\n"]
[(if (fx< 12 13) 12 13) => "12\n"]
[(if (fx< 12 12) 13 14) => "14\n"]
[(if (fx< 13 12) 13 14) => "14\n"]
[(if (fx<= 12 13) 12 13) => "12\n"]
[(if (fx<= 12 12) 12 13) => "12\n"]
[(if (fx<= 13 12) 13 14) => "14\n"]
[(if (fx> 12 13) 12 13) => "13\n"]
[(if (fx> 12 12) 12 13) => "13\n"]
[(if (fx> 13 12) 13 14) => "13\n"]
[(if (fx>= 12 13) 12 13) => "13\n"]
[(if (fx>= 12 12) 12 13) => "12\n"]
[(if (fx>= 13 12) 13 14) => "13\n"]
)
| null | https://raw.githubusercontent.com/zellio/incrementum/12060fefdcc2883ffda38c9233a37aad6b20625e/test/scheme/tests-1.5-req.scm | scheme |
(add-tests-with-string-output "fx+"
[(fx+ 1 2) => "3\n"]
[(fx+ 1 -2) => "-1\n"]
[(fx+ -1 2) => "1\n"]
[(fx+ -1 -2) => "-3\n"]
[(fx+ 536870911 -1) => "536870910\n"]
[(fx+ 536870910 1) => "536870911\n"]
[(fx+ -536870912 1) => "-536870911\n"]
[(fx+ -536870911 -1) => "-536870912\n"]
[(fx+ 536870911 -536870912) => "-1\n"]
[(fx+ 1 (fx+ 2 3)) => "6\n"]
[(fx+ 1 (fx+ 2 -3)) => "0\n"]
[(fx+ 1 (fx+ -2 3)) => "2\n"]
[(fx+ 1 (fx+ -2 -3)) => "-4\n"]
[(fx+ -1 (fx+ 2 3)) => "4\n"]
[(fx+ -1 (fx+ 2 -3)) => "-2\n"]
[(fx+ -1 (fx+ -2 3)) => "0\n"]
[(fx+ -1 (fx+ -2 -3)) => "-6\n"]
[(fx+ (fx+ 1 2) 3) => "6\n"]
[(fx+ (fx+ 1 2) -3) => "0\n"]
[(fx+ (fx+ 1 -2) 3) => "2\n"]
[(fx+ (fx+ 1 -2) -3) => "-4\n"]
[(fx+ (fx+ -1 2) 3) => "4\n"]
[(fx+ (fx+ -1 2) -3) => "-2\n"]
[(fx+ (fx+ -1 -2) 3) => "0\n"]
[(fx+ (fx+ -1 -2) -3) => "-6\n"]
[(fx+ (fx+ -1 -2) (fx+ 3 4)) => "4\n"]
[(fx+ (fx+ -1 -2) (fx+ 3 -4)) => "-4\n"]
[(fx+ (fx+ -1 -2) (fx+ -3 4)) => "-2\n"]
[(fx+ (fx+ -1 -2) (fx+ -3 -4)) => "-10\n"]
[(fx+ (fx+ (fx+ (fx+ (fx+ (fx+ (fx+ (fx+ 1 2) 3) 4) 5) 6) 7) 8) 9) => "45\n"]
[(fx+ (fx+ (fx+ (fx+ (fx+ (fx+ (fx+ (fx+ 1 2) 3) 4) 5) 6) 7) 8) 9) => "45\n"]
[(fx+ 1 (fx+ 2 (fx+ 3 (fx+ 4 (fx+ 5 (fx+ 6 (fx+ 7 (fx+ 8 9)))))))) => "45\n"]
[(fx+ (fx+ (fx+ (fx+ 1 2) (fx+ 3 4)) (fx+ (fx+ 5 6) (fx+ 7 8)))
(fx+ (fx+ (fx+ 9 10) (fx+ 11 12)) (fx+ (fx+ 13 14) (fx+ 15 16))))
=> "136\n"]
)
(add-tests-with-string-output "fx-"
[(fx- 1 2) => "-1\n"]
[(fx- 1 -2) => "3\n"]
[(fx- -1 2) => "-3\n"]
[(fx- -1 -2) => "1\n"]
[(fx- 536870910 -1) => "536870911\n"]
[(fx- 536870911 1) => "536870910\n"]
[(fx- -536870911 1) => "-536870912\n"]
[(fx- -536870912 -1) => "-536870911\n"]
[(fx- 1 536870911) => "-536870910\n"]
[(fx- -1 536870911) => "-536870912\n"]
[(fx- 1 -536870910) => "536870911\n"]
[(fx- -1 -536870912) => "536870911\n"]
[(fx- 536870911 536870911) => "0\n"]
[ ( fx- 536870911 -536870912 ) = > " -1\n " ]
[(fx- -536870911 -536870912) => "1\n"]
[(fx- 1 (fx- 2 3)) => "2\n"]
[(fx- 1 (fx- 2 -3)) => "-4\n"]
[(fx- 1 (fx- -2 3)) => "6\n"]
[(fx- 1 (fx- -2 -3)) => "0\n"]
[(fx- -1 (fx- 2 3)) => "0\n"]
[(fx- -1 (fx- 2 -3)) => "-6\n"]
[(fx- -1 (fx- -2 3)) => "4\n"]
[(fx- -1 (fx- -2 -3)) => "-2\n"]
[(fx- 0 (fx- -2 -3)) => "-1\n"]
[(fx- (fx- 1 2) 3) => "-4\n"]
[(fx- (fx- 1 2) -3) => "2\n"]
[(fx- (fx- 1 -2) 3) => "0\n"]
[(fx- (fx- 1 -2) -3) => "6\n"]
[(fx- (fx- -1 2) 3) => "-6\n"]
[(fx- (fx- -1 2) -3) => "0\n"]
[(fx- (fx- -1 -2) 3) => "-2\n"]
[(fx- (fx- -1 -2) -3) => "4\n"]
[(fx- (fx- (fx- (fx- (fx- (fx- (fx- (fx- 1 2) 3) 4) 5) 6) 7) 8) 9) => "-43\n"]
[(fx- 1 (fx- 2 (fx- 3 (fx- 4 (fx- 5 (fx- 6 (fx- 7 (fx- 8 9)))))))) => "5\n"]
[(fx- (fx- 1 2) (fx- 3 4)) => "0\n"]
[(fx- (fx- 1 2) (fx- 3 -4)) => "-8\n"]
[(fx- (fx- 1 2) (fx- -3 4)) => "6\n"]
[(fx- (fx- 1 2) (fx- -3 -4)) => "-2\n"]
[(fx- (fx- 1 -2) (fx- 3 4)) => "4\n"]
[(fx- (fx- 1 -2) (fx- 3 -4)) => "-4\n"]
[(fx- (fx- 1 -2) (fx- -3 4)) => "10\n"]
[(fx- (fx- 1 -2) (fx- -3 -4)) => "2\n"]
[(fx- (fx- -1 2) (fx- 3 4)) => "-2\n"]
[(fx- (fx- -1 2) (fx- 3 -4)) => "-10\n"]
[(fx- (fx- -1 2) (fx- -3 4)) => "4\n"]
[(fx- (fx- -1 2) (fx- -3 -4)) => "-4\n"]
[(fx- (fx- -1 -2) (fx- 3 4)) => "2\n"]
[(fx- (fx- -1 -2) (fx- 3 -4)) => "-6\n"]
[(fx- (fx- -1 -2) (fx- -3 4)) => "8\n"]
[(fx- (fx- -1 -2) (fx- -3 -4)) => "0\n"]
[(fx- (fx- (fx- (fx- (fx- (fx- (fx- (fx- 1 2) 3) 4) 5) 6) 7) 8) 9) => "-43\n"]
[(fx- 1 (fx- 2 (fx- 3 (fx- 4 (fx- 5 (fx- 6 (fx- 7 (fx- 8 9)))))))) => "5\n"]
[(fx- (fx- (fx- (fx- 1 2) (fx- 3 4)) (fx- (fx- 5 6) (fx- 7 8)))
(fx- (fx- (fx- 9 10) (fx- 11 12)) (fx- (fx- 13 14) (fx- 15 16))))
=> "0\n"]
)
(add-tests-with-string-output "fx*"
[(fx* 2 3) => "6\n"]
[(fx* 2 -3) => "-6\n"]
[(fx* -2 3) => "-6\n"]
[(fx* -2 -3) => "6\n"]
[(fx* 536870911 1) => "536870911\n"]
[(fx* 536870911 -1) => "-536870911\n"]
[(fx* -536870912 1) => "-536870912\n"]
[(fx* -536870911 -1) => "536870911\n"]
[(fx* 2 (fx* 3 4)) => "24\n"]
[(fx* (fx* 2 3) 4) => "24\n"]
[(fx* (fx* (fx* (fx* (fx* 2 3) 4) 5) 6) 7) => "5040\n"]
[(fx* 2 (fx* 3 (fx* 4 (fx* 5 (fx* 6 7))))) => "5040\n"]
[(fx* (fx* (fx* (fx* 2 3) (fx* 4 5)) (fx* (fx* 6 7) (fx* 8 9)))
(fx* (fx* (fx* 2 3) (fx* 2 3)) (fx* (fx* 2 3) (fx* 2 3))))
=> "470292480\n"]
)
(add-tests-with-string-output "fxlognot"
[(fxlognot -7) => "6\n"]
[(fxlognot (fxlogor (fxlognot 7) 1)) => "6\n"]
[(fxlognot (fxlogor (fxlognot 7) (fxlognot 2))) => "2\n"]
[(fxlogand (fxlognot (fxlognot 12)) (fxlognot (fxlognot 12))) => "12\n"]
)
(add-tests-with-string-output "fxlogand and fxlogor"
[(fxlogor 3 16) => "19\n"]
[(fxlogor 3 5) => "7\n"]
[(fxlogor 3 7) => "7\n"]
[(fxlognot (fxlogor (fxlognot 7) 1)) => "6\n"]
[(fxlognot (fxlogor 1 (fxlognot 7))) => "6\n"]
[(fxlogand 3 7) => "3\n"]
[(fxlogand 3 5) => "1\n"]
[(fxlogand 2346 (fxlognot 2346)) => "0\n"]
[(fxlogand (fxlognot 2346) 2346) => "0\n"]
[(fxlogand 2376 2376) => "2376\n"]
[(fxlognot (fxlogor (fxlognot 7) 1)) => "6\n"]
[(fxlognot (fxlogor (fxlognot 7) (fxlognot 2))) => "2\n"]
[(fxlogand (fxlognot (fxlognot 12)) (fxlognot (fxlognot 12))) => "12\n"]
)
(add-tests-with-string-output "fx="
[(fx= 12 13) => "#f\n"]
[(fx= 12 12) => "#t\n"]
[(fx= 16 (fx+ 13 3)) => "#t\n"]
[(fx= 16 (fx+ 13 13)) => "#f\n"]
[(fx= (fx+ 13 3) 16) => "#t\n"]
[(fx= (fx+ 13 13) 16) => "#f\n"]
[(fx= (fx+ 13 3) (fx+ 10 6)) => "#t\n"]
[(fx= (fx+ 13 0) (fx+ 10 6)) => "#f\n"]
[(fx= (fx+ 12 1) (fx+ -12 -1)) => "#f\n"]
)
(add-tests-with-string-output "fx<"
[(fx< 12 13) => "#t\n"]
[(fx< 12 12) => "#f\n"]
[(fx< 13 12) => "#f\n"]
[(fx< 16 (fx+ 13 1)) => "#f\n"]
[(fx< 16 (fx+ 13 3)) => "#f\n"]
[(fx< 16 (fx+ 13 13)) => "#t\n"]
[(fx< (fx+ 13 1) 16) => "#t\n"]
[(fx< (fx+ 13 3) 16) => "#f\n"]
[(fx< (fx+ 13 13) 16) => "#f\n"]
[(fx< (fx+ 10 6) (fx+ 13 1)) => "#f\n"]
[(fx< (fx+ 10 6) (fx+ 13 3)) => "#f\n"]
[(fx< (fx+ 10 6) (fx+ 13 31)) => "#t\n"]
[(fx< (fx+ 12 1) (fx+ -12 -1)) => "#f\n"]
[(fx< (fx+ -12 -1) (fx+ 12 1)) => "#t\n"]
)
(add-tests-with-string-output "fx<="
[(fx<= 12 13) => "#t\n"]
[(fx<= 12 12) => "#t\n"]
[(fx<= 13 12) => "#f\n"]
[(fx<= 16 (fx+ 13 1)) => "#f\n"]
[(fx<= 16 (fx+ 13 3)) => "#t\n"]
[(fx<= 16 (fx+ 13 13)) => "#t\n"]
[(fx<= (fx+ 13 1) 16) => "#t\n"]
[(fx<= (fx+ 13 3) 16) => "#t\n"]
[(fx<= (fx+ 13 13) 16) => "#f\n"]
)
(add-tests-with-string-output "fx>"
[(fx> 12 13) => "#f\n"]
[(fx> 12 12) => "#f\n"]
[(fx> 13 12) => "#t\n"]
[(fx> 16 (fx+ 13 1)) => "#t\n"]
[(fx> 16 (fx+ 13 3)) => "#f\n"]
[(fx> 16 (fx+ 13 13)) => "#f\n"]
[(fx> (fx+ 13 1) 16) => "#f\n"]
[(fx> (fx+ 13 3) 16) => "#f\n"]
[(fx> (fx+ 13 13) 16) => "#t\n"]
[(fx> (fx+ 10 6) (fx+ 13 1)) => "#t\n"]
[(fx> (fx+ 10 6) (fx+ 13 3)) => "#f\n"]
[(fx> (fx+ 10 6) (fx+ 13 31)) => "#f\n"]
[(fx> (fx+ 12 1) (fx+ -12 -1)) => "#t\n"]
[(fx> (fx+ -12 -1) (fx+ 12 1)) => "#f\n"]
)
(add-tests-with-string-output "fx>="
[(fx>= 12 13) => "#f\n"]
[(fx>= 12 12) => "#t\n"]
[(fx>= 13 12) => "#t\n"]
[(fx>= 16 (fx+ 13 1)) => "#t\n"]
[(fx>= 16 (fx+ 13 3)) => "#t\n"]
[(fx>= 16 (fx+ 13 13)) => "#f\n"]
[(fx>= (fx+ 13 1) 16) => "#f\n"]
[(fx>= (fx+ 13 3) 16) => "#t\n"]
[(fx>= (fx+ 13 13) 16) => "#t\n"]
[(fx<= (fx+ 10 6) (fx+ 13 1)) => "#f\n"]
[(fx<= (fx+ 10 6) (fx+ 13 3)) => "#t\n"]
[(fx<= (fx+ 10 6) (fx+ 13 31)) => "#t\n"]
[(fx<= (fx+ 12 1) (fx+ -12 -1)) => "#f\n"]
[(fx<= (fx+ -12 -1) (fx+ 12 1)) => "#t\n"]
[(fx>= (fx+ 10 6) (fx+ 13 1)) => "#t\n"]
[(fx>= (fx+ 10 6) (fx+ 13 3)) => "#t\n"]
[(fx>= (fx+ 10 6) (fx+ 13 31)) => "#f\n"]
[(fx>= (fx+ 12 1) (fx+ -12 -1)) => "#t\n"]
[(fx>= (fx+ -12 -1) (fx+ 12 1)) => "#f\n"]
)
(add-tests-with-string-output "if"
[(if (fx= 12 13) 12 13) => "13\n"]
[(if (fx= 12 12) 13 14) => "13\n"]
[(if (fx< 12 13) 12 13) => "12\n"]
[(if (fx< 12 12) 13 14) => "14\n"]
[(if (fx< 13 12) 13 14) => "14\n"]
[(if (fx<= 12 13) 12 13) => "12\n"]
[(if (fx<= 12 12) 12 13) => "12\n"]
[(if (fx<= 13 12) 13 14) => "14\n"]
[(if (fx> 12 13) 12 13) => "13\n"]
[(if (fx> 12 12) 12 13) => "13\n"]
[(if (fx> 13 12) 13 14) => "13\n"]
[(if (fx>= 12 13) 12 13) => "13\n"]
[(if (fx>= 12 12) 12 13) => "12\n"]
[(if (fx>= 13 12) 13 14) => "13\n"]
)
|
|
343517231b73bc21842dbff8249d5d00bd6afa13d4fc21af97d4e1799b7105f2 | cljfx/cljfx | line_to.clj | (ns cljfx.fx.line-to
"Part of a public API"
(:require [cljfx.composite :as composite]
[cljfx.lifecycle :as lifecycle]
[cljfx.fx.path-element :as fx.path-element])
(:import [javafx.scene.shape LineTo]))
(set! *warn-on-reflection* true)
(def props
(merge
fx.path-element/props
(composite/props LineTo
:x [:setter lifecycle/scalar :coerce double :default 0]
:y [:setter lifecycle/scalar :coerce double :default 0])))
(def lifecycle
(lifecycle/annotate
(composite/describe LineTo
:ctor []
:props props)
:line-to))
| null | https://raw.githubusercontent.com/cljfx/cljfx/543f7409290051e9444771d2cd86dadeb8cdce33/src/cljfx/fx/line_to.clj | clojure | (ns cljfx.fx.line-to
"Part of a public API"
(:require [cljfx.composite :as composite]
[cljfx.lifecycle :as lifecycle]
[cljfx.fx.path-element :as fx.path-element])
(:import [javafx.scene.shape LineTo]))
(set! *warn-on-reflection* true)
(def props
(merge
fx.path-element/props
(composite/props LineTo
:x [:setter lifecycle/scalar :coerce double :default 0]
:y [:setter lifecycle/scalar :coerce double :default 0])))
(def lifecycle
(lifecycle/annotate
(composite/describe LineTo
:ctor []
:props props)
:line-to))
|
|
288cfbb020f89700369250b5b3894e1b0dc116a4007953cee8f5b6f28b26f1b7 | BinaryAnalysisPlatform/bap | phoenix_dot.mli | open Core_kernel[@@warning "-D"]
open Bap.Std
module Make(Env : sig
val project : project
val options : Phoenix_options.t
module Target : Target
end) : sig
val fprint_graph : Format.formatter -> Symtab.fn -> unit
val output_graph : Out_channel.t -> Symtab.fn -> unit
end
| null | https://raw.githubusercontent.com/BinaryAnalysisPlatform/bap/253afc171bbfd0fe1b34f6442795dbf4b1798348/plugins/phoenix/phoenix_dot.mli | ocaml | open Core_kernel[@@warning "-D"]
open Bap.Std
module Make(Env : sig
val project : project
val options : Phoenix_options.t
module Target : Target
end) : sig
val fprint_graph : Format.formatter -> Symtab.fn -> unit
val output_graph : Out_channel.t -> Symtab.fn -> unit
end
|
|
9b28739b9839e46b543262b53aa28b60e222782139b2da95e0ef7bfc0f3cd0fb | bendmorris/scotch | Eval.hs | This file is part of Scotch .
Scotch is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
Scotch is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOnoR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with Scotch . If not , see < / > .
Scotch is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Scotch is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOnoR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Scotch. If not, see </>.
-}
module Scotch.Eval.Eval (ieval, subfile) where
import Data.List
import Numeric
import System.Directory
import Scotch.Types.Types
import Scotch.Types.Exceptions
import Scotch.Types.Bindings
import Scotch.Types.Hash
import Scotch.Types.Interpreter
import Scotch.Eval.Calc
import Scotch.Eval.Substitute
import Scotch.Parse.Parse as Parse
{-
eval: evaluates an expression.
This function evaluates expressions step by step and should not be assumed to result
in full evaluation; rather, eval should be run until the result is the same as the initial input.
-}
eval :: Expr -> VarDict -> InterpreterSettings -> Bool -> Expr
eval exp [] settings rw = eval exp emptyHash settings rw
eval oexp vars settings rw =
if exp /= oexp
then exp
else case exp of
Var id -> if length (qualVarHash id vars) > 0
then Val $ Hash $ makeHash strHash (qualVarHash id vars) emptyHash
else if length (qualVarHash ("local." ++ id) vars) > 0
then Val $ Hash $ makeHash strHash (qualVarHash ("local." ++ id) vars) emptyHash
else Var id
Call x [] -> x
Call (Call id args) args' -> eval' $ Call id (args ++ args')
Call (Var "eval")
[x] -> case eval' x of
Val (Str s) -> case length evaled of
0 -> Skip
1 -> evaled !! 0
otherwise -> Val $ Proc $ evaled
where evaled = [snd i | i <- Parse.read "" s]
otherwise -> Call (Var "eval") [otherwise]
Call (Var "int")
[x] -> case eval' x of
Val (NumInt i) -> Val $ NumInt i
Val (NumFloat f) -> Val $ NumInt (truncate f)
Val (Str s) -> case (Parse.read "" s) !! 0 of
(Just a, Val (NumInt i)) -> Val (NumInt i)
otherwise -> exCantConvert s "integer"
Exception e -> Exception e
otherwise -> Call (Var "int") [otherwise]
Call (Var "float")
[x] -> case eval' x of
Val (NumInt i) -> Val $ NumFloat $ fromIntegral i
Val (NumFloat f) -> Val $ NumFloat f
Val (Str s) -> case (Parse.read "" s) !! 0 of
(Just a, Val (NumFloat f)) -> Val (NumFloat f)
(Just a, Val (NumInt i)) -> Val (NumFloat (fromIntegral i))
otherwise -> exCantConvert s "float"
Exception e -> Exception e
otherwise -> Call (Var "float") [otherwise]
Call (Var "str")
[x] -> case eval' x of
Val (Str s) -> Val $ Str s
Val (NumFloat f) -> Val $ Str $ showFFloat Nothing f ""
Val (Undefined u) -> Exception u
Val v -> Val $ Str (show v)
Exception e -> Exception e
otherwise -> if otherwise == x
then Val $ Str $ show x
else Call (Var "str") [otherwise]
Call (Var "list")
[x] -> case eval' x of
List l -> List l
Val (Str s) -> List [Val (Str [c]) | c <- s]
Val (Hash h) -> List [List [Val (Str (fst l)), snd l] | e <- h, l <- e]
Exception e -> Exception e
Val v -> List [Val v]
otherwise -> Call (Var "list") [otherwise]
Call (Var "bool")
[x] -> case eval' x of
Val Null -> Val (Bit False)
Val (Bit b) -> Val (Bit b)
Val (NumInt n) -> Val (Bit (n /= 0))
Val (NumFloat n) -> Val (Bit (n /= 0))
Val (Str s) -> Val (Bit (s /= ""))
List l -> Val (Bit (l /= []))
otherwise -> Call (Var "bool") [otherwise]
Call (Var id) args -> if fullEval (Var id) eval' == Var id
then Call (Var id) [fullEval arg eval' | arg <- args]
else Call (fullEval (Var id) eval') args
Call (Val (Lambda ids expr)) args ->
if length ids == length args
then substitute expr (zip [Var id | id <- ids] args)
else exp
Call (Val (NumInt i)) args -> Prod (Val (NumInt i)) (totalProd args)
Call (Val (NumFloat i)) args -> Prod (Val (NumFloat i)) (totalProd args)
Call (Val (Str s)) args -> Val $ Str $ s ++ foldl (++) "" [case fullEval i eval' of
Val (Str a) -> a
otherwise -> show otherwise
| i <- args]
Call x args -> Call (fullEval x eval') args
Import s t -> Import s t
Take n x -> case n of
Val (NumInt i) -> case x of
List l -> List (take i' l)
Val (Str s) -> Val $ Str $ take i' s
Exception e -> Exception e
Add (List l) (y) -> if length t == i'
then List t
else Take n (eval' x)
where t = take i' l
otherwise -> Take n (eval' x)
where i' = fromIntegral i
Exception e -> Exception e
otherwise -> Take (eval' otherwise) x
List l -> case (validList l) of
Val _ -> case List [eval' i | i <- l] of
List l -> if all ((==) True)
[case i of
Val (Str s) -> length s == 1
otherwise -> False
| i <- l]
then Val $ Str (foldl (++) [case i of
Val (Str s) -> s !! 0
| i <- l] [])
else List l
otherwise -> otherwise
Exception e -> Exception e
HashExpr l -> Val $ Hash $ makeHash strHash
[(case eval' (fst i) of
Val (Str s) -> s
otherwise -> show otherwise,
snd i)
| i <- l] emptyHash
Val x -> case x of
Undefined s -> Exception s
otherwise -> Val x
Subs n x -> case fullEval x' eval' of
List l -> case n' of
Val (NumInt n) -> case (if n >= 0
then getElemAtN l (fromIntegral n)
else getElemAtN l ((length l) + (fromIntegral n))) of
Just x -> x
Nothing -> exNotInList n
List l' -> List [Subs i (List l) | i <- l']
otherwise -> Subs n' x'
Val (Str s) -> case n' of
Val (NumInt n) -> if n >= 0
then Val (Str ([s !! (fromIntegral n)]))
else Val (Str ([s !! ((length s) + (fromIntegral n))]))
List l' -> List [Subs i (Val (Str s)) | i <- l']
otherwise -> Subs n' x'
Val (Hash l) -> case n' of
Exception e -> Exception e
List l' -> List [Subs i (Val (Hash l)) | i <- l']
otherwise -> case fullEval (Call (Var "str") [otherwise]) eval' of
Val (Str s) -> case hashMember strHash s l of
Just x -> x
Nothing -> exNotInHash s
Exception e -> Exception e
otherwise -> Subs n' x'
Call (Var f) args -> case n' of
Val (NumInt n) -> if n >= 0
then eval' $ Subs (Val (NumInt n)) (eval' (Take (Val (NumInt ((fromIntegral n) + 1))) (Call (Var f) args)))
else Subs (Val (NumInt n)) (eval' x)
List l' -> List [Subs i f' | i <- l']
where f' = (Call (Var f) args)
otherwise -> Subs n' x'
otherwise -> Subs n x'
where n' = eval' n
x' = eval' x
Concat x y -> eval' (Add x y)
Add x y -> case x of
Exception e -> Exception e
List l -> case y of
Exception e -> Exception e
List l' -> List $ l ++ l'
Val v -> vadd (strict settings) x y
Add a (Call id args) -> Add (eval' (Add x a)) (Call id args)
otherwise -> nextOp
Val (Proc p) -> Val $ Proc $ p ++ [y]
Val v -> case y of
Exception e -> Exception e
List l -> vadd (strict settings) x y
Val v -> vadd (strict settings) x y
otherwise -> nextOp
otherwise -> nextOp
where nextOp = if vadd (strict settings) x y == Add x y
then Add (eval' x) (eval' y)
else operation x y vadd Add
Sub x y -> operation x y vsub Sub
if nextOp = = Prod x y
then if eval ' ( Prod y x ) = = Prod y x
then nextOp
else case x of
Prod a b - > if eval ' x = = x
then Prod a ( Prod b y )
else nextOp
otherwise - > case y of
Prod a b - > if eval ' y = = y
then Prod ( Prod x a ) b
else nextOp
otherwise - > nextOp
else
then if eval' (Prod y x) == Prod y x
then nextOp
else case x of
Prod a b -> if eval' x == x
then Prod a (Prod b y)
else nextOp
otherwise -> case y of
Prod a b -> if eval' y == y
then Prod (Prod x a) b
else nextOp
otherwise -> nextOp
else -}nextOp
where nextOp = operation x y vprod Prod
Div x y -> operation x y vdiv Div
Mod x y -> operation x y vmod Mod
Exp x y -> operation x y vexp Exp
Eq x y -> case operation x' y' veq Eq of
Eq (List a) (List b) -> if length a' == length b'
then Val $ Bit $
allTrue [fullEval (Eq (a' !! n)
(b' !! n))
eval'
| n <- [0 .. (length a - 1)]]
else Val (Bit False)
where allTrue [] = True
allTrue (h:t) = case h of
Val (Bit True) -> allTrue t
otherwise -> False
list' l = case fullEval (List l) eval' of
List l -> l
otherwise -> []
a' = list' a
b' = list' b
otherwise -> if x' == y' then Val (Bit True)
else otherwise
where x' = fullEval x eval'
y' = fullEval y eval'
InEq x y -> eval' (Prod (operation x y veq Eq) (Val (NumInt (-1))))
Gt x y -> operation x y vgt Gt
Lt x y -> operation x y vlt Lt
And x y -> case eval' x of
Val (Bit True) -> case eval' y of
Val (Bit True) -> Val (Bit True)
Val (Bit False) -> Val (Bit False)
otherwise -> And (eval' x) (eval' y)
Val (Bit False) -> Val (Bit False)
otherwise -> And (eval' x) (eval' y)
where err = exTypeMismatch (eval' x) (eval' y) "and"
Or x y -> case eval' x of
Val (Bit True) -> Val (Bit True)
Val (Bit False) -> case eval' y of
Val (Bit b) -> Val (Bit b)
otherwise -> Or (eval' x) (eval' y)
otherwise -> Or (eval' x) (eval' y)
where err = exTypeMismatch (eval' x) (eval' y) "or"
Not x -> case eval' x of
Exception s -> Exception s
Val (Bit b) -> Val $ Bit $ not b
otherwise -> Not otherwise
Def f x Skip -> case f of
List l -> Val $ Proc $
[Def (l !! n) (Subs (Val $ NumInt $ fromIntegral n) x) Skip
| n <- [0 .. length(l) - 1]]
Subs a b -> case fullEval b eval' of
Val (Hash h) -> Def b (Val (Hash (makeHash strHash [((case fullEval a eval' of
Val (Str s) -> s
otherwise -> show a), x)
] h))) Skip
HashExpr h -> Def b (HashExpr (h ++ [(a, x)])) Skip
Var id -> Def b (Val (Hash (makeHash strHash [(show a, x)] emptyHash))) Skip
otherwise -> Def f x Skip
otherwise -> Def f x Skip
Def f x y -> evalWithNewDefs y [(f, x)]
EagerDef f x' Skip -> case f of
List l -> Val $ Proc $
[EagerDef (l !! n) (Subs (Val $ NumInt $ fromIntegral n) x) Skip
| n <- [0 .. length(l) - 1]]
Subs a b -> case fullEval b eval' of
Val (Hash h) -> Def b (Val (Hash (makeHash strHash [((case fullEval a eval' of
Val (Str s) -> s
otherwise -> show a), x)
] h))) Skip
HashExpr h -> EagerDef b (HashExpr (h ++ [(a, x)])) Skip
Var id -> EagerDef b (Val (Hash (makeHash strHash [(show a, x)] emptyHash))) Skip
otherwise -> EagerDef f x Skip
otherwise -> EagerDef f x Skip
where x = fullEval x' eval'
EagerDef f x y -> case fullEval x eval' of
Val v -> next
List l -> next
otherwise -> next--EagerDef f otherwise y
where next = evalWithNewDefs y [(f, fullEval x eval')]
If cond x y -> case fullEval cond eval' of
Val (Bit True) -> x
Val (Bit False) -> y
Exception e -> Exception e
otherwise -> If otherwise x y
Case check cases -> caseExpr check (reverse cases)
For id x y conds -> case fullEval x eval' of
List l -> List [substitute y [(Var id, item)] | item <- l,
allTrue [substitute cond [(Var id, item)] | cond <- conds]
]
Exception e -> Exception e
otherwise -> For id otherwise y conds
Range from to step -> case from of
Val (NumInt i) -> case to of
Val (NumInt j) -> case step of
Val (NumInt k) -> List [Val (NumInt x) | x <- [i, i+k .. j]]
Exception e -> Exception e
otherwise -> Range from to (eval' step)
Skip -> case (eval' step) of
Val (NumInt k) -> List [Val (NumInt x) | x <- [i, i+k ..]]
Exception e -> Exception e
otherwise -> Range from Skip otherwise
Exception e -> Exception e
otherwise -> Range from (eval' to) step
Exception e -> Exception e
otherwise -> Range (eval' from) to step
UseRule r x -> case r of
Rule r -> eval' (rule x r)
List l -> if all ((/=) [Skip]) l'
then UseRule (Rule (allRules l' [])) x
else exInvalidRule r
where l' = [case fullEval i eval' of
Rule r' -> r'
otherwise -> [Skip]
| i <- l]
Val (Hash h) -> UseRule (Rule r') x
where r' = [Def (Var (fst i)) (snd i) Skip
| j <- h, i <- j]
Val v -> exInvalidRule r
otherwise -> UseRule (eval' r) x
where rule x (h:t) = case h of
Def a b c -> Def a b (rule x t)
EagerDef a b c -> EagerDef a b (rule x t)
otherwise -> rule x t
rule x [] = x
allRules (h:t) a = allRules t (a ++ h)
allRules [] a = a
otherwise -> otherwise
where operation x y f g = if calc x' y' f (strict settings) == g x' y'
then g x' y'
else calc x' y' f (strict settings)
where x' = fullEval x eval'
y' = fullEval y eval'
allTrue [] = True
allTrue (h:t) = case eval' h of
Val (Bit True) -> allTrue t
Exception e -> False
Val v -> False
otherwise -> if otherwise == h then False else allTrue (otherwise : t)
caseExpr check [] = Call (Var "case") [fullEval check eval']
caseExpr check (h:t) = Def (Call (Var "case") [fst h]) (snd h) (caseExpr check t)
evalArgs x = case x of
Call a b -> Call (evalArgs a) ([fullEval i eval' | i <- b])
otherwise -> otherwise
exp = if rw
then rewrite (evalArgs oexp) (vars !! exprHash oexp) (vars !! exprHash oexp) eval'
else oexp
eval' expr = eval expr vars settings rw
evalWithNewDefs expr defs = eval expr (makeVarDict defs vars) settings rw
getElemAtN [] n = Nothing
getElemAtN (h:t) 0 = Just h
getElemAtN (h:t) n = getElemAtN t (n-1)
totalProd [] = Val (NumInt 1)
totalProd (h:t) = if t == []
then h
else Prod h (totalProd t)
iolist :: [IO Expr] -> IO [Expr]
iolist [] = do return []
iolist (h:t) = do item <- h
rest <- iolist t
return (item:rest)
-- ieval: evaluates an expression completely, replacing I/O operations as necessary
ieval :: InterpreterSettings -> Expr -> VarDict -> [Expr] -> IO Expr
ieval settings expr vars last =
do subbed <- subfile expr
let result = eval subbed vars settings True
if isInfixOf [result] last
then return (last !! 0)
else do if (verbose settings) && length last > 0
then putStrLn (show (last !! 0))
else return ()
result' <- case expr of
Def _ _ Skip -> do return (result, vars)
EagerDef _ _ Skip -> do return (result, vars)
Def id x y -> do return $ (y, makeVarDict [(id, x)] vars)
EagerDef id x y -> do x' <- ieval settings x vars (expr : last')
return $ (y, makeVarDict [(id, x')] vars)
otherwise -> do return (result, vars)
ieval settings (fst result') (snd result') (expr : last')
where last' = take 2 last
oneArg f a = do a' <- subfile a
return $ f a'
twoArgs f a b = do a' <- subfile a
b' <- subfile b
return $ f a' b'
threeArgs f a b c = do a' <- subfile a
b' <- subfile b
c' <- subfile c
return $ f a' b' c'
-- subfile: substitutes values for delayed I/O operations
subfile :: Expr -> IO Expr
subfile exp =
case exp of
Var "input" -> do line <- getLine
return $ Val (Str line)
Call (Var "read") [f] -> do sub <- subfile f
case f of
Val (Str f) -> do exists <- doesFileExist f
case exists of
True -> do contents <- readFile f
return $ Val $ Str contents
False -> return $ exFileDNE
otherwise -> return $ exp
Call f args -> do args' <- iolist [subfile arg | arg <- args]
return $ Call f args'
Take a b -> twoArgs Take a b
List l -> do list <- iolist [subfile e | e <- l]
return $ List list
HashExpr l -> do list1 <- iolist [subfile (fst e) | e <- l]
list2 <- iolist [subfile (snd e) | e <- l]
return $ HashExpr (zip list1 list2)
Subs a b -> twoArgs Subs a b
Add a b -> twoArgs Add a b
Sub a b -> twoArgs Sub a b
Prod a b -> twoArgs Prod a b
Div a b -> twoArgs Div a b
Mod a b -> twoArgs Mod a b
Exp a b -> twoArgs Exp a b
Eq a b -> twoArgs Eq a b
InEq a b -> twoArgs InEq a b
Gt a b -> twoArgs Gt a b
Lt a b -> twoArgs Lt a b
And a b -> twoArgs And a b
Or a b -> twoArgs Or a b
Not a -> oneArg Not a
EagerDef id a b -> twoArgs (EagerDef id) a b
Def id a b -> oneArg (Def id a) b
If a b c -> threeArgs If a b c
For id x y z -> do x' <- subfile x
y' <- subfile y
z' <- iolist [subfile i | i <- z]
return $ For id x' y' z'
otherwise -> do return otherwise
| null | https://raw.githubusercontent.com/bendmorris/scotch/7b4bb4546d0ea3bc6b345eedc65a5d7f82c98d10/Scotch/Eval/Eval.hs | haskell |
eval: evaluates an expression.
This function evaluates expressions step by step and should not be assumed to result
in full evaluation; rather, eval should be run until the result is the same as the initial input.
EagerDef f otherwise y
ieval: evaluates an expression completely, replacing I/O operations as necessary
subfile: substitutes values for delayed I/O operations | This file is part of Scotch .
Scotch is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
Scotch is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOnoR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with Scotch . If not , see < / > .
Scotch is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Scotch is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOnoR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Scotch. If not, see </>.
-}
module Scotch.Eval.Eval (ieval, subfile) where
import Data.List
import Numeric
import System.Directory
import Scotch.Types.Types
import Scotch.Types.Exceptions
import Scotch.Types.Bindings
import Scotch.Types.Hash
import Scotch.Types.Interpreter
import Scotch.Eval.Calc
import Scotch.Eval.Substitute
import Scotch.Parse.Parse as Parse
eval :: Expr -> VarDict -> InterpreterSettings -> Bool -> Expr
eval exp [] settings rw = eval exp emptyHash settings rw
eval oexp vars settings rw =
if exp /= oexp
then exp
else case exp of
Var id -> if length (qualVarHash id vars) > 0
then Val $ Hash $ makeHash strHash (qualVarHash id vars) emptyHash
else if length (qualVarHash ("local." ++ id) vars) > 0
then Val $ Hash $ makeHash strHash (qualVarHash ("local." ++ id) vars) emptyHash
else Var id
Call x [] -> x
Call (Call id args) args' -> eval' $ Call id (args ++ args')
Call (Var "eval")
[x] -> case eval' x of
Val (Str s) -> case length evaled of
0 -> Skip
1 -> evaled !! 0
otherwise -> Val $ Proc $ evaled
where evaled = [snd i | i <- Parse.read "" s]
otherwise -> Call (Var "eval") [otherwise]
Call (Var "int")
[x] -> case eval' x of
Val (NumInt i) -> Val $ NumInt i
Val (NumFloat f) -> Val $ NumInt (truncate f)
Val (Str s) -> case (Parse.read "" s) !! 0 of
(Just a, Val (NumInt i)) -> Val (NumInt i)
otherwise -> exCantConvert s "integer"
Exception e -> Exception e
otherwise -> Call (Var "int") [otherwise]
Call (Var "float")
[x] -> case eval' x of
Val (NumInt i) -> Val $ NumFloat $ fromIntegral i
Val (NumFloat f) -> Val $ NumFloat f
Val (Str s) -> case (Parse.read "" s) !! 0 of
(Just a, Val (NumFloat f)) -> Val (NumFloat f)
(Just a, Val (NumInt i)) -> Val (NumFloat (fromIntegral i))
otherwise -> exCantConvert s "float"
Exception e -> Exception e
otherwise -> Call (Var "float") [otherwise]
Call (Var "str")
[x] -> case eval' x of
Val (Str s) -> Val $ Str s
Val (NumFloat f) -> Val $ Str $ showFFloat Nothing f ""
Val (Undefined u) -> Exception u
Val v -> Val $ Str (show v)
Exception e -> Exception e
otherwise -> if otherwise == x
then Val $ Str $ show x
else Call (Var "str") [otherwise]
Call (Var "list")
[x] -> case eval' x of
List l -> List l
Val (Str s) -> List [Val (Str [c]) | c <- s]
Val (Hash h) -> List [List [Val (Str (fst l)), snd l] | e <- h, l <- e]
Exception e -> Exception e
Val v -> List [Val v]
otherwise -> Call (Var "list") [otherwise]
Call (Var "bool")
[x] -> case eval' x of
Val Null -> Val (Bit False)
Val (Bit b) -> Val (Bit b)
Val (NumInt n) -> Val (Bit (n /= 0))
Val (NumFloat n) -> Val (Bit (n /= 0))
Val (Str s) -> Val (Bit (s /= ""))
List l -> Val (Bit (l /= []))
otherwise -> Call (Var "bool") [otherwise]
Call (Var id) args -> if fullEval (Var id) eval' == Var id
then Call (Var id) [fullEval arg eval' | arg <- args]
else Call (fullEval (Var id) eval') args
Call (Val (Lambda ids expr)) args ->
if length ids == length args
then substitute expr (zip [Var id | id <- ids] args)
else exp
Call (Val (NumInt i)) args -> Prod (Val (NumInt i)) (totalProd args)
Call (Val (NumFloat i)) args -> Prod (Val (NumFloat i)) (totalProd args)
Call (Val (Str s)) args -> Val $ Str $ s ++ foldl (++) "" [case fullEval i eval' of
Val (Str a) -> a
otherwise -> show otherwise
| i <- args]
Call x args -> Call (fullEval x eval') args
Import s t -> Import s t
Take n x -> case n of
Val (NumInt i) -> case x of
List l -> List (take i' l)
Val (Str s) -> Val $ Str $ take i' s
Exception e -> Exception e
Add (List l) (y) -> if length t == i'
then List t
else Take n (eval' x)
where t = take i' l
otherwise -> Take n (eval' x)
where i' = fromIntegral i
Exception e -> Exception e
otherwise -> Take (eval' otherwise) x
List l -> case (validList l) of
Val _ -> case List [eval' i | i <- l] of
List l -> if all ((==) True)
[case i of
Val (Str s) -> length s == 1
otherwise -> False
| i <- l]
then Val $ Str (foldl (++) [case i of
Val (Str s) -> s !! 0
| i <- l] [])
else List l
otherwise -> otherwise
Exception e -> Exception e
HashExpr l -> Val $ Hash $ makeHash strHash
[(case eval' (fst i) of
Val (Str s) -> s
otherwise -> show otherwise,
snd i)
| i <- l] emptyHash
Val x -> case x of
Undefined s -> Exception s
otherwise -> Val x
Subs n x -> case fullEval x' eval' of
List l -> case n' of
Val (NumInt n) -> case (if n >= 0
then getElemAtN l (fromIntegral n)
else getElemAtN l ((length l) + (fromIntegral n))) of
Just x -> x
Nothing -> exNotInList n
List l' -> List [Subs i (List l) | i <- l']
otherwise -> Subs n' x'
Val (Str s) -> case n' of
Val (NumInt n) -> if n >= 0
then Val (Str ([s !! (fromIntegral n)]))
else Val (Str ([s !! ((length s) + (fromIntegral n))]))
List l' -> List [Subs i (Val (Str s)) | i <- l']
otherwise -> Subs n' x'
Val (Hash l) -> case n' of
Exception e -> Exception e
List l' -> List [Subs i (Val (Hash l)) | i <- l']
otherwise -> case fullEval (Call (Var "str") [otherwise]) eval' of
Val (Str s) -> case hashMember strHash s l of
Just x -> x
Nothing -> exNotInHash s
Exception e -> Exception e
otherwise -> Subs n' x'
Call (Var f) args -> case n' of
Val (NumInt n) -> if n >= 0
then eval' $ Subs (Val (NumInt n)) (eval' (Take (Val (NumInt ((fromIntegral n) + 1))) (Call (Var f) args)))
else Subs (Val (NumInt n)) (eval' x)
List l' -> List [Subs i f' | i <- l']
where f' = (Call (Var f) args)
otherwise -> Subs n' x'
otherwise -> Subs n x'
where n' = eval' n
x' = eval' x
Concat x y -> eval' (Add x y)
Add x y -> case x of
Exception e -> Exception e
List l -> case y of
Exception e -> Exception e
List l' -> List $ l ++ l'
Val v -> vadd (strict settings) x y
Add a (Call id args) -> Add (eval' (Add x a)) (Call id args)
otherwise -> nextOp
Val (Proc p) -> Val $ Proc $ p ++ [y]
Val v -> case y of
Exception e -> Exception e
List l -> vadd (strict settings) x y
Val v -> vadd (strict settings) x y
otherwise -> nextOp
otherwise -> nextOp
where nextOp = if vadd (strict settings) x y == Add x y
then Add (eval' x) (eval' y)
else operation x y vadd Add
Sub x y -> operation x y vsub Sub
if nextOp = = Prod x y
then if eval ' ( Prod y x ) = = Prod y x
then nextOp
else case x of
Prod a b - > if eval ' x = = x
then Prod a ( Prod b y )
else nextOp
otherwise - > case y of
Prod a b - > if eval ' y = = y
then Prod ( Prod x a ) b
else nextOp
otherwise - > nextOp
else
then if eval' (Prod y x) == Prod y x
then nextOp
else case x of
Prod a b -> if eval' x == x
then Prod a (Prod b y)
else nextOp
otherwise -> case y of
Prod a b -> if eval' y == y
then Prod (Prod x a) b
else nextOp
otherwise -> nextOp
else -}nextOp
where nextOp = operation x y vprod Prod
Div x y -> operation x y vdiv Div
Mod x y -> operation x y vmod Mod
Exp x y -> operation x y vexp Exp
Eq x y -> case operation x' y' veq Eq of
Eq (List a) (List b) -> if length a' == length b'
then Val $ Bit $
allTrue [fullEval (Eq (a' !! n)
(b' !! n))
eval'
| n <- [0 .. (length a - 1)]]
else Val (Bit False)
where allTrue [] = True
allTrue (h:t) = case h of
Val (Bit True) -> allTrue t
otherwise -> False
list' l = case fullEval (List l) eval' of
List l -> l
otherwise -> []
a' = list' a
b' = list' b
otherwise -> if x' == y' then Val (Bit True)
else otherwise
where x' = fullEval x eval'
y' = fullEval y eval'
InEq x y -> eval' (Prod (operation x y veq Eq) (Val (NumInt (-1))))
Gt x y -> operation x y vgt Gt
Lt x y -> operation x y vlt Lt
And x y -> case eval' x of
Val (Bit True) -> case eval' y of
Val (Bit True) -> Val (Bit True)
Val (Bit False) -> Val (Bit False)
otherwise -> And (eval' x) (eval' y)
Val (Bit False) -> Val (Bit False)
otherwise -> And (eval' x) (eval' y)
where err = exTypeMismatch (eval' x) (eval' y) "and"
Or x y -> case eval' x of
Val (Bit True) -> Val (Bit True)
Val (Bit False) -> case eval' y of
Val (Bit b) -> Val (Bit b)
otherwise -> Or (eval' x) (eval' y)
otherwise -> Or (eval' x) (eval' y)
where err = exTypeMismatch (eval' x) (eval' y) "or"
Not x -> case eval' x of
Exception s -> Exception s
Val (Bit b) -> Val $ Bit $ not b
otherwise -> Not otherwise
Def f x Skip -> case f of
List l -> Val $ Proc $
[Def (l !! n) (Subs (Val $ NumInt $ fromIntegral n) x) Skip
| n <- [0 .. length(l) - 1]]
Subs a b -> case fullEval b eval' of
Val (Hash h) -> Def b (Val (Hash (makeHash strHash [((case fullEval a eval' of
Val (Str s) -> s
otherwise -> show a), x)
] h))) Skip
HashExpr h -> Def b (HashExpr (h ++ [(a, x)])) Skip
Var id -> Def b (Val (Hash (makeHash strHash [(show a, x)] emptyHash))) Skip
otherwise -> Def f x Skip
otherwise -> Def f x Skip
Def f x y -> evalWithNewDefs y [(f, x)]
EagerDef f x' Skip -> case f of
List l -> Val $ Proc $
[EagerDef (l !! n) (Subs (Val $ NumInt $ fromIntegral n) x) Skip
| n <- [0 .. length(l) - 1]]
Subs a b -> case fullEval b eval' of
Val (Hash h) -> Def b (Val (Hash (makeHash strHash [((case fullEval a eval' of
Val (Str s) -> s
otherwise -> show a), x)
] h))) Skip
HashExpr h -> EagerDef b (HashExpr (h ++ [(a, x)])) Skip
Var id -> EagerDef b (Val (Hash (makeHash strHash [(show a, x)] emptyHash))) Skip
otherwise -> EagerDef f x Skip
otherwise -> EagerDef f x Skip
where x = fullEval x' eval'
EagerDef f x y -> case fullEval x eval' of
Val v -> next
List l -> next
where next = evalWithNewDefs y [(f, fullEval x eval')]
If cond x y -> case fullEval cond eval' of
Val (Bit True) -> x
Val (Bit False) -> y
Exception e -> Exception e
otherwise -> If otherwise x y
Case check cases -> caseExpr check (reverse cases)
For id x y conds -> case fullEval x eval' of
List l -> List [substitute y [(Var id, item)] | item <- l,
allTrue [substitute cond [(Var id, item)] | cond <- conds]
]
Exception e -> Exception e
otherwise -> For id otherwise y conds
Range from to step -> case from of
Val (NumInt i) -> case to of
Val (NumInt j) -> case step of
Val (NumInt k) -> List [Val (NumInt x) | x <- [i, i+k .. j]]
Exception e -> Exception e
otherwise -> Range from to (eval' step)
Skip -> case (eval' step) of
Val (NumInt k) -> List [Val (NumInt x) | x <- [i, i+k ..]]
Exception e -> Exception e
otherwise -> Range from Skip otherwise
Exception e -> Exception e
otherwise -> Range from (eval' to) step
Exception e -> Exception e
otherwise -> Range (eval' from) to step
UseRule r x -> case r of
Rule r -> eval' (rule x r)
List l -> if all ((/=) [Skip]) l'
then UseRule (Rule (allRules l' [])) x
else exInvalidRule r
where l' = [case fullEval i eval' of
Rule r' -> r'
otherwise -> [Skip]
| i <- l]
Val (Hash h) -> UseRule (Rule r') x
where r' = [Def (Var (fst i)) (snd i) Skip
| j <- h, i <- j]
Val v -> exInvalidRule r
otherwise -> UseRule (eval' r) x
where rule x (h:t) = case h of
Def a b c -> Def a b (rule x t)
EagerDef a b c -> EagerDef a b (rule x t)
otherwise -> rule x t
rule x [] = x
allRules (h:t) a = allRules t (a ++ h)
allRules [] a = a
otherwise -> otherwise
where operation x y f g = if calc x' y' f (strict settings) == g x' y'
then g x' y'
else calc x' y' f (strict settings)
where x' = fullEval x eval'
y' = fullEval y eval'
allTrue [] = True
allTrue (h:t) = case eval' h of
Val (Bit True) -> allTrue t
Exception e -> False
Val v -> False
otherwise -> if otherwise == h then False else allTrue (otherwise : t)
caseExpr check [] = Call (Var "case") [fullEval check eval']
caseExpr check (h:t) = Def (Call (Var "case") [fst h]) (snd h) (caseExpr check t)
evalArgs x = case x of
Call a b -> Call (evalArgs a) ([fullEval i eval' | i <- b])
otherwise -> otherwise
exp = if rw
then rewrite (evalArgs oexp) (vars !! exprHash oexp) (vars !! exprHash oexp) eval'
else oexp
eval' expr = eval expr vars settings rw
evalWithNewDefs expr defs = eval expr (makeVarDict defs vars) settings rw
getElemAtN [] n = Nothing
getElemAtN (h:t) 0 = Just h
getElemAtN (h:t) n = getElemAtN t (n-1)
totalProd [] = Val (NumInt 1)
totalProd (h:t) = if t == []
then h
else Prod h (totalProd t)
iolist :: [IO Expr] -> IO [Expr]
iolist [] = do return []
iolist (h:t) = do item <- h
rest <- iolist t
return (item:rest)
ieval :: InterpreterSettings -> Expr -> VarDict -> [Expr] -> IO Expr
ieval settings expr vars last =
do subbed <- subfile expr
let result = eval subbed vars settings True
if isInfixOf [result] last
then return (last !! 0)
else do if (verbose settings) && length last > 0
then putStrLn (show (last !! 0))
else return ()
result' <- case expr of
Def _ _ Skip -> do return (result, vars)
EagerDef _ _ Skip -> do return (result, vars)
Def id x y -> do return $ (y, makeVarDict [(id, x)] vars)
EagerDef id x y -> do x' <- ieval settings x vars (expr : last')
return $ (y, makeVarDict [(id, x')] vars)
otherwise -> do return (result, vars)
ieval settings (fst result') (snd result') (expr : last')
where last' = take 2 last
oneArg f a = do a' <- subfile a
return $ f a'
twoArgs f a b = do a' <- subfile a
b' <- subfile b
return $ f a' b'
threeArgs f a b c = do a' <- subfile a
b' <- subfile b
c' <- subfile c
return $ f a' b' c'
subfile :: Expr -> IO Expr
subfile exp =
case exp of
Var "input" -> do line <- getLine
return $ Val (Str line)
Call (Var "read") [f] -> do sub <- subfile f
case f of
Val (Str f) -> do exists <- doesFileExist f
case exists of
True -> do contents <- readFile f
return $ Val $ Str contents
False -> return $ exFileDNE
otherwise -> return $ exp
Call f args -> do args' <- iolist [subfile arg | arg <- args]
return $ Call f args'
Take a b -> twoArgs Take a b
List l -> do list <- iolist [subfile e | e <- l]
return $ List list
HashExpr l -> do list1 <- iolist [subfile (fst e) | e <- l]
list2 <- iolist [subfile (snd e) | e <- l]
return $ HashExpr (zip list1 list2)
Subs a b -> twoArgs Subs a b
Add a b -> twoArgs Add a b
Sub a b -> twoArgs Sub a b
Prod a b -> twoArgs Prod a b
Div a b -> twoArgs Div a b
Mod a b -> twoArgs Mod a b
Exp a b -> twoArgs Exp a b
Eq a b -> twoArgs Eq a b
InEq a b -> twoArgs InEq a b
Gt a b -> twoArgs Gt a b
Lt a b -> twoArgs Lt a b
And a b -> twoArgs And a b
Or a b -> twoArgs Or a b
Not a -> oneArg Not a
EagerDef id a b -> twoArgs (EagerDef id) a b
Def id a b -> oneArg (Def id a) b
If a b c -> threeArgs If a b c
For id x y z -> do x' <- subfile x
y' <- subfile y
z' <- iolist [subfile i | i <- z]
return $ For id x' y' z'
otherwise -> do return otherwise
|
cb281e6f4ffb7fc5c907c5974c7f873bb72c82e4c8f0f6b58bc21fea4fbedfbd | stepcut/plugins | Main.hs |
import System.Plugins
import API
src = "../Plugin.hs"
wrap = "../Wrapper.hs"
apipath = "../api"
main = do status <- make src ["-i"++apipath]
case status of
MakeSuccess _ _ -> f
MakeFailure e -> mapM_ putStrLn e
where f = do v <- pdynload "../Plugin.o" ["../api"] [] "API.Interface" "resource"
case v of
LoadSuccess _ a -> putStrLn "loaded .. yay!"
_ -> putStrLn "wrong types"
| null | https://raw.githubusercontent.com/stepcut/plugins/52c660b5bc71182627d14c1d333d0234050cac01/testsuite/pdynload/typealias/prog/Main.hs | haskell |
import System.Plugins
import API
src = "../Plugin.hs"
wrap = "../Wrapper.hs"
apipath = "../api"
main = do status <- make src ["-i"++apipath]
case status of
MakeSuccess _ _ -> f
MakeFailure e -> mapM_ putStrLn e
where f = do v <- pdynload "../Plugin.o" ["../api"] [] "API.Interface" "resource"
case v of
LoadSuccess _ a -> putStrLn "loaded .. yay!"
_ -> putStrLn "wrong types"
|
|
1ee5519610685c4772306ffef8d3eec4797681a16be5a08ee66644d6f6c6d53b | carotene/carotene | api_channels_handler.erl | -module(api_channels_handler).
-export([init/3]).
-export([terminate/3]).
-export([allowed_methods/2]).
-export([content_types_provided/2]).
-export([resource_exists/2]).
-export([channel_to_json/2]).
init(_Type, Req, _Opts) ->
{{IP, _Port}, _} = cowboy_req:peer(Req),
case carotene_api_authorization:authorize(IP) of
true -> {upgrade, protocol, cowboy_rest};
false ->
{ok, Req2} = cowboy_req:reply(500, [
{<<"content-type">>, <<"text/plain">>}
], "You are not authorized to access this endpoint. Check your configuration.", Req),
{shutdown, Req2, no_state}
end.
allowed_methods(Req, State) ->
{[<<"GET">>], Req, State}.
content_types_provided(Req, State) ->
{[
{{<<"application">>, <<"json">>, []}, channel_to_json}
], Req, State}.
channel_to_json(Req, {index, Channels}) ->
Body = jsx:encode(Channels),
{Body, Req, Channels};
channel_to_json(Req, Channel) ->
Body = jsx:encode([{<<"channel">>, Channel}]),
{Body, Req, Channel}.
resource_exists(Req, _State) ->
{ok, Exs} = gen_server:call(carotene_admin_connection, get_subscribed),
case cowboy_req:binding(channel, Req) of
undefined ->
{true, Req, {index, Exs}};
{Channel, _Bin} ->
case lists:member(Channel, Exs) of
true -> {true, Req, Channel};
false -> {false, Req, Channel}
end
end.
terminate(_Reason, _Req, _State) ->
ok.
| null | https://raw.githubusercontent.com/carotene/carotene/963ecad344ec1c318c173ad828a5af3c000ddbfc/src/api_channels_handler.erl | erlang | -module(api_channels_handler).
-export([init/3]).
-export([terminate/3]).
-export([allowed_methods/2]).
-export([content_types_provided/2]).
-export([resource_exists/2]).
-export([channel_to_json/2]).
init(_Type, Req, _Opts) ->
{{IP, _Port}, _} = cowboy_req:peer(Req),
case carotene_api_authorization:authorize(IP) of
true -> {upgrade, protocol, cowboy_rest};
false ->
{ok, Req2} = cowboy_req:reply(500, [
{<<"content-type">>, <<"text/plain">>}
], "You are not authorized to access this endpoint. Check your configuration.", Req),
{shutdown, Req2, no_state}
end.
allowed_methods(Req, State) ->
{[<<"GET">>], Req, State}.
content_types_provided(Req, State) ->
{[
{{<<"application">>, <<"json">>, []}, channel_to_json}
], Req, State}.
channel_to_json(Req, {index, Channels}) ->
Body = jsx:encode(Channels),
{Body, Req, Channels};
channel_to_json(Req, Channel) ->
Body = jsx:encode([{<<"channel">>, Channel}]),
{Body, Req, Channel}.
resource_exists(Req, _State) ->
{ok, Exs} = gen_server:call(carotene_admin_connection, get_subscribed),
case cowboy_req:binding(channel, Req) of
undefined ->
{true, Req, {index, Exs}};
{Channel, _Bin} ->
case lists:member(Channel, Exs) of
true -> {true, Req, Channel};
false -> {false, Req, Channel}
end
end.
terminate(_Reason, _Req, _State) ->
ok.
|
|
9ae299d41a28e3a6b123a93ae62e73a4ddfa14e00377fdc23aa49c959a49ada9 | nasa/Common-Metadata-Repository | httpd.clj | (ns cmr.graph.components.httpd
(:require
[com.stuartsierra.component :as component]
[cmr.graph.components.config :as config]
[cmr.graph.rest.app :as rest-api]
[org.httpkit.server :as server]
[taoensso.timbre :as log]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; HTTP Server Component API ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; TBD
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Component Lifecycle Implementation ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defrecord HTTPD [])
(defn start
[this]
(log/info "Starting httpd component ...")
(let [port (config/http-port this)
server (server/run-server (rest-api/app this) {:port port})]
(log/debugf "HTTPD is listening on port %s" port)
(log/debug "Started httpd component.")
(assoc this :server server)))
(defn stop
[this]
(log/info "Stopping httpd component ...")
(if-let [server (:server this)]
(server))
(assoc this :server nil))
(def lifecycle-behaviour
{:start start
:stop stop})
(extend HTTPD
component/Lifecycle
lifecycle-behaviour)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Component Constructor ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn create-component
""
[]
(map->HTTPD {}))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/63001cf021d32d61030b1dcadd8b253e4a221662/other/cmr-exchange/graph/src/cmr/graph/components/httpd.clj | clojure |
HTTP Server Component API ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
TBD
Component Lifecycle Implementation ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Component Constructor ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
| (ns cmr.graph.components.httpd
(:require
[com.stuartsierra.component :as component]
[cmr.graph.components.config :as config]
[cmr.graph.rest.app :as rest-api]
[org.httpkit.server :as server]
[taoensso.timbre :as log]))
(defrecord HTTPD [])
(defn start
[this]
(log/info "Starting httpd component ...")
(let [port (config/http-port this)
server (server/run-server (rest-api/app this) {:port port})]
(log/debugf "HTTPD is listening on port %s" port)
(log/debug "Started httpd component.")
(assoc this :server server)))
(defn stop
[this]
(log/info "Stopping httpd component ...")
(if-let [server (:server this)]
(server))
(assoc this :server nil))
(def lifecycle-behaviour
{:start start
:stop stop})
(extend HTTPD
component/Lifecycle
lifecycle-behaviour)
(defn create-component
""
[]
(map->HTTPD {}))
|
858e872ab7791bef249dadd3f3bf6cbd0eb80bc501ef316731585dcd63fc521b | grzm/awyeah-api | query.clj | Copyright ( c ) Cognitect , Inc.
;; All rights reserved.
(ns ^:skip-wiki com.grzm.awyeah.protocols.query
"Impl, don't call directly."
(:require
[clojure.string :as str]
[com.grzm.awyeah.protocols :as aws.protocols]
[com.grzm.awyeah.service :as service]
[com.grzm.awyeah.shape :as shape]
[com.grzm.awyeah.util :as util]))
(set! *warn-on-reflection* true)
;; ----------------------------------------------------------------------------------------
;; ----------------------------------------------------------------------------------------
(defn serialized-name
[shape default]
(or (:locationName shape)
default))
(defmulti serialize
(fn [shape _args _serialized _prefix] (:type shape)))
(defn prefix-assoc
[serialized prefix val]
(assoc serialized (str/join "." prefix) val))
(defmethod serialize :default
[_shape args serialized prefix]
(prefix-assoc serialized prefix (str args)))
(defmethod serialize "structure"
[shape args serialized prefix]
(let [args (util/with-defaults shape args)]
(reduce (fn [serialized k]
(let [member-shape (shape/member-shape shape k)
member-name (serialized-name member-shape (name k))]
(if (contains? args k)
(serialize member-shape (k args) serialized (conj prefix member-name))
serialized)))
serialized
(keys (:members shape)))))
(defmethod serialize "list"
[shape args serialized prefix]
(if (empty? args)
(prefix-assoc serialized prefix "")
(let [member-shape (shape/list-member-shape shape)
list-prefix (if (:flattened shape)
(conj (vec (butlast prefix)) (serialized-name member-shape (last prefix)))
(conj prefix (serialized-name member-shape "member")))]
(reduce (fn [serialized [i member]]
(serialize member-shape member serialized (conj list-prefix (str i))))
serialized
(map-indexed (fn [i member] [(inc i) member]) args)))))
(defmethod serialize "map"
[shape args serialized prefix]
(let [map-prefix (if (:flattened shape) prefix (conj prefix "entry"))
key-shape (shape/key-shape shape)
key-suffix (serialized-name key-shape "key")
value-shape (shape/value-shape shape)
value-suffix (serialized-name value-shape "value")]
(reduce (fn [serialized [i k v]]
(as-> serialized $
(serialize key-shape (name k) $ (conj map-prefix (str i) key-suffix))
(serialize value-shape v $ (conj map-prefix (str i) value-suffix))))
serialized
(map-indexed (fn [i [k v]] [(inc i) k v]) args))))
(defmethod serialize "blob"
[_shape args serialized prefix]
(prefix-assoc serialized prefix (util/base64-encode args)))
(defmethod serialize "timestamp" [shape args serialized prefix]
(prefix-assoc serialized prefix (shape/format-date shape
args
(partial util/format-date util/iso8601-date-format))))
(defmethod serialize "boolean"
[_shape args serialized prefix]
(prefix-assoc serialized prefix (if args "true" "false")))
(defn build-query-http-request
[serialize service {:keys [op request]}]
(let [operation (get-in service [:operations op])
input-shape (service/shape service (:input operation))
params {"Action" (name op)
"Version" (get-in service [:metadata :apiVersion])}]
{:request-method :post
:scheme :https
:server-port 443
:uri "/"
:headers (aws.protocols/headers service operation)
:body (util/query-string
(serialize input-shape request params []))}))
(defmethod aws.protocols/build-http-request "query"
[service req-map]
(build-query-http-request serialize service req-map))
(defn build-query-http-response
[service {:keys [op]} {:keys [body]}]
(let [operation (get-in service [:operations op])]
(if-let [output-shape (service/shape service (:output operation))]
(shape/xml-parse output-shape (util/bbuf->str body))
(util/xml->map (util/xml-read (util/bbuf->str body))))))
(defmethod aws.protocols/parse-http-response "query"
[service op-map http-response]
(build-query-http-response service op-map http-response))
| null | https://raw.githubusercontent.com/grzm/awyeah-api/5111c627f73955af8d2529f7ae793ca8203cff15/src/com/grzm/awyeah/protocols/query.clj | clojure | All rights reserved.
----------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------- | Copyright ( c ) Cognitect , Inc.
(ns ^:skip-wiki com.grzm.awyeah.protocols.query
"Impl, don't call directly."
(:require
[clojure.string :as str]
[com.grzm.awyeah.protocols :as aws.protocols]
[com.grzm.awyeah.service :as service]
[com.grzm.awyeah.shape :as shape]
[com.grzm.awyeah.util :as util]))
(set! *warn-on-reflection* true)
(defn serialized-name
[shape default]
(or (:locationName shape)
default))
(defmulti serialize
(fn [shape _args _serialized _prefix] (:type shape)))
(defn prefix-assoc
[serialized prefix val]
(assoc serialized (str/join "." prefix) val))
(defmethod serialize :default
[_shape args serialized prefix]
(prefix-assoc serialized prefix (str args)))
(defmethod serialize "structure"
[shape args serialized prefix]
(let [args (util/with-defaults shape args)]
(reduce (fn [serialized k]
(let [member-shape (shape/member-shape shape k)
member-name (serialized-name member-shape (name k))]
(if (contains? args k)
(serialize member-shape (k args) serialized (conj prefix member-name))
serialized)))
serialized
(keys (:members shape)))))
(defmethod serialize "list"
[shape args serialized prefix]
(if (empty? args)
(prefix-assoc serialized prefix "")
(let [member-shape (shape/list-member-shape shape)
list-prefix (if (:flattened shape)
(conj (vec (butlast prefix)) (serialized-name member-shape (last prefix)))
(conj prefix (serialized-name member-shape "member")))]
(reduce (fn [serialized [i member]]
(serialize member-shape member serialized (conj list-prefix (str i))))
serialized
(map-indexed (fn [i member] [(inc i) member]) args)))))
(defmethod serialize "map"
[shape args serialized prefix]
(let [map-prefix (if (:flattened shape) prefix (conj prefix "entry"))
key-shape (shape/key-shape shape)
key-suffix (serialized-name key-shape "key")
value-shape (shape/value-shape shape)
value-suffix (serialized-name value-shape "value")]
(reduce (fn [serialized [i k v]]
(as-> serialized $
(serialize key-shape (name k) $ (conj map-prefix (str i) key-suffix))
(serialize value-shape v $ (conj map-prefix (str i) value-suffix))))
serialized
(map-indexed (fn [i [k v]] [(inc i) k v]) args))))
(defmethod serialize "blob"
[_shape args serialized prefix]
(prefix-assoc serialized prefix (util/base64-encode args)))
(defmethod serialize "timestamp" [shape args serialized prefix]
(prefix-assoc serialized prefix (shape/format-date shape
args
(partial util/format-date util/iso8601-date-format))))
(defmethod serialize "boolean"
[_shape args serialized prefix]
(prefix-assoc serialized prefix (if args "true" "false")))
(defn build-query-http-request
[serialize service {:keys [op request]}]
(let [operation (get-in service [:operations op])
input-shape (service/shape service (:input operation))
params {"Action" (name op)
"Version" (get-in service [:metadata :apiVersion])}]
{:request-method :post
:scheme :https
:server-port 443
:uri "/"
:headers (aws.protocols/headers service operation)
:body (util/query-string
(serialize input-shape request params []))}))
(defmethod aws.protocols/build-http-request "query"
[service req-map]
(build-query-http-request serialize service req-map))
(defn build-query-http-response
[service {:keys [op]} {:keys [body]}]
(let [operation (get-in service [:operations op])]
(if-let [output-shape (service/shape service (:output operation))]
(shape/xml-parse output-shape (util/bbuf->str body))
(util/xml->map (util/xml-read (util/bbuf->str body))))))
(defmethod aws.protocols/parse-http-response "query"
[service op-map http-response]
(build-query-http-response service op-map http-response))
|
fa414ee5d5cf2b209c6d58e1f1c7b3cde590a6b5e375dfb9a5a6c9b5647fbcf6 | bennn/little-book-of-semaphores | 5-1-savages.rkt | #lang little-book-of-semaphores
;; -----------------------------------------------------------------------------
(define M 5)
(define pot (box 0))
;; Constraints:
;; - Cook may not fill non-empty pot
;; - Savages may not eat from empty pot
(define mutex (make-semaphore 1))
(define pot-full (make-semaphore 0))
(define pot-empty (make-semaphore 0))
(define (get-pot)
(with mutex
(let loop ()
(when (zero? (unbox pot))
(signal mutex)
(wait pot-full)
(wait mutex)
(loop)))
(decr pot)
(if (zero? (unbox pot))
(signal pot-empty)
(signal pot-full))))
(define (fill-pot)
(with mutex
(let loop ()
(unless (zero? (unbox pot))
(signal mutex)
(wait pot-empty)
(wait mutex)
(loop)))
(set-box! pot M)
(signal pot-full))
(printf "ORDER UP\n"))
(define (eat id)
(printf "~a is eating\n" id)
(sleep (random)))
;; -----------------------------------------------------------------------------
(define-for-syntax NUM-SAVAGES (box 0))
(define-syntax make-savage
(syntax-parser
[stx
#:when (set-box! NUM-SAVAGES (add1 (unbox NUM-SAVAGES)))
#:with name (format-id #'stx "Savage-~a" (unbox NUM-SAVAGES))
#`(define-thread name
(forever
(get-pot)
(eat '#,(syntax-e #'name))))]))
(define-thread Cook
(forever
(fill-pot)
(sleep (random))))
;; -----------------------------------------------------------------------------
(module+ test
(make-savage)
(make-savage)
(make-savage)
(make-savage)
(run))
| null | https://raw.githubusercontent.com/bennn/little-book-of-semaphores/13dc1690073ae45f5ac7e4e4137d636aae1ef7c9/problems/5-1-savages.rkt | racket | -----------------------------------------------------------------------------
Constraints:
- Cook may not fill non-empty pot
- Savages may not eat from empty pot
-----------------------------------------------------------------------------
----------------------------------------------------------------------------- | #lang little-book-of-semaphores
(define M 5)
(define pot (box 0))
(define mutex (make-semaphore 1))
(define pot-full (make-semaphore 0))
(define pot-empty (make-semaphore 0))
(define (get-pot)
(with mutex
(let loop ()
(when (zero? (unbox pot))
(signal mutex)
(wait pot-full)
(wait mutex)
(loop)))
(decr pot)
(if (zero? (unbox pot))
(signal pot-empty)
(signal pot-full))))
(define (fill-pot)
(with mutex
(let loop ()
(unless (zero? (unbox pot))
(signal mutex)
(wait pot-empty)
(wait mutex)
(loop)))
(set-box! pot M)
(signal pot-full))
(printf "ORDER UP\n"))
(define (eat id)
(printf "~a is eating\n" id)
(sleep (random)))
(define-for-syntax NUM-SAVAGES (box 0))
(define-syntax make-savage
(syntax-parser
[stx
#:when (set-box! NUM-SAVAGES (add1 (unbox NUM-SAVAGES)))
#:with name (format-id #'stx "Savage-~a" (unbox NUM-SAVAGES))
#`(define-thread name
(forever
(get-pot)
(eat '#,(syntax-e #'name))))]))
(define-thread Cook
(forever
(fill-pot)
(sleep (random))))
(module+ test
(make-savage)
(make-savage)
(make-savage)
(make-savage)
(run))
|
404cb966f0606ba7edf421e04932a22d524599b34a324cb6ab22d4f67b905941 | jordanthayer/ocaml-search | statistics.ml | * Some statistic functions .
@author eaburns
@since 2010 - 04 - 28
@author eaburns
@since 2010-04-28
*)
type float_ref = { mutable v : float }
let float_ref x = { v = x }
let (<--) r v = r.v <- v
let (!!) r = r.v
(** [mean vls] gets the mean of an array of floats. *)
let mean vls =
let n = float (Array.length vls) in
let sum = float_ref 0. in
Array.iter (fun v -> sum <-- !!sum +. v) vls;
!!sum /. n
(** [mean_and_stdev vls] gets the mean and standard deviation of an
array of floats. *)
let mean_and_stdev vls =
let n = float (Array.length vls) in
let sum = float_ref 0. in
Array.iter (fun v -> sum <-- !!sum +. v) vls;
let mean = !!sum /. n in
let diffs = float_ref 0. in
Array.iter (fun v -> diffs <-- !!diffs +. (v -. mean) ** 2.) vls;
mean, sqrt (!!diffs /. n)
(** [mean_and_interval vls] gives the mean and the magnitude of the
95% confidence interval on the mean. *)
let mean_and_interval vls =
let mu, sigma = mean_and_stdev vls in
let n = float (Array.length vls) in
mu, 1.96 *. sigma /. (sqrt n)
* [ percentile ] computes the [ p ] percentile of the values
[ vls ] by ranking them .
According to wikipedia , this procedure is recommended by the
National Institute of Standards and Technology ( NIST ) .
[vls] by ranking them.
According to wikipedia, this procedure is recommended by the
National Institute of Standards and Technology (NIST). *)
let percentile p vls =
if Array.length vls = 0 then invalid_arg "percentile: no values";
if p < 0. || p > 100. then invalid_arg "percentile: out of bounds";
let cmp (a : float) b = if a < b then ~-1 else if a > b then 1 else 0 in
let ranked = Array.copy vls in
let num = float (Array.length ranked) in
let n = p *. (num -. 1.) /. 100. +. 1. in
let k = truncate n in
let d = n -. (float k) in
Array.sort cmp ranked;
match n with
| n when n <= 1. -> ranked.(0)
| n when n >= num -> ranked.((truncate num) - 1)
| n -> ranked.(k - 1) +. d *. (ranked.(k) -. ranked.(k - 1))
* [ min_and_max f vls ] gets the min and of the [ vls ] array of
the value of [ f vls.(i ) ] for all [ i ] in the arary .
the value of [f vls.(i)] for all [i] in the arary. *)
let min_and_max f vls =
let min = float_ref infinity and max = float_ref neg_infinity in
Array.iter (fun p ->
let v = f p in
if v < !!min then min <-- v;
if v > !!max then max <-- v;)
vls;
!!min, !!max
(** {1 Density estimation} *)
* [ gaussian_kernel ] makes a gaussian kernel function .
let gaussian_kernel =
let coeff = 1. /. (sqrt (2. *. Geometry.pi)) in
(fun u -> coeff *. (exp (~-.(u ** 2.) /. 2.)))
(** [make_kernel_density_estimator kernel bandwidth data] makes a
density estimation function. *)
let make_kernel_density_estimator kernel bandwidth data =
let n = Array.length data in
let nf = float n in
(fun x ->
let sum = float_ref 0. in
for i = 0 to n - 1 do
let diff = x -. data.(i) in
sum <-- !!sum +. (kernel (diff /. bandwidth)) /. bandwidth;
done;
!!sum /. nf)
EOF
| null | https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/spt/src/statistics.ml | ocaml | * [mean vls] gets the mean of an array of floats.
* [mean_and_stdev vls] gets the mean and standard deviation of an
array of floats.
* [mean_and_interval vls] gives the mean and the magnitude of the
95% confidence interval on the mean.
* {1 Density estimation}
* [make_kernel_density_estimator kernel bandwidth data] makes a
density estimation function. | * Some statistic functions .
@author eaburns
@since 2010 - 04 - 28
@author eaburns
@since 2010-04-28
*)
type float_ref = { mutable v : float }
let float_ref x = { v = x }
let (<--) r v = r.v <- v
let (!!) r = r.v
let mean vls =
let n = float (Array.length vls) in
let sum = float_ref 0. in
Array.iter (fun v -> sum <-- !!sum +. v) vls;
!!sum /. n
let mean_and_stdev vls =
let n = float (Array.length vls) in
let sum = float_ref 0. in
Array.iter (fun v -> sum <-- !!sum +. v) vls;
let mean = !!sum /. n in
let diffs = float_ref 0. in
Array.iter (fun v -> diffs <-- !!diffs +. (v -. mean) ** 2.) vls;
mean, sqrt (!!diffs /. n)
let mean_and_interval vls =
let mu, sigma = mean_and_stdev vls in
let n = float (Array.length vls) in
mu, 1.96 *. sigma /. (sqrt n)
* [ percentile ] computes the [ p ] percentile of the values
[ vls ] by ranking them .
According to wikipedia , this procedure is recommended by the
National Institute of Standards and Technology ( NIST ) .
[vls] by ranking them.
According to wikipedia, this procedure is recommended by the
National Institute of Standards and Technology (NIST). *)
let percentile p vls =
if Array.length vls = 0 then invalid_arg "percentile: no values";
if p < 0. || p > 100. then invalid_arg "percentile: out of bounds";
let cmp (a : float) b = if a < b then ~-1 else if a > b then 1 else 0 in
let ranked = Array.copy vls in
let num = float (Array.length ranked) in
let n = p *. (num -. 1.) /. 100. +. 1. in
let k = truncate n in
let d = n -. (float k) in
Array.sort cmp ranked;
match n with
| n when n <= 1. -> ranked.(0)
| n when n >= num -> ranked.((truncate num) - 1)
| n -> ranked.(k - 1) +. d *. (ranked.(k) -. ranked.(k - 1))
* [ min_and_max f vls ] gets the min and of the [ vls ] array of
the value of [ f vls.(i ) ] for all [ i ] in the arary .
the value of [f vls.(i)] for all [i] in the arary. *)
let min_and_max f vls =
let min = float_ref infinity and max = float_ref neg_infinity in
Array.iter (fun p ->
let v = f p in
if v < !!min then min <-- v;
if v > !!max then max <-- v;)
vls;
!!min, !!max
* [ gaussian_kernel ] makes a gaussian kernel function .
let gaussian_kernel =
let coeff = 1. /. (sqrt (2. *. Geometry.pi)) in
(fun u -> coeff *. (exp (~-.(u ** 2.) /. 2.)))
let make_kernel_density_estimator kernel bandwidth data =
let n = Array.length data in
let nf = float n in
(fun x ->
let sum = float_ref 0. in
for i = 0 to n - 1 do
let diff = x -. data.(i) in
sum <-- !!sum +. (kernel (diff /. bandwidth)) /. bandwidth;
done;
!!sum /. nf)
EOF
|
0b10c22cb04a73fea535407e88ab0ec6bf7bde334357cea88f231fa73d8d265a | nasa/Common-Metadata-Repository | auth.clj | (ns cmr.metadata.proxy.components.auth
"This namespace represents the authorization API for CMR OPeNDAP. This is
where the rest of the application goes when it needs to perform checks on
roles or permissions for a given user and/or concept.
Currently, this namespace is only used by the REST middleware that checks
resources for authorization."
(:require
[clojure.set :as set]
[cmr.authz.components.caching :as caching]
[cmr.authz.components.config :as config]
[cmr.authz.errors :as errors]
[cmr.authz.permissions :as permissions]
[cmr.authz.roles :as roles]
[cmr.authz.token :as token]
[cmr.exchange.common.results.errors :as base-errors]
[cmr.http.kit.response :as response]
[com.stuartsierra.component :as component]
[taoensso.timbre :as log]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Support / utility Data & Functions ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn admin-role?
"Check to see if the roles of a given token+user match the required roles for
the route."
[route-roles cache-lookup]
(log/debug "Roles required-set:" route-roles)
(log/debug "Roles has-set:" cache-lookup)
(seq (set/intersection cache-lookup route-roles)))
(defn concept-permission?
"Check to see if the concept permissions of a given token+user match the
required permissions for the route."
[route-perms cache-lookup concept-id]
(let [id (keyword concept-id)
required (permissions/cmr-acl->reitit-acl route-perms)
required-set (id required)
has-set (id cache-lookup)]
(log/debug "cache-lookup:" cache-lookup)
(log/debug "Permissions required-set:" required-set)
(log/debug "Permissions has-set:" has-set)
(seq (set/intersection required-set has-set))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Caching Component API ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn cached-user
"Look up the user for a token in the cache; if there is a miss, make the
actual call for the lookup."
[system token]
(try
(caching/lookup
system
(token/user-id-key token)
#(token/->user (config/get-echo-rest-url system) token))
(catch Exception e
(log/error e)
{:errors (base-errors/exception-data e)})))
(defn cached-admin-role
"Look up the roles for token+user in the cache; if there is a miss, make the
actual call for the lookup."
[system token user-id]
(try
(caching/lookup system
(roles/roles-key token)
#(roles/admin (config/get-access-control-url system)
token
user-id))
(catch Exception e
(log/error e)
{:errors (base-errors/exception-data e)})))
(defn cached-concept-permission
"Look up the permissions for a concept in the cache; if there is a miss,
make the actual call for the lookup."
[system token user-id concept-id]
(try
(caching/lookup system
(permissions/permissions-key token concept-id)
#(permissions/concept
(config/get-access-control-url system)
token
user-id
concept-id))
(catch Exception e
(log/error e)
{:errors (base-errors/exception-data e)})))
(defn check-roles
"A supporting function for `check-roles-permissions` that handles the roles
side of things."
[system handler request route-roles user-token user-id]
(log/debug "Checking roles annotated in routes ...")
(let [lookup (cached-admin-role system user-token user-id)
errors (:errors lookup)]
(if errors
(do
(log/error errors/no-permissions)
(response/not-allowed errors/no-permissions errors))
(if (admin-role? route-roles lookup)
(handler request)
(response/not-allowed errors/no-permissions)))))
(defn check-permissions
"A supporting function for `check-roles-permissions` that handles the
permissions side of things."
[system handler request route-permissions user-token user-id]
(let [concept-id (permissions/route-concept-id request)
lookup (cached-concept-permission
system user-token user-id concept-id)
errors (:errors lookup)]
(log/debug "Checking permissions annotated in routes ...")
(if errors
(do
(log/error errors/no-permissions)
(response/not-allowed errors/no-permissions errors))
(if (concept-permission? route-permissions
lookup
concept-id)
(handler request)
(response/not-allowed errors/no-permissions)))))
(defn check-roles-permissions
"A supporting function for `check-route-access` that handles the actual
checking."
[system handler request route-roles route-permissions]
(if-let [user-token (token/extract request)]
(let [user-lookup (cached-user system user-token)
errors (:errors user-lookup)]
(log/debug "ECHO token provided; proceeding ...")
(log/trace "user-lookup:" user-lookup)
(if errors
(do
(log/error errors/token-required)
(response/not-allowed errors/token-required errors))
(do
(log/trace "user-token: [REDACTED]")
(log/trace "user-id:" user-lookup)
XXX For now , there is only the admin role in the CMR , so
;; we'll just keep this specific to that for now. Later, if
;; more roles are used, we'll want to make this more
;; generic ...
route-roles
(check-roles
system handler request route-roles user-token user-lookup)
route-permissions
(check-permissions system
handler
request
route-permissions
user-token
user-lookup)))))
(do
(log/warn "ECHO token not provided for protected resource")
(response/not-allowed errors/token-required))))
(defn check-route-access
"This is the primary function for this namespace, utilized directly by CMR
OPeNDAP's authorization middleware. Given a request which contains
route-specific authorization requirements and potentially a user token,
it checks against these as well as the level of access require for any
requested concepts."
[system handler request]
Before performing any GETs / POSTs against CMR Access Control or ECHO ,
;; let's make sure that's actually necessary, only doing it in the cases
;; where the route is annotated for roles/permissions.
(let [route-roles (roles/route-annotation request)
route-permissions (permissions/route-annotation request)]
(if (or route-roles route-permissions)
(do
(log/debug (str "Either roles or permissions were annotated in "
"routes; checking ACLs ..."))
(log/debug "route-roles:" route-roles)
(log/debug "route-permissions:" route-permissions)
(check-roles-permissions
system handler request route-roles route-permissions))
(do
(log/debug (str "Neither roles nor permissions were annotated in "
"the routes; skipping ACL check ..."))
(handler request)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Component Lifecycle Implementation ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defrecord Authz [])
(defn start
[this]
(log/info "Starting authorization component ...")
(log/debug "Started authorization component.")
this)
(defn stop
[this]
(log/info "Stopping authorization component ...")
(log/debug "Stopped authorization component.")
this)
(def lifecycle-behaviour
{:start start
:stop stop})
(extend Authz
component/Lifecycle
lifecycle-behaviour)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Component Constructor ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn create-component
""
[]
(map->Authz {}))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/63001cf021d32d61030b1dcadd8b253e4a221662/other/cmr-exchange/metadata-proxy/src/cmr/metadata/proxy/components/auth.clj | clojure |
; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ;
Caching Component API ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
if there is a miss, make the
if there is a miss, make the
if there is a miss,
we'll just keep this specific to that for now. Later, if
more roles are used, we'll want to make this more
generic ...
let's make sure that's actually necessary, only doing it in the cases
where the route is annotated for roles/permissions.
Component Lifecycle Implementation ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Component Constructor ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
| (ns cmr.metadata.proxy.components.auth
"This namespace represents the authorization API for CMR OPeNDAP. This is
where the rest of the application goes when it needs to perform checks on
roles or permissions for a given user and/or concept.
Currently, this namespace is only used by the REST middleware that checks
resources for authorization."
(:require
[clojure.set :as set]
[cmr.authz.components.caching :as caching]
[cmr.authz.components.config :as config]
[cmr.authz.errors :as errors]
[cmr.authz.permissions :as permissions]
[cmr.authz.roles :as roles]
[cmr.authz.token :as token]
[cmr.exchange.common.results.errors :as base-errors]
[cmr.http.kit.response :as response]
[com.stuartsierra.component :as component]
[taoensso.timbre :as log]))
(defn admin-role?
"Check to see if the roles of a given token+user match the required roles for
the route."
[route-roles cache-lookup]
(log/debug "Roles required-set:" route-roles)
(log/debug "Roles has-set:" cache-lookup)
(seq (set/intersection cache-lookup route-roles)))
(defn concept-permission?
"Check to see if the concept permissions of a given token+user match the
required permissions for the route."
[route-perms cache-lookup concept-id]
(let [id (keyword concept-id)
required (permissions/cmr-acl->reitit-acl route-perms)
required-set (id required)
has-set (id cache-lookup)]
(log/debug "cache-lookup:" cache-lookup)
(log/debug "Permissions required-set:" required-set)
(log/debug "Permissions has-set:" has-set)
(seq (set/intersection required-set has-set))))
(defn cached-user
actual call for the lookup."
[system token]
(try
(caching/lookup
system
(token/user-id-key token)
#(token/->user (config/get-echo-rest-url system) token))
(catch Exception e
(log/error e)
{:errors (base-errors/exception-data e)})))
(defn cached-admin-role
actual call for the lookup."
[system token user-id]
(try
(caching/lookup system
(roles/roles-key token)
#(roles/admin (config/get-access-control-url system)
token
user-id))
(catch Exception e
(log/error e)
{:errors (base-errors/exception-data e)})))
(defn cached-concept-permission
make the actual call for the lookup."
[system token user-id concept-id]
(try
(caching/lookup system
(permissions/permissions-key token concept-id)
#(permissions/concept
(config/get-access-control-url system)
token
user-id
concept-id))
(catch Exception e
(log/error e)
{:errors (base-errors/exception-data e)})))
(defn check-roles
"A supporting function for `check-roles-permissions` that handles the roles
side of things."
[system handler request route-roles user-token user-id]
(log/debug "Checking roles annotated in routes ...")
(let [lookup (cached-admin-role system user-token user-id)
errors (:errors lookup)]
(if errors
(do
(log/error errors/no-permissions)
(response/not-allowed errors/no-permissions errors))
(if (admin-role? route-roles lookup)
(handler request)
(response/not-allowed errors/no-permissions)))))
(defn check-permissions
"A supporting function for `check-roles-permissions` that handles the
permissions side of things."
[system handler request route-permissions user-token user-id]
(let [concept-id (permissions/route-concept-id request)
lookup (cached-concept-permission
system user-token user-id concept-id)
errors (:errors lookup)]
(log/debug "Checking permissions annotated in routes ...")
(if errors
(do
(log/error errors/no-permissions)
(response/not-allowed errors/no-permissions errors))
(if (concept-permission? route-permissions
lookup
concept-id)
(handler request)
(response/not-allowed errors/no-permissions)))))
(defn check-roles-permissions
"A supporting function for `check-route-access` that handles the actual
checking."
[system handler request route-roles route-permissions]
(if-let [user-token (token/extract request)]
(let [user-lookup (cached-user system user-token)
errors (:errors user-lookup)]
(log/debug "ECHO token provided; proceeding ...")
(log/trace "user-lookup:" user-lookup)
(if errors
(do
(log/error errors/token-required)
(response/not-allowed errors/token-required errors))
(do
(log/trace "user-token: [REDACTED]")
(log/trace "user-id:" user-lookup)
XXX For now , there is only the admin role in the CMR , so
route-roles
(check-roles
system handler request route-roles user-token user-lookup)
route-permissions
(check-permissions system
handler
request
route-permissions
user-token
user-lookup)))))
(do
(log/warn "ECHO token not provided for protected resource")
(response/not-allowed errors/token-required))))
(defn check-route-access
"This is the primary function for this namespace, utilized directly by CMR
OPeNDAP's authorization middleware. Given a request which contains
route-specific authorization requirements and potentially a user token,
it checks against these as well as the level of access require for any
requested concepts."
[system handler request]
Before performing any GETs / POSTs against CMR Access Control or ECHO ,
(let [route-roles (roles/route-annotation request)
route-permissions (permissions/route-annotation request)]
(if (or route-roles route-permissions)
(do
(log/debug (str "Either roles or permissions were annotated in "
"routes; checking ACLs ..."))
(log/debug "route-roles:" route-roles)
(log/debug "route-permissions:" route-permissions)
(check-roles-permissions
system handler request route-roles route-permissions))
(do
(log/debug (str "Neither roles nor permissions were annotated in "
"the routes; skipping ACL check ..."))
(handler request)))))
(defrecord Authz [])
(defn start
[this]
(log/info "Starting authorization component ...")
(log/debug "Started authorization component.")
this)
(defn stop
[this]
(log/info "Stopping authorization component ...")
(log/debug "Stopped authorization component.")
this)
(def lifecycle-behaviour
{:start start
:stop stop})
(extend Authz
component/Lifecycle
lifecycle-behaviour)
(defn create-component
""
[]
(map->Authz {}))
|
3321e93bde4abdea7098b25ed9e9318b093f796aa2b2bc4156fdc93522add825 | basho/riak_cs_auth | riak_cs_s3_auth.erl | %% ---------------------------------------------------------------------
%%
Copyright ( c ) 2007 - 2013 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------
-module(riak_cs_s3_auth).
-behavior(riak_cs_auth).
-export([identify/2, authenticate/4]).
-include_lib("riak_cs_core/include/riak_cs.hrl").
-include_lib("riak_cs_core/include/s3_api.hrl").
-include_lib("webmachine/include/webmachine.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-define(QS_KEYID, "AWSAccessKeyId").
-define(QS_SIGNATURE, "Signature").
%% ===================================================================
%% Public API
%% ===================================================================
-spec identify(term(), term()) -> {string() | undefined , string()}.
identify(RD,_Ctx) ->
case wrq:get_req_header("authorization", RD) of
undefined ->
{wrq:get_qs_value(?QS_KEYID, RD), wrq:get_qs_value(?QS_SIGNATURE, RD)};
AuthHeader ->
parse_auth_header(AuthHeader)
end.
-spec authenticate(rcs_user(), string(), term(), term()) -> ok | {error, atom()}.
authenticate(User, Signature, RD, _Ctx) ->
CalculatedSignature = calculate_signature(User?RCS_USER.key_secret, RD),
case check_auth(Signature, CalculatedSignature) of
true ->
Expires = wrq:get_qs_value("Expires", RD),
case Expires of
undefined ->
ok;
_ ->
{MegaSecs, Secs, _} = os:timestamp(),
Now = (MegaSecs * 1000000) + Secs,
case Now > list_to_integer(Expires) of
true ->
@TODO Not sure if this is the proper error
%% to return; will have to check after testing.
{error, invalid_authentication};
false ->
ok
end
end;
_ ->
{error, invalid_authentication}
end.
%% ===================================================================
Internal functions
%% ===================================================================
parse_auth_header("AWS " ++ Key) ->
case string:tokens(Key, ":") of
[KeyId, KeyData] ->
{KeyId, KeyData};
_ -> {undefined, undefined}
end;
parse_auth_header(_) ->
{undefined, undefined}.
calculate_signature(KeyData, RD) ->
Headers = riak_cs_wm_utils:normalize_headers(RD),
AmazonHeaders = riak_cs_wm_utils:extract_amazon_headers(Headers),
OriginalResource = riak_cs_s3_rewrite:original_resource(RD),
Resource = case OriginalResource of
undefined -> []; %% TODO: get noisy here?
{Path,QS} -> [Path, canonicalize_qs(QS)]
end,
Expires = wrq:get_qs_value("Expires", RD),
case Expires of
undefined ->
case proplists:is_defined("x-amz-date", Headers) of
true ->
Date = "\n";
false ->
Date = [wrq:get_req_header("date", RD), "\n"]
end;
_ ->
Date = Expires ++ "\n"
end,
case wrq:get_req_header("content-md5", RD) of
undefined ->
CMD5 = [];
CMD5 ->
ok
end,
case wrq:get_req_header("content-type", RD) of
undefined ->
ContentType = [];
ContentType ->
ok
end,
STS = [atom_to_list(wrq:method(RD)), "\n",
CMD5,
"\n",
ContentType,
"\n",
Date,
AmazonHeaders,
Resource],
base64:encode_to_string(
crypto:sha_mac(KeyData, STS)).
check_auth(PresentedSignature, CalculatedSignature) ->
PresentedSignature == CalculatedSignature.
canonicalize_qs(QS) ->
The QS must be sorted be canonicalized ,
and since ` canonicalize_qs/2 ` builds up the
%% accumulator with cons, it comes back in reverse
%% order. So we'll sort then reverise, so cons'ing
%% actually puts it back in the correct order
ReversedSorted = lists:reverse(lists:sort(QS)),
canonicalize_qs(ReversedSorted, []).
canonicalize_qs([], []) ->
[];
canonicalize_qs([], Acc) ->
lists:flatten(["?", Acc]);
canonicalize_qs([{K, []}|T], Acc) ->
case lists:member(K, ?SUBRESOURCES) of
true ->
Amp = if Acc == [] -> "";
true -> "&"
end,
canonicalize_qs(T, [[K, Amp]|Acc]);
false ->
canonicalize_qs(T, Acc)
end;
canonicalize_qs([{K, V}|T], Acc) ->
case lists:member(K, ?SUBRESOURCES) of
true ->
Amp = if Acc == [] -> "";
true -> "&"
end,
canonicalize_qs(T, [[K, "=", V, Amp]|Acc]);
false ->
canonicalize_qs(T, Acc)
end.
%% ===================================================================
Eunit tests
%% ===================================================================
-ifdef(TEST).
Test cases for the examples provided by Amazon here :
%%
auth_test_() ->
{spawn,
[
{setup,
fun setup/0,
fun teardown/1,
fun(_X) ->
[
example_get_object(),
example_put_object(),
example_list(),
example_fetch(),
example_delete(),
example_upload(),
example_list_all_buckets(),
example_unicode_keys()
]
end
}]}.
setup() ->
application:set_env(riak_cs, cs_root_host, ?ROOT_HOST).
teardown(_) ->
application:unset_env(riak_cs, cs_root_host).
test_fun(Desc, ExpectedSignature, CalculatedSignature) ->
{Desc, ?_assert(check_auth(ExpectedSignature,CalculatedSignature))}.
example_get_object() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'GET',
Version = {1, 1},
OrigPath = "/johnsmith/photos/puppy.jpg",
Path = "/buckets/johnsmith/objects/photos/puppy.jpg",
Headers =
mochiweb_headers:make([{"Host", "s3.amazonaws.com"},
{"Date", "Tue, 27 Mar 2007 19:36:42 +0000"},
{"x-rcs-rewrite-path", OrigPath}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "xXjDGYUmKxnwqr5KXNPGldn5LbA=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example get object test", ExpectedSignature, CalculatedSignature).
example_put_object() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'PUT',
Version = {1, 1},
OrigPath = "/johnsmith/photos/puppy.jpg",
Path = "/buckets/johnsmith/objects/photos/puppy.jpg",
Headers =
mochiweb_headers:make([{"Host", "s3.amazonaws.com"},
{"Content-Type", "image/jpeg"},
{"x-rcs-rewrite-path", OrigPath},
{"Content-Length", 94328},
{"Date", "Tue, 27 Mar 2007 21:15:45 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "hcicpDDvL9SsO6AkvxqmIWkmOuQ=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example put object test", ExpectedSignature, CalculatedSignature).
example_list() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'GET',
Version = {1, 1},
OrigPath = "/johnsmith/?prefix=photos&max-keys=50&marker=puppy",
Path = "/buckets/johnsmith/objects?prefix=photos&max-keys=50&marker=puppy",
Headers =
mochiweb_headers:make([{"User-Agent", "Mozilla/5.0"},
{"Host", "johnsmith.s3.amazonaws.com"},
{"x-rcs-rewrite-path", OrigPath},
{"Date", "Tue, 27 Mar 2007 19:42:41 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "jsRt/rhG+Vtp88HrYL706QhE4w4=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example list test", ExpectedSignature, CalculatedSignature).
example_fetch() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'GET',
Version = {1, 1},
OrigPath = "/johnsmith/?acl",
Path = "/buckets/johnsmith/acl",
Headers =
mochiweb_headers:make([{"Host", "johnsmith.s3.amazonaws.com"},
{"x-rcs-rewrite-path", OrigPath},
{"Date", "Tue, 27 Mar 2007 19:44:46 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "thdUi9VAkzhkniLj96JIrOPGi0g=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example fetch test", ExpectedSignature, CalculatedSignature).
example_delete() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'DELETE',
Version = {1, 1},
OrigPath = "/johnsmith/photos/puppy.jpg",
Path = "/buckets/johnsmith/objects/photos/puppy.jpg",
Headers =
mochiweb_headers:make([{"User-Agent", "dotnet"},
{"Host", "s3.amazonaws.com"},
{"x-rcs-rewrite-path", OrigPath},
{"Date", "Tue, 27 Mar 2007 21:20:27 +0000"},
{"x-amz-date", "Tue, 27 Mar 2007 21:20:26 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "k3nL7gH3+PadhTEVn5Ip83xlYzk=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example delete test", ExpectedSignature, CalculatedSignature).
@TODO This test case should be specified using two separate
X - Amz - Meta - ReviewedBy headers , but Amazon strictly interprets
section 4.2 of RFC 2616 and forbids anything but commas seperating
field values of headers with the same field name whereas
%% inserts a comma and a space between the field values. This is
%% probably something that can be changed in webmachine without any
%% ill effect, but that needs to be verified. For now, the test case
is specified using a singled X - Amz - Meta - ReviewedBy header with
%% multiple field values.
example_upload() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'PUT',
Version = {1, 1},
OrigPath = "/static.johnsmith.net/db-backup.dat.gz",
Path = "/buckets/static.johnsmith.net/objects/db-backup.dat.gz",
Headers =
mochiweb_headers:make([{"User-Agent", "curl/7.15.5"},
{"Host", "static.johnsmith.net:8080"},
{"Date", "Tue, 27 Mar 2007 21:06:08 +0000"},
{"x-rcs-rewrite-path", OrigPath},
{"x-amz-acl", "public-read"},
{"content-type", "application/x-download"},
{"Content-MD5", "4gJE4saaMU4BqNR0kLY+lw=="},
{"X-Amz-Meta-ReviewedBy", ","},
%% {"X-Amz-Meta-ReviewedBy", ""},
{"X-Amz-Meta-FileChecksum", "0x02661779"},
{"X-Amz-Meta-ChecksumAlgorithm", "crc32"},
{"Content-Disposition", "attachment; filename=database.dat"},
{"Content-Encoding", "gzip"},
{"Content-Length", 5913339}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "C0FlOtU8Ylb9KDTpZqYkZPX91iI=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example upload test", ExpectedSignature, CalculatedSignature).
example_list_all_buckets() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'GET',
Version = {1, 1},
Path = "/",
Headers =
mochiweb_headers:make([{"Host", "s3.amazonaws.com"},
{"x-rcs-rewrite-path", Path},
{"Date", "Wed, 28 Mar 2007 01:29:59 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "Db+gepJSUbZKwpx1FR0DLtEYoZA=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example list all buckts test", ExpectedSignature, CalculatedSignature).
example_unicode_keys() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'GET',
Version = {1, 1},
OrigPath = "/dictionary/fran%C3%A7ais/pr%c3%a9f%c3%a8re",
Path = "/buckets/dictionary/objects/fran%C3%A7ais/pr%c3%a9f%c3%a8re",
Headers =
mochiweb_headers:make([{"Host", "s3.amazonaws.com"},
{"x-rcs-rewrite-path", OrigPath},
{"Date", "Wed, 28 Mar 2007 01:49:49 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "dxhSBHoI6eVSPcXJqEghlUzZMnY=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example unicode keys test", ExpectedSignature, CalculatedSignature).
-endif.
| null | https://raw.githubusercontent.com/basho/riak_cs_auth/906f3e6f61c34c1c52a677f8ac81d265e6ebb5fd/src/riak_cs_s3_auth.erl | erlang | ---------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
---------------------------------------------------------------------
===================================================================
Public API
===================================================================
to return; will have to check after testing.
===================================================================
===================================================================
TODO: get noisy here?
accumulator with cons, it comes back in reverse
order. So we'll sort then reverise, so cons'ing
actually puts it back in the correct order
===================================================================
===================================================================
inserts a comma and a space between the field values. This is
probably something that can be changed in webmachine without any
ill effect, but that needs to be verified. For now, the test case
multiple field values.
{"X-Amz-Meta-ReviewedBy", ""}, | Copyright ( c ) 2007 - 2013 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_cs_s3_auth).
-behavior(riak_cs_auth).
-export([identify/2, authenticate/4]).
-include_lib("riak_cs_core/include/riak_cs.hrl").
-include_lib("riak_cs_core/include/s3_api.hrl").
-include_lib("webmachine/include/webmachine.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-define(QS_KEYID, "AWSAccessKeyId").
-define(QS_SIGNATURE, "Signature").
-spec identify(term(), term()) -> {string() | undefined , string()}.
identify(RD,_Ctx) ->
case wrq:get_req_header("authorization", RD) of
undefined ->
{wrq:get_qs_value(?QS_KEYID, RD), wrq:get_qs_value(?QS_SIGNATURE, RD)};
AuthHeader ->
parse_auth_header(AuthHeader)
end.
-spec authenticate(rcs_user(), string(), term(), term()) -> ok | {error, atom()}.
authenticate(User, Signature, RD, _Ctx) ->
CalculatedSignature = calculate_signature(User?RCS_USER.key_secret, RD),
case check_auth(Signature, CalculatedSignature) of
true ->
Expires = wrq:get_qs_value("Expires", RD),
case Expires of
undefined ->
ok;
_ ->
{MegaSecs, Secs, _} = os:timestamp(),
Now = (MegaSecs * 1000000) + Secs,
case Now > list_to_integer(Expires) of
true ->
@TODO Not sure if this is the proper error
{error, invalid_authentication};
false ->
ok
end
end;
_ ->
{error, invalid_authentication}
end.
Internal functions
parse_auth_header("AWS " ++ Key) ->
case string:tokens(Key, ":") of
[KeyId, KeyData] ->
{KeyId, KeyData};
_ -> {undefined, undefined}
end;
parse_auth_header(_) ->
{undefined, undefined}.
calculate_signature(KeyData, RD) ->
Headers = riak_cs_wm_utils:normalize_headers(RD),
AmazonHeaders = riak_cs_wm_utils:extract_amazon_headers(Headers),
OriginalResource = riak_cs_s3_rewrite:original_resource(RD),
Resource = case OriginalResource of
{Path,QS} -> [Path, canonicalize_qs(QS)]
end,
Expires = wrq:get_qs_value("Expires", RD),
case Expires of
undefined ->
case proplists:is_defined("x-amz-date", Headers) of
true ->
Date = "\n";
false ->
Date = [wrq:get_req_header("date", RD), "\n"]
end;
_ ->
Date = Expires ++ "\n"
end,
case wrq:get_req_header("content-md5", RD) of
undefined ->
CMD5 = [];
CMD5 ->
ok
end,
case wrq:get_req_header("content-type", RD) of
undefined ->
ContentType = [];
ContentType ->
ok
end,
STS = [atom_to_list(wrq:method(RD)), "\n",
CMD5,
"\n",
ContentType,
"\n",
Date,
AmazonHeaders,
Resource],
base64:encode_to_string(
crypto:sha_mac(KeyData, STS)).
check_auth(PresentedSignature, CalculatedSignature) ->
PresentedSignature == CalculatedSignature.
canonicalize_qs(QS) ->
The QS must be sorted be canonicalized ,
and since ` canonicalize_qs/2 ` builds up the
ReversedSorted = lists:reverse(lists:sort(QS)),
canonicalize_qs(ReversedSorted, []).
canonicalize_qs([], []) ->
[];
canonicalize_qs([], Acc) ->
lists:flatten(["?", Acc]);
canonicalize_qs([{K, []}|T], Acc) ->
case lists:member(K, ?SUBRESOURCES) of
true ->
Amp = if Acc == [] -> "";
true -> "&"
end,
canonicalize_qs(T, [[K, Amp]|Acc]);
false ->
canonicalize_qs(T, Acc)
end;
canonicalize_qs([{K, V}|T], Acc) ->
case lists:member(K, ?SUBRESOURCES) of
true ->
Amp = if Acc == [] -> "";
true -> "&"
end,
canonicalize_qs(T, [[K, "=", V, Amp]|Acc]);
false ->
canonicalize_qs(T, Acc)
end.
Eunit tests
-ifdef(TEST).
Test cases for the examples provided by Amazon here :
auth_test_() ->
{spawn,
[
{setup,
fun setup/0,
fun teardown/1,
fun(_X) ->
[
example_get_object(),
example_put_object(),
example_list(),
example_fetch(),
example_delete(),
example_upload(),
example_list_all_buckets(),
example_unicode_keys()
]
end
}]}.
setup() ->
application:set_env(riak_cs, cs_root_host, ?ROOT_HOST).
teardown(_) ->
application:unset_env(riak_cs, cs_root_host).
test_fun(Desc, ExpectedSignature, CalculatedSignature) ->
{Desc, ?_assert(check_auth(ExpectedSignature,CalculatedSignature))}.
example_get_object() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'GET',
Version = {1, 1},
OrigPath = "/johnsmith/photos/puppy.jpg",
Path = "/buckets/johnsmith/objects/photos/puppy.jpg",
Headers =
mochiweb_headers:make([{"Host", "s3.amazonaws.com"},
{"Date", "Tue, 27 Mar 2007 19:36:42 +0000"},
{"x-rcs-rewrite-path", OrigPath}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "xXjDGYUmKxnwqr5KXNPGldn5LbA=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example get object test", ExpectedSignature, CalculatedSignature).
example_put_object() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'PUT',
Version = {1, 1},
OrigPath = "/johnsmith/photos/puppy.jpg",
Path = "/buckets/johnsmith/objects/photos/puppy.jpg",
Headers =
mochiweb_headers:make([{"Host", "s3.amazonaws.com"},
{"Content-Type", "image/jpeg"},
{"x-rcs-rewrite-path", OrigPath},
{"Content-Length", 94328},
{"Date", "Tue, 27 Mar 2007 21:15:45 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "hcicpDDvL9SsO6AkvxqmIWkmOuQ=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example put object test", ExpectedSignature, CalculatedSignature).
example_list() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'GET',
Version = {1, 1},
OrigPath = "/johnsmith/?prefix=photos&max-keys=50&marker=puppy",
Path = "/buckets/johnsmith/objects?prefix=photos&max-keys=50&marker=puppy",
Headers =
mochiweb_headers:make([{"User-Agent", "Mozilla/5.0"},
{"Host", "johnsmith.s3.amazonaws.com"},
{"x-rcs-rewrite-path", OrigPath},
{"Date", "Tue, 27 Mar 2007 19:42:41 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "jsRt/rhG+Vtp88HrYL706QhE4w4=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example list test", ExpectedSignature, CalculatedSignature).
example_fetch() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'GET',
Version = {1, 1},
OrigPath = "/johnsmith/?acl",
Path = "/buckets/johnsmith/acl",
Headers =
mochiweb_headers:make([{"Host", "johnsmith.s3.amazonaws.com"},
{"x-rcs-rewrite-path", OrigPath},
{"Date", "Tue, 27 Mar 2007 19:44:46 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "thdUi9VAkzhkniLj96JIrOPGi0g=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example fetch test", ExpectedSignature, CalculatedSignature).
example_delete() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'DELETE',
Version = {1, 1},
OrigPath = "/johnsmith/photos/puppy.jpg",
Path = "/buckets/johnsmith/objects/photos/puppy.jpg",
Headers =
mochiweb_headers:make([{"User-Agent", "dotnet"},
{"Host", "s3.amazonaws.com"},
{"x-rcs-rewrite-path", OrigPath},
{"Date", "Tue, 27 Mar 2007 21:20:27 +0000"},
{"x-amz-date", "Tue, 27 Mar 2007 21:20:26 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "k3nL7gH3+PadhTEVn5Ip83xlYzk=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example delete test", ExpectedSignature, CalculatedSignature).
@TODO This test case should be specified using two separate
X - Amz - Meta - ReviewedBy headers , but Amazon strictly interprets
section 4.2 of RFC 2616 and forbids anything but commas seperating
field values of headers with the same field name whereas
is specified using a singled X - Amz - Meta - ReviewedBy header with
example_upload() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'PUT',
Version = {1, 1},
OrigPath = "/static.johnsmith.net/db-backup.dat.gz",
Path = "/buckets/static.johnsmith.net/objects/db-backup.dat.gz",
Headers =
mochiweb_headers:make([{"User-Agent", "curl/7.15.5"},
{"Host", "static.johnsmith.net:8080"},
{"Date", "Tue, 27 Mar 2007 21:06:08 +0000"},
{"x-rcs-rewrite-path", OrigPath},
{"x-amz-acl", "public-read"},
{"content-type", "application/x-download"},
{"Content-MD5", "4gJE4saaMU4BqNR0kLY+lw=="},
{"X-Amz-Meta-ReviewedBy", ","},
{"X-Amz-Meta-FileChecksum", "0x02661779"},
{"X-Amz-Meta-ChecksumAlgorithm", "crc32"},
{"Content-Disposition", "attachment; filename=database.dat"},
{"Content-Encoding", "gzip"},
{"Content-Length", 5913339}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "C0FlOtU8Ylb9KDTpZqYkZPX91iI=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example upload test", ExpectedSignature, CalculatedSignature).
example_list_all_buckets() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'GET',
Version = {1, 1},
Path = "/",
Headers =
mochiweb_headers:make([{"Host", "s3.amazonaws.com"},
{"x-rcs-rewrite-path", Path},
{"Date", "Wed, 28 Mar 2007 01:29:59 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "Db+gepJSUbZKwpx1FR0DLtEYoZA=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example list all buckts test", ExpectedSignature, CalculatedSignature).
example_unicode_keys() ->
KeyData = "uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o",
Method = 'GET',
Version = {1, 1},
OrigPath = "/dictionary/fran%C3%A7ais/pr%c3%a9f%c3%a8re",
Path = "/buckets/dictionary/objects/fran%C3%A7ais/pr%c3%a9f%c3%a8re",
Headers =
mochiweb_headers:make([{"Host", "s3.amazonaws.com"},
{"x-rcs-rewrite-path", OrigPath},
{"Date", "Wed, 28 Mar 2007 01:49:49 +0000"}]),
RD = wrq:create(Method, Version, Path, Headers),
ExpectedSignature = "dxhSBHoI6eVSPcXJqEghlUzZMnY=",
CalculatedSignature = calculate_signature(KeyData, RD),
test_fun("example unicode keys test", ExpectedSignature, CalculatedSignature).
-endif.
|
d2f0297e492d7e841e707915dcbd21ee569cf599863362f6ddf9b060c4712c06 | dradtke/Lisp-Text-Editor | transformations.lisp | (in-package :cl-cairo2)
Notes
;;;;
cairo - matrix - init is not defined , as we have a structure in lisp
;;;; with an appropriate constructor
;;;;
;;;; cairo_identity_matrix is reset-trans-matrix
;;;;
;;;; functions that manipulate transformation matrices have
;;;; trans-matrix instead of matrix in their name
;;;;
;;;; cairo_matrix_transform_distance and cairo_matrix_transform_point
;;;; are simply transform-distance and transform-point
;;;;
;;;; cairo_matrix_init is not defined, make-trans-matrix will give
;;;; you an identity matrix
;;;;
;;;; simple functions
;;;;
(define-many-with-default-context
(translate tx ty)
(scale sx sy)
(rotate angle))
(define-flexible (reset-trans-matrix pointer)
(cairo_identity_matrix pointer))
;;;;
;;;; transition matrix structure and helper functions/macros
;;;;
(defstruct trans-matrix
(xx 1d0 :type double-float)
(yx 0d0 :type double-float)
(xy 0d0 :type double-float)
(yy 1d0 :type double-float)
(x0 0d0 :type double-float)
(y0 0d0 :type double-float))
(defun trans-matrix-copy-in (pointer matrix)
"Copy matrix to a memory location."
(with-foreign-slots ((xx yx xy yy x0 y0) pointer cairo_matrix_t)
(setf xx (trans-matrix-xx matrix)
yx (trans-matrix-yx matrix)
xy (trans-matrix-xy matrix)
yy (trans-matrix-yy matrix)
x0 (trans-matrix-x0 matrix)
y0 (trans-matrix-y0 matrix))))
(defun trans-matrix-copy-out (pointer matrix)
"Copy contents of a memory location to a transition matrix."
(with-foreign-slots ((xx yx xy yy x0 y0) pointer cairo_matrix_t)
(setf (trans-matrix-xx matrix) xx
(trans-matrix-yx matrix) yx
(trans-matrix-xy matrix) xy
(trans-matrix-yy matrix) yy
(trans-matrix-x0 matrix) x0
(trans-matrix-y0 matrix) y0)))
(defmacro with-trans-matrix-in (matrix pointer &body body)
"Execute body with pointer pointing to a memory location with matrix."
`(with-foreign-pointer (,pointer (foreign-type-size 'cairo_matrix_t))
(trans-matrix-copy-in ,pointer ,matrix)
,@body))
(defmacro with-trans-matrix-out (pointer &body body)
"Execute body with pointer pointing to an uninitialized location,
then copy this to matrix and return the matrix."
(let ((matrix-name (gensym)))
`(with-foreign-pointer (,pointer (foreign-type-size 'cairo_matrix_t))
(let ((,matrix-name (make-trans-matrix)))
,@body
(trans-matrix-copy-out ,pointer ,matrix-name)
,matrix-name))))
(defmacro with-trans-matrix-in-out (matrix pointer &body body)
(let ((matrix-name (gensym)))
`(with-foreign-pointer (,pointer (foreign-type-size 'cairo_matrix_t))
(let ((,matrix-name (make-trans-matrix)))
(trans-matrix-copy-in ,pointer ,matrix)
,@body
(trans-matrix-copy-out ,pointer ,matrix-name)
,matrix-name))))
(defmacro with-x-y (&body body)
"Creates temporary variables on the stack with pointers xp and yp,
and copies x and y in/out before/after (respectively) the
execution of body."
`(with-foreign-objects ((xp :double) (yp :double))
(setf (mem-ref xp :double) (coerce x 'double-float)
(mem-ref yp :double) (coerce y 'double-float))
,@body
(values (mem-ref xp :double) (mem-ref yp :double))))
(defmacro define-with-x-y (name)
"Defines a function that is called with context, x and y, and
returns the latter two."
`(define-flexible (,name pointer x y)
(with-x-y
(,(prepend-intern "cairo_" name) pointer xp yp))))
;;;;
;;;; transformation and conversion functions
;;;;
(define-flexible (transform pointer matrix)
(with-trans-matrix-in matrix matrix-pointer
(cairo_transform pointer matrix-pointer)))
(define-flexible (set-trans-matrix pointer matrix)
(with-trans-matrix-in matrix matrix-pointer
(cairo_set_matrix pointer matrix-pointer)))
(define-flexible (get-trans-matrix pointer)
(with-trans-matrix-out matrix-pointer
(cairo_get_matrix pointer matrix-pointer)))
(define-with-x-y user-to-device)
(define-with-x-y user-to-device-distance)
(define-with-x-y device-to-user)
(define-with-x-y device-to-user-distance)
;;;;
;;;; transformations
;;;;
(defmacro define-matrix-init (name &rest args)
"Define a matrix initializer function with args, which returns the
new matrix."
`(defun ,(prepend-intern "trans-matrix-init-" name :replace-dash nil) ,args
(with-trans-matrix-out matrix-pointer
(,(prepend-intern "cairo_matrix_init_" name)
matrix-pointer
,@args))))
(define-matrix-init translate tx ty)
(define-matrix-init scale sx sy)
(define-matrix-init rotate radians)
(defmacro define-matrix-transformation (name &rest args)
"Define a matrix transformation function with matrix and args,
which returns the new matrix."
`(export
(defun ,(prepend-intern "trans-matrix-" name :replace-dash nil) (matrix ,@args)
(with-trans-matrix-in-out matrix matrix-pointer
(,(prepend-intern "cairo_matrix_" name)
matrix-pointer
,@args)))))
(define-matrix-transformation translate tx ty)
(define-matrix-transformation scale sx sy)
(define-matrix-transformation rotate radians)
(define-matrix-transformation invert)
(defun trans-matrix-multiply (a b)
(with-trans-matrix-in a a-pointer
(with-trans-matrix-in b b-pointer
(with-trans-matrix-out result-pointer
(cairo_matrix_multiply result-pointer
a-pointer
b-pointer)))))
(defun transform-distance (matrix x y)
(with-trans-matrix-in matrix matrix-pointer
(with-x-y
(cairo_matrix_transform_distance matrix-pointer xp yp))))
(defun transform-point (matrix x y)
(with-trans-matrix-in matrix matrix-pointer
(with-x-y
(cairo_matrix_transform_point matrix-pointer xp yp))))
| null | https://raw.githubusercontent.com/dradtke/Lisp-Text-Editor/b0947828eda82d7edd0df8ec2595e7491a633580/quicklisp/dists/quicklisp/software/cl-cairo2-20120208-git/src/transformations.lisp | lisp |
with an appropriate constructor
cairo_identity_matrix is reset-trans-matrix
functions that manipulate transformation matrices have
trans-matrix instead of matrix in their name
cairo_matrix_transform_distance and cairo_matrix_transform_point
are simply transform-distance and transform-point
cairo_matrix_init is not defined, make-trans-matrix will give
you an identity matrix
simple functions
transition matrix structure and helper functions/macros
transformation and conversion functions
transformations
| (in-package :cl-cairo2)
Notes
cairo - matrix - init is not defined , as we have a structure in lisp
(define-many-with-default-context
(translate tx ty)
(scale sx sy)
(rotate angle))
(define-flexible (reset-trans-matrix pointer)
(cairo_identity_matrix pointer))
(defstruct trans-matrix
(xx 1d0 :type double-float)
(yx 0d0 :type double-float)
(xy 0d0 :type double-float)
(yy 1d0 :type double-float)
(x0 0d0 :type double-float)
(y0 0d0 :type double-float))
(defun trans-matrix-copy-in (pointer matrix)
"Copy matrix to a memory location."
(with-foreign-slots ((xx yx xy yy x0 y0) pointer cairo_matrix_t)
(setf xx (trans-matrix-xx matrix)
yx (trans-matrix-yx matrix)
xy (trans-matrix-xy matrix)
yy (trans-matrix-yy matrix)
x0 (trans-matrix-x0 matrix)
y0 (trans-matrix-y0 matrix))))
(defun trans-matrix-copy-out (pointer matrix)
"Copy contents of a memory location to a transition matrix."
(with-foreign-slots ((xx yx xy yy x0 y0) pointer cairo_matrix_t)
(setf (trans-matrix-xx matrix) xx
(trans-matrix-yx matrix) yx
(trans-matrix-xy matrix) xy
(trans-matrix-yy matrix) yy
(trans-matrix-x0 matrix) x0
(trans-matrix-y0 matrix) y0)))
(defmacro with-trans-matrix-in (matrix pointer &body body)
"Execute body with pointer pointing to a memory location with matrix."
`(with-foreign-pointer (,pointer (foreign-type-size 'cairo_matrix_t))
(trans-matrix-copy-in ,pointer ,matrix)
,@body))
(defmacro with-trans-matrix-out (pointer &body body)
"Execute body with pointer pointing to an uninitialized location,
then copy this to matrix and return the matrix."
(let ((matrix-name (gensym)))
`(with-foreign-pointer (,pointer (foreign-type-size 'cairo_matrix_t))
(let ((,matrix-name (make-trans-matrix)))
,@body
(trans-matrix-copy-out ,pointer ,matrix-name)
,matrix-name))))
(defmacro with-trans-matrix-in-out (matrix pointer &body body)
(let ((matrix-name (gensym)))
`(with-foreign-pointer (,pointer (foreign-type-size 'cairo_matrix_t))
(let ((,matrix-name (make-trans-matrix)))
(trans-matrix-copy-in ,pointer ,matrix)
,@body
(trans-matrix-copy-out ,pointer ,matrix-name)
,matrix-name))))
(defmacro with-x-y (&body body)
"Creates temporary variables on the stack with pointers xp and yp,
and copies x and y in/out before/after (respectively) the
execution of body."
`(with-foreign-objects ((xp :double) (yp :double))
(setf (mem-ref xp :double) (coerce x 'double-float)
(mem-ref yp :double) (coerce y 'double-float))
,@body
(values (mem-ref xp :double) (mem-ref yp :double))))
(defmacro define-with-x-y (name)
"Defines a function that is called with context, x and y, and
returns the latter two."
`(define-flexible (,name pointer x y)
(with-x-y
(,(prepend-intern "cairo_" name) pointer xp yp))))
(define-flexible (transform pointer matrix)
(with-trans-matrix-in matrix matrix-pointer
(cairo_transform pointer matrix-pointer)))
(define-flexible (set-trans-matrix pointer matrix)
(with-trans-matrix-in matrix matrix-pointer
(cairo_set_matrix pointer matrix-pointer)))
(define-flexible (get-trans-matrix pointer)
(with-trans-matrix-out matrix-pointer
(cairo_get_matrix pointer matrix-pointer)))
(define-with-x-y user-to-device)
(define-with-x-y user-to-device-distance)
(define-with-x-y device-to-user)
(define-with-x-y device-to-user-distance)
(defmacro define-matrix-init (name &rest args)
"Define a matrix initializer function with args, which returns the
new matrix."
`(defun ,(prepend-intern "trans-matrix-init-" name :replace-dash nil) ,args
(with-trans-matrix-out matrix-pointer
(,(prepend-intern "cairo_matrix_init_" name)
matrix-pointer
,@args))))
(define-matrix-init translate tx ty)
(define-matrix-init scale sx sy)
(define-matrix-init rotate radians)
(defmacro define-matrix-transformation (name &rest args)
"Define a matrix transformation function with matrix and args,
which returns the new matrix."
`(export
(defun ,(prepend-intern "trans-matrix-" name :replace-dash nil) (matrix ,@args)
(with-trans-matrix-in-out matrix matrix-pointer
(,(prepend-intern "cairo_matrix_" name)
matrix-pointer
,@args)))))
(define-matrix-transformation translate tx ty)
(define-matrix-transformation scale sx sy)
(define-matrix-transformation rotate radians)
(define-matrix-transformation invert)
(defun trans-matrix-multiply (a b)
(with-trans-matrix-in a a-pointer
(with-trans-matrix-in b b-pointer
(with-trans-matrix-out result-pointer
(cairo_matrix_multiply result-pointer
a-pointer
b-pointer)))))
(defun transform-distance (matrix x y)
(with-trans-matrix-in matrix matrix-pointer
(with-x-y
(cairo_matrix_transform_distance matrix-pointer xp yp))))
(defun transform-point (matrix x y)
(with-trans-matrix-in matrix matrix-pointer
(with-x-y
(cairo_matrix_transform_point matrix-pointer xp yp))))
|
fb09a8c22efa94d5f197ade45a9c268daae81be6735f89b2642bbf9e14c01da9 | finnishtransportagency/harja | tieluvat_test.clj | (ns harja.palvelin.integraatiot.api.tieluvat-test
(:require [clojure.test :refer [deftest is use-fixtures]]
[harja.testi :refer :all]
[com.stuartsierra.component :as component]
[harja.palvelin.komponentit.liitteet :as liitteet]
[harja.palvelin.integraatiot.api.tieluvat :as tieluvat]
[harja.domain.tielupa :as tielupa]
[harja.domain.muokkaustiedot :as muokkaustiedot]
[harja.palvelin.integraatiot.api.tyokalut :as tyokalut]
[harja.palvelin.integraatiot.api.tyokalut.virheet :as virheet]
[harja.kyselyt.tielupa-kyselyt :as tielupa-q]
[slingshot.slingshot :refer [throw+]]
[slingshot.test]
[harja.palvelin.integraatiot.api.tyokalut :as api-tyokalut]))
(def kayttaja "livi")
(def jarjestelma-fixture
(laajenna-integraatiojarjestelmafixturea
kayttaja
:liitteiden-hallinta (component/using (liitteet/->Liitteet nil) [:db])
:api-tieluvat (component/using (tieluvat/->Tieluvat)
[:http-palvelin :db :integraatioloki :liitteiden-hallinta])))
(use-fixtures :once jarjestelma-fixture)
(deftest hae-ely
(let [db (luo-testitietokanta)
uudenmaan-elyn-id (ffirst (q "select id from organisaatio where elynumero = 1;"))]
(is (= {::tielupa/ely uudenmaan-elyn-id} (tieluvat/hae-ely db "Uusimaa" {})))
(is (thrown+?
#(tyokalut/tasmaa-poikkeus
%
virheet/+viallinen-kutsu+
virheet/+tuntematon-ely+
"Tuntematon ELY Tuntematon")
(tieluvat/hae-ely db "Tuntematon" {})))))
(deftest hae-sijainnit
(let [db (luo-testitietokanta)
tielupa-pistesijainnilla {::tielupa/sijainnit [{:harja.domain.tielupa/tie 20
:harja.domain.tielupa/aet 1
:harja.domain.tielupa/aosa 1}]}
tielupa-pistesijainteineen (tieluvat/hae-tieluvan-sijainnit db tielupa-pistesijainnilla)
tielupa-sijaintivalilla {::tielupa/sijainnit [{:harja.domain.tielupa/tie 20
:harja.domain.tielupa/aet 1
:harja.domain.tielupa/aosa 1
:losa 1
:let 300}]}
tielupa-sijaintivaleineen (tieluvat/hae-tieluvan-sijainnit db tielupa-sijaintivalilla)
tarkasta-tielupa (fn [ilman-sijainti sijainnin-kanssa]
(let [avaimet (fn [tielupa] (mapv #(select-keys % [::tielupa/tie ::tielupa/aosa ::tielupa/aet])
(::tielupa/sijainnit tielupa)))]
(is (= (avaimet ilman-sijainti) (avaimet sijainnin-kanssa))))
(is (every? #(not (nil? (::tielupa/geometria %))) (::tielupa/sijainnit sijainnin-kanssa))))]
(tarkasta-tielupa tielupa-pistesijainnilla tielupa-pistesijainteineen)
(tarkasta-tielupa tielupa-sijaintivalilla tielupa-sijaintivaleineen)))
(deftest kirjaa-uusi-mainoslupa
(let [db (luo-testitietokanta)
tunniste 3453455
tielupa-json (.replace (slurp "test/resurssit/api/tieluvan-kirjaus-mainoslupa.json") "<TUNNISTE>" (str tunniste))
odotettu {::tielupa/tienpitoviranomainen-sahkopostiosoite ""
::tielupa/kohde-postitoimipaikka "Kiiminki"
::tielupa/liikenneohjaajan-sahkopostiosoite ""
::tielupa/liikenneohjaajan-yhteyshenkilo "Lilli Liikenteenohjaaja"
::tielupa/tienpitoviranomainen-puhelinnumero "987-7889087"
::tielupa/voimassaolon-alkupvm #inst "2020-09-21T21:00:00.000-00:00"
::tielupa/tienpitoviranomainen-yhteyshenkilo "Teijo Tienpitäjä"
::tielupa/kunta "Kiiminki"
::tielupa/kohde-lahiosoite "Tie 123"
::tielupa/liikenneohjaajan-nimi "Liikenneohjaus Oy"
::tielupa/paatoksen-diaarinumero "123456789"
::tielupa/hakija-tyyppi "kotitalous"
::tielupa/urakat [4]
::tielupa/kaapeliasennukset []
::tielupa/urakoitsija-sahkopostiosoite ""
::tielupa/hakija-postinumero "90900"
::tielupa/sijainnit
[{::tielupa/ajorata 0
::tielupa/tie 20
::tielupa/aosa 6
::tielupa/aet 2631
::tielupa/kaista 11}]
::tielupa/urakoitsija-puhelinnumero "987-7889087"
::tielupa/otsikko "Lupa lisätä mainos tielle"
::tielupa/hakija-postinosoite "Liitintie 1"
::tielupa/urakoiden-nimet ["Oulun alueurakka"]
::tielupa/ely 12
::tielupa/kohde-postinumero "90900"
::tielupa/ulkoinen-tunniste 3453455
::tielupa/saapumispvm #inst "2017-09-21T21:00:00.000-00:00"
::tielupa/liikenneohjaajan-puhelinnumero "987-7889087"
::tielupa/katselmus-url ""
::tielupa/voimassaolon-loppupvm #inst "2020-09-21T21:00:00.000-00:00"
::tielupa/hakija-nimi "Henna Hakija"
::tielupa/myontamispvm #inst "2018-09-21T21:00:00.000-00:00"
::tielupa/tyyppi :mainoslupa
::tielupa/hakija-sahkopostiosoite ""
::tielupa/hakija-puhelinnumero "987-7889087"
::tielupa/tien-nimi "Kuusamontie"
::tielupa/urakoitsija-yhteyshenkilo "Yrjänä Yhteyshenkilo"
::tielupa/urakoitsija-nimi "Puulaaki Oy"}]
(api-tyokalut/post-kutsu ["/api/tieluvat"] kayttaja portti tielupa-json)
(let [haettu-tielupa (first (tielupa-q/hae-tieluvat db {::tielupa/ulkoinen-tunniste tunniste}))
haettu-tielupa (-> haettu-tielupa
(dissoc ::muokkaustiedot/luotu)
(assoc ::tielupa/sijainnit (map #(dissoc % ::tielupa/geometria) (::tielupa/sijainnit haettu-tielupa))))]
(tarkista-map-arvot odotettu haettu-tielupa))))
(deftest kirjaa-uusi-suojaalue-lupa
(let [db (luo-testitietokanta)
tunniste 373773
tielupa-json (.replace (slurp "test/resurssit/api/tieluvan-kirjaus-suojaalue.json") "<TUNNISTE>" (str tunniste))
odotettu #:harja.domain.tielupa{:urakoitsija-yhteyshenkilo
"Yrjänä Yhteyshenkilo",
:tienpitoviranomainen-sahkopostiosoite
"",
:voimassaolon-alkupvm
#inst "2020-09-21T21:00:00.000-00:00",
:kohde-postitoimipaikka "Kiiminki",
:kohde-lahiosoite "Tie 123",
:liikenneohjaajan-yhteyshenkilo
"Lilli Liikenteenohjaaja",
:hakija-postinumero "90900",
:kunta "Kiiminki",
:liikenneohjaajan-sahkopostiosoite
"",
:urakoitsija-sahkopostiosoite
"",
:tienpitoviranomainen-yhteyshenkilo
"Teijo Tienpitäjä",
:tienpitoviranomainen-puhelinnumero
"987-7889087",
:sijainnit
[#:harja.domain.tielupa {:tie 20,
:aosa 6,
:aet 2631,
:ajorata 0,
:kaista 1}],
:hakija-tyyppi "kotitalous",
:kaapeliasennukset [],
:liikenneohjaajan-nimi "Liikenneohjaus Oy",
:paatoksen-diaarinumero "123456789",
:saapumispvm
#inst "2017-09-21T21:00:00.000-00:00",
:otsikko "Lupa rakentaa aitta suoja-alueelle",
:katselmus-url "",
:urakoiden-nimet ["Oulun alueurakka"],
:hakija-postinosoite "Liitintie 1",
:urakoitsija-puhelinnumero "987-7889087",
:kohde-postinumero "90900",
:hakija-puhelinnumero "987-7889087",
:ulkoinen-tunniste 373773,
:liikenneohjaajan-puhelinnumero "987-7889087",
:tien-nimi "Kuusamontie",
:hakija-nimi "Henna Hakija",
:myontamispvm
#inst "2018-09-21T21:00:00.000-00:00",
:hakija-sahkopostiosoite
"",
:tyyppi :suoja-aluerakentamislupa,
:urakoitsija-nimi "Puulaaki Oy",
:voimassaolon-loppupvm
#inst "2020-09-21T21:00:00.000-00:00"}]
(api-tyokalut/post-kutsu ["/api/tieluvat"] kayttaja portti tielupa-json)
(let [haettu-tielupa (first (tielupa-q/hae-tieluvat db {::tielupa/ulkoinen-tunniste tunniste}))
_ (prn haettu-tielupa)
haettu-tielupa (-> haettu-tielupa
(dissoc ::muokkaustiedot/luotu)
(assoc ::tielupa/sijainnit (map #(dissoc % ::tielupa/geometria) (::tielupa/sijainnit haettu-tielupa))))]
(tarkista-map-arvot odotettu haettu-tielupa))))
(deftest kirjaa-uusi-liittymalupalupa
(let [db (luo-testitietokanta)
tunniste 43858
tielupa-json (.replace (slurp "test/resurssit/api/tieluvan-kirjaus-liittymalupa.json") "<TUNNISTE>" (str tunniste))
odotettu #:harja.domain.tielupa{:urakoitsija-yhteyshenkilo
"Yrjänä Yhteyshenkilo",
:tienpitoviranomainen-sahkopostiosoite
"",
:voimassaolon-alkupvm
#inst "2020-09-21T21:00:00.000-00:00",
:kohde-postitoimipaikka "Kiiminki",
:kohde-lahiosoite "Tie 123",
:liikenneohjaajan-yhteyshenkilo
"Lilli Liikenteenohjaaja",
:hakija-postinumero "90900",
:kunta "Kiiminki",
:liikenneohjaajan-sahkopostiosoite
"",
:urakoitsija-sahkopostiosoite
"",
:tienpitoviranomainen-yhteyshenkilo
"Teijo Tienpitäjä",
:tienpitoviranomainen-puhelinnumero
"987-7889087",
:sijainnit
[#:harja.domain.tielupa {:tie 20,
:aosa 6,
:aet 2631,
:ajorata 0,
:kaista 11}],
:hakija-tyyppi "kotitalous",
:kaapeliasennukset [],
:liikenneohjaajan-nimi "Liikenneohjaus Oy",
:paatoksen-diaarinumero "123456789",
:saapumispvm
#inst "2017-09-21T21:00:00.000-00:00",
:otsikko
"Lupa rakentaa uusi liittymä mökkitielle",
:katselmus-url "",
:urakoiden-nimet ["Oulun alueurakka"],
:hakija-postinosoite "Liitintie 1",
:urakoitsija-puhelinnumero "987-7889087",
:kohde-postinumero "90900",
:hakija-puhelinnumero "987-7889087",
:ulkoinen-tunniste 43858,
:liikenneohjaajan-puhelinnumero "987-7889087",
:tien-nimi "Kuusamontie",
:hakija-nimi "Henna Liittymä",
:myontamispvm
#inst "2018-09-21T21:00:00.000-00:00",
:hakija-sahkopostiosoite
"",
:tyyppi :liittymalupa,
:urakoitsija-nimi "Puulaaki Oy",
:voimassaolon-loppupvm
#inst "2020-09-21T21:00:00.000-00:00"}
]
(api-tyokalut/post-kutsu ["/api/tieluvat"] kayttaja portti tielupa-json)
(let [haettu-tielupa (first (tielupa-q/hae-tieluvat db {::tielupa/ulkoinen-tunniste tunniste}))
_ (prn "haettu-tielpa:" haettu-tielupa)
haettu-tielupa (-> haettu-tielupa
(dissoc ::muokkaustiedot/luotu)
(assoc ::tielupa/sijainnit (map #(dissoc % ::tielupa/geometria) (::tielupa/sijainnit haettu-tielupa))))]
(tarkista-map-arvot odotettu haettu-tielupa))))
| null | https://raw.githubusercontent.com/finnishtransportagency/harja/110c979dc11526dcde966445e82d326b4ee05991/test/clj/harja/palvelin/integraatiot/api/tieluvat_test.clj | clojure | (ns harja.palvelin.integraatiot.api.tieluvat-test
(:require [clojure.test :refer [deftest is use-fixtures]]
[harja.testi :refer :all]
[com.stuartsierra.component :as component]
[harja.palvelin.komponentit.liitteet :as liitteet]
[harja.palvelin.integraatiot.api.tieluvat :as tieluvat]
[harja.domain.tielupa :as tielupa]
[harja.domain.muokkaustiedot :as muokkaustiedot]
[harja.palvelin.integraatiot.api.tyokalut :as tyokalut]
[harja.palvelin.integraatiot.api.tyokalut.virheet :as virheet]
[harja.kyselyt.tielupa-kyselyt :as tielupa-q]
[slingshot.slingshot :refer [throw+]]
[slingshot.test]
[harja.palvelin.integraatiot.api.tyokalut :as api-tyokalut]))
(def kayttaja "livi")
(def jarjestelma-fixture
(laajenna-integraatiojarjestelmafixturea
kayttaja
:liitteiden-hallinta (component/using (liitteet/->Liitteet nil) [:db])
:api-tieluvat (component/using (tieluvat/->Tieluvat)
[:http-palvelin :db :integraatioloki :liitteiden-hallinta])))
(use-fixtures :once jarjestelma-fixture)
(deftest hae-ely
(let [db (luo-testitietokanta)
uudenmaan-elyn-id (ffirst (q "select id from organisaatio where elynumero = 1;"))]
(is (= {::tielupa/ely uudenmaan-elyn-id} (tieluvat/hae-ely db "Uusimaa" {})))
(is (thrown+?
#(tyokalut/tasmaa-poikkeus
%
virheet/+viallinen-kutsu+
virheet/+tuntematon-ely+
"Tuntematon ELY Tuntematon")
(tieluvat/hae-ely db "Tuntematon" {})))))
(deftest hae-sijainnit
(let [db (luo-testitietokanta)
tielupa-pistesijainnilla {::tielupa/sijainnit [{:harja.domain.tielupa/tie 20
:harja.domain.tielupa/aet 1
:harja.domain.tielupa/aosa 1}]}
tielupa-pistesijainteineen (tieluvat/hae-tieluvan-sijainnit db tielupa-pistesijainnilla)
tielupa-sijaintivalilla {::tielupa/sijainnit [{:harja.domain.tielupa/tie 20
:harja.domain.tielupa/aet 1
:harja.domain.tielupa/aosa 1
:losa 1
:let 300}]}
tielupa-sijaintivaleineen (tieluvat/hae-tieluvan-sijainnit db tielupa-sijaintivalilla)
tarkasta-tielupa (fn [ilman-sijainti sijainnin-kanssa]
(let [avaimet (fn [tielupa] (mapv #(select-keys % [::tielupa/tie ::tielupa/aosa ::tielupa/aet])
(::tielupa/sijainnit tielupa)))]
(is (= (avaimet ilman-sijainti) (avaimet sijainnin-kanssa))))
(is (every? #(not (nil? (::tielupa/geometria %))) (::tielupa/sijainnit sijainnin-kanssa))))]
(tarkasta-tielupa tielupa-pistesijainnilla tielupa-pistesijainteineen)
(tarkasta-tielupa tielupa-sijaintivalilla tielupa-sijaintivaleineen)))
(deftest kirjaa-uusi-mainoslupa
(let [db (luo-testitietokanta)
tunniste 3453455
tielupa-json (.replace (slurp "test/resurssit/api/tieluvan-kirjaus-mainoslupa.json") "<TUNNISTE>" (str tunniste))
odotettu {::tielupa/tienpitoviranomainen-sahkopostiosoite ""
::tielupa/kohde-postitoimipaikka "Kiiminki"
::tielupa/liikenneohjaajan-sahkopostiosoite ""
::tielupa/liikenneohjaajan-yhteyshenkilo "Lilli Liikenteenohjaaja"
::tielupa/tienpitoviranomainen-puhelinnumero "987-7889087"
::tielupa/voimassaolon-alkupvm #inst "2020-09-21T21:00:00.000-00:00"
::tielupa/tienpitoviranomainen-yhteyshenkilo "Teijo Tienpitäjä"
::tielupa/kunta "Kiiminki"
::tielupa/kohde-lahiosoite "Tie 123"
::tielupa/liikenneohjaajan-nimi "Liikenneohjaus Oy"
::tielupa/paatoksen-diaarinumero "123456789"
::tielupa/hakija-tyyppi "kotitalous"
::tielupa/urakat [4]
::tielupa/kaapeliasennukset []
::tielupa/urakoitsija-sahkopostiosoite ""
::tielupa/hakija-postinumero "90900"
::tielupa/sijainnit
[{::tielupa/ajorata 0
::tielupa/tie 20
::tielupa/aosa 6
::tielupa/aet 2631
::tielupa/kaista 11}]
::tielupa/urakoitsija-puhelinnumero "987-7889087"
::tielupa/otsikko "Lupa lisätä mainos tielle"
::tielupa/hakija-postinosoite "Liitintie 1"
::tielupa/urakoiden-nimet ["Oulun alueurakka"]
::tielupa/ely 12
::tielupa/kohde-postinumero "90900"
::tielupa/ulkoinen-tunniste 3453455
::tielupa/saapumispvm #inst "2017-09-21T21:00:00.000-00:00"
::tielupa/liikenneohjaajan-puhelinnumero "987-7889087"
::tielupa/katselmus-url ""
::tielupa/voimassaolon-loppupvm #inst "2020-09-21T21:00:00.000-00:00"
::tielupa/hakija-nimi "Henna Hakija"
::tielupa/myontamispvm #inst "2018-09-21T21:00:00.000-00:00"
::tielupa/tyyppi :mainoslupa
::tielupa/hakija-sahkopostiosoite ""
::tielupa/hakija-puhelinnumero "987-7889087"
::tielupa/tien-nimi "Kuusamontie"
::tielupa/urakoitsija-yhteyshenkilo "Yrjänä Yhteyshenkilo"
::tielupa/urakoitsija-nimi "Puulaaki Oy"}]
(api-tyokalut/post-kutsu ["/api/tieluvat"] kayttaja portti tielupa-json)
(let [haettu-tielupa (first (tielupa-q/hae-tieluvat db {::tielupa/ulkoinen-tunniste tunniste}))
haettu-tielupa (-> haettu-tielupa
(dissoc ::muokkaustiedot/luotu)
(assoc ::tielupa/sijainnit (map #(dissoc % ::tielupa/geometria) (::tielupa/sijainnit haettu-tielupa))))]
(tarkista-map-arvot odotettu haettu-tielupa))))
(deftest kirjaa-uusi-suojaalue-lupa
(let [db (luo-testitietokanta)
tunniste 373773
tielupa-json (.replace (slurp "test/resurssit/api/tieluvan-kirjaus-suojaalue.json") "<TUNNISTE>" (str tunniste))
odotettu #:harja.domain.tielupa{:urakoitsija-yhteyshenkilo
"Yrjänä Yhteyshenkilo",
:tienpitoviranomainen-sahkopostiosoite
"",
:voimassaolon-alkupvm
#inst "2020-09-21T21:00:00.000-00:00",
:kohde-postitoimipaikka "Kiiminki",
:kohde-lahiosoite "Tie 123",
:liikenneohjaajan-yhteyshenkilo
"Lilli Liikenteenohjaaja",
:hakija-postinumero "90900",
:kunta "Kiiminki",
:liikenneohjaajan-sahkopostiosoite
"",
:urakoitsija-sahkopostiosoite
"",
:tienpitoviranomainen-yhteyshenkilo
"Teijo Tienpitäjä",
:tienpitoviranomainen-puhelinnumero
"987-7889087",
:sijainnit
[#:harja.domain.tielupa {:tie 20,
:aosa 6,
:aet 2631,
:ajorata 0,
:kaista 1}],
:hakija-tyyppi "kotitalous",
:kaapeliasennukset [],
:liikenneohjaajan-nimi "Liikenneohjaus Oy",
:paatoksen-diaarinumero "123456789",
:saapumispvm
#inst "2017-09-21T21:00:00.000-00:00",
:otsikko "Lupa rakentaa aitta suoja-alueelle",
:katselmus-url "",
:urakoiden-nimet ["Oulun alueurakka"],
:hakija-postinosoite "Liitintie 1",
:urakoitsija-puhelinnumero "987-7889087",
:kohde-postinumero "90900",
:hakija-puhelinnumero "987-7889087",
:ulkoinen-tunniste 373773,
:liikenneohjaajan-puhelinnumero "987-7889087",
:tien-nimi "Kuusamontie",
:hakija-nimi "Henna Hakija",
:myontamispvm
#inst "2018-09-21T21:00:00.000-00:00",
:hakija-sahkopostiosoite
"",
:tyyppi :suoja-aluerakentamislupa,
:urakoitsija-nimi "Puulaaki Oy",
:voimassaolon-loppupvm
#inst "2020-09-21T21:00:00.000-00:00"}]
(api-tyokalut/post-kutsu ["/api/tieluvat"] kayttaja portti tielupa-json)
(let [haettu-tielupa (first (tielupa-q/hae-tieluvat db {::tielupa/ulkoinen-tunniste tunniste}))
_ (prn haettu-tielupa)
haettu-tielupa (-> haettu-tielupa
(dissoc ::muokkaustiedot/luotu)
(assoc ::tielupa/sijainnit (map #(dissoc % ::tielupa/geometria) (::tielupa/sijainnit haettu-tielupa))))]
(tarkista-map-arvot odotettu haettu-tielupa))))
(deftest kirjaa-uusi-liittymalupalupa
(let [db (luo-testitietokanta)
tunniste 43858
tielupa-json (.replace (slurp "test/resurssit/api/tieluvan-kirjaus-liittymalupa.json") "<TUNNISTE>" (str tunniste))
odotettu #:harja.domain.tielupa{:urakoitsija-yhteyshenkilo
"Yrjänä Yhteyshenkilo",
:tienpitoviranomainen-sahkopostiosoite
"",
:voimassaolon-alkupvm
#inst "2020-09-21T21:00:00.000-00:00",
:kohde-postitoimipaikka "Kiiminki",
:kohde-lahiosoite "Tie 123",
:liikenneohjaajan-yhteyshenkilo
"Lilli Liikenteenohjaaja",
:hakija-postinumero "90900",
:kunta "Kiiminki",
:liikenneohjaajan-sahkopostiosoite
"",
:urakoitsija-sahkopostiosoite
"",
:tienpitoviranomainen-yhteyshenkilo
"Teijo Tienpitäjä",
:tienpitoviranomainen-puhelinnumero
"987-7889087",
:sijainnit
[#:harja.domain.tielupa {:tie 20,
:aosa 6,
:aet 2631,
:ajorata 0,
:kaista 11}],
:hakija-tyyppi "kotitalous",
:kaapeliasennukset [],
:liikenneohjaajan-nimi "Liikenneohjaus Oy",
:paatoksen-diaarinumero "123456789",
:saapumispvm
#inst "2017-09-21T21:00:00.000-00:00",
:otsikko
"Lupa rakentaa uusi liittymä mökkitielle",
:katselmus-url "",
:urakoiden-nimet ["Oulun alueurakka"],
:hakija-postinosoite "Liitintie 1",
:urakoitsija-puhelinnumero "987-7889087",
:kohde-postinumero "90900",
:hakija-puhelinnumero "987-7889087",
:ulkoinen-tunniste 43858,
:liikenneohjaajan-puhelinnumero "987-7889087",
:tien-nimi "Kuusamontie",
:hakija-nimi "Henna Liittymä",
:myontamispvm
#inst "2018-09-21T21:00:00.000-00:00",
:hakija-sahkopostiosoite
"",
:tyyppi :liittymalupa,
:urakoitsija-nimi "Puulaaki Oy",
:voimassaolon-loppupvm
#inst "2020-09-21T21:00:00.000-00:00"}
]
(api-tyokalut/post-kutsu ["/api/tieluvat"] kayttaja portti tielupa-json)
(let [haettu-tielupa (first (tielupa-q/hae-tieluvat db {::tielupa/ulkoinen-tunniste tunniste}))
_ (prn "haettu-tielpa:" haettu-tielupa)
haettu-tielupa (-> haettu-tielupa
(dissoc ::muokkaustiedot/luotu)
(assoc ::tielupa/sijainnit (map #(dissoc % ::tielupa/geometria) (::tielupa/sijainnit haettu-tielupa))))]
(tarkista-map-arvot odotettu haettu-tielupa))))
|
|
45feeadcb15e2731e7023ea5532b60515d5e82ebc5a840a5630a29faa65845b1 | TomerAberbach/programming-in-haskell-exercises | 8.hs | luhnDouble :: Int -> Int
luhnDouble n = if doubled > 9 then doubled - 9 else doubled
where
doubled = n * 2
luhn :: Int -> Int -> Int -> Int -> Bool
luhn a b c d = (luhnDouble a + b + luhnDouble c + d) `mod` 10 == 0
| null | https://raw.githubusercontent.com/TomerAberbach/programming-in-haskell-exercises/a66830529ebc9c4d84d0e4c6e0ad58041b46bc32/parts/1/chapters/4/8.hs | haskell | luhnDouble :: Int -> Int
luhnDouble n = if doubled > 9 then doubled - 9 else doubled
where
doubled = n * 2
luhn :: Int -> Int -> Int -> Int -> Bool
luhn a b c d = (luhnDouble a + b + luhnDouble c + d) `mod` 10 == 0
|
|
2357e689a2ddd536a48448c51c70592604e8551dec6922c52d6ad9d85efd57d4 | haskell-nix/hnix-store | Logger.hs | # language RankNTypes #
module System.Nix.Store.Remote.Logger
( Logger(..)
, Field(..)
, processOutput
)
where
import Prelude hiding ( Last )
import Control.Monad.Except ( throwError )
import Data.Binary.Get
import Network.Socket.ByteString ( recv )
import System.Nix.Store.Remote.Binary
import System.Nix.Store.Remote.Types
import System.Nix.Store.Remote.Util
controlParser :: Get Logger
controlParser = do
ctrl <- getInt
case (ctrl :: Int) of
0x6f6c6d67 -> Next <$> getByteStringLen
0x64617461 -> Read <$> getInt
0x64617416 -> Write <$> getByteStringLen
0x616c7473 -> pure Last
0x63787470 -> flip Error <$> getByteStringLen
<*> getInt
0x53545254 -> StartActivity <$> getInt
<*> getInt
<*> getInt
<*> getByteStringLen
<*> getFields
<*> getInt
0x53544f50 -> StopActivity <$> getInt
0x52534c54 -> Result <$> getInt
<*> getInt
<*> getFields
x -> fail $ "Invalid control message received:" <> show x
processOutput :: MonadStore [Logger]
processOutput = go decoder
where
decoder = runGetIncremental controlParser
go :: Decoder Logger -> MonadStore [Logger]
go (Done _leftover _consumed ctrl) = do
case ctrl of
e@(Error _ _) -> pure [e]
Last -> pure [Last]
Read _n -> do
(mdata, _) <- get
case mdata of
Nothing -> throwError "No data to read provided"
Just part -> do
-- XXX: we should check/assert part size against n of (Read n)
sockPut $ putByteStringLen part
clearData
go decoder
-- we should probably handle Read here as well
x -> do
next <- go decoder
pure $ x : next
go (Partial k) = do
soc <- asks storeSocket
chunk <- liftIO (Just <$> recv soc 8)
go (k chunk)
go (Fail _leftover _consumed msg) = error $ fromString msg
getFields :: Get [Field]
getFields = do
cnt <- getInt
replicateM cnt getField
getField :: Get Field
getField = do
typ <- getInt
case (typ :: Int) of
0 -> LogInt <$> getInt
1 -> LogStr <$> getByteStringLen
x -> fail $ "Unknown log type: " <> show x
| null | https://raw.githubusercontent.com/haskell-nix/hnix-store/5e55781516178939bf9a86f943e120e6ad775b9d/hnix-store-remote/src/System/Nix/Store/Remote/Logger.hs | haskell | XXX: we should check/assert part size against n of (Read n)
we should probably handle Read here as well | # language RankNTypes #
module System.Nix.Store.Remote.Logger
( Logger(..)
, Field(..)
, processOutput
)
where
import Prelude hiding ( Last )
import Control.Monad.Except ( throwError )
import Data.Binary.Get
import Network.Socket.ByteString ( recv )
import System.Nix.Store.Remote.Binary
import System.Nix.Store.Remote.Types
import System.Nix.Store.Remote.Util
controlParser :: Get Logger
controlParser = do
ctrl <- getInt
case (ctrl :: Int) of
0x6f6c6d67 -> Next <$> getByteStringLen
0x64617461 -> Read <$> getInt
0x64617416 -> Write <$> getByteStringLen
0x616c7473 -> pure Last
0x63787470 -> flip Error <$> getByteStringLen
<*> getInt
0x53545254 -> StartActivity <$> getInt
<*> getInt
<*> getInt
<*> getByteStringLen
<*> getFields
<*> getInt
0x53544f50 -> StopActivity <$> getInt
0x52534c54 -> Result <$> getInt
<*> getInt
<*> getFields
x -> fail $ "Invalid control message received:" <> show x
processOutput :: MonadStore [Logger]
processOutput = go decoder
where
decoder = runGetIncremental controlParser
go :: Decoder Logger -> MonadStore [Logger]
go (Done _leftover _consumed ctrl) = do
case ctrl of
e@(Error _ _) -> pure [e]
Last -> pure [Last]
Read _n -> do
(mdata, _) <- get
case mdata of
Nothing -> throwError "No data to read provided"
Just part -> do
sockPut $ putByteStringLen part
clearData
go decoder
x -> do
next <- go decoder
pure $ x : next
go (Partial k) = do
soc <- asks storeSocket
chunk <- liftIO (Just <$> recv soc 8)
go (k chunk)
go (Fail _leftover _consumed msg) = error $ fromString msg
getFields :: Get [Field]
getFields = do
cnt <- getInt
replicateM cnt getField
getField :: Get Field
getField = do
typ <- getInt
case (typ :: Int) of
0 -> LogInt <$> getInt
1 -> LogStr <$> getByteStringLen
x -> fail $ "Unknown log type: " <> show x
|
aeac387b59cbcaf91a0f219635c0e29e353ea3c7d2ed7006982661dae2a65feb | takikawa/racket-ppa | whitespace.rkt | #lang racket/base
(require "../common/struct-star.rkt"
"config.rkt"
"special.rkt"
"readtable.rkt"
"consume.rkt"
"error.rkt"
"location.rkt"
"special.rkt"
"special-comment.rkt")
(provide read-char/skip-whitespace-and-comments
special-comment-via-readtable?)
;; Skip most whitespace, including non-character values that are
;; `special-comment?`s --- but return a special comment (always
;; `special`-wrapped) if `(read-config-keep-comment? config)`. The
;; result is a character that has been consumed.
- based special comments are not skipped ; those must be
;; handled directly, possibly via `special-comment-via-readtable?`.
(define (read-char/skip-whitespace-and-comments init-c read-one in config)
(define rt (read-config-readtable config))
(define source (read-config-source config))
(let skip-loop ([init-c init-c])
(define c (or init-c
(read-char/special in config source)))
(define ec (readtable-effective-char rt c))
(cond
[(eof-object? ec) c]
[(not (char? ec))
(define v (special-value c))
(cond
[(and (special-comment? v)
(not (read-config-keep-comment? config)))
(skip-loop #f)]
[else c])]
[(or (char-whitespace? ec)
;; treat BOM as whitespace in the same sense as a comment:
(eqv? #\uFEFF ec))
(skip-loop #f)]
ec )
(let loop ()
(define c (read-char/special in config source))
(unless (or (eof-object? c)
(eqv? #\newline (effective-char c config)))
(loop)))
(if (read-config-keep-comment? config)
(result-special-comment)
(skip-loop #f))]
[(and (char=? #\# ec)
(eqv? #\| (readtable-effective-char/# rt (peek-char/special in config 0 source))))
(skip-pipe-comment! c in config)
(if (read-config-keep-comment? config)
(result-special-comment)
(skip-loop #f))]
[(and (char=? #\# ec)
(eqv? #\! (readtable-effective-char/# rt (peek-char/special in config 0 source)))
(let ([c3 (peek-char/special in config 1 source)])
(or (eqv? #\space c3)
(eqv? #\/ c3))))
(skip-unix-line-comment! in config)
(if (read-config-keep-comment? config)
(result-special-comment)
(skip-loop #f))]
[(and (char=? #\# ec)
(eqv? #\; (readtable-effective-char/# rt (peek-char/special in config 0 source))))
(consume-char in #\;)
(define v (read-one #f in config))
(when (eof-object? v)
(reader-error in config
#:due-to v
"expected a commented-out element for `~a;`, but found end-of-file"
ec))
(if (read-config-keep-comment? config)
(result-special-comment)
(skip-loop #f))]
[else c])))
;; For returning a comment as a result:
(define (result-special-comment)
(special (make-special-comment #f)))
;; Skips balanced pipe comments
(define (skip-pipe-comment! init-c in config)
(define source (read-config-source config))
(define-values (line col pos) (port-next-location in))
(consume-char in #\|)
(let loop ([prev-c #f] [depth 0])
(define c (read-char/special in config source))
(cond
[(eof-object? c)
(reader-error in (reading-at config line col pos)
#:due-to c
"end of file in `#|` comment")]
[(not (char? c))
(loop #f depth)]
[(and (char=? #\| c) (eqv? prev-c #\#))
(loop #f (add1 depth))]
[(and (char=? #\# c) (eqv? prev-c #\|))
(when (positive? depth)
(loop #f (sub1 depth)))]
[else (loop c depth)])))
;; Skips a comment that starts #! and runs to the end of the line, but
;; can be continued with `\` at the end of the line
(define (skip-unix-line-comment! in config)
(let loop ([backslash? #f])
(define c (read-char/special in config))
(cond
[(eof-object? c) (void)]
[(not (char? c)) (loop #f)]
[(char=? c #\newline)
(when backslash?
(loop #f))]
[(char=? c #\\)
(loop #t)]
[else (loop #f)])))
(define (special-comment-via-readtable? c read-one in config)
;; If we have a readtable, we may need to read ahead to make sure
;; that `c` doesn't start a comment. Always reading would be more
;; consistent in some ways, it works better in other ways to limit
;; reading and only read if a readtable callback that could produce
;; a comment.
(define v
(cond
[(and (char? c)
(let ([ec (readtable-effective-char (read-config-readtable config) c #f)])
(or (not ec)
(and (char=? ec #\#)
(let ([c2 (peek-char in)])
(and (char? c2)
(not (readtable-effective-char/# (read-config-readtable config) c2))))))))
(read-one c in (keep-comment config))]
[else c]))
(and (special-comment? v) v))
| null | https://raw.githubusercontent.com/takikawa/racket-ppa/5f2031309f6359c61a8dfd1fec0b77bbf9fb78df/src/expander/read/whitespace.rkt | racket | Skip most whitespace, including non-character values that are
`special-comment?`s --- but return a special comment (always
`special`-wrapped) if `(read-config-keep-comment? config)`. The
result is a character that has been consumed.
those must be
handled directly, possibly via `special-comment-via-readtable?`.
treat BOM as whitespace in the same sense as a comment:
(readtable-effective-char/# rt (peek-char/special in config 0 source))))
)
For returning a comment as a result:
Skips balanced pipe comments
Skips a comment that starts #! and runs to the end of the line, but
can be continued with `\` at the end of the line
If we have a readtable, we may need to read ahead to make sure
that `c` doesn't start a comment. Always reading would be more
consistent in some ways, it works better in other ways to limit
reading and only read if a readtable callback that could produce
a comment. | #lang racket/base
(require "../common/struct-star.rkt"
"config.rkt"
"special.rkt"
"readtable.rkt"
"consume.rkt"
"error.rkt"
"location.rkt"
"special.rkt"
"special-comment.rkt")
(provide read-char/skip-whitespace-and-comments
special-comment-via-readtable?)
(define (read-char/skip-whitespace-and-comments init-c read-one in config)
(define rt (read-config-readtable config))
(define source (read-config-source config))
(let skip-loop ([init-c init-c])
(define c (or init-c
(read-char/special in config source)))
(define ec (readtable-effective-char rt c))
(cond
[(eof-object? ec) c]
[(not (char? ec))
(define v (special-value c))
(cond
[(and (special-comment? v)
(not (read-config-keep-comment? config)))
(skip-loop #f)]
[else c])]
[(or (char-whitespace? ec)
(eqv? #\uFEFF ec))
(skip-loop #f)]
ec )
(let loop ()
(define c (read-char/special in config source))
(unless (or (eof-object? c)
(eqv? #\newline (effective-char c config)))
(loop)))
(if (read-config-keep-comment? config)
(result-special-comment)
(skip-loop #f))]
[(and (char=? #\# ec)
(eqv? #\| (readtable-effective-char/# rt (peek-char/special in config 0 source))))
(skip-pipe-comment! c in config)
(if (read-config-keep-comment? config)
(result-special-comment)
(skip-loop #f))]
[(and (char=? #\# ec)
(eqv? #\! (readtable-effective-char/# rt (peek-char/special in config 0 source)))
(let ([c3 (peek-char/special in config 1 source)])
(or (eqv? #\space c3)
(eqv? #\/ c3))))
(skip-unix-line-comment! in config)
(if (read-config-keep-comment? config)
(result-special-comment)
(skip-loop #f))]
[(and (char=? #\# ec)
(define v (read-one #f in config))
(when (eof-object? v)
(reader-error in config
#:due-to v
"expected a commented-out element for `~a;`, but found end-of-file"
ec))
(if (read-config-keep-comment? config)
(result-special-comment)
(skip-loop #f))]
[else c])))
(define (result-special-comment)
(special (make-special-comment #f)))
(define (skip-pipe-comment! init-c in config)
(define source (read-config-source config))
(define-values (line col pos) (port-next-location in))
(consume-char in #\|)
(let loop ([prev-c #f] [depth 0])
(define c (read-char/special in config source))
(cond
[(eof-object? c)
(reader-error in (reading-at config line col pos)
#:due-to c
"end of file in `#|` comment")]
[(not (char? c))
(loop #f depth)]
[(and (char=? #\| c) (eqv? prev-c #\#))
(loop #f (add1 depth))]
[(and (char=? #\# c) (eqv? prev-c #\|))
(when (positive? depth)
(loop #f (sub1 depth)))]
[else (loop c depth)])))
(define (skip-unix-line-comment! in config)
(let loop ([backslash? #f])
(define c (read-char/special in config))
(cond
[(eof-object? c) (void)]
[(not (char? c)) (loop #f)]
[(char=? c #\newline)
(when backslash?
(loop #f))]
[(char=? c #\\)
(loop #t)]
[else (loop #f)])))
(define (special-comment-via-readtable? c read-one in config)
(define v
(cond
[(and (char? c)
(let ([ec (readtable-effective-char (read-config-readtable config) c #f)])
(or (not ec)
(and (char=? ec #\#)
(let ([c2 (peek-char in)])
(and (char? c2)
(not (readtable-effective-char/# (read-config-readtable config) c2))))))))
(read-one c in (keep-comment config))]
[else c]))
(and (special-comment? v) v))
|
b7a23f80ffa661d99fac668961bef2f346af046faaad54d2a8714d9d92c9a649 | 2600hz/kazoo | kapi_asr.erl | %%%-----------------------------------------------------------------------------
( C ) 2011 - 2020 , 2600Hz
%%% @doc ASR requests, responses, and errors AMQP API.
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(kapi_asr).
-compile({no_auto_import, [error/1]}).
-export([api_definitions/0, api_definition/1]).
-export([bind_q/2, unbind_q/1]).
-export([declare_exchanges/0]).
-export([req/1, req_v/1]).
-export([resp/1, resp_v/1]).
-export([error/1, error_v/1]).
-export([publish_req/1, publish_req/2]).
-export([publish_resp/2, publish_resp/3]).
-export([publish_error/2, publish_error/3]).
-include_lib("kz_amqp_util.hrl").
-define(CATEGORY, <<"asr">>).
-define(KEY_ASR_REQ, <<"asr.req">>).
%%------------------------------------------------------------------------------
%% @doc Get all API definitions of this module.
%% @end
%%------------------------------------------------------------------------------
-spec api_definitions() -> kapi_definition:apis().
api_definitions() ->
[req_definition()
,resp_definition()
,error_definition()
].
%%------------------------------------------------------------------------------
%% @doc Get API definition of the given `Name'.
%% @see api_definitions/0
%% @end
%%------------------------------------------------------------------------------
-spec api_definition(kz_term:text()) -> kapi_definition:api().
api_definition(Name) when not is_binary(Name) ->
api_definition(kz_term:to_binary(Name));
api_definition(<<"req">>) ->
req_definition();
api_definition(<<"resp">>) ->
resp_definition();
api_definition(<<"error">>) ->
error_definition().
%%------------------------------------------------------------------------------
%% @doc Request ASR.
%% @end
%%------------------------------------------------------------------------------
-spec req_definition() -> kapi_definition:api().
req_definition() ->
EventName = <<"req">>,
Setters = [{fun kapi_definition:set_name/2, EventName}
,{fun kapi_definition:set_friendly_name/2, <<"ASR Req">>}
,{fun kapi_definition:set_description/2, <<"A request for ASR services">>}
,{fun kapi_definition:set_category/2, ?CATEGORY}
,{fun kapi_definition:set_build_fun/2, fun req/1}
,{fun kapi_definition:set_validate_fun/2, fun req_v/1}
,{fun kapi_definition:set_publish_fun/2, fun publish_req/1}
,{fun kapi_definition:set_binding/2, ?KEY_ASR_REQ}
,{fun kapi_definition:set_required_headers/2, [<<"ASR-Endpoint">>
,<<"ASR-Account-ID">>
,<<"ASR-Account-Password">>
,<<"Call-ID">>
,<<"Control-Queue">>
]}
,{fun kapi_definition:set_optional_headers/2, [<<"Language">>
,<<"Stream-Response">>
]}
,{fun kapi_definition:set_values/2
,kapi_definition:event_type_headers(?CATEGORY, EventName)
}
,{fun kapi_definition:set_types/2, []}
],
kapi_definition:setters(Setters).
-spec resp_definition() -> kapi_definition:api().
resp_definition() ->
EventName = <<"resp">>,
Setters = [{fun kapi_definition:set_name/2, EventName}
,{fun kapi_definition:set_friendly_name/2, <<"ASR Response">>}
,{fun kapi_definition:set_description/2, <<"An ASR Response with detected text">>}
,{fun kapi_definition:set_category/2, ?CATEGORY}
,{fun kapi_definition:set_build_fun/2, fun resp/1}
,{fun kapi_definition:set_validate_fun/2, fun resp_v/1}
,{fun kapi_definition:set_publish_fun/2, fun publish_resp/2}
,{fun kapi_definition:set_required_headers/2, []}
,{fun kapi_definition:set_optional_headers/2, [<<"Response-Text">>]}
,{fun kapi_definition:set_values/2
,kapi_definition:event_type_headers(?CATEGORY, EventName)
}
,{fun kapi_definition:set_types/2, []}
],
kapi_definition:setters(Setters).
-spec error_definition() -> kapi_definition:api().
error_definition() ->
EventName = <<"error">>,
Setters = [{fun kapi_definition:set_name/2, EventName}
,{fun kapi_definition:set_friendly_name/2, <<"ASR Error">>}
,{fun kapi_definition:set_description/2, <<"An ASR Error when converting speech to text">>}
,{fun kapi_definition:set_category/2, ?CATEGORY}
,{fun kapi_definition:set_build_fun/2, fun error/1}
,{fun kapi_definition:set_validate_fun/2, fun error_v/1}
,{fun kapi_definition:set_publish_fun/2, fun publish_error/2}
,{fun kapi_definition:set_required_headers/2, []}
,{fun kapi_definition:set_optional_headers/2, [<<"Error-Code">>
,<<"Error-Msg">>
]}
,{fun kapi_definition:set_values/2
,kapi_definition:event_type_headers(?CATEGORY, EventName)
}
,{fun kapi_definition:set_types/2, []}
],
kapi_definition:setters(Setters).
%%------------------------------------------------------------------------------
%% @doc Send request ASR.
Takes { @link kz_term : ( ) } , creates JSON string or error .
%% @end
%%------------------------------------------------------------------------------
-spec req(kz_term:api_terms()) -> kz_api:api_formatter_return().
req(Req) ->
kapi_definition:build_message(Req, req_definition()).
-spec req_v(kz_term:api_terms()) -> boolean().
req_v(Req) ->
kapi_definition:validate(Req, req_definition()).
%%------------------------------------------------------------------------------
%% @doc Prepare and publish an ASR request.
%% @end
%%------------------------------------------------------------------------------
-spec publish_req(kz_term:api_terms()) -> 'ok'.
publish_req(JObj) ->
publish_req(JObj, ?DEFAULT_CONTENT_TYPE).
-spec publish_req(kz_term:api_terms(), kz_term:ne_binary()) -> 'ok'.
publish_req(Req, ContentType) ->
Definition = resp_definition(),
{'ok', Payload} = kz_api:prepare_api_payload(Req
,kapi_definition:values(Definition)
,kapi_definition:build_fun(Definition)
),
kz_amqp_util:callctl_publish(kapi_definition:binding(Definition), Payload, ContentType).
%%------------------------------------------------------------------------------
%% @doc Response with ASR.
Takes { @link kz_term : ( ) } , creates JSON string or error .
%% @end
%%------------------------------------------------------------------------------
-spec resp(kz_term:api_terms()) -> kz_api:api_formatter_return().
resp(Req) ->
kapi_definition:build_message(Req, resp_definition()).
-spec resp_v(kz_term:api_terms()) -> boolean().
resp_v(Req) ->
kapi_definition:validate(Req, resp_definition()).
%%------------------------------------------------------------------------------
%% @doc Prepare and publish an ASR response.
%% @end
%%------------------------------------------------------------------------------
-spec publish_resp(kz_term:ne_binary(), kz_term:api_terms()) -> 'ok'.
publish_resp(Queue, JObj) ->
publish_resp(Queue, JObj, ?DEFAULT_CONTENT_TYPE).
-spec publish_resp(kz_term:ne_binary(), kz_term:api_terms(), kz_term:ne_binary()) -> 'ok'.
publish_resp(Queue, Resp, ContentType) ->
Definition = resp_definition(),
{'ok', Payload} = kz_api:prepare_api_payload(Resp
,kapi_definition:values(Definition)
,kapi_definition:build_fun(Definition)
),
kz_amqp_util:targeted_publish(Queue, Payload, ContentType).
%%------------------------------------------------------------------------------
%% @doc Asr error.
Takes { @link kz_term : ( ) } , creates JSON string or error .
%% @end
%%------------------------------------------------------------------------------
-spec error(kz_term:api_terms()) -> kz_api:api_formatter_return().
error(Req) ->
kapi_definition:build_message(Req, error_definition()).
-spec error_v(kz_term:api_terms()) -> boolean().
error_v(Req) ->
kapi_definition:validate(Req, error_definition()).
%%------------------------------------------------------------------------------
%% @doc Prepare and publish an ASR error.
%% @end
%%------------------------------------------------------------------------------
-spec publish_error(kz_term:ne_binary(), kz_term:api_terms()) -> 'ok'.
publish_error(Queue, JObj) ->
publish_error(Queue, JObj, ?DEFAULT_CONTENT_TYPE).
-spec publish_error(kz_term:ne_binary(), kz_term:api_terms(), kz_term:ne_binary()) -> 'ok'.
publish_error(Queue, Error, ContentType) ->
Definition = error_definition(),
{'ok', Payload} = kz_api:prepare_api_payload(Error
,kapi_definition:values(Definition)
,kapi_definition:build_fun(Definition)
),
kz_amqp_util:targeted_publish(Queue, Payload, ContentType).
%%------------------------------------------------------------------------------
%% @doc Bind to a queue to the ASR exchange and events.
%% @end
%%------------------------------------------------------------------------------
-spec bind_q(binary(), kz_term:proplist()) -> 'ok'.
bind_q(Queue, _Props) ->
kz_amqp_util:bind_q_to_callctl(Queue, ?KEY_ASR_REQ).
%%------------------------------------------------------------------------------
%% @doc Unbind from a queue to the ASR exchange and events.
%% @end
%%------------------------------------------------------------------------------
-spec unbind_q(binary()) -> 'ok'.
unbind_q(Queue) ->
kz_amqp_util:unbind_q_from_callctl(Queue).
%%------------------------------------------------------------------------------
%% @doc Declare the exchanges used by this API.
%% @end
%%------------------------------------------------------------------------------
-spec declare_exchanges() -> 'ok'.
declare_exchanges() ->
kz_amqp_util:callctl_exchange().
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/core/kazoo_amqp/src/api/kapi_asr.erl | erlang | -----------------------------------------------------------------------------
@doc ASR requests, responses, and errors AMQP API.
@end
-----------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Get all API definitions of this module.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Get API definition of the given `Name'.
@see api_definitions/0
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Request ASR.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Send request ASR.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Prepare and publish an ASR request.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Response with ASR.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Prepare and publish an ASR response.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Asr error.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Prepare and publish an ASR error.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Bind to a queue to the ASR exchange and events.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Unbind from a queue to the ASR exchange and events.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Declare the exchanges used by this API.
@end
------------------------------------------------------------------------------ | ( C ) 2011 - 2020 , 2600Hz
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(kapi_asr).
-compile({no_auto_import, [error/1]}).
-export([api_definitions/0, api_definition/1]).
-export([bind_q/2, unbind_q/1]).
-export([declare_exchanges/0]).
-export([req/1, req_v/1]).
-export([resp/1, resp_v/1]).
-export([error/1, error_v/1]).
-export([publish_req/1, publish_req/2]).
-export([publish_resp/2, publish_resp/3]).
-export([publish_error/2, publish_error/3]).
-include_lib("kz_amqp_util.hrl").
-define(CATEGORY, <<"asr">>).
-define(KEY_ASR_REQ, <<"asr.req">>).
-spec api_definitions() -> kapi_definition:apis().
api_definitions() ->
[req_definition()
,resp_definition()
,error_definition()
].
-spec api_definition(kz_term:text()) -> kapi_definition:api().
api_definition(Name) when not is_binary(Name) ->
api_definition(kz_term:to_binary(Name));
api_definition(<<"req">>) ->
req_definition();
api_definition(<<"resp">>) ->
resp_definition();
api_definition(<<"error">>) ->
error_definition().
-spec req_definition() -> kapi_definition:api().
req_definition() ->
EventName = <<"req">>,
Setters = [{fun kapi_definition:set_name/2, EventName}
,{fun kapi_definition:set_friendly_name/2, <<"ASR Req">>}
,{fun kapi_definition:set_description/2, <<"A request for ASR services">>}
,{fun kapi_definition:set_category/2, ?CATEGORY}
,{fun kapi_definition:set_build_fun/2, fun req/1}
,{fun kapi_definition:set_validate_fun/2, fun req_v/1}
,{fun kapi_definition:set_publish_fun/2, fun publish_req/1}
,{fun kapi_definition:set_binding/2, ?KEY_ASR_REQ}
,{fun kapi_definition:set_required_headers/2, [<<"ASR-Endpoint">>
,<<"ASR-Account-ID">>
,<<"ASR-Account-Password">>
,<<"Call-ID">>
,<<"Control-Queue">>
]}
,{fun kapi_definition:set_optional_headers/2, [<<"Language">>
,<<"Stream-Response">>
]}
,{fun kapi_definition:set_values/2
,kapi_definition:event_type_headers(?CATEGORY, EventName)
}
,{fun kapi_definition:set_types/2, []}
],
kapi_definition:setters(Setters).
-spec resp_definition() -> kapi_definition:api().
resp_definition() ->
EventName = <<"resp">>,
Setters = [{fun kapi_definition:set_name/2, EventName}
,{fun kapi_definition:set_friendly_name/2, <<"ASR Response">>}
,{fun kapi_definition:set_description/2, <<"An ASR Response with detected text">>}
,{fun kapi_definition:set_category/2, ?CATEGORY}
,{fun kapi_definition:set_build_fun/2, fun resp/1}
,{fun kapi_definition:set_validate_fun/2, fun resp_v/1}
,{fun kapi_definition:set_publish_fun/2, fun publish_resp/2}
,{fun kapi_definition:set_required_headers/2, []}
,{fun kapi_definition:set_optional_headers/2, [<<"Response-Text">>]}
,{fun kapi_definition:set_values/2
,kapi_definition:event_type_headers(?CATEGORY, EventName)
}
,{fun kapi_definition:set_types/2, []}
],
kapi_definition:setters(Setters).
-spec error_definition() -> kapi_definition:api().
error_definition() ->
EventName = <<"error">>,
Setters = [{fun kapi_definition:set_name/2, EventName}
,{fun kapi_definition:set_friendly_name/2, <<"ASR Error">>}
,{fun kapi_definition:set_description/2, <<"An ASR Error when converting speech to text">>}
,{fun kapi_definition:set_category/2, ?CATEGORY}
,{fun kapi_definition:set_build_fun/2, fun error/1}
,{fun kapi_definition:set_validate_fun/2, fun error_v/1}
,{fun kapi_definition:set_publish_fun/2, fun publish_error/2}
,{fun kapi_definition:set_required_headers/2, []}
,{fun kapi_definition:set_optional_headers/2, [<<"Error-Code">>
,<<"Error-Msg">>
]}
,{fun kapi_definition:set_values/2
,kapi_definition:event_type_headers(?CATEGORY, EventName)
}
,{fun kapi_definition:set_types/2, []}
],
kapi_definition:setters(Setters).
Takes { @link kz_term : ( ) } , creates JSON string or error .
-spec req(kz_term:api_terms()) -> kz_api:api_formatter_return().
req(Req) ->
kapi_definition:build_message(Req, req_definition()).
-spec req_v(kz_term:api_terms()) -> boolean().
req_v(Req) ->
kapi_definition:validate(Req, req_definition()).
-spec publish_req(kz_term:api_terms()) -> 'ok'.
publish_req(JObj) ->
publish_req(JObj, ?DEFAULT_CONTENT_TYPE).
-spec publish_req(kz_term:api_terms(), kz_term:ne_binary()) -> 'ok'.
publish_req(Req, ContentType) ->
Definition = resp_definition(),
{'ok', Payload} = kz_api:prepare_api_payload(Req
,kapi_definition:values(Definition)
,kapi_definition:build_fun(Definition)
),
kz_amqp_util:callctl_publish(kapi_definition:binding(Definition), Payload, ContentType).
Takes { @link kz_term : ( ) } , creates JSON string or error .
-spec resp(kz_term:api_terms()) -> kz_api:api_formatter_return().
resp(Req) ->
kapi_definition:build_message(Req, resp_definition()).
-spec resp_v(kz_term:api_terms()) -> boolean().
resp_v(Req) ->
kapi_definition:validate(Req, resp_definition()).
-spec publish_resp(kz_term:ne_binary(), kz_term:api_terms()) -> 'ok'.
publish_resp(Queue, JObj) ->
publish_resp(Queue, JObj, ?DEFAULT_CONTENT_TYPE).
-spec publish_resp(kz_term:ne_binary(), kz_term:api_terms(), kz_term:ne_binary()) -> 'ok'.
publish_resp(Queue, Resp, ContentType) ->
Definition = resp_definition(),
{'ok', Payload} = kz_api:prepare_api_payload(Resp
,kapi_definition:values(Definition)
,kapi_definition:build_fun(Definition)
),
kz_amqp_util:targeted_publish(Queue, Payload, ContentType).
Takes { @link kz_term : ( ) } , creates JSON string or error .
-spec error(kz_term:api_terms()) -> kz_api:api_formatter_return().
error(Req) ->
kapi_definition:build_message(Req, error_definition()).
-spec error_v(kz_term:api_terms()) -> boolean().
error_v(Req) ->
kapi_definition:validate(Req, error_definition()).
-spec publish_error(kz_term:ne_binary(), kz_term:api_terms()) -> 'ok'.
publish_error(Queue, JObj) ->
publish_error(Queue, JObj, ?DEFAULT_CONTENT_TYPE).
-spec publish_error(kz_term:ne_binary(), kz_term:api_terms(), kz_term:ne_binary()) -> 'ok'.
publish_error(Queue, Error, ContentType) ->
Definition = error_definition(),
{'ok', Payload} = kz_api:prepare_api_payload(Error
,kapi_definition:values(Definition)
,kapi_definition:build_fun(Definition)
),
kz_amqp_util:targeted_publish(Queue, Payload, ContentType).
-spec bind_q(binary(), kz_term:proplist()) -> 'ok'.
bind_q(Queue, _Props) ->
kz_amqp_util:bind_q_to_callctl(Queue, ?KEY_ASR_REQ).
-spec unbind_q(binary()) -> 'ok'.
unbind_q(Queue) ->
kz_amqp_util:unbind_q_from_callctl(Queue).
-spec declare_exchanges() -> 'ok'.
declare_exchanges() ->
kz_amqp_util:callctl_exchange().
|
e8836df94cbfd56b37d8d7a2d072274e23f203ae73fadb89cdb757ec2841301e | facebookincubator/hsthrift | Client.hs | -----------------------------------------------------------------
Autogenerated by Thrift
--
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
@generated
-----------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - overlapping - patterns #
# OPTIONS_GHC -fno - warn - incomplete - patterns #
# OPTIONS_GHC -fno - warn - incomplete - uni - patterns #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
module Service.X.Client
(X, testFunc, testFuncIO, send_testFunc, _build_testFunc,
recv_testFunc, _parse_testFunc)
where
import qualified Control.Arrow as Arrow
import qualified Control.Concurrent as Concurrent
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad
import qualified Control.Monad.Trans.Class as Trans
import qualified Control.Monad.Trans.Reader as Reader
import qualified Data.ByteString.Builder as ByteString
import qualified Data.ByteString.Lazy as LBS
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Int as Int
import qualified Data.List as List
import qualified Data.Proxy as Proxy
import qualified Prelude as Prelude
import qualified Thrift.Binary.Parser as Parser
import qualified Thrift.Codegen as Thrift
import qualified Thrift.Protocol.ApplicationException.Types
as Thrift
import Data.Monoid ((<>))
import Prelude ((==), (=<<), (>>=), (<$>), (.))
import Service.Types
data X
testFunc ::
(Thrift.Protocol p, Thrift.ClientChannel c, (Thrift.<:) s X) =>
Thrift.ThriftM p c s Int.Int32
testFunc
= do Thrift.ThriftEnv _proxy _channel _opts _counter <- Reader.ask
Trans.lift (testFuncIO _proxy _channel _counter _opts)
testFuncIO ::
(Thrift.Protocol p, Thrift.ClientChannel c, (Thrift.<:) s X) =>
Proxy.Proxy p ->
c s -> Thrift.Counter -> Thrift.RpcOptions -> Prelude.IO Int.Int32
testFuncIO _proxy _channel _counter _opts
= do (_handle, _sendCob, _recvCob) <- Thrift.mkCallbacks
(recv_testFunc _proxy)
send_testFunc _proxy _channel _counter _sendCob _recvCob _opts
Thrift.wait _handle
send_testFunc ::
(Thrift.Protocol p, Thrift.ClientChannel c, (Thrift.<:) s X) =>
Proxy.Proxy p ->
c s ->
Thrift.Counter ->
Thrift.SendCallback ->
Thrift.RecvCallback -> Thrift.RpcOptions -> Prelude.IO ()
send_testFunc _proxy _channel _counter _sendCob _recvCob _rpcOpts
= do _seqNum <- _counter
let
_callMsg
= LBS.toStrict
(ByteString.toLazyByteString (_build_testFunc _proxy _seqNum))
Thrift.sendRequest _channel
(Thrift.Request _callMsg
(Thrift.setRpcPriority _rpcOpts Thrift.High))
_sendCob
_recvCob
recv_testFunc ::
(Thrift.Protocol p) =>
Proxy.Proxy p ->
Thrift.Response -> Prelude.Either Exception.SomeException Int.Int32
recv_testFunc _proxy (Thrift.Response _response _)
= Monad.join
(Arrow.left (Exception.SomeException . Thrift.ProtocolException)
(Parser.parse (_parse_testFunc _proxy) _response))
_build_testFunc ::
Thrift.Protocol p =>
Proxy.Proxy p -> Int.Int32 -> ByteString.Builder
_build_testFunc _proxy _seqNum
= Thrift.genMsgBegin _proxy "testFunc" 1 _seqNum <>
Thrift.genStruct _proxy []
<> Thrift.genMsgEnd _proxy
_parse_testFunc ::
Thrift.Protocol p =>
Proxy.Proxy p ->
Parser.Parser (Prelude.Either Exception.SomeException Int.Int32)
_parse_testFunc _proxy
= do Thrift.MsgBegin _name _msgTy _ <- Thrift.parseMsgBegin _proxy
_result <- case _msgTy of
1 -> Prelude.fail "testFunc: expected reply but got function call"
2 | _name == "testFunc" ->
do let
_idMap = HashMap.fromList [("testFunc_success", 0)]
_fieldBegin <- Thrift.parseFieldBegin _proxy 0 _idMap
case _fieldBegin of
Thrift.FieldBegin _type _id _bool -> do case _id of
0 | _type ==
Thrift.getI32Type
_proxy
->
Prelude.fmap
Prelude.Right
(Thrift.parseI32
_proxy)
_ -> Prelude.fail
(Prelude.unwords
["unrecognized exception, type:",
Prelude.show
_type,
"field id:",
Prelude.show _id])
Thrift.FieldEnd -> Prelude.fail "no response"
| Prelude.otherwise -> Prelude.fail "reply function does not match"
3 -> Prelude.fmap (Prelude.Left . Exception.SomeException)
(Thrift.parseStruct _proxy ::
Parser.Parser Thrift.ApplicationException)
4 -> Prelude.fail
"testFunc: expected reply but got oneway function call"
_ -> Prelude.fail "testFunc: invalid message type"
Thrift.parseMsgEnd _proxy
Prelude.return _result | null | https://raw.githubusercontent.com/facebookincubator/hsthrift/d3ff75d487e9d0c2904d18327373b603456e7a01/compiler/test/fixtures/gen-hs2/Service/X/Client.hs | haskell | ---------------------------------------------------------------
DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
---------------------------------------------------------------
# LANGUAGE OverloadedStrings #
# LANGUAGE BangPatterns # | Autogenerated by Thrift
@generated
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - overlapping - patterns #
# OPTIONS_GHC -fno - warn - incomplete - patterns #
# OPTIONS_GHC -fno - warn - incomplete - uni - patterns #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
module Service.X.Client
(X, testFunc, testFuncIO, send_testFunc, _build_testFunc,
recv_testFunc, _parse_testFunc)
where
import qualified Control.Arrow as Arrow
import qualified Control.Concurrent as Concurrent
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad
import qualified Control.Monad.Trans.Class as Trans
import qualified Control.Monad.Trans.Reader as Reader
import qualified Data.ByteString.Builder as ByteString
import qualified Data.ByteString.Lazy as LBS
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Int as Int
import qualified Data.List as List
import qualified Data.Proxy as Proxy
import qualified Prelude as Prelude
import qualified Thrift.Binary.Parser as Parser
import qualified Thrift.Codegen as Thrift
import qualified Thrift.Protocol.ApplicationException.Types
as Thrift
import Data.Monoid ((<>))
import Prelude ((==), (=<<), (>>=), (<$>), (.))
import Service.Types
data X
testFunc ::
(Thrift.Protocol p, Thrift.ClientChannel c, (Thrift.<:) s X) =>
Thrift.ThriftM p c s Int.Int32
testFunc
= do Thrift.ThriftEnv _proxy _channel _opts _counter <- Reader.ask
Trans.lift (testFuncIO _proxy _channel _counter _opts)
testFuncIO ::
(Thrift.Protocol p, Thrift.ClientChannel c, (Thrift.<:) s X) =>
Proxy.Proxy p ->
c s -> Thrift.Counter -> Thrift.RpcOptions -> Prelude.IO Int.Int32
testFuncIO _proxy _channel _counter _opts
= do (_handle, _sendCob, _recvCob) <- Thrift.mkCallbacks
(recv_testFunc _proxy)
send_testFunc _proxy _channel _counter _sendCob _recvCob _opts
Thrift.wait _handle
send_testFunc ::
(Thrift.Protocol p, Thrift.ClientChannel c, (Thrift.<:) s X) =>
Proxy.Proxy p ->
c s ->
Thrift.Counter ->
Thrift.SendCallback ->
Thrift.RecvCallback -> Thrift.RpcOptions -> Prelude.IO ()
send_testFunc _proxy _channel _counter _sendCob _recvCob _rpcOpts
= do _seqNum <- _counter
let
_callMsg
= LBS.toStrict
(ByteString.toLazyByteString (_build_testFunc _proxy _seqNum))
Thrift.sendRequest _channel
(Thrift.Request _callMsg
(Thrift.setRpcPriority _rpcOpts Thrift.High))
_sendCob
_recvCob
recv_testFunc ::
(Thrift.Protocol p) =>
Proxy.Proxy p ->
Thrift.Response -> Prelude.Either Exception.SomeException Int.Int32
recv_testFunc _proxy (Thrift.Response _response _)
= Monad.join
(Arrow.left (Exception.SomeException . Thrift.ProtocolException)
(Parser.parse (_parse_testFunc _proxy) _response))
_build_testFunc ::
Thrift.Protocol p =>
Proxy.Proxy p -> Int.Int32 -> ByteString.Builder
_build_testFunc _proxy _seqNum
= Thrift.genMsgBegin _proxy "testFunc" 1 _seqNum <>
Thrift.genStruct _proxy []
<> Thrift.genMsgEnd _proxy
_parse_testFunc ::
Thrift.Protocol p =>
Proxy.Proxy p ->
Parser.Parser (Prelude.Either Exception.SomeException Int.Int32)
_parse_testFunc _proxy
= do Thrift.MsgBegin _name _msgTy _ <- Thrift.parseMsgBegin _proxy
_result <- case _msgTy of
1 -> Prelude.fail "testFunc: expected reply but got function call"
2 | _name == "testFunc" ->
do let
_idMap = HashMap.fromList [("testFunc_success", 0)]
_fieldBegin <- Thrift.parseFieldBegin _proxy 0 _idMap
case _fieldBegin of
Thrift.FieldBegin _type _id _bool -> do case _id of
0 | _type ==
Thrift.getI32Type
_proxy
->
Prelude.fmap
Prelude.Right
(Thrift.parseI32
_proxy)
_ -> Prelude.fail
(Prelude.unwords
["unrecognized exception, type:",
Prelude.show
_type,
"field id:",
Prelude.show _id])
Thrift.FieldEnd -> Prelude.fail "no response"
| Prelude.otherwise -> Prelude.fail "reply function does not match"
3 -> Prelude.fmap (Prelude.Left . Exception.SomeException)
(Thrift.parseStruct _proxy ::
Parser.Parser Thrift.ApplicationException)
4 -> Prelude.fail
"testFunc: expected reply but got oneway function call"
_ -> Prelude.fail "testFunc: invalid message type"
Thrift.parseMsgEnd _proxy
Prelude.return _result |
b5f6fbb8b9509a06ae17d8cb60ca897d656dc08cb8d918445dde7765205b95b6 | Atry/Control.Dsl | Monadic.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE RebindableSyntax #
module Control.Dsl.Monadic where
import Control.Dsl.PolyCont
import qualified Prelude
| This @Monadic@ keyword extracts the monadic value of a monadic expression .
newtype Monadic m r a = Monadic (m a)
instance Prelude.Monad m => PolyCont (Monadic m) (m b) a where
runPolyCont (Monadic k) = (Prelude.>>=) k
| null | https://raw.githubusercontent.com/Atry/Control.Dsl/f19da265c8ea537af95e448e6107fa503d5363c2/src/Control/Dsl/Monadic.hs | haskell | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE RebindableSyntax #
module Control.Dsl.Monadic where
import Control.Dsl.PolyCont
import qualified Prelude
| This @Monadic@ keyword extracts the monadic value of a monadic expression .
newtype Monadic m r a = Monadic (m a)
instance Prelude.Monad m => PolyCont (Monadic m) (m b) a where
runPolyCont (Monadic k) = (Prelude.>>=) k
|
|
2f958798e50887abfbe714390282a91096966aa6d567eee59f55c83b6729a57d | lordi/haskell-terminal | ParserUtils.hs | -- General parsec helpers
module Terminal.ParserUtils where
import Text.Parsec
import Text.Parsec.String
import Data.Maybe (catMaybes)
-- |Apply parser p and return its result together with the string that has been
-- parsed.
annotate p = do
before <- getInput
result <- p
after <- getInput
return (result, take (length before - length after) before)
-- |Apply parser p as often as possible and return the matches together with the
-- bytes that are not successfully parsed (that are left over)
manyWithLeftover p = do
x <- many p
i <- getInput
return (x, i)
-- |Apply parser p at least n and up to m times
manyUpTo n m p = do
first <- count n p
rest <- count (m - n) (optionMaybe p)
return (first ++ (catMaybes rest))
| null | https://raw.githubusercontent.com/lordi/haskell-terminal/037a1374c3e28a9cc00f9da0ec8b2887944ab943/src/Terminal/ParserUtils.hs | haskell | General parsec helpers
|Apply parser p and return its result together with the string that has been
parsed.
|Apply parser p as often as possible and return the matches together with the
bytes that are not successfully parsed (that are left over)
|Apply parser p at least n and up to m times | module Terminal.ParserUtils where
import Text.Parsec
import Text.Parsec.String
import Data.Maybe (catMaybes)
annotate p = do
before <- getInput
result <- p
after <- getInput
return (result, take (length before - length after) before)
manyWithLeftover p = do
x <- many p
i <- getInput
return (x, i)
manyUpTo n m p = do
first <- count n p
rest <- count (m - n) (optionMaybe p)
return (first ++ (catMaybes rest))
|
a1ed969ea0b7ae814398f0506392582890e96c268e85ed68330658748b3f38b3 | tommay/pokemon-go | Epic.hs | module Epic (
Epic.catch,
Epic.fail,
toEpic,
E.MonadCatch,
) where
import qualified Control.Monad.Catch as E
newtype EpicException = EpicException String
instance Show EpicException where
show (EpicException string) = string
instance E.Exception EpicException
fail :: E.MonadThrow m => String -> m a
fail = E.throwM . EpicException
catch :: E.MonadCatch m => m a -> (String -> m a) -> m a
catch expr handler =
E.catch expr (\ (EpicException ex) -> handler ex)
toEpic :: (Show a, E.MonadCatch m) => Either a b -> m b
toEpic either =
case either of
Left err -> Epic.fail $ show err
Right val -> return val
| null | https://raw.githubusercontent.com/tommay/pokemon-go/6b0132f03a1d50ad5513b70fe715e57879dfa597/src/Epic.hs | haskell | module Epic (
Epic.catch,
Epic.fail,
toEpic,
E.MonadCatch,
) where
import qualified Control.Monad.Catch as E
newtype EpicException = EpicException String
instance Show EpicException where
show (EpicException string) = string
instance E.Exception EpicException
fail :: E.MonadThrow m => String -> m a
fail = E.throwM . EpicException
catch :: E.MonadCatch m => m a -> (String -> m a) -> m a
catch expr handler =
E.catch expr (\ (EpicException ex) -> handler ex)
toEpic :: (Show a, E.MonadCatch m) => Either a b -> m b
toEpic either =
case either of
Left err -> Epic.fail $ show err
Right val -> return val
|
|
6473359633a2b5a850e038cf9a608715164c4cfac981b2a9b2b7ee1448f905c2 | ekoontz/menard | grammar.cljc | (ns menard.grammar
(:require [menard.exception :refer [exception]]
#?(:clj [clojure.java.io :as io :refer [resource]])
[menard.morphology :as m]
[clojure.string :as string]
#?(:clj [clojure.tools.logging :as log])
#?(:cljs [cljslog.core :as log])
[dag_unify.core :as u :refer [unify]]
[dag_unify.diagnostics :refer [fail-path]]
[dag_unify.serialization :as s :refer [serialize]]))
(defn list-as-map-to-list
"turn a map represention of a list: e.g. {:first :a :rest {:first :b}}
into a list: e.g. [a b]"
[m]
(when (u/get-in m [:first])
(cons (u/get-in m [:first])
(list-as-map-to-list (u/get-in m [:rest])))))
(defn process-options [input-grammar]
(log/info (str "process-options: input-grammar: " (count input-grammar) " rules."))
(let [output-grammar
(->> input-grammar
(mapcat (fn [base-rule]
(let [result
(->> (eval (:options base-rule [:top]))
(map (fn [option]
(unify base-rule option)))
(filter #(not (= % :fail)))
(map (fn [each]
(-> each
(dissoc :options)
(dissoc :dag_unify.serialization/serialized)))))]
result))))]
(log/info (str "process-options: output-grammar: " (count output-grammar) " rules."))
output-grammar))
(defn filter-rules-by-firstness [input-grammar]
(log/info (str "filter-rules: input-grammar: " (count input-grammar) " rules."))
(let [output-grammar
(->> input-grammar
(filter (fn [input-rule]
(cond (= (get input-rule :head)
(get input-rule :1))
(do (log/debug (str "rule is ok: head is first: " (u/get-in input-rule [:rule])))
true)
(= (get input-rule :head)
(get input-rule :2))
(do (log/debug (str "rule is ok: head is last: " (u/get-in input-rule [:rule])))
true)
:else
(let [error-message (str "rule: " (u/get-in input-rule [:rule]) ": does not specify if the head is first or last.")]
(log/error error-message)
(exception error-message))))))]
(log/info (str "filter-rules-by-firstness: output-grammar: " (count output-grammar) " rules."))
output-grammar))
(defn warn-rules-by-catness [input-grammar]
(log/info (str "warn-rules-by-catness: input-grammar: " (count input-grammar) " rules."))
(let [output-grammar
(->> input-grammar
(filter (fn [input-rule]
(cond (and (keyword? (u/get-in input-rule [:cat]))
(not (= :top (u/get-in input-rule [:cat]))))
(do (log/debug (str "rule: " (u/get-in input-rule [:rule]) " is ok: :cat is specified to: " (u/get-in input-rule [:cat])))
true)
:else
(let [warn-message (str "rule: " (u/get-in input-rule [:rule]) " has no :cat value specified: might overgeneralize unexpectedly.")]
(log/warn warn-message)
true)))))]
(log/info (str "warn-rules-by-catness: output-grammar: " (count output-grammar) " rules."))
output-grammar))
(defn apply-processing-rules-to [rule processing-rules]
(log/debug (str "looking at rule: " (:rule rule) " and applying this many processing-rules: " (count processing-rules)))
(if (empty? processing-rules)
rule
(let [processing-rule (first processing-rules)
result (let [{processing-rule-name :rule
antecedent :if
consequent :then} processing-rule
result-if (unify rule antecedent)]
(if (not (= :fail result-if))
(let [unify-result (unify rule consequent)]
(if (= :fail unify-result)
(do
(log/warn (str "postprocessing-rule: " processing-rule " failed for rule: " (:rule rule) ": leaving rule unmodified by that postprocessing rule."))
rule)
;; antecedent succeeded and unify with consequent succeeded; continue with the rest of the rules.
unify-result))
;; antecedent failed; continue with the rest of the rules.
rule))]
(apply-processing-rules-to result (rest processing-rules)))))
(defn process [grammar & [grammar-processing-rules]]
(log/info (str "grammar/process: processing " (count grammar) " rule" (if (not (= (count grammar) 1)) "s") "."))
(->> grammar
;; each member of :unify in a rule is a symbol.
evaluate each symbol , which should be a dag , and
;; combine all of them with the original rule:
(map #(reduce unify
(cons (dissoc % :unify)
(map (fn [each]
(eval each))
(:unify %)))))
process-options
filter-rules-by-firstness
warn-rules-by-catness
(map #(if (seq grammar-processing-rules)
;; some processing rules: apply each to this grammar rule:
(apply-processing-rules-to % grammar-processing-rules)
no processing rules : just return the gramamr rule :
%))
(remove #(= :fail %))
(map #(u/assoc-in % [:phrasal?] true))
(map #(u/assoc-in % [:menard.generate/started?] true))))
#?(:clj
(defn write-compiled-grammar [grammar write-to-file]
(spit write-to-file (vec (->> grammar (map serialize) (map vec))))))
(defmacro read-compiled-grammar [filename]
`~(-> filename
resource
slurp
read-string))
(defmacro read-expressions [filename]
`~(-> filename
resource
slurp
read-string))
(defmacro read-grammar [filename]
`~(-> filename
resource
slurp
read-string))
(defn read-grammar-fn [filename]
(-> filename
((fn [filename]
(if (re-find #"^file:///" filename)
(do
(log/debug (str "read-grammar-fn: reading a file:/// filename:" filename))
filename)
(do
(log/debug (str "read-grammar-fn: reading a non-file:/// filename:" filename))
(resource filename)))))
slurp
read-string))
| null | https://raw.githubusercontent.com/ekoontz/menard/d0702dc2d775a967865240e85d9d8683a47e6616/src/menard/grammar.cljc | clojure | antecedent succeeded and unify with consequent succeeded; continue with the rest of the rules.
antecedent failed; continue with the rest of the rules.
each member of :unify in a rule is a symbol.
combine all of them with the original rule:
some processing rules: apply each to this grammar rule: | (ns menard.grammar
(:require [menard.exception :refer [exception]]
#?(:clj [clojure.java.io :as io :refer [resource]])
[menard.morphology :as m]
[clojure.string :as string]
#?(:clj [clojure.tools.logging :as log])
#?(:cljs [cljslog.core :as log])
[dag_unify.core :as u :refer [unify]]
[dag_unify.diagnostics :refer [fail-path]]
[dag_unify.serialization :as s :refer [serialize]]))
(defn list-as-map-to-list
"turn a map represention of a list: e.g. {:first :a :rest {:first :b}}
into a list: e.g. [a b]"
[m]
(when (u/get-in m [:first])
(cons (u/get-in m [:first])
(list-as-map-to-list (u/get-in m [:rest])))))
(defn process-options [input-grammar]
(log/info (str "process-options: input-grammar: " (count input-grammar) " rules."))
(let [output-grammar
(->> input-grammar
(mapcat (fn [base-rule]
(let [result
(->> (eval (:options base-rule [:top]))
(map (fn [option]
(unify base-rule option)))
(filter #(not (= % :fail)))
(map (fn [each]
(-> each
(dissoc :options)
(dissoc :dag_unify.serialization/serialized)))))]
result))))]
(log/info (str "process-options: output-grammar: " (count output-grammar) " rules."))
output-grammar))
(defn filter-rules-by-firstness [input-grammar]
(log/info (str "filter-rules: input-grammar: " (count input-grammar) " rules."))
(let [output-grammar
(->> input-grammar
(filter (fn [input-rule]
(cond (= (get input-rule :head)
(get input-rule :1))
(do (log/debug (str "rule is ok: head is first: " (u/get-in input-rule [:rule])))
true)
(= (get input-rule :head)
(get input-rule :2))
(do (log/debug (str "rule is ok: head is last: " (u/get-in input-rule [:rule])))
true)
:else
(let [error-message (str "rule: " (u/get-in input-rule [:rule]) ": does not specify if the head is first or last.")]
(log/error error-message)
(exception error-message))))))]
(log/info (str "filter-rules-by-firstness: output-grammar: " (count output-grammar) " rules."))
output-grammar))
(defn warn-rules-by-catness [input-grammar]
(log/info (str "warn-rules-by-catness: input-grammar: " (count input-grammar) " rules."))
(let [output-grammar
(->> input-grammar
(filter (fn [input-rule]
(cond (and (keyword? (u/get-in input-rule [:cat]))
(not (= :top (u/get-in input-rule [:cat]))))
(do (log/debug (str "rule: " (u/get-in input-rule [:rule]) " is ok: :cat is specified to: " (u/get-in input-rule [:cat])))
true)
:else
(let [warn-message (str "rule: " (u/get-in input-rule [:rule]) " has no :cat value specified: might overgeneralize unexpectedly.")]
(log/warn warn-message)
true)))))]
(log/info (str "warn-rules-by-catness: output-grammar: " (count output-grammar) " rules."))
output-grammar))
(defn apply-processing-rules-to [rule processing-rules]
(log/debug (str "looking at rule: " (:rule rule) " and applying this many processing-rules: " (count processing-rules)))
(if (empty? processing-rules)
rule
(let [processing-rule (first processing-rules)
result (let [{processing-rule-name :rule
antecedent :if
consequent :then} processing-rule
result-if (unify rule antecedent)]
(if (not (= :fail result-if))
(let [unify-result (unify rule consequent)]
(if (= :fail unify-result)
(do
(log/warn (str "postprocessing-rule: " processing-rule " failed for rule: " (:rule rule) ": leaving rule unmodified by that postprocessing rule."))
rule)
unify-result))
rule))]
(apply-processing-rules-to result (rest processing-rules)))))
(defn process [grammar & [grammar-processing-rules]]
(log/info (str "grammar/process: processing " (count grammar) " rule" (if (not (= (count grammar) 1)) "s") "."))
(->> grammar
evaluate each symbol , which should be a dag , and
(map #(reduce unify
(cons (dissoc % :unify)
(map (fn [each]
(eval each))
(:unify %)))))
process-options
filter-rules-by-firstness
warn-rules-by-catness
(map #(if (seq grammar-processing-rules)
(apply-processing-rules-to % grammar-processing-rules)
no processing rules : just return the gramamr rule :
%))
(remove #(= :fail %))
(map #(u/assoc-in % [:phrasal?] true))
(map #(u/assoc-in % [:menard.generate/started?] true))))
#?(:clj
(defn write-compiled-grammar [grammar write-to-file]
(spit write-to-file (vec (->> grammar (map serialize) (map vec))))))
(defmacro read-compiled-grammar [filename]
`~(-> filename
resource
slurp
read-string))
(defmacro read-expressions [filename]
`~(-> filename
resource
slurp
read-string))
(defmacro read-grammar [filename]
`~(-> filename
resource
slurp
read-string))
(defn read-grammar-fn [filename]
(-> filename
((fn [filename]
(if (re-find #"^file:///" filename)
(do
(log/debug (str "read-grammar-fn: reading a file:/// filename:" filename))
filename)
(do
(log/debug (str "read-grammar-fn: reading a non-file:/// filename:" filename))
(resource filename)))))
slurp
read-string))
|
fa87b674c497a63c6f3c9454ef2197c9b821330cd0b8d9c6bd8f0fcc5860c14b | erikd/haskell-big-integer-experiment | sanity-test.hs |
import Control.Monad (when)
import New.Integer ()
import qualified GMP.Integer as G
import qualified New.GHC.Integer.Type as N
main :: IO ()
main = do
print $ (G.mkInteger False [0x7fffffff], N.mkInteger False [0x7fffffff])
print $ (G.shiftLInteger (G.mkInteger False [0x7fffffff]) 1#, N.shiftLInteger (N.mkInteger False [0x7fffffff]) 1#)
print $ (G.mkInteger False [-10], N.mkInteger False [-10])
when False $ do
putStrLn "Small"
print $ (N.Small 0x1234)
print $ (N.Small 0x12346789)
putStrLn "\nmkInteger"
print $ N.mkInteger True [1]
print $ N.mkInteger True [0, 2]
print $ N.mkInteger False [1]
print $ N.negateInteger (N.mkInteger False [1])
print $ N.mkInteger True [1000000000000000000]
print $ N.mkInteger True [0x7fffffff]
print $ N.mkInteger True [0x80000000]
putStrLn "\nshiftLInteger"
print $ N.shiftLInteger (N.mkInteger True [0]) 0#
print $ N.shiftLInteger (N.mkInteger False [0]) 1#
print $ N.shiftLInteger (N.mkInteger False [1]) 1#
print $ N.shiftLInteger (N.mkInteger True [0x10000000]) 1#
print $ N.shiftLInteger (N.mkInteger True [0x10000000]) 2#
print $ N.shiftLInteger (N.mkInteger True [0x10000000]) 3#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 4#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 8#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 12#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 16#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 32#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 60#
print $ N.shiftLInteger (N.mkInteger False [0x7fffffff00000001]) 1#
print $ N.shiftLInteger (N.mkInteger False [0x7fffffff00000001]) 2#
print $ N.shiftLInteger (N.mkInteger False [0x7fffffff00000001]) 4#
putStrLn "\ntimesLInteger"
printProduct (N.mkInteger True [1]) (N.Small 1)
printProduct (N.mkInteger True [0x1001]) (N.Small 0x1001)
printProduct (N.mkInteger True [0x100000001]) (N.Small 0x100000001)
printProduct :: N.Integer -> N.Integer -> IO ()
printProduct a b = putStrLn $
show a ++ " * " ++ show b ++ " = " ++ show (N.timesInteger a b)
| null | https://raw.githubusercontent.com/erikd/haskell-big-integer-experiment/7841ec3fcc5be219fa16963849bd12137112f8a9/sanity-test.hs | haskell |
import Control.Monad (when)
import New.Integer ()
import qualified GMP.Integer as G
import qualified New.GHC.Integer.Type as N
main :: IO ()
main = do
print $ (G.mkInteger False [0x7fffffff], N.mkInteger False [0x7fffffff])
print $ (G.shiftLInteger (G.mkInteger False [0x7fffffff]) 1#, N.shiftLInteger (N.mkInteger False [0x7fffffff]) 1#)
print $ (G.mkInteger False [-10], N.mkInteger False [-10])
when False $ do
putStrLn "Small"
print $ (N.Small 0x1234)
print $ (N.Small 0x12346789)
putStrLn "\nmkInteger"
print $ N.mkInteger True [1]
print $ N.mkInteger True [0, 2]
print $ N.mkInteger False [1]
print $ N.negateInteger (N.mkInteger False [1])
print $ N.mkInteger True [1000000000000000000]
print $ N.mkInteger True [0x7fffffff]
print $ N.mkInteger True [0x80000000]
putStrLn "\nshiftLInteger"
print $ N.shiftLInteger (N.mkInteger True [0]) 0#
print $ N.shiftLInteger (N.mkInteger False [0]) 1#
print $ N.shiftLInteger (N.mkInteger False [1]) 1#
print $ N.shiftLInteger (N.mkInteger True [0x10000000]) 1#
print $ N.shiftLInteger (N.mkInteger True [0x10000000]) 2#
print $ N.shiftLInteger (N.mkInteger True [0x10000000]) 3#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 4#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 8#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 12#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 16#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 32#
print $ N.shiftLInteger (N.mkInteger True [0x12345678]) 60#
print $ N.shiftLInteger (N.mkInteger False [0x7fffffff00000001]) 1#
print $ N.shiftLInteger (N.mkInteger False [0x7fffffff00000001]) 2#
print $ N.shiftLInteger (N.mkInteger False [0x7fffffff00000001]) 4#
putStrLn "\ntimesLInteger"
printProduct (N.mkInteger True [1]) (N.Small 1)
printProduct (N.mkInteger True [0x1001]) (N.Small 0x1001)
printProduct (N.mkInteger True [0x100000001]) (N.Small 0x100000001)
printProduct :: N.Integer -> N.Integer -> IO ()
printProduct a b = putStrLn $
show a ++ " * " ++ show b ++ " = " ++ show (N.timesInteger a b)
|
|
02f749ef3b19aa278298854285c7c8a939208884574fd16fcb75a5e8e35e5afc | Opetushallitus/ataru | pohjakoulutus_toinen_aste_handlers.cljs | (ns ataru.virkailija.application.pohjakoulutus-toinen-aste.pohjakoulutus-toinen-aste-handlers
(:require [re-frame.core :as re-frame :refer [subscribe]]
[ataru.tarjonta.haku :as haku]
[ataru.application.harkinnanvaraisuus.harkinnanvaraisuus-util :as hutil]
[ataru.application.harkinnanvaraisuus.harkinnanvaraisuus-types :refer [harkinnanvaraisuus-yksilollistetty-matikka-aikka-types
pohjakoulutus-harkinnanvarainen-types]]))
(re-frame/reg-event-fx
:application/fetch-applicant-pohjakoulutus
(fn [_ [_ haku-oid application-key]]
{:http {:method :get
:path (str "/lomake-editori/api/valintalaskentakoostepalvelu/suoritukset/haku/" haku-oid "/hakemus/" application-key)
:handler-or-dispatch :application/handle-fetch-applicant-pohjakoulutus-response
:handler-args application-key
:override-args {:error-handler #(re-frame/dispatch [:application/handle-fetch-applicant-pohjakoulutus-error application-key])}
:id :fetch-applicant-pohjakoulutus}}))
(re-frame/reg-event-db
:application/handle-fetch-applicant-pohjakoulutus-response
(fn [db [_ response application-key]]
(-> db
(assoc-in [:application :pohjakoulutus-by-application-key application-key] response))))
(re-frame/reg-event-db
:application/handle-fetch-applicant-pohjakoulutus-error
(fn [db [_ application-key]]
(-> db
(assoc-in [:application :pohjakoulutus-by-application-key application-key :error] true))))
(re-frame/reg-event-fx
:application/fetch-application-valinnat
(fn [_ [_ haku-oid application-key]]
{:http {:method :get
:path (str "/lomake-editori/api/tulos-service/haku/" haku-oid "/hakemus/" application-key)
:handler-or-dispatch :application/handle-fetch-application-valinnat-response
:handler-args application-key
:override-args {:error-handler #(re-frame/dispatch [:application/handle-fetch-application-valinnat-error application-key %])}
:id :fetch-applicant-valinnat}}))
(re-frame/reg-event-db
:application/handle-fetch-application-valinnat-response
(fn [db [_ response application-key]]
(-> db
(assoc-in [:application :valinnat-by-application-key application-key] response))))
(re-frame/reg-event-db
:application/handle-fetch-application-valinnat-error
(fn [db [_ application-key response]]
(let [error (-> response
:response
:error
(case
"Valinnan tulokset kesken" :valinnan-tulokset-kesken
true))]
(-> db
(assoc-in [:application :valinnat-by-application-key application-key :error] error)))))
(re-frame/reg-event-fx
:application/fetch-applicant-harkinnanvaraisuus
(fn [_ [_ application-key]]
{:http {:method :get
:path (str "/lomake-editori/api/valintalaskentakoostepalvelu/harkinnanvaraisuus/hakemus/" application-key)
:handler-or-dispatch :application/handle-fetch-applicant-harkinnanvaraisuus-response
:handler-args application-key
:override-args {:error-handler #(re-frame/dispatch [:application/handle-fetch-applicant-harkinnanvaraisuus-error application-key])}
:id :fetch-applicant-harkinnanvaraisuus}}))
(re-frame/reg-event-db
:application/handle-fetch-applicant-harkinnanvaraisuus-response
(fn [db [_ response application-key]]
(let [answers @(subscribe [:application/selected-application-answers])
has-harkinnanvaraisuus-reason-in-group (fn [resp group]
(->> resp
(map :harkinnanvaraisuudenSyy)
(some (fn [harkinnanvaraisuus]
(some #{harkinnanvaraisuus} group)) )
boolean))
yksilollistetty-matikka-aikka? (has-harkinnanvaraisuus-reason-in-group
response harkinnanvaraisuus-yksilollistetty-matikka-aikka-types)
harkinnanvarainen-pohjakoulutus? (has-harkinnanvaraisuus-reason-in-group
response pohjakoulutus-harkinnanvarainen-types)
pick-value-fn (fn [answers question]
(:value (question answers)))
harkinnanvarainen-application-but-not-according-to-koski? (and (not harkinnanvarainen-pohjakoulutus?)
(hutil/get-common-harkinnanvaraisuus-reason answers pick-value-fn))]
(-> db
(assoc-in [:application :harkinnanvarainen-pohjakoulutus-by-application-key application-key]
harkinnanvarainen-pohjakoulutus?)
(assoc-in [:application :yksilollistetty-matikka-aikka-by-application-key application-key]
yksilollistetty-matikka-aikka?)
(assoc-in [:application :harkinnanvarainen-application-but-not-according-to-koski? application-key]
harkinnanvarainen-application-but-not-according-to-koski?)))))
(re-frame/reg-event-db
:application/handle-fetch-applicant-harkinnanvaraisuus-error
(fn [db [_ application-key]]
(-> db
(assoc-in [:application :harkinnanvarainen-pohjakoulutus-by-application-key application-key :error] true))))
(defn create-fetch-applicant-pohjakoulutus-event-if-toisen-asteen-yhteishaku
[application]
(when (haku/toisen-asteen-yhteishaku? (:tarjonta application))
[[:application/fetch-applicant-pohjakoulutus (:haku application) (:key application)]
[:application/fetch-applicant-harkinnanvaraisuus (:key application)]
[:application/fetch-application-valinnat (:haku application) (:key application)]]))
| null | https://raw.githubusercontent.com/Opetushallitus/ataru/27f650e0665e2735aab0c4059f766a3fb2826246/src/cljs/ataru/virkailija/application/pohjakoulutus_toinen_aste/pohjakoulutus_toinen_aste_handlers.cljs | clojure | (ns ataru.virkailija.application.pohjakoulutus-toinen-aste.pohjakoulutus-toinen-aste-handlers
(:require [re-frame.core :as re-frame :refer [subscribe]]
[ataru.tarjonta.haku :as haku]
[ataru.application.harkinnanvaraisuus.harkinnanvaraisuus-util :as hutil]
[ataru.application.harkinnanvaraisuus.harkinnanvaraisuus-types :refer [harkinnanvaraisuus-yksilollistetty-matikka-aikka-types
pohjakoulutus-harkinnanvarainen-types]]))
(re-frame/reg-event-fx
:application/fetch-applicant-pohjakoulutus
(fn [_ [_ haku-oid application-key]]
{:http {:method :get
:path (str "/lomake-editori/api/valintalaskentakoostepalvelu/suoritukset/haku/" haku-oid "/hakemus/" application-key)
:handler-or-dispatch :application/handle-fetch-applicant-pohjakoulutus-response
:handler-args application-key
:override-args {:error-handler #(re-frame/dispatch [:application/handle-fetch-applicant-pohjakoulutus-error application-key])}
:id :fetch-applicant-pohjakoulutus}}))
(re-frame/reg-event-db
:application/handle-fetch-applicant-pohjakoulutus-response
(fn [db [_ response application-key]]
(-> db
(assoc-in [:application :pohjakoulutus-by-application-key application-key] response))))
(re-frame/reg-event-db
:application/handle-fetch-applicant-pohjakoulutus-error
(fn [db [_ application-key]]
(-> db
(assoc-in [:application :pohjakoulutus-by-application-key application-key :error] true))))
(re-frame/reg-event-fx
:application/fetch-application-valinnat
(fn [_ [_ haku-oid application-key]]
{:http {:method :get
:path (str "/lomake-editori/api/tulos-service/haku/" haku-oid "/hakemus/" application-key)
:handler-or-dispatch :application/handle-fetch-application-valinnat-response
:handler-args application-key
:override-args {:error-handler #(re-frame/dispatch [:application/handle-fetch-application-valinnat-error application-key %])}
:id :fetch-applicant-valinnat}}))
(re-frame/reg-event-db
:application/handle-fetch-application-valinnat-response
(fn [db [_ response application-key]]
(-> db
(assoc-in [:application :valinnat-by-application-key application-key] response))))
(re-frame/reg-event-db
:application/handle-fetch-application-valinnat-error
(fn [db [_ application-key response]]
(let [error (-> response
:response
:error
(case
"Valinnan tulokset kesken" :valinnan-tulokset-kesken
true))]
(-> db
(assoc-in [:application :valinnat-by-application-key application-key :error] error)))))
(re-frame/reg-event-fx
:application/fetch-applicant-harkinnanvaraisuus
(fn [_ [_ application-key]]
{:http {:method :get
:path (str "/lomake-editori/api/valintalaskentakoostepalvelu/harkinnanvaraisuus/hakemus/" application-key)
:handler-or-dispatch :application/handle-fetch-applicant-harkinnanvaraisuus-response
:handler-args application-key
:override-args {:error-handler #(re-frame/dispatch [:application/handle-fetch-applicant-harkinnanvaraisuus-error application-key])}
:id :fetch-applicant-harkinnanvaraisuus}}))
(re-frame/reg-event-db
:application/handle-fetch-applicant-harkinnanvaraisuus-response
(fn [db [_ response application-key]]
(let [answers @(subscribe [:application/selected-application-answers])
has-harkinnanvaraisuus-reason-in-group (fn [resp group]
(->> resp
(map :harkinnanvaraisuudenSyy)
(some (fn [harkinnanvaraisuus]
(some #{harkinnanvaraisuus} group)) )
boolean))
yksilollistetty-matikka-aikka? (has-harkinnanvaraisuus-reason-in-group
response harkinnanvaraisuus-yksilollistetty-matikka-aikka-types)
harkinnanvarainen-pohjakoulutus? (has-harkinnanvaraisuus-reason-in-group
response pohjakoulutus-harkinnanvarainen-types)
pick-value-fn (fn [answers question]
(:value (question answers)))
harkinnanvarainen-application-but-not-according-to-koski? (and (not harkinnanvarainen-pohjakoulutus?)
(hutil/get-common-harkinnanvaraisuus-reason answers pick-value-fn))]
(-> db
(assoc-in [:application :harkinnanvarainen-pohjakoulutus-by-application-key application-key]
harkinnanvarainen-pohjakoulutus?)
(assoc-in [:application :yksilollistetty-matikka-aikka-by-application-key application-key]
yksilollistetty-matikka-aikka?)
(assoc-in [:application :harkinnanvarainen-application-but-not-according-to-koski? application-key]
harkinnanvarainen-application-but-not-according-to-koski?)))))
(re-frame/reg-event-db
:application/handle-fetch-applicant-harkinnanvaraisuus-error
(fn [db [_ application-key]]
(-> db
(assoc-in [:application :harkinnanvarainen-pohjakoulutus-by-application-key application-key :error] true))))
(defn create-fetch-applicant-pohjakoulutus-event-if-toisen-asteen-yhteishaku
[application]
(when (haku/toisen-asteen-yhteishaku? (:tarjonta application))
[[:application/fetch-applicant-pohjakoulutus (:haku application) (:key application)]
[:application/fetch-applicant-harkinnanvaraisuus (:key application)]
[:application/fetch-application-valinnat (:haku application) (:key application)]]))
|
|
bd458a58670be02cae4e8a0246a245c720fabfa702a30d558019dace4099efcc | ucsd-progsys/nate | symtable.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : , v 1.39 2006/05/11 15:50:53 xleroy Exp $
(* To assign numbers to globals and primitives *)
open Misc
open Asttypes
open Lambda
open Cmo_format
(* Functions for batch linking *)
type error =
Undefined_global of string
| Unavailable_primitive of string
| Wrong_vm of string
| Uninitialized_global of string
exception Error of error
(* Tables for numbering objects *)
type 'a numtable =
{ num_cnt: int; (* The next number *)
num_tbl: ('a, int) Tbl.t } (* The table of already numbered objects *)
let empty_numtable = { num_cnt = 0; num_tbl = Tbl.empty }
let find_numtable nt key =
Tbl.find key nt.num_tbl
let enter_numtable nt key =
let n = !nt.num_cnt in
nt := { num_cnt = n + 1; num_tbl = Tbl.add key n !nt.num_tbl };
n
let incr_numtable nt =
let n = !nt.num_cnt in
nt := { num_cnt = n + 1; num_tbl = !nt.num_tbl };
n
(* Global variables *)
let global_table = ref(empty_numtable : Ident.t numtable)
and literal_table = ref([] : (int * structured_constant) list)
let slot_for_getglobal id =
try
find_numtable !global_table id
with Not_found ->
raise(Error(Undefined_global(Ident.name id)))
let slot_for_setglobal id =
enter_numtable global_table id
let slot_for_literal cst =
let n = incr_numtable global_table in
literal_table := (n, cst) :: !literal_table;
n
(* The C primitives *)
let c_prim_table = ref(empty_numtable : string numtable)
let set_prim_table name =
ignore(enter_numtable c_prim_table name)
let num_of_prim name =
try
find_numtable !c_prim_table name
with Not_found ->
if !Clflags.custom_runtime then
enter_numtable c_prim_table name
else begin
let symb =
try Dll.find_primitive name
with Not_found -> raise(Error(Unavailable_primitive name)) in
let num = enter_numtable c_prim_table name in
Dll.synchronize_primitive num symb;
num
end
let require_primitive name =
if name.[0] <> '%' then ignore(num_of_prim name)
let all_primitives () =
let prim = Array.create !c_prim_table.num_cnt "" in
Tbl.iter (fun name number -> prim.(number) <- name) !c_prim_table.num_tbl;
prim
let data_primitive_names () =
let prim = all_primitives() in
let b = Buffer.create 512 in
for i = 0 to Array.length prim - 1 do
Buffer.add_string b prim.(i); Buffer.add_char b '\000'
done;
Buffer.contents b
let output_primitive_names outchan =
output_string outchan (data_primitive_names())
open Printf
let output_primitive_table outchan =
let prim = all_primitives() in
fprintf outchan "\
#ifdef __cplusplus\n\
extern \"C\" {\n\
#endif\n";
for i = 0 to Array.length prim - 1 do
fprintf outchan "extern long %s();\n" prim.(i)
done;
fprintf outchan "typedef long (*primitive)();\n";
fprintf outchan "primitive caml_builtin_cprim[] = {\n";
for i = 0 to Array.length prim - 1 do
fprintf outchan " %s,\n" prim.(i)
done;
fprintf outchan " (primitive) 0 };\n";
fprintf outchan "char * caml_names_of_builtin_cprim[] = {\n";
for i = 0 to Array.length prim - 1 do
fprintf outchan " \"%s\",\n" prim.(i)
done;
fprintf outchan " (char *) 0 };\n";
fprintf outchan "\
#ifdef __cplusplus\n\
}\n\
#endif\n"
(* Initialization for batch linking *)
let init () =
(* Enter the predefined exceptions *)
Array.iter
(fun name ->
let id =
try List.assoc name Predef.builtin_values
with Not_found -> fatal_error "Symtable.init" in
let c = slot_for_setglobal id in
let cst = Const_block(0, [Const_base(Const_string name)]) in
literal_table := (c, cst) :: !literal_table)
Runtimedef.builtin_exceptions;
Initialize the known C primitives
if String.length !Clflags.use_prims > 0 then begin
let ic = open_in !Clflags.use_prims in
try
while true do
set_prim_table (input_line ic)
done
with End_of_file -> close_in ic
| x -> close_in ic; raise x
end else if String.length !Clflags.use_runtime > 0 then begin
let primfile = Filename.temp_file "camlprims" "" in
try
if Sys.command(Printf.sprintf "%s -p > %s"
!Clflags.use_runtime primfile) <> 0
then raise(Error(Wrong_vm !Clflags.use_runtime));
let ic = open_in primfile in
try
while true do
set_prim_table (input_line ic)
done
with End_of_file -> close_in ic; remove_file primfile
| x -> close_in ic; raise x
with x -> remove_file primfile; raise x
end else begin
Array.iter set_prim_table Runtimedef.builtin_primitives
end
(* Relocate a block of object bytecode *)
Must use the unsafe String.set here because the block may be
a " fake " string as returned by Meta.static_alloc .
a "fake" string as returned by Meta.static_alloc. *)
let patch_int buff pos n =
String.unsafe_set buff pos (Char.unsafe_chr n);
String.unsafe_set buff (pos + 1) (Char.unsafe_chr (n asr 8));
String.unsafe_set buff (pos + 2) (Char.unsafe_chr (n asr 16));
String.unsafe_set buff (pos + 3) (Char.unsafe_chr (n asr 24))
let patch_object buff patchlist =
List.iter
(function
(Reloc_literal sc, pos) ->
patch_int buff pos (slot_for_literal sc)
| (Reloc_getglobal id, pos) ->
patch_int buff pos (slot_for_getglobal id)
| (Reloc_setglobal id, pos) ->
patch_int buff pos (slot_for_setglobal id)
| (Reloc_primitive name, pos) ->
patch_int buff pos (num_of_prim name))
patchlist
(* Translate structured constants *)
let rec transl_const = function
Const_base(Const_int i) -> Obj.repr i
| Const_base(Const_char c) -> Obj.repr c
| Const_base(Const_string s) -> Obj.repr s
| Const_base(Const_float f) -> Obj.repr (float_of_string f)
| Const_base(Const_int32 i) -> Obj.repr i
| Const_base(Const_int64 i) -> Obj.repr i
| Const_base(Const_nativeint i) -> Obj.repr i
| Const_pointer i -> Obj.repr i
| Const_immstring s -> Obj.repr s
| Const_block(tag, fields) ->
let block = Obj.new_block tag (List.length fields) in
let pos = ref 0 in
List.iter
(fun c -> Obj.set_field block !pos (transl_const c); incr pos)
fields;
block
| Const_float_array fields ->
Obj.repr(Array.of_list(List.map (fun f -> float_of_string f) fields))
(* Build the initial table of globals *)
let initial_global_table () =
let glob = Array.create !global_table.num_cnt (Obj.repr 0) in
List.iter
(fun (slot, cst) -> glob.(slot) <- transl_const cst)
!literal_table;
literal_table := [];
glob
(* Save the table of globals *)
let output_global_map oc =
output_value oc !global_table
let data_global_map () =
Obj.repr !global_table
(* Functions for toplevel use *)
(* Update the in-core table of globals *)
let update_global_table () =
let ng = !global_table.num_cnt in
if ng > Array.length(Meta.global_data()) then Meta.realloc_global_data ng;
let glob = Meta.global_data() in
List.iter
(fun (slot, cst) -> glob.(slot) <- transl_const cst)
!literal_table;
literal_table := []
(* Recover data for toplevel initialization. Data can come either from
executable file (normal case) or from linked-in data (-output-obj). *)
type section_reader = {
read_string: string -> string;
read_struct: string -> Obj.t;
close_reader: unit -> unit
}
let read_sections () =
try
let sections = Meta.get_section_table () in
{ read_string =
(fun name -> (Obj.magic(List.assoc name sections) : string));
read_struct =
(fun name -> List.assoc name sections);
close_reader =
(fun () -> ()) }
with Not_found ->
let ic = open_in_bin Sys.executable_name in
Bytesections.read_toc ic;
{ read_string = Bytesections.read_section_string ic;
read_struct = Bytesections.read_section_struct ic;
close_reader = fun () -> close_in ic }
Initialize the linker for toplevel use
let init_toplevel () =
try
let sect = read_sections () in
(* Locations of globals *)
global_table := (Obj.magic (sect.read_struct "SYMB") : Ident.t numtable);
(* Primitives *)
let prims = sect.read_string "PRIM" in
c_prim_table := empty_numtable;
let pos = ref 0 in
while !pos < String.length prims do
let i = String.index_from prims !pos '\000' in
set_prim_table (String.sub prims !pos (i - !pos));
pos := i + 1
done;
(* DLL initialization *)
let dllpath = try sect.read_string "DLPT" with Not_found -> "" in
Dll.init_toplevel dllpath;
(* Recover CRC infos for interfaces *)
let crcintfs =
try (Obj.magic (sect.read_struct "CRCS") : (string * Digest.t) list)
with Not_found -> [] in
(* Done *)
sect.close_reader();
crcintfs
with Bytesections.Bad_magic_number | Not_found | Failure _ ->
fatal_error "Toplevel bytecode executable is corrupted"
(* Find the value of a global identifier *)
let get_global_position id = slot_for_getglobal id
let get_global_value id =
(Meta.global_data()).(slot_for_getglobal id)
let assign_global_value id v =
(Meta.global_data()).(slot_for_getglobal id) <- v
(* Check that all globals referenced in the given patch list
have been initialized already *)
let check_global_initialized patchlist =
First determine the globals we will define
let defined_globals =
List.fold_left
(fun accu rel ->
match rel with
(Reloc_setglobal id, pos) -> id :: accu
| _ -> accu)
[] patchlist in
(* Then check that all referenced, not defined globals have a value *)
let check_reference = function
(Reloc_getglobal id, pos) ->
if not (List.mem id defined_globals)
&& Obj.is_int (get_global_value id)
then raise (Error(Uninitialized_global(Ident.name id)))
| _ -> () in
List.iter check_reference patchlist
(* Save and restore the current state *)
type global_map = Ident.t numtable
let current_state () = !global_table
let restore_state st = global_table := st
let hide_additions st =
if st.num_cnt > !global_table.num_cnt then
fatal_error "Symtable.hide_additions";
global_table :=
{ num_cnt = !global_table.num_cnt;
num_tbl = st.num_tbl }
(* "Filter" the global map according to some predicate.
Used to expunge the global map for the toplevel. *)
let filter_global_map p gmap =
let newtbl = ref Tbl.empty in
Tbl.iter
(fun id num -> if p id then newtbl := Tbl.add id num !newtbl)
gmap.num_tbl;
{num_cnt = gmap.num_cnt; num_tbl = !newtbl}
(* Error report *)
open Format
let report_error ppf = function
| Undefined_global s ->
fprintf ppf "Reference to undefined global `%s'" s
| Unavailable_primitive s ->
fprintf ppf "The external function `%s' is not available" s
| Wrong_vm s ->
fprintf ppf "Cannot find or execute the runtime system %s" s
| Uninitialized_global s ->
fprintf ppf "The value of the global `%s' is not yet computed" s
| null | https://raw.githubusercontent.com/ucsd-progsys/nate/8b1267cd8b10283d8bc239d16a28c654a4cb8942/eval/sherrloc/easyocaml%2B%2B/bytecomp/symtable.ml | ocaml | *********************************************************************
Objective Caml
*********************************************************************
To assign numbers to globals and primitives
Functions for batch linking
Tables for numbering objects
The next number
The table of already numbered objects
Global variables
The C primitives
Initialization for batch linking
Enter the predefined exceptions
Relocate a block of object bytecode
Translate structured constants
Build the initial table of globals
Save the table of globals
Functions for toplevel use
Update the in-core table of globals
Recover data for toplevel initialization. Data can come either from
executable file (normal case) or from linked-in data (-output-obj).
Locations of globals
Primitives
DLL initialization
Recover CRC infos for interfaces
Done
Find the value of a global identifier
Check that all globals referenced in the given patch list
have been initialized already
Then check that all referenced, not defined globals have a value
Save and restore the current state
"Filter" the global map according to some predicate.
Used to expunge the global map for the toplevel.
Error report | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : , v 1.39 2006/05/11 15:50:53 xleroy Exp $
open Misc
open Asttypes
open Lambda
open Cmo_format
type error =
Undefined_global of string
| Unavailable_primitive of string
| Wrong_vm of string
| Uninitialized_global of string
exception Error of error
type 'a numtable =
let empty_numtable = { num_cnt = 0; num_tbl = Tbl.empty }
let find_numtable nt key =
Tbl.find key nt.num_tbl
let enter_numtable nt key =
let n = !nt.num_cnt in
nt := { num_cnt = n + 1; num_tbl = Tbl.add key n !nt.num_tbl };
n
let incr_numtable nt =
let n = !nt.num_cnt in
nt := { num_cnt = n + 1; num_tbl = !nt.num_tbl };
n
let global_table = ref(empty_numtable : Ident.t numtable)
and literal_table = ref([] : (int * structured_constant) list)
let slot_for_getglobal id =
try
find_numtable !global_table id
with Not_found ->
raise(Error(Undefined_global(Ident.name id)))
let slot_for_setglobal id =
enter_numtable global_table id
let slot_for_literal cst =
let n = incr_numtable global_table in
literal_table := (n, cst) :: !literal_table;
n
let c_prim_table = ref(empty_numtable : string numtable)
let set_prim_table name =
ignore(enter_numtable c_prim_table name)
let num_of_prim name =
try
find_numtable !c_prim_table name
with Not_found ->
if !Clflags.custom_runtime then
enter_numtable c_prim_table name
else begin
let symb =
try Dll.find_primitive name
with Not_found -> raise(Error(Unavailable_primitive name)) in
let num = enter_numtable c_prim_table name in
Dll.synchronize_primitive num symb;
num
end
let require_primitive name =
if name.[0] <> '%' then ignore(num_of_prim name)
let all_primitives () =
let prim = Array.create !c_prim_table.num_cnt "" in
Tbl.iter (fun name number -> prim.(number) <- name) !c_prim_table.num_tbl;
prim
let data_primitive_names () =
let prim = all_primitives() in
let b = Buffer.create 512 in
for i = 0 to Array.length prim - 1 do
Buffer.add_string b prim.(i); Buffer.add_char b '\000'
done;
Buffer.contents b
let output_primitive_names outchan =
output_string outchan (data_primitive_names())
open Printf
let output_primitive_table outchan =
let prim = all_primitives() in
fprintf outchan "\
#ifdef __cplusplus\n\
extern \"C\" {\n\
#endif\n";
for i = 0 to Array.length prim - 1 do
fprintf outchan "extern long %s();\n" prim.(i)
done;
fprintf outchan "typedef long (*primitive)();\n";
fprintf outchan "primitive caml_builtin_cprim[] = {\n";
for i = 0 to Array.length prim - 1 do
fprintf outchan " %s,\n" prim.(i)
done;
fprintf outchan " (primitive) 0 };\n";
fprintf outchan "char * caml_names_of_builtin_cprim[] = {\n";
for i = 0 to Array.length prim - 1 do
fprintf outchan " \"%s\",\n" prim.(i)
done;
fprintf outchan " (char *) 0 };\n";
fprintf outchan "\
#ifdef __cplusplus\n\
}\n\
#endif\n"
let init () =
Array.iter
(fun name ->
let id =
try List.assoc name Predef.builtin_values
with Not_found -> fatal_error "Symtable.init" in
let c = slot_for_setglobal id in
let cst = Const_block(0, [Const_base(Const_string name)]) in
literal_table := (c, cst) :: !literal_table)
Runtimedef.builtin_exceptions;
Initialize the known C primitives
if String.length !Clflags.use_prims > 0 then begin
let ic = open_in !Clflags.use_prims in
try
while true do
set_prim_table (input_line ic)
done
with End_of_file -> close_in ic
| x -> close_in ic; raise x
end else if String.length !Clflags.use_runtime > 0 then begin
let primfile = Filename.temp_file "camlprims" "" in
try
if Sys.command(Printf.sprintf "%s -p > %s"
!Clflags.use_runtime primfile) <> 0
then raise(Error(Wrong_vm !Clflags.use_runtime));
let ic = open_in primfile in
try
while true do
set_prim_table (input_line ic)
done
with End_of_file -> close_in ic; remove_file primfile
| x -> close_in ic; raise x
with x -> remove_file primfile; raise x
end else begin
Array.iter set_prim_table Runtimedef.builtin_primitives
end
Must use the unsafe String.set here because the block may be
a " fake " string as returned by Meta.static_alloc .
a "fake" string as returned by Meta.static_alloc. *)
let patch_int buff pos n =
String.unsafe_set buff pos (Char.unsafe_chr n);
String.unsafe_set buff (pos + 1) (Char.unsafe_chr (n asr 8));
String.unsafe_set buff (pos + 2) (Char.unsafe_chr (n asr 16));
String.unsafe_set buff (pos + 3) (Char.unsafe_chr (n asr 24))
let patch_object buff patchlist =
List.iter
(function
(Reloc_literal sc, pos) ->
patch_int buff pos (slot_for_literal sc)
| (Reloc_getglobal id, pos) ->
patch_int buff pos (slot_for_getglobal id)
| (Reloc_setglobal id, pos) ->
patch_int buff pos (slot_for_setglobal id)
| (Reloc_primitive name, pos) ->
patch_int buff pos (num_of_prim name))
patchlist
let rec transl_const = function
Const_base(Const_int i) -> Obj.repr i
| Const_base(Const_char c) -> Obj.repr c
| Const_base(Const_string s) -> Obj.repr s
| Const_base(Const_float f) -> Obj.repr (float_of_string f)
| Const_base(Const_int32 i) -> Obj.repr i
| Const_base(Const_int64 i) -> Obj.repr i
| Const_base(Const_nativeint i) -> Obj.repr i
| Const_pointer i -> Obj.repr i
| Const_immstring s -> Obj.repr s
| Const_block(tag, fields) ->
let block = Obj.new_block tag (List.length fields) in
let pos = ref 0 in
List.iter
(fun c -> Obj.set_field block !pos (transl_const c); incr pos)
fields;
block
| Const_float_array fields ->
Obj.repr(Array.of_list(List.map (fun f -> float_of_string f) fields))
let initial_global_table () =
let glob = Array.create !global_table.num_cnt (Obj.repr 0) in
List.iter
(fun (slot, cst) -> glob.(slot) <- transl_const cst)
!literal_table;
literal_table := [];
glob
let output_global_map oc =
output_value oc !global_table
let data_global_map () =
Obj.repr !global_table
let update_global_table () =
let ng = !global_table.num_cnt in
if ng > Array.length(Meta.global_data()) then Meta.realloc_global_data ng;
let glob = Meta.global_data() in
List.iter
(fun (slot, cst) -> glob.(slot) <- transl_const cst)
!literal_table;
literal_table := []
type section_reader = {
read_string: string -> string;
read_struct: string -> Obj.t;
close_reader: unit -> unit
}
let read_sections () =
try
let sections = Meta.get_section_table () in
{ read_string =
(fun name -> (Obj.magic(List.assoc name sections) : string));
read_struct =
(fun name -> List.assoc name sections);
close_reader =
(fun () -> ()) }
with Not_found ->
let ic = open_in_bin Sys.executable_name in
Bytesections.read_toc ic;
{ read_string = Bytesections.read_section_string ic;
read_struct = Bytesections.read_section_struct ic;
close_reader = fun () -> close_in ic }
Initialize the linker for toplevel use
let init_toplevel () =
try
let sect = read_sections () in
global_table := (Obj.magic (sect.read_struct "SYMB") : Ident.t numtable);
let prims = sect.read_string "PRIM" in
c_prim_table := empty_numtable;
let pos = ref 0 in
while !pos < String.length prims do
let i = String.index_from prims !pos '\000' in
set_prim_table (String.sub prims !pos (i - !pos));
pos := i + 1
done;
let dllpath = try sect.read_string "DLPT" with Not_found -> "" in
Dll.init_toplevel dllpath;
let crcintfs =
try (Obj.magic (sect.read_struct "CRCS") : (string * Digest.t) list)
with Not_found -> [] in
sect.close_reader();
crcintfs
with Bytesections.Bad_magic_number | Not_found | Failure _ ->
fatal_error "Toplevel bytecode executable is corrupted"
let get_global_position id = slot_for_getglobal id
let get_global_value id =
(Meta.global_data()).(slot_for_getglobal id)
let assign_global_value id v =
(Meta.global_data()).(slot_for_getglobal id) <- v
let check_global_initialized patchlist =
First determine the globals we will define
let defined_globals =
List.fold_left
(fun accu rel ->
match rel with
(Reloc_setglobal id, pos) -> id :: accu
| _ -> accu)
[] patchlist in
let check_reference = function
(Reloc_getglobal id, pos) ->
if not (List.mem id defined_globals)
&& Obj.is_int (get_global_value id)
then raise (Error(Uninitialized_global(Ident.name id)))
| _ -> () in
List.iter check_reference patchlist
type global_map = Ident.t numtable
let current_state () = !global_table
let restore_state st = global_table := st
let hide_additions st =
if st.num_cnt > !global_table.num_cnt then
fatal_error "Symtable.hide_additions";
global_table :=
{ num_cnt = !global_table.num_cnt;
num_tbl = st.num_tbl }
let filter_global_map p gmap =
let newtbl = ref Tbl.empty in
Tbl.iter
(fun id num -> if p id then newtbl := Tbl.add id num !newtbl)
gmap.num_tbl;
{num_cnt = gmap.num_cnt; num_tbl = !newtbl}
open Format
let report_error ppf = function
| Undefined_global s ->
fprintf ppf "Reference to undefined global `%s'" s
| Unavailable_primitive s ->
fprintf ppf "The external function `%s' is not available" s
| Wrong_vm s ->
fprintf ppf "Cannot find or execute the runtime system %s" s
| Uninitialized_global s ->
fprintf ppf "The value of the global `%s' is not yet computed" s
|
32e407878574c8ff02a4ff2b6be827330372161975f89ee9a7074a14b909ec17 | fhunleth/relsync | target_syncer.erl | Copyright 2014
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%% @doc
%%% This module is sent to the target device to scan files locally and
%%% implement the updates as requested by the host device running relsync.
%%% @end
-module(target_syncer).
-include_lib("kernel/include/file.hrl").
-behaviour(gen_server).
%% API
-export([start_link/0, start_link/1,
get_file_listing/2,
set_hooks/2,
get_local_file_listing/1,
copy_file/4,
rm_file/2,
create_symlink_mirror/3,
notify_presync/1,
notify_postsync/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-record(state, {
% hooks holds the module name that provides an alternative
% implementation to the default synchronization
hooks}).
%%%===================================================================
%%% API
%%%===================================================================
-spec get_file_listing(atom(), string()) -> [{string(), {integer(),binary()}}].
get_file_listing(Node, Path) ->
gen_server:call({?SERVER, Node}, {get_file_listing, Path}).
Traverse the directory specified by path and return the list of files
% with their info.
-spec get_local_file_listing(string()) -> [{string(), {integer(), binary()}}].
get_local_file_listing(Path) ->
PrefixLength = length(Path),
filelib:fold_files(Path, ".*", true,
fun(Y, Acc) -> [{lists:nthtail(PrefixLength, Y), file_info(Y)} | Acc] end,
[]).
% Use the specified Module to customize the behavior of the
% synchronization process. The object code for the Module
% is sent to the remote Node as well.
-spec set_hooks(atom(), atom()) -> ok.
set_hooks(Node, undefined) ->
gen_server:call({?SERVER, Node}, clear_hooks);
set_hooks(Node, ModuleName) ->
{Module, Bin, File} = maybe_compile(ModuleName),
gen_server:call({?SERVER, Node}, {set_hooks, Module, Bin, File}).
maybe_compile(ModuleName) ->
Module = list_to_atom(ModuleName),
case code:get_object_code(Module) of
{Module, Bin, File} ->
{Module, Bin, File};
_ ->
{ok, CompiledModule, Bin} = compile:file(ModuleName, [binary]),
{CompiledModule, Bin, ModuleName}
end.
% Copy the Contents to the file specified by Path on Node, and
% then set the mode to Mode.
-spec copy_file(atom(), string(), integer(), binary()) -> ok | {error, _}.
copy_file(Node, Path, Mode, Contents) ->
gen_server:call({?SERVER, Node}, {copy_file, Path, Mode, Contents}).
% Remove the specified file from Node
-spec rm_file(atom(), string()) -> ok | {error, _}.
rm_file(Node, Path) ->
gen_server:call({?SERVER, Node}, {rm_file, Path}).
Create a symlink mirror of all files in Path in NewPath ,
but only if NewPath does n't exist .
-spec create_symlink_mirror(atom(), string(), string())-> ok | {error, _}.
create_symlink_mirror(Node, Path, NewPath) ->
gen_server:call({?SERVER, Node}, {create_symlink_mirror, Path, NewPath}).
% Called to let the remote node know that a synchronization
% run is coming.
-spec notify_presync(atom()) -> ok.
notify_presync(Node) ->
gen_server:call({?SERVER, Node}, notify_presync).
% Called to let the remote node know that a synchronization
% run has finished.
-spec notify_postsync(atom()) -> ok.
notify_postsync(Node) ->
gen_server:call({?SERVER, Node}, notify_postsync).
%%--------------------------------------------------------------------
%% @doc
%% Starts the server locally (called by the supervisor)
%%
( ) - > { ok , Pid } | ignore | { error , Error }
%% @end
%%--------------------------------------------------------------------
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
%% Starts the server on the specified remote node.
start_link(Node) ->
Result = rpc:call(Node, gen_server, start, [{local, ?SERVER}, ?MODULE, [], []]),
case Result of
{ok, Pid} ->
link(Pid)
end,
Result.
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Initializes the server
%%
) - > { ok , State } |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
init([]) ->
{ok, #state{}}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling call messages
%%
, From , State ) - >
%% {reply, Reply, State} |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_call({get_file_listing, Path}, _From, State) ->
Reply = get_local_file_listing(Path),
{reply, Reply, State};
handle_call(clear_hooks, _From, State) ->
NewState = State#state{hooks=undefined},
{reply, ok, NewState};
handle_call({set_hooks, Module, Bin, File}, _From, State) ->
code:load_binary(Module, File, Bin),
NewState = State#state{hooks=Module},
{reply, ok, NewState};
handle_call({copy_file, Path, Mode, Contents}, _From, State) ->
ok = filelib:ensure_dir(Path),
delete the file first so that we write to a new inode . This is needed
% for symlink mirrors, but also more gracefully handles the case where
% someone else has the file opened.
file:delete(Path),
ok = file:write_file(Path, Contents),
ok = file:change_mode(Path, Mode),
maybe_update_beam(Path),
{reply, ok, State};
handle_call({create_symlink_mirror, Path, NewPath}, _From, State) ->
case filelib:is_dir(NewPath) of
false ->
ok = filelib:ensure_dir(NewPath),
FromFiles = get_local_file_listing(Path),
[ ok = symlink_files(Path ++ File, NewPath ++ File) || {File, _} <- FromFiles ],
Update Erlang 's search paths to look in the mirror location now .
ok = update_code_paths(Path, NewPath);
true ->
% Don't do anything, since the mirror already exists.
ok
end,
{reply, ok, State};
handle_call({rm_file, Path}, _From, State) ->
Reply = file:delete(Path),
{reply, Reply, State};
handle_call(notify_presync, _From, State) ->
#state{hooks=Hooks} = State,
call_hook_or_not(Hooks, presync),
{reply, ok, State};
handle_call(notify_postsync, _From, State) ->
#state{hooks=Hooks} = State,
call_hook_or_not(Hooks, postsync),
{reply, ok, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling cast messages
%%
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling all non call/cast messages
%%
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_info(_Info, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
with . The return value is ignored .
%%
, State ) - > void ( )
%% @end
%%--------------------------------------------------------------------
terminate(_Reason, _State) ->
ok.
%%--------------------------------------------------------------------
@private
%% @doc
%% Convert process state when code is changed
%%
, State , Extra ) - > { ok , NewState }
%% @end
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
-spec file_info(string()) -> {integer(), binary()}.
file_info(Filename) ->
{ok, Data} = file:read_file(Filename),
Hash = crypto:hash(sha, Data),
{ok, #file_info{mode = Mode}} = file:read_file_info(Filename),
{Mode, Hash}.
%%-spec call_hook_or_not(atom(), atom()) ->
call_hook_or_not(undefined, presync) ->
ok;
call_hook_or_not(undefined, postsync) ->
ok;
call_hook_or_not(M, F) ->
M:F().
maybe_update_beam(Path) ->
case filename:extension(Path) of
".beam" ->
update_beam(Path);
_ ->
ok
end.
update_beam(Path) ->
Module = list_to_atom(filename:rootname(filename:basename(Path))),
case code:which(Module) of
non_existing ->
% Code not loaded yet. Let the VM load it on demand.
ok;
_ ->
NOTE : we do n't check whether the old path ( from code : )
is the same as the new Path . Symlink mirroring would fail this even
% though it is ok, but in general, we don't police module naming collisions.
case code:is_sticky(Module) of
true ->
io:format("Not reloading sticky module ~p.~n", [Module]);
false ->
% Updating code that has been loaded
io:format("Reloading ~p...~n", [Module]),
code:purge(Module),
{module, Module} = code:load_file(Module)
end
end.
symlink_files(From, To) ->
filelib:ensure_dir(To),
file:make_symlink(From, To).
replace_prefix(Path, From, To) ->
case lists:prefix(From, Path) of
false ->
% Not affected, so don't update.
Path;
true ->
To ++ lists:nthtail(length(From), Path)
end.
% Update the Erlang VM's code search path to the new directory prefix.
% This is called after mirroring the directory so that we can write to it.
update_code_paths(From, To) ->
NewPaths = [ replace_prefix(Path, From, To) || Path <- code:get_path() ],
true = code:set_path(NewPaths),
ok.
| null | https://raw.githubusercontent.com/fhunleth/relsync/4f49df183fa4b5cff5f7afb4818d907b65d9ab37/src/target_syncer.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc
This module is sent to the target device to scan files locally and
implement the updates as requested by the host device running relsync.
@end
API
gen_server callbacks
hooks holds the module name that provides an alternative
implementation to the default synchronization
===================================================================
API
===================================================================
with their info.
Use the specified Module to customize the behavior of the
synchronization process. The object code for the Module
is sent to the remote Node as well.
Copy the Contents to the file specified by Path on Node, and
then set the mode to Mode.
Remove the specified file from Node
Called to let the remote node know that a synchronization
run is coming.
Called to let the remote node know that a synchronization
run has finished.
--------------------------------------------------------------------
@doc
Starts the server locally (called by the supervisor)
@end
--------------------------------------------------------------------
Starts the server on the specified remote node.
===================================================================
gen_server callbacks
===================================================================
--------------------------------------------------------------------
@doc
Initializes the server
ignore |
{stop, Reason}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling call messages
{reply, Reply, State} |
{stop, Reason, Reply, State} |
{stop, Reason, State}
@end
--------------------------------------------------------------------
for symlink mirrors, but also more gracefully handles the case where
someone else has the file opened.
Don't do anything, since the mirror already exists.
--------------------------------------------------------------------
@doc
Handling cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling all non call/cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any
necessary cleaning up. When it returns, the gen_server terminates
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Convert process state when code is changed
@end
--------------------------------------------------------------------
===================================================================
===================================================================
-spec call_hook_or_not(atom(), atom()) ->
Code not loaded yet. Let the VM load it on demand.
though it is ok, but in general, we don't police module naming collisions.
Updating code that has been loaded
Not affected, so don't update.
Update the Erlang VM's code search path to the new directory prefix.
This is called after mirroring the directory so that we can write to it. | Copyright 2014
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(target_syncer).
-include_lib("kernel/include/file.hrl").
-behaviour(gen_server).
-export([start_link/0, start_link/1,
get_file_listing/2,
set_hooks/2,
get_local_file_listing/1,
copy_file/4,
rm_file/2,
create_symlink_mirror/3,
notify_presync/1,
notify_postsync/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-record(state, {
hooks}).
-spec get_file_listing(atom(), string()) -> [{string(), {integer(),binary()}}].
get_file_listing(Node, Path) ->
gen_server:call({?SERVER, Node}, {get_file_listing, Path}).
Traverse the directory specified by path and return the list of files
-spec get_local_file_listing(string()) -> [{string(), {integer(), binary()}}].
get_local_file_listing(Path) ->
PrefixLength = length(Path),
filelib:fold_files(Path, ".*", true,
fun(Y, Acc) -> [{lists:nthtail(PrefixLength, Y), file_info(Y)} | Acc] end,
[]).
-spec set_hooks(atom(), atom()) -> ok.
set_hooks(Node, undefined) ->
gen_server:call({?SERVER, Node}, clear_hooks);
set_hooks(Node, ModuleName) ->
{Module, Bin, File} = maybe_compile(ModuleName),
gen_server:call({?SERVER, Node}, {set_hooks, Module, Bin, File}).
maybe_compile(ModuleName) ->
Module = list_to_atom(ModuleName),
case code:get_object_code(Module) of
{Module, Bin, File} ->
{Module, Bin, File};
_ ->
{ok, CompiledModule, Bin} = compile:file(ModuleName, [binary]),
{CompiledModule, Bin, ModuleName}
end.
-spec copy_file(atom(), string(), integer(), binary()) -> ok | {error, _}.
copy_file(Node, Path, Mode, Contents) ->
gen_server:call({?SERVER, Node}, {copy_file, Path, Mode, Contents}).
-spec rm_file(atom(), string()) -> ok | {error, _}.
rm_file(Node, Path) ->
gen_server:call({?SERVER, Node}, {rm_file, Path}).
Create a symlink mirror of all files in Path in NewPath ,
but only if NewPath does n't exist .
-spec create_symlink_mirror(atom(), string(), string())-> ok | {error, _}.
create_symlink_mirror(Node, Path, NewPath) ->
gen_server:call({?SERVER, Node}, {create_symlink_mirror, Path, NewPath}).
-spec notify_presync(atom()) -> ok.
notify_presync(Node) ->
gen_server:call({?SERVER, Node}, notify_presync).
-spec notify_postsync(atom()) -> ok.
notify_postsync(Node) ->
gen_server:call({?SERVER, Node}, notify_postsync).
( ) - > { ok , Pid } | ignore | { error , Error }
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
start_link(Node) ->
Result = rpc:call(Node, gen_server, start, [{local, ?SERVER}, ?MODULE, [], []]),
case Result of
{ok, Pid} ->
link(Pid)
end,
Result.
@private
) - > { ok , State } |
{ ok , State , Timeout } |
init([]) ->
{ok, #state{}}.
@private
, From , State ) - >
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call({get_file_listing, Path}, _From, State) ->
Reply = get_local_file_listing(Path),
{reply, Reply, State};
handle_call(clear_hooks, _From, State) ->
NewState = State#state{hooks=undefined},
{reply, ok, NewState};
handle_call({set_hooks, Module, Bin, File}, _From, State) ->
code:load_binary(Module, File, Bin),
NewState = State#state{hooks=Module},
{reply, ok, NewState};
handle_call({copy_file, Path, Mode, Contents}, _From, State) ->
ok = filelib:ensure_dir(Path),
delete the file first so that we write to a new inode . This is needed
file:delete(Path),
ok = file:write_file(Path, Contents),
ok = file:change_mode(Path, Mode),
maybe_update_beam(Path),
{reply, ok, State};
handle_call({create_symlink_mirror, Path, NewPath}, _From, State) ->
case filelib:is_dir(NewPath) of
false ->
ok = filelib:ensure_dir(NewPath),
FromFiles = get_local_file_listing(Path),
[ ok = symlink_files(Path ++ File, NewPath ++ File) || {File, _} <- FromFiles ],
Update Erlang 's search paths to look in the mirror location now .
ok = update_code_paths(Path, NewPath);
true ->
ok
end,
{reply, ok, State};
handle_call({rm_file, Path}, _From, State) ->
Reply = file:delete(Path),
{reply, Reply, State};
handle_call(notify_presync, _From, State) ->
#state{hooks=Hooks} = State,
call_hook_or_not(Hooks, presync),
{reply, ok, State};
handle_call(notify_postsync, _From, State) ->
#state{hooks=Hooks} = State,
call_hook_or_not(Hooks, postsync),
{reply, ok, State}.
@private
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast(_Msg, State) ->
{noreply, State}.
@private
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_info(_Info, State) ->
{noreply, State}.
@private
with . The return value is ignored .
, State ) - > void ( )
terminate(_Reason, _State) ->
ok.
@private
, State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
-spec file_info(string()) -> {integer(), binary()}.
file_info(Filename) ->
{ok, Data} = file:read_file(Filename),
Hash = crypto:hash(sha, Data),
{ok, #file_info{mode = Mode}} = file:read_file_info(Filename),
{Mode, Hash}.
call_hook_or_not(undefined, presync) ->
ok;
call_hook_or_not(undefined, postsync) ->
ok;
call_hook_or_not(M, F) ->
M:F().
maybe_update_beam(Path) ->
case filename:extension(Path) of
".beam" ->
update_beam(Path);
_ ->
ok
end.
update_beam(Path) ->
Module = list_to_atom(filename:rootname(filename:basename(Path))),
case code:which(Module) of
non_existing ->
ok;
_ ->
NOTE : we do n't check whether the old path ( from code : )
is the same as the new Path . Symlink mirroring would fail this even
case code:is_sticky(Module) of
true ->
io:format("Not reloading sticky module ~p.~n", [Module]);
false ->
io:format("Reloading ~p...~n", [Module]),
code:purge(Module),
{module, Module} = code:load_file(Module)
end
end.
symlink_files(From, To) ->
filelib:ensure_dir(To),
file:make_symlink(From, To).
replace_prefix(Path, From, To) ->
case lists:prefix(From, Path) of
false ->
Path;
true ->
To ++ lists:nthtail(length(From), Path)
end.
update_code_paths(From, To) ->
NewPaths = [ replace_prefix(Path, From, To) || Path <- code:get_path() ],
true = code:set_path(NewPaths),
ok.
|
98c497bd6103f3cd61a30f3ff703438ec67874d43c6b58a4cbb57a197c876f52 | cfcs/ocaml-socks | socks_client.ml | open Socks
open Lwt
open Cmdliner
open Rresult
type t = {
oc: Lwt_io.output Lwt_io.channel;
ic: Lwt_io.input Lwt_io.channel;
}
let of_socket socket =
let oc = Lwt_io.of_fd ~mode:Lwt_io.output socket in
let ic = Lwt_io.of_fd ~mode:Lwt_io.input socket in
{oc; ic}
let connect ~proxy ~host ~port =
let connect_str = R.get_ok (Socks.make_socks5_request (Connect {address = Domain_address host; port = port})) in
let socket = Lwt_unix.socket Lwt_unix.PF_INET Lwt_unix.SOCK_STREAM 0 in
let%lwt host_info = Lwt_unix.gethostbyname proxy in
let server_address = host_info.Lwt_unix.h_addr_list.(0) in
let%lwt () = Lwt_unix.connect socket (Lwt_unix.ADDR_INET (server_address, 1080)) in
Logs.info (fun m -> m "Connected to proxy %s" proxy);
return socket
let send_connect_request socket host port =
let connect_str = R.get_ok (Socks.make_socks5_request (Connect {address = Domain_address host; port = port})) in
Lwt_unix.write_string socket connect_str 0 (String.length connect_str)
let connect proxy host port =
let%lwt socket = connect_to_proxy proxy in
let auth_request = make_socks5_auth_request ~username_password:false in
let auth_response = Bytes.make 2 '\x00' in
let connect_response = Bytes.make 10 '\x00' in
let%lwt _ = Lwt_unix.write_string socket auth_request 0 (String.length auth_request) in
let%lwt _ = Lwt_unix.read socket auth_response 0 2 in
let auth_method = parse_socks5_auth_response (Bytes.to_string auth_response) in
begin match auth_method with
| No_acceptable_methods -> Logs.err (fun m -> m "No acceptable auth methods")
| _ -> Logs.info (fun m -> m "Auth OK")
end;
let%lwt _ = send_connect_request socket host port in
let%lwt _ = Lwt_unix.read socket connect_response 0 10 in
let c = parse_socks5_response (Bytes.to_string connect_response) in
begin match c with
| Ok _ -> Logs.info (fun m -> m "Connect request ok")
| Error _ -> Logs.err (fun m -> m "Connect failed")
end;
return socket
let http_head socket =
let conn = of_socket socket in
let%lwt () = Lwt_io.write conn.oc "HEAD / HTTP/1.0\r\n\r\n" in
let%lwt head = Lwt_io.read conn.ic in
Logs.info (fun m -> m "Head: %s" head);
return ()
let client _ proxy host port =
Lwt_main.run (let%lwt socket = connect proxy host port in http_head socket)
let setup_log style_renderer level =
Fmt_tty.setup_std_outputs ?style_renderer ();
Logs.set_level level;
Logs.set_reporter (Logs_fmt.reporter ());
()
let logging =
let env = Arg.env_var "SOCKS_CLIENT_VERBOSITY" in
Term.(const setup_log $ Fmt_cli.style_renderer () $ Logs_cli.level ~env ())
let proxy =
let doc = "Proxy" in
Arg.(required & pos 0 (some string) None & info [] ~docv:"PROXY" ~doc)
let host =
let doc = "Host" in
Arg.(required & pos 1 (some string) None & info [] ~docv:"HOST" ~doc)
let port =
let doc = "Port" in
Arg.(required & pos 2 (some int) None & info [] ~docv:"PORT" ~doc)
let cmd =
let doc = "SOCKS5 client" in
let exits = Term.default_exits in
Term.(const client $ logging $ proxy $ host $ port),
Term.info "socks_client" ~doc ~exits
let () = Term.(eval cmd |> exit)
| null | https://raw.githubusercontent.com/cfcs/ocaml-socks/00d2467f8e1ff2f369f4c8145654df51a35d8e08/test/socks_client.ml | ocaml | open Socks
open Lwt
open Cmdliner
open Rresult
type t = {
oc: Lwt_io.output Lwt_io.channel;
ic: Lwt_io.input Lwt_io.channel;
}
let of_socket socket =
let oc = Lwt_io.of_fd ~mode:Lwt_io.output socket in
let ic = Lwt_io.of_fd ~mode:Lwt_io.input socket in
{oc; ic}
let connect ~proxy ~host ~port =
let connect_str = R.get_ok (Socks.make_socks5_request (Connect {address = Domain_address host; port = port})) in
let socket = Lwt_unix.socket Lwt_unix.PF_INET Lwt_unix.SOCK_STREAM 0 in
let%lwt host_info = Lwt_unix.gethostbyname proxy in
let server_address = host_info.Lwt_unix.h_addr_list.(0) in
let%lwt () = Lwt_unix.connect socket (Lwt_unix.ADDR_INET (server_address, 1080)) in
Logs.info (fun m -> m "Connected to proxy %s" proxy);
return socket
let send_connect_request socket host port =
let connect_str = R.get_ok (Socks.make_socks5_request (Connect {address = Domain_address host; port = port})) in
Lwt_unix.write_string socket connect_str 0 (String.length connect_str)
let connect proxy host port =
let%lwt socket = connect_to_proxy proxy in
let auth_request = make_socks5_auth_request ~username_password:false in
let auth_response = Bytes.make 2 '\x00' in
let connect_response = Bytes.make 10 '\x00' in
let%lwt _ = Lwt_unix.write_string socket auth_request 0 (String.length auth_request) in
let%lwt _ = Lwt_unix.read socket auth_response 0 2 in
let auth_method = parse_socks5_auth_response (Bytes.to_string auth_response) in
begin match auth_method with
| No_acceptable_methods -> Logs.err (fun m -> m "No acceptable auth methods")
| _ -> Logs.info (fun m -> m "Auth OK")
end;
let%lwt _ = send_connect_request socket host port in
let%lwt _ = Lwt_unix.read socket connect_response 0 10 in
let c = parse_socks5_response (Bytes.to_string connect_response) in
begin match c with
| Ok _ -> Logs.info (fun m -> m "Connect request ok")
| Error _ -> Logs.err (fun m -> m "Connect failed")
end;
return socket
let http_head socket =
let conn = of_socket socket in
let%lwt () = Lwt_io.write conn.oc "HEAD / HTTP/1.0\r\n\r\n" in
let%lwt head = Lwt_io.read conn.ic in
Logs.info (fun m -> m "Head: %s" head);
return ()
let client _ proxy host port =
Lwt_main.run (let%lwt socket = connect proxy host port in http_head socket)
let setup_log style_renderer level =
Fmt_tty.setup_std_outputs ?style_renderer ();
Logs.set_level level;
Logs.set_reporter (Logs_fmt.reporter ());
()
let logging =
let env = Arg.env_var "SOCKS_CLIENT_VERBOSITY" in
Term.(const setup_log $ Fmt_cli.style_renderer () $ Logs_cli.level ~env ())
let proxy =
let doc = "Proxy" in
Arg.(required & pos 0 (some string) None & info [] ~docv:"PROXY" ~doc)
let host =
let doc = "Host" in
Arg.(required & pos 1 (some string) None & info [] ~docv:"HOST" ~doc)
let port =
let doc = "Port" in
Arg.(required & pos 2 (some int) None & info [] ~docv:"PORT" ~doc)
let cmd =
let doc = "SOCKS5 client" in
let exits = Term.default_exits in
Term.(const client $ logging $ proxy $ host $ port),
Term.info "socks_client" ~doc ~exits
let () = Term.(eval cmd |> exit)
|
|
029cce70ae1980e3d407cf0b6fb16364cdbe4dd404cfd98e3389f6b536b20398 | namin/biohacker | unify.lisp | ;; -*- Mode: Lisp; -*-
;;;; Variables and unification
Last edited 1/29/93 , by KDF
Copyright ( c ) 1988 - 1993 , , Northwestern University ,
and , the Xerox Corporation .
;;; All rights reserved.
;;; See the file legal.txt for a paragraph stating scope of permission
;;; and disclaimer of warranty. The above copyright notice and that
;;; paragraph must be included in any separate copy of this file.
(in-package :COMMON-LISP-USER)
(defun variable? (x)
A symbol whose first character is " ? "
(char= #\? (elt (symbol-name x) 0))))
(defun unify (a b &optional (bindings nil))
(cond ((equal a b) bindings)
((variable? a) (unify-variable a b bindings))
((variable? b) (unify-variable b a bindings))
((or (not (listp a)) (not (listp b))) :FAIL)
((not (eq :FAIL (setq bindings
(unify (car a) (car b) bindings))))
(unify (cdr a) (cdr b) bindings))
(t :FAIL)))
(defun unify-variable (var exp bindings &aux val)
;; Must distinguish no value from value of nil
(setq val (assoc var bindings))
(cond (val (unify (cdr val) exp bindings))
;; If safe, bind <var> to <exp>
((free-in? var exp bindings) (cons (cons var exp) bindings))
(t :FAIL)))
(defun free-in? (var exp bindings)
;; Returns nil if <var> occurs in <exp>, assuming <bindings>.
(cond ((null exp) t)
((equal var exp) nil)
((variable? exp)
(let ((val (assoc exp bindings)))
(if val
(free-in? var (cdr val) bindings)
t)))
((not (listp exp)) t)
((free-in? var (car exp) bindings)
(free-in? var (cdr exp) bindings))))
| null | https://raw.githubusercontent.com/namin/biohacker/6b5da4c51c9caa6b5e1a68b046af171708d1af64/BPS/atms/unify.lisp | lisp | -*- Mode: Lisp; -*-
Variables and unification
All rights reserved.
See the file legal.txt for a paragraph stating scope of permission
and disclaimer of warranty. The above copyright notice and that
paragraph must be included in any separate copy of this file.
Must distinguish no value from value of nil
If safe, bind <var> to <exp>
Returns nil if <var> occurs in <exp>, assuming <bindings>. |
Last edited 1/29/93 , by KDF
Copyright ( c ) 1988 - 1993 , , Northwestern University ,
and , the Xerox Corporation .
(in-package :COMMON-LISP-USER)
(defun variable? (x)
A symbol whose first character is " ? "
(char= #\? (elt (symbol-name x) 0))))
(defun unify (a b &optional (bindings nil))
(cond ((equal a b) bindings)
((variable? a) (unify-variable a b bindings))
((variable? b) (unify-variable b a bindings))
((or (not (listp a)) (not (listp b))) :FAIL)
((not (eq :FAIL (setq bindings
(unify (car a) (car b) bindings))))
(unify (cdr a) (cdr b) bindings))
(t :FAIL)))
(defun unify-variable (var exp bindings &aux val)
(setq val (assoc var bindings))
(cond (val (unify (cdr val) exp bindings))
((free-in? var exp bindings) (cons (cons var exp) bindings))
(t :FAIL)))
(defun free-in? (var exp bindings)
(cond ((null exp) t)
((equal var exp) nil)
((variable? exp)
(let ((val (assoc exp bindings)))
(if val
(free-in? var (cdr val) bindings)
t)))
((not (listp exp)) t)
((free-in? var (car exp) bindings)
(free-in? var (cdr exp) bindings))))
|
f3daaab437230703f7710a31b75bebcff36bedbf0b631a6704c118f5b9d8799e | processone/p1_pgsql | pgsql_sasl.erl | %%%-------------------------------------------------------------------
@author < >
%%%
Copyright ( C ) 2002 - 2021 ProcessOne , SARL . All Rights Reserved .
%%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%%-------------------------------------------------------------------
-module(pgsql_sasl).
-author('').
-export([client_new/3, client_step/2, client_finish/2]).
-record(sasl_state,
{user :: binary(),
password :: binary(),
nonce :: binary(),
verify = <<>> :: binary()
}).
-spec client_new(binary(), binary(), list(binary())) ->
{ok, binary(), binary(), #sasl_state{}} |
{error, any()}.
client_new(User, Password, Mechs) ->
case lists:member(<<"SCRAM-SHA-256">>, Mechs) of
true ->
Nonce = base64:encode(crypto:strong_rand_bytes(18)),
State = #sasl_state{user = User,
password = Password,
nonce = Nonce},
Msg = client_first_message_bare(User, Nonce),
Response = <<"n,,", Msg/binary>>,
{ok, <<"SCRAM-SHA-256">>, Response, State};
false ->
{error, "No supported SASL mechs"}
end.
-spec client_step(#sasl_state{}, binary()) ->
{ok, binary(), #sasl_state{}} |
{error, any()}.
client_step(State, ServerResponse) ->
case parse(ServerResponse) of
SResp when is_list(SResp) ->
I = binary_to_integer(proplists:get_value(<<"i">>, SResp)),
R = proplists:get_value(<<"r">>, SResp),
S = base64:decode(proplists:get_value(<<"s">>, SResp)),
Nonce = State#sasl_state.nonce,
NonceSize = size(Nonce),
case R of
<<Nonce:NonceSize/binary, _/binary>> ->
ClientMsg1 = client_first_message_bare(
State#sasl_state.user,
State#sasl_state.nonce),
ClientMsg2 = <<"c=biws,r=", R/binary>>,
AuthMessage =
<<ClientMsg1/binary, ",",
ServerResponse/binary, ",",
ClientMsg2/binary>>,
Password = State#sasl_state.password,
SaltedPassword = scram:salted_password(
sha256, Password, S, I),
ClientKey =
scram:client_key(sha256, SaltedPassword),
StoredKey = scram:stored_key(sha256, ClientKey),
ClientSignature =
scram:client_signature(sha256, StoredKey, AuthMessage),
ClientProof =
crypto:exor(ClientKey, ClientSignature),
P = base64:encode(ClientProof),
Msg = <<ClientMsg2/binary, ",p=", P/binary>>,
ServerKey =
scram:server_key(sha256, SaltedPassword),
V = scram:server_signature(sha256, ServerKey, AuthMessage),
{ok, Msg, State#sasl_state{nonce = R, verify = V}};
_ ->
{error, "Bad SASL server nonce"}
end;
_ ->
{error, {"Error parsing server response", ServerResponse}}
end.
-spec client_finish(#sasl_state{}, binary()) -> ok | {error, any()}.
client_finish(State, ServerResponse) ->
case parse(ServerResponse) of
SResp when is_list(SResp) ->
V = base64:decode(proplists:get_value(<<"v">>, SResp)),
if
State#sasl_state.verify == V ->
ok;
true ->
{error, "SASL server verification failed"}
end;
_ ->
{error, {"Error parsing server response", ServerResponse}}
end.
client_first_message_bare(User, Nonce) ->
<<"n=", User/binary, ",r=", Nonce/binary>>.
parse(S) -> parse1(S, <<>>, []).
parse1(<<$=, Cs/binary>>, S, Ts) ->
parse2(Cs, S, <<>>, Ts);
parse1(<<$,, Cs/binary>>, <<>>, Ts) -> parse1(Cs, <<>>, Ts);
parse1(<<$\s, Cs/binary>>, <<>>, Ts) -> parse1(Cs, <<>>, Ts);
parse1(<<C, Cs/binary>>, S, Ts) -> parse1(Cs, <<S/binary, C>>, Ts);
parse1(<<>>, <<>>, T) -> lists:reverse(T);
parse1(<<>>, _S, _T) -> bad.
parse2(<<$", Cs/binary>>, Key, Val, Ts) ->
parse3(Cs, Key, Val, Ts);
parse2(<<C, Cs/binary>>, Key, Val, Ts) ->
parse4(Cs, Key, <<Val/binary, C>>, Ts);
parse2(<<>>, _, _, _) -> bad.
parse3(<<$", Cs/binary>>, Key, Val, Ts) ->
parse4(Cs, Key, Val, Ts);
parse3(<<$\\, C, Cs/binary>>, Key, Val, Ts) ->
parse3(Cs, Key, <<Val/binary, C>>, Ts);
parse3(<<C, Cs/binary>>, Key, Val, Ts) ->
parse3(Cs, Key, <<Val/binary, C>>, Ts);
parse3(<<>>, _, _, _) -> bad.
parse4(<<$,, Cs/binary>>, Key, Val, Ts) ->
parse1(Cs, <<>>, [{Key, Val} | Ts]);
parse4(<<$\s, Cs/binary>>, Key, Val, Ts) ->
parse4(Cs, Key, Val, Ts);
parse4(<<C, Cs/binary>>, Key, Val, Ts) ->
parse4(Cs, Key, <<Val/binary, C>>, Ts);
parse4(<<>>, Key, Val, Ts) ->
parse1(<<>>, <<>>, [{Key, Val} | Ts]).
| null | https://raw.githubusercontent.com/processone/p1_pgsql/9fb6f7ee4e884e92b29e36bba1f1346eba261bff/src/pgsql_sasl.erl | erlang | -------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------------------------------- | @author < >
Copyright ( C ) 2002 - 2021 ProcessOne , SARL . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(pgsql_sasl).
-author('').
-export([client_new/3, client_step/2, client_finish/2]).
-record(sasl_state,
{user :: binary(),
password :: binary(),
nonce :: binary(),
verify = <<>> :: binary()
}).
-spec client_new(binary(), binary(), list(binary())) ->
{ok, binary(), binary(), #sasl_state{}} |
{error, any()}.
client_new(User, Password, Mechs) ->
case lists:member(<<"SCRAM-SHA-256">>, Mechs) of
true ->
Nonce = base64:encode(crypto:strong_rand_bytes(18)),
State = #sasl_state{user = User,
password = Password,
nonce = Nonce},
Msg = client_first_message_bare(User, Nonce),
Response = <<"n,,", Msg/binary>>,
{ok, <<"SCRAM-SHA-256">>, Response, State};
false ->
{error, "No supported SASL mechs"}
end.
-spec client_step(#sasl_state{}, binary()) ->
{ok, binary(), #sasl_state{}} |
{error, any()}.
client_step(State, ServerResponse) ->
case parse(ServerResponse) of
SResp when is_list(SResp) ->
I = binary_to_integer(proplists:get_value(<<"i">>, SResp)),
R = proplists:get_value(<<"r">>, SResp),
S = base64:decode(proplists:get_value(<<"s">>, SResp)),
Nonce = State#sasl_state.nonce,
NonceSize = size(Nonce),
case R of
<<Nonce:NonceSize/binary, _/binary>> ->
ClientMsg1 = client_first_message_bare(
State#sasl_state.user,
State#sasl_state.nonce),
ClientMsg2 = <<"c=biws,r=", R/binary>>,
AuthMessage =
<<ClientMsg1/binary, ",",
ServerResponse/binary, ",",
ClientMsg2/binary>>,
Password = State#sasl_state.password,
SaltedPassword = scram:salted_password(
sha256, Password, S, I),
ClientKey =
scram:client_key(sha256, SaltedPassword),
StoredKey = scram:stored_key(sha256, ClientKey),
ClientSignature =
scram:client_signature(sha256, StoredKey, AuthMessage),
ClientProof =
crypto:exor(ClientKey, ClientSignature),
P = base64:encode(ClientProof),
Msg = <<ClientMsg2/binary, ",p=", P/binary>>,
ServerKey =
scram:server_key(sha256, SaltedPassword),
V = scram:server_signature(sha256, ServerKey, AuthMessage),
{ok, Msg, State#sasl_state{nonce = R, verify = V}};
_ ->
{error, "Bad SASL server nonce"}
end;
_ ->
{error, {"Error parsing server response", ServerResponse}}
end.
-spec client_finish(#sasl_state{}, binary()) -> ok | {error, any()}.
client_finish(State, ServerResponse) ->
case parse(ServerResponse) of
SResp when is_list(SResp) ->
V = base64:decode(proplists:get_value(<<"v">>, SResp)),
if
State#sasl_state.verify == V ->
ok;
true ->
{error, "SASL server verification failed"}
end;
_ ->
{error, {"Error parsing server response", ServerResponse}}
end.
client_first_message_bare(User, Nonce) ->
<<"n=", User/binary, ",r=", Nonce/binary>>.
parse(S) -> parse1(S, <<>>, []).
parse1(<<$=, Cs/binary>>, S, Ts) ->
parse2(Cs, S, <<>>, Ts);
parse1(<<$,, Cs/binary>>, <<>>, Ts) -> parse1(Cs, <<>>, Ts);
parse1(<<$\s, Cs/binary>>, <<>>, Ts) -> parse1(Cs, <<>>, Ts);
parse1(<<C, Cs/binary>>, S, Ts) -> parse1(Cs, <<S/binary, C>>, Ts);
parse1(<<>>, <<>>, T) -> lists:reverse(T);
parse1(<<>>, _S, _T) -> bad.
parse2(<<$", Cs/binary>>, Key, Val, Ts) ->
parse3(Cs, Key, Val, Ts);
parse2(<<C, Cs/binary>>, Key, Val, Ts) ->
parse4(Cs, Key, <<Val/binary, C>>, Ts);
parse2(<<>>, _, _, _) -> bad.
parse3(<<$", Cs/binary>>, Key, Val, Ts) ->
parse4(Cs, Key, Val, Ts);
parse3(<<$\\, C, Cs/binary>>, Key, Val, Ts) ->
parse3(Cs, Key, <<Val/binary, C>>, Ts);
parse3(<<C, Cs/binary>>, Key, Val, Ts) ->
parse3(Cs, Key, <<Val/binary, C>>, Ts);
parse3(<<>>, _, _, _) -> bad.
parse4(<<$,, Cs/binary>>, Key, Val, Ts) ->
parse1(Cs, <<>>, [{Key, Val} | Ts]);
parse4(<<$\s, Cs/binary>>, Key, Val, Ts) ->
parse4(Cs, Key, Val, Ts);
parse4(<<C, Cs/binary>>, Key, Val, Ts) ->
parse4(Cs, Key, <<Val/binary, C>>, Ts);
parse4(<<>>, Key, Val, Ts) ->
parse1(<<>>, <<>>, [{Key, Val} | Ts]).
|
a588d91d1a23957cec145f5fbe05708d7fa5e06e82834c4cab5b625fa8d78c2e | 8c6794b6/haskell-sc-scratch | SparseByWeight.hs | |
Module : $ Header$
CopyRight : ( c ) 8c6794b6
License : :
Stability : unstable
Portability : non - portable
Scratch written while reading
/purely functional data structure/ , by .
This codes contains /sparse/ representation of binary numbers .
/Sparse/ means , the representation does not contain zero .
Module : $Header$
CopyRight : (c) 8c6794b6
License : BSD3
Maintainer :
Stability : unstable
Portability : non-portable
Scratch written while reading
/purely functional data structure/, by Chris Okasaki.
This codes contains /sparse/ representation of binary numbers.
/Sparse/ means, the representation does not contain zero.
-}
module BinNum.SparseByWeight where
type Nat = [Int]
carry :: Int -> Nat -> Nat
carry w ws = case ws of
[] -> [w]
w':rest | w < w' -> w:ws
| otherwise -> carry (2*w) rest
borrow :: Int -> Nat -> Nat
borrow w ws@(w':rest)
| w == w' = rest
| otherwise = w : borrow (2*w) ws
inc :: Nat -> Nat
inc = carry 1
dec :: Nat -> Nat
dec = borrow 1
add :: Nat -> Nat -> Nat
add xs ys = case (xs,ys) of
(_,[]) -> xs
([],_) -> ys
(w1:ws1,w2:ws2)
| w1 < w2 -> w1 : add ws1 ys
| w1 > w2 -> w2 : add xs ws2
| otherwise -> carry (2*w1) (add ws1 ws2)
n2i :: Nat -> Int
n2i = sum
i2n :: Int -> Nat
i2n = (iterate inc [] !!)
| null | https://raw.githubusercontent.com/8c6794b6/haskell-sc-scratch/22de2199359fa56f256b544609cd6513b5e40f43/Scratch/FP/PFDS/BinNum/SparseByWeight.hs | haskell | |
Module : $ Header$
CopyRight : ( c ) 8c6794b6
License : :
Stability : unstable
Portability : non - portable
Scratch written while reading
/purely functional data structure/ , by .
This codes contains /sparse/ representation of binary numbers .
/Sparse/ means , the representation does not contain zero .
Module : $Header$
CopyRight : (c) 8c6794b6
License : BSD3
Maintainer :
Stability : unstable
Portability : non-portable
Scratch written while reading
/purely functional data structure/, by Chris Okasaki.
This codes contains /sparse/ representation of binary numbers.
/Sparse/ means, the representation does not contain zero.
-}
module BinNum.SparseByWeight where
type Nat = [Int]
carry :: Int -> Nat -> Nat
carry w ws = case ws of
[] -> [w]
w':rest | w < w' -> w:ws
| otherwise -> carry (2*w) rest
borrow :: Int -> Nat -> Nat
borrow w ws@(w':rest)
| w == w' = rest
| otherwise = w : borrow (2*w) ws
inc :: Nat -> Nat
inc = carry 1
dec :: Nat -> Nat
dec = borrow 1
add :: Nat -> Nat -> Nat
add xs ys = case (xs,ys) of
(_,[]) -> xs
([],_) -> ys
(w1:ws1,w2:ws2)
| w1 < w2 -> w1 : add ws1 ys
| w1 > w2 -> w2 : add xs ws2
| otherwise -> carry (2*w1) (add ws1 ws2)
n2i :: Nat -> Int
n2i = sum
i2n :: Int -> Nat
i2n = (iterate inc [] !!)
|
|
4b4afc7afda68b14cbe999e37a33df09e21a8876a613cf1453445059a915f1c9 | manuel-serrano/bigloo | gn.scm | ;*=====================================================================*/
* serrano / prgm / project / bigloo / / comptime / Globalize / gn.scm * /
;* ------------------------------------------------------------- */
* Author : * /
* Creation : Thu Jan 26 14:54:22 1995 * /
* Last change : We d Jun 16 16:02:00 2021 ( serrano ) * /
* Copyright : 1995 - 2021 , see LICENSE file * /
;* ------------------------------------------------------------- */
* We compute the G0 and G1 properties which is defined as follow : * /
;* */
* Let A(f , ) < = > f is a free function in g , called by g * /
;* E(f) <=> f is a function used as value */
;* */
* then G0(f , ) < = > E(f ) v ( # g , E(f ) ^ A(g , f ) ) * /
* G1(f , ) < = > G0(f ) ^ ! ( E(f ) ) * /
;* */
;* # = exists */
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module globalize_gn
(include "Tools/trace.sch")
(import tools_shape
type_type
ast_var
ast_node
globalize_ginfo
globalize_globalize)
(export (Gn! local* ::node ::variable variable*)))
;*---------------------------------------------------------------------*/
;* Gn! ... */
;* ------------------------------------------------------------- */
* In order to compute the E property , we first compute the E * /
;* set, the set of all escaping functions. During this tree */
;* walk, we compute the call-graph (using the fun-Ginfo */
;* structure). */
;*---------------------------------------------------------------------*/
(define (Gn! args node caller g)
(set! *E* (E node caller g))
(let loop ((G *E*)
(G1 '()))
(if (null? G)
(begin
(set! *G0* (append *E* G1))
(set! *G1* G1))
(let ((new-G (G-from-cto (car G))))
(loop (append new-G (cdr G))
(append new-G G1))))))
;*---------------------------------------------------------------------*/
;* E ... */
;*---------------------------------------------------------------------*/
(define-generic (E node::node caller::variable g))
;*---------------------------------------------------------------------*/
;* E ::atom ... */
;*---------------------------------------------------------------------*/
(define-method (E node::atom caller::variable g)
g)
;*---------------------------------------------------------------------*/
;* E ::kwote ... */
;*---------------------------------------------------------------------*/
(define-method (E node::kwote caller::variable g)
g)
;*---------------------------------------------------------------------*/
;* E ::var ... */
;*---------------------------------------------------------------------*/
(define-method (E node::var caller::variable g)
g)
;*---------------------------------------------------------------------*/
;* E ::closure ... */
;*---------------------------------------------------------------------*/
(define-method (E node::closure caller::variable g)
(let ((var (var-variable node)))
(save-fun! caller var)
(if (and (local? var)
du to cfa , ` fun ' may introduce non escaping functions
(local/Ginfo-escape? var)
(not (sfun/Ginfo-G? (local-value var))))
(begin
(sfun/Ginfo-G?-set! (local-value var) #t)
(cons var g))
g)))
;*---------------------------------------------------------------------*/
;* E ::sequence ... */
;*---------------------------------------------------------------------*/
(define-method (E node::sequence caller g)
(E* (sequence-nodes node) caller g))
;*---------------------------------------------------------------------*/
;* E ::sync ... */
;*---------------------------------------------------------------------*/
(define-method (E node::sync caller g)
(E (sync-body node) caller
(E (sync-prelock node) caller
(E (sync-mutex node) caller g))))
;*---------------------------------------------------------------------*/
;* E ::app ... */
;*---------------------------------------------------------------------*/
(define-method (E node::app caller g)
(with-access::app node (fun args)
(save-app! caller (var-variable fun))
(E* args caller g)))
;*---------------------------------------------------------------------*/
;* E ::app-ly ... */
;*---------------------------------------------------------------------*/
(define-method (E node::app-ly caller g)
(with-access::app-ly node (fun arg)
(E fun caller (E arg caller g))))
;*---------------------------------------------------------------------*/
;* E ::funcall ... */
;*---------------------------------------------------------------------*/
(define-method (E node::funcall caller g)
(with-access::funcall node (fun args)
(E fun caller (E* args caller g))))
;*---------------------------------------------------------------------*/
;* E ::extern ... */
;*---------------------------------------------------------------------*/
(define-method (E node::extern caller g)
(with-access::extern node (expr*)
(E* expr* caller g)))
;*---------------------------------------------------------------------*/
;* E ::cast ... */
;*---------------------------------------------------------------------*/
(define-method (E node::cast caller g)
(with-access::cast node (arg)
(E arg caller g)))
;*---------------------------------------------------------------------*/
;* E ::setq ... */
;*---------------------------------------------------------------------*/
(define-method (E node::setq caller g)
(with-access::setq node (value)
(E value caller g)))
;*---------------------------------------------------------------------*/
;* E ::conditional ... */
;*---------------------------------------------------------------------*/
(define-method (E node::conditional caller g)
(with-access::conditional node (test true false)
(E test caller (E true caller (E false caller g)))))
;*---------------------------------------------------------------------*/
;* E ::fail ... */
;*---------------------------------------------------------------------*/
(define-method (E node::fail caller g)
(with-access::fail node (proc msg obj)
(E proc caller (E msg caller (E obj caller g)))))
;*---------------------------------------------------------------------*/
;* E ::switch ... */
;*---------------------------------------------------------------------*/
(define-method (E node::switch caller g)
(with-access::switch node (clauses test)
(let loop ((clauses clauses)
(g g))
(if (null? clauses)
(E test caller g)
(loop (cdr clauses) (E (cdr (car clauses)) caller g))))))
;*---------------------------------------------------------------------*/
;* E ::let-fun ... */
;*---------------------------------------------------------------------*/
(define-method (E node::let-fun caller g)
(with-access::let-fun node (body locals)
(let loop ((locals locals)
(g g))
(if (null? locals)
(E body caller g)
(loop (cdr locals)
(E (sfun-body (local-value (car locals)))
(car locals)
g))))))
;*---------------------------------------------------------------------*/
;* E ::let-var ... */
;*---------------------------------------------------------------------*/
(define-method (E node::let-var caller g)
(with-access::let-var node (body bindings)
(let loop ((bindings bindings)
(g g))
(if (null? bindings)
(E body caller g)
(loop (cdr bindings)
(E (cdr (car bindings)) caller g))))))
;*---------------------------------------------------------------------*/
;* E ::set-ex-it ... */
;*---------------------------------------------------------------------*/
(define-method (E node::set-ex-it caller g)
(with-access::set-ex-it node (body onexit)
(E body caller (E onexit caller g))))
;*---------------------------------------------------------------------*/
;* E ::jump-ex-it ... */
;*---------------------------------------------------------------------*/
(define-method (E node::jump-ex-it caller g)
(with-access::jump-ex-it node (exit value)
(E exit caller (E value caller g))))
;*---------------------------------------------------------------------*/
;* E ::make-box ... */
;*---------------------------------------------------------------------*/
(define-method (E node::make-box caller g)
(with-access::make-box node (value)
(E value caller g)))
;*---------------------------------------------------------------------*/
;* E ::box-ref ... */
;*---------------------------------------------------------------------*/
(define-method (E node::box-ref caller g)
(with-access::box-ref node (var)
(E var caller g)))
;*---------------------------------------------------------------------*/
;* E ::box-set! ... */
;*---------------------------------------------------------------------*/
(define-method (E node::box-set! caller g)
(with-access::box-set! node (var value)
(E var caller (E value caller g))))
;*---------------------------------------------------------------------*/
;* E* ... */
;*---------------------------------------------------------------------*/
(define (E* node* caller g)
(let loop ((node* node*)
(g g))
(if (null? node*)
g
(loop (cdr node*)
(E (car node*) caller g)))))
;*---------------------------------------------------------------------*/
;* save-app! ... */
;*---------------------------------------------------------------------*/
(define (save-app! caller callee)
(if (global? callee)
'done
(let ((callee-info (local-value callee)))
(if (not (memq caller (sfun/Ginfo-cfrom callee-info)))
(begin
(sfun/Ginfo-cfrom-set! callee-info
(cons caller
(sfun/Ginfo-cfrom callee-info)))
(let ((caller-info (variable-value caller)))
(sfun/Ginfo-cto-set! caller-info
(cons callee
(sfun/Ginfo-cto
caller-info))))))
'done)))
;*---------------------------------------------------------------------*/
;* save-fun! ... */
;*---------------------------------------------------------------------*/
(define (save-fun! caller callee)
(if (or (global? caller) (global? callee))
'done
(let ((caller-info (local-value caller)))
(trace (globalize 3) "save-fun!: "
(shape caller) " " (shape callee)
#\Newline)
(if (not (memq callee (sfun/Ginfo-efunctions caller-info)))
(sfun/Ginfo-efunctions-set! caller-info
(cons callee (sfun/Ginfo-efunctions caller-info))))
'done)))
;*---------------------------------------------------------------------*/
;* G-from-cto ... */
;*---------------------------------------------------------------------*/
(define (G-from-cto local)
(let loop ((cto (sfun/Ginfo-cto (local-value local)))
(G '()))
(cond
((null? cto)
G)
((sfun/Ginfo-G? (local-value (car cto)))
(loop (cdr cto) G))
(else
(sfun/Ginfo-G?-set! (local-value (car cto)) #t)
(loop (cdr cto) (cons (car cto) G))))))
| null | https://raw.githubusercontent.com/manuel-serrano/bigloo/fdeac39af72d5119d01818815b0f395f2907d6da/comptime/Globalize/gn.scm | scheme | *=====================================================================*/
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* */
* E(f) <=> f is a function used as value */
* */
* */
* # = exists */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* Gn! ... */
* ------------------------------------------------------------- */
* set, the set of all escaping functions. During this tree */
* walk, we compute the call-graph (using the fun-Ginfo */
* structure). */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::atom ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::kwote ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::var ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::closure ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::sequence ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::sync ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::app ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::app-ly ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::funcall ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::extern ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::cast ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::setq ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::conditional ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::fail ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::switch ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::let-fun ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::let-var ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::set-ex-it ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::jump-ex-it ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::make-box ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::box-ref ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E ::box-set! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* E* ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* save-app! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* save-fun! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* G-from-cto ... */
*---------------------------------------------------------------------*/ | * serrano / prgm / project / bigloo / / comptime / Globalize / gn.scm * /
* Author : * /
* Creation : Thu Jan 26 14:54:22 1995 * /
* Last change : We d Jun 16 16:02:00 2021 ( serrano ) * /
* Copyright : 1995 - 2021 , see LICENSE file * /
* We compute the G0 and G1 properties which is defined as follow : * /
* Let A(f , ) < = > f is a free function in g , called by g * /
* then G0(f , ) < = > E(f ) v ( # g , E(f ) ^ A(g , f ) ) * /
* G1(f , ) < = > G0(f ) ^ ! ( E(f ) ) * /
(module globalize_gn
(include "Tools/trace.sch")
(import tools_shape
type_type
ast_var
ast_node
globalize_ginfo
globalize_globalize)
(export (Gn! local* ::node ::variable variable*)))
* In order to compute the E property , we first compute the E * /
(define (Gn! args node caller g)
(set! *E* (E node caller g))
(let loop ((G *E*)
(G1 '()))
(if (null? G)
(begin
(set! *G0* (append *E* G1))
(set! *G1* G1))
(let ((new-G (G-from-cto (car G))))
(loop (append new-G (cdr G))
(append new-G G1))))))
(define-generic (E node::node caller::variable g))
(define-method (E node::atom caller::variable g)
g)
(define-method (E node::kwote caller::variable g)
g)
(define-method (E node::var caller::variable g)
g)
(define-method (E node::closure caller::variable g)
(let ((var (var-variable node)))
(save-fun! caller var)
(if (and (local? var)
du to cfa , ` fun ' may introduce non escaping functions
(local/Ginfo-escape? var)
(not (sfun/Ginfo-G? (local-value var))))
(begin
(sfun/Ginfo-G?-set! (local-value var) #t)
(cons var g))
g)))
(define-method (E node::sequence caller g)
(E* (sequence-nodes node) caller g))
(define-method (E node::sync caller g)
(E (sync-body node) caller
(E (sync-prelock node) caller
(E (sync-mutex node) caller g))))
(define-method (E node::app caller g)
(with-access::app node (fun args)
(save-app! caller (var-variable fun))
(E* args caller g)))
(define-method (E node::app-ly caller g)
(with-access::app-ly node (fun arg)
(E fun caller (E arg caller g))))
(define-method (E node::funcall caller g)
(with-access::funcall node (fun args)
(E fun caller (E* args caller g))))
(define-method (E node::extern caller g)
(with-access::extern node (expr*)
(E* expr* caller g)))
(define-method (E node::cast caller g)
(with-access::cast node (arg)
(E arg caller g)))
(define-method (E node::setq caller g)
(with-access::setq node (value)
(E value caller g)))
(define-method (E node::conditional caller g)
(with-access::conditional node (test true false)
(E test caller (E true caller (E false caller g)))))
(define-method (E node::fail caller g)
(with-access::fail node (proc msg obj)
(E proc caller (E msg caller (E obj caller g)))))
(define-method (E node::switch caller g)
(with-access::switch node (clauses test)
(let loop ((clauses clauses)
(g g))
(if (null? clauses)
(E test caller g)
(loop (cdr clauses) (E (cdr (car clauses)) caller g))))))
(define-method (E node::let-fun caller g)
(with-access::let-fun node (body locals)
(let loop ((locals locals)
(g g))
(if (null? locals)
(E body caller g)
(loop (cdr locals)
(E (sfun-body (local-value (car locals)))
(car locals)
g))))))
(define-method (E node::let-var caller g)
(with-access::let-var node (body bindings)
(let loop ((bindings bindings)
(g g))
(if (null? bindings)
(E body caller g)
(loop (cdr bindings)
(E (cdr (car bindings)) caller g))))))
(define-method (E node::set-ex-it caller g)
(with-access::set-ex-it node (body onexit)
(E body caller (E onexit caller g))))
(define-method (E node::jump-ex-it caller g)
(with-access::jump-ex-it node (exit value)
(E exit caller (E value caller g))))
(define-method (E node::make-box caller g)
(with-access::make-box node (value)
(E value caller g)))
(define-method (E node::box-ref caller g)
(with-access::box-ref node (var)
(E var caller g)))
(define-method (E node::box-set! caller g)
(with-access::box-set! node (var value)
(E var caller (E value caller g))))
(define (E* node* caller g)
(let loop ((node* node*)
(g g))
(if (null? node*)
g
(loop (cdr node*)
(E (car node*) caller g)))))
(define (save-app! caller callee)
(if (global? callee)
'done
(let ((callee-info (local-value callee)))
(if (not (memq caller (sfun/Ginfo-cfrom callee-info)))
(begin
(sfun/Ginfo-cfrom-set! callee-info
(cons caller
(sfun/Ginfo-cfrom callee-info)))
(let ((caller-info (variable-value caller)))
(sfun/Ginfo-cto-set! caller-info
(cons callee
(sfun/Ginfo-cto
caller-info))))))
'done)))
(define (save-fun! caller callee)
(if (or (global? caller) (global? callee))
'done
(let ((caller-info (local-value caller)))
(trace (globalize 3) "save-fun!: "
(shape caller) " " (shape callee)
#\Newline)
(if (not (memq callee (sfun/Ginfo-efunctions caller-info)))
(sfun/Ginfo-efunctions-set! caller-info
(cons callee (sfun/Ginfo-efunctions caller-info))))
'done)))
(define (G-from-cto local)
(let loop ((cto (sfun/Ginfo-cto (local-value local)))
(G '()))
(cond
((null? cto)
G)
((sfun/Ginfo-G? (local-value (car cto)))
(loop (cdr cto) G))
(else
(sfun/Ginfo-G?-set! (local-value (car cto)) #t)
(loop (cdr cto) (cons (car cto) G))))))
|
004675c7b1366ee4c11d7dcf96b73db38eda6a6a5d4238457e9015e980b04e78 | google/mlir-hs | Pass.hs | Copyright 2021 Google LLC
--
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- -2.0
--
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module MLIR.Native.Pass where
import qualified Language.C.Inline as C
import Control.Exception (bracket)
import MLIR.Native.FFI
C.context $ C.baseCtx <> mlirCtx
C.include "mlir-c/IR.h"
C.include "mlir-c/Pass.h"
C.include "mlir-c/Conversion.h"
-- TODO(apaszke): Flesh this out based on the header
--------------------------------------------------------------------------------
-- Pass manager
createPassManager :: Context -> IO PassManager
createPassManager ctx =
[C.exp| MlirPassManager { mlirPassManagerCreate($(MlirContext ctx)) } |]
destroyPassManager :: PassManager -> IO ()
destroyPassManager pm =
[C.exp| void { mlirPassManagerDestroy($(MlirPassManager pm)) } |]
withPassManager :: Context -> (PassManager -> IO a) -> IO a
withPassManager ctx = bracket (createPassManager ctx) destroyPassManager
runPasses :: PassManager -> Module -> IO LogicalResult
runPasses pm m =
[C.exp| MlirLogicalResult { mlirPassManagerRun($(MlirPassManager pm), $(MlirModule m)) } |]
--------------------------------------------------------------------------------
Transform passes
--------------------------------------------------------------------------------
-- Conversion passes
addConvertMemRefToLLVMPass :: PassManager -> IO ()
addConvertMemRefToLLVMPass pm =
[C.exp| void {
mlirPassManagerAddOwnedPass($(MlirPassManager pm), mlirCreateConversionFinalizeMemRefToLLVMConversionPass())
} |]
addConvertFuncToLLVMPass :: PassManager -> IO ()
addConvertFuncToLLVMPass pm =
[C.exp| void {
mlirPassManagerAddOwnedPass($(MlirPassManager pm), mlirCreateConversionConvertFuncToLLVM())
} |]
addConvertVectorToLLVMPass :: PassManager -> IO ()
addConvertVectorToLLVMPass pm =
[C.exp| void {
mlirPassManagerAddOwnedPass($(MlirPassManager pm), mlirCreateConversionConvertVectorToLLVM())
} |]
addConvertReconcileUnrealizedCastsPass :: PassManager -> IO ()
addConvertReconcileUnrealizedCastsPass pm =
[C.exp| void {
mlirPassManagerAddOwnedPass($(MlirPassManager pm), mlirCreateConversionReconcileUnrealizedCasts())
} |]
| null | https://raw.githubusercontent.com/google/mlir-hs/30c1c908df3b6676d3f022dc5ff73dabfd55fbe0/src/MLIR/Native/Pass.hs | haskell |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
TODO(apaszke): Flesh this out based on the header
------------------------------------------------------------------------------
Pass manager
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Conversion passes | Copyright 2021 Google LLC
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
module MLIR.Native.Pass where
import qualified Language.C.Inline as C
import Control.Exception (bracket)
import MLIR.Native.FFI
C.context $ C.baseCtx <> mlirCtx
C.include "mlir-c/IR.h"
C.include "mlir-c/Pass.h"
C.include "mlir-c/Conversion.h"
createPassManager :: Context -> IO PassManager
createPassManager ctx =
[C.exp| MlirPassManager { mlirPassManagerCreate($(MlirContext ctx)) } |]
destroyPassManager :: PassManager -> IO ()
destroyPassManager pm =
[C.exp| void { mlirPassManagerDestroy($(MlirPassManager pm)) } |]
withPassManager :: Context -> (PassManager -> IO a) -> IO a
withPassManager ctx = bracket (createPassManager ctx) destroyPassManager
runPasses :: PassManager -> Module -> IO LogicalResult
runPasses pm m =
[C.exp| MlirLogicalResult { mlirPassManagerRun($(MlirPassManager pm), $(MlirModule m)) } |]
Transform passes
addConvertMemRefToLLVMPass :: PassManager -> IO ()
addConvertMemRefToLLVMPass pm =
[C.exp| void {
mlirPassManagerAddOwnedPass($(MlirPassManager pm), mlirCreateConversionFinalizeMemRefToLLVMConversionPass())
} |]
addConvertFuncToLLVMPass :: PassManager -> IO ()
addConvertFuncToLLVMPass pm =
[C.exp| void {
mlirPassManagerAddOwnedPass($(MlirPassManager pm), mlirCreateConversionConvertFuncToLLVM())
} |]
addConvertVectorToLLVMPass :: PassManager -> IO ()
addConvertVectorToLLVMPass pm =
[C.exp| void {
mlirPassManagerAddOwnedPass($(MlirPassManager pm), mlirCreateConversionConvertVectorToLLVM())
} |]
addConvertReconcileUnrealizedCastsPass :: PassManager -> IO ()
addConvertReconcileUnrealizedCastsPass pm =
[C.exp| void {
mlirPassManagerAddOwnedPass($(MlirPassManager pm), mlirCreateConversionReconcileUnrealizedCasts())
} |]
|
ef7275bf7091c3ac617abb75bab622a70ae9d1532363e33195267b60164e9285 | dozzie/toml | toml.erl | %%%---------------------------------------------------------------------------
%%% @doc
TOML parser module .
%%% @end
%%%---------------------------------------------------------------------------
-module(toml).
%% parser wrappers
-export([read_file/1, read_file/2]).
-export([parse/1, parse/2]).
%% explaining errors
-export([format_error/1]).
%% data accessors
-export([get_value/3, get_value/4, exists/2, exists/3]).
-export([keys/2, sections/2, foldk/4, folds/4]).
%-export([to_list/1, to_list/2]).
-export_type([config/0, section/0, key/0, toml_value/0]).
-export_type([toml_array/0, datetime/0]).
-export_type([jsx_object/0, jsx_list/0, jsx_value/0]).
-export_type([validate_fun/0, validate_fun_return/0]).
-export_type([validate_location/0, validate_error/0]).
-export_type([toml_error/0, semantic_error/0]).
-export_type([semerr_redefinition/0, semerr_inline/0]).
-export_type([semerr_data_location/0, semerr_location/0]).
%%%---------------------------------------------------------------------------
%%% data types
%%----------------------------------------------------------
%% main types {{{
-opaque config() :: {toml, term()}.
A tuple with atom ` toml ' being its first element .
-type section() :: [string()].
Name of a section ( " table " in TOML 's terms ) . Root section is denoted by
%% empty list (`[]').
-type key() :: string().
%% Name of a value in a section.
-type toml_value() ::
{string, string()}
| {integer, integer()}
| {float, float()}
| {boolean, boolean()}
| {datetime, datetime()}
| {array, toml_array()}
| {data, term()}.
%% Value stored under {@type key()}, along with its type.
%%
Custom Erlang structure returned by validation function ( { @type
%% validate_fun()}) is denoted by {@type @{data, Data@}}.
%%
Array of values is doubly typed , first as an array , and then with data type
of its content , e.g. ` { array , { string , [ " one " , " two " , " three " ] } } ' . See
%% {@type toml_array()} for details.
%% }}}
%%----------------------------------------------------------
%% auxiliary types {{{
-type toml_array() ::
{empty, []}
| {string, [string(), ...]}
| {integer, [integer(), ...]}
| {float, [float(), ...]}
| {boolean, [boolean(), ...]}
| {datetime, [datetime(), ...]}
| {array, [toml_array(), ...]}
| {object, [jsx_object(), ...]}.
%% Representation of array's content.
-type datetime() ::
{datetime, calendar:datetime(), TZ :: string()}
| {datetime, calendar:datetime()}
| {date, calendar:date()}
| {time, calendar:time()}.
%% RFC 3339 timestamp (with or without timezone), date, or time.
%%
` TZ ' is either a ` " Z " ' ( the same as ` " +00:00 " ' ) or has format
` " [ + -]HH : " ' .
-type jsx_object() :: [{}] | [{binary(), jsx_value()}, ...].
%% Object (inline section/table) representation, jsx-style.
-type jsx_list() :: [jsx_value()].
%% Array representation, jsx-style.
-type jsx_value() :: binary()
| integer()
| float()
| boolean()
| datetime()
| jsx_list()
| jsx_object().
%% Arbitrary value (scalar/array/object), jsx-style. {@type datetime()} is not
%% really jsx-compatible, and there's no `null'.
%% }}}
%%----------------------------------------------------------
%% validation function {{{
-type validate_fun() ::
fun((section(), key(), toml_value() | section, Arg :: term()) ->
validate_fun_return()).
%% Key validation callback. This callback is specified at configuration
%% parsing time and has a chance to further verify validity of a value or even
%% convert it already to its intended form, e.g. listen address
%% `"<host>:<port>"' can be immediately converted to `{Host,Port}' tuple.
%%
< b > NOTE</b > : Array section ( " array of tables " in TOML 's terms ) is passed
%% as an array of objects, i.e.
{ @type @{array , @{object , [ jsx_object ( ) , ... ] @}@ } } .
%%
%% Since it's not allowed to have a section and key of the same name,
%% subsections themselves are also subject to validation. Validation function
%% can return `ok', `{ok,_}', or `ignore' to accept the section name (the
three values have the same result ; any data from ` { ok , Data } ' is ignored )
%% and `{error,_}' to reject the name.
-type validate_fun_return() :: ok | {ok, Data :: term()} | ignore
| {error, validate_error()}.
Expected return values from { @type validate_fun ( ) } .
%%
%% {@type @{ok, Data@}} results in the {@type toml_value()} of `{data, Data}'.
%% See {@link get_value/3}.
%%
%% {@type @{error, Reason :: validate_error()@}} is reported by
%% {@link read_file/2} and {@link parse/2} as
{ @type @{error , @{validate , Where : : validate_location ( ) , Reason@}@ } } .
-type validate_error() :: term().
Error returned by { @type validate_fun ( ) } . See { @type ( ) }
%% for details.
%% }}}
%%----------------------------------------------------------
%% errors {{{
-type toml_error() :: {tokenize, Line :: pos_integer()}
| {parse, Line :: pos_integer()}
| {semantic, semantic_error()}
| {bad_return, validate_location(), Result :: term()}
| {validate, validate_location(), validate_error()}.
Error in processing TOML .
-type validate_location() ::
{Section :: [string()], Key :: string(), Line :: pos_integer()}.
Location information of validation error ( see { @type validate_fun ( ) } ) .
-type semantic_error() :: semerr_redefinition() | semerr_inline().
Data - level error , meaning that data represented by TOML config is forbidden
by TOML specification .
-type semerr_redefinition() ::
{auto_section, key, semerr_location()}
| {section, key | section | array_section, semerr_location()}
| {array_section, key | section | auto_section, semerr_location()}
| {key, key | section | auto_section | array_section, semerr_location()}.
%% Error signifying that a key/section was already defined, either explicitly
%% or implicitly. The structure of the error follows convention of
{ @type @{Type , PreviousType , semerr_location()@ } } .
%%
%% `auto_section' in `Type' means that there already exists a key with the
same name as one of the parent sections of the current section .
%%
` auto_section ' in ` PreviousType ' means that the section was not defined
%% explicitly, but earlier sections restrict how it could look like (i.e.
%% a subsection was already defined).
-type semerr_inline() ::
{duplicate, Key :: string(), semerr_data_location(), semerr_location()}
| {type_mismatch,
{Pos :: pos_integer(), OffendingType :: atom(), ExpectedType :: atom()},
semerr_data_location(), semerr_location()}.
Error signifying that inline object has two keys of the same name or an
%% inline array has elements of different types.
%%
` Pos ' is a 1 - based index in the array , ` ExpectedType ' is data type of the
first array element , and ` ' is the type of the first element
%% that doesn't match.
-type semerr_data_location() ::
[pos_integer() | string()].
%% Location of a semantic error in inline data (arrays and objects). The
%% location is a path specified in terms appropriate for respective data
types : key for objects , 1 - based index for arrays .
-type semerr_location() ::
{Path :: [string(), ...], CurLine :: pos_integer(),
PrevLine :: pos_integer()}.
%% Location information of semantic error. `Path' is name of the offending
%% section and, if applicable, key.
%% }}}
%%----------------------------------------------------------
%%%---------------------------------------------------------------------------
%%% parser wrappers
%%%---------------------------------------------------------------------------
@doc a TOML file on disk .
-spec read_file(file:filename()) ->
{ok, config()} | {error, ReadError | toml_error()}
when ReadError :: file:posix() | badarg | terminated | system_limit.
read_file(File) ->
case file:read_file(File) of
{ok, Content} -> parse(Content);
{error, Reason} -> {error, Reason}
end.
@doc a TOML file on disk .
%%
%% Each of the keys in the file is passed through a validation callback that
%% can accept the key, reject it, make it skipped, or further parse its
%% value for later retrieval.
-spec read_file(file:filename(), {validate_fun(), Arg :: term()}) ->
{ok, config()} | {error, ReadError | toml_error()}
when ReadError :: file:posix() | badarg | terminated | system_limit.
read_file(File, {Fun, _Arg} = Validate) when is_function(Fun, 4) ->
case file:read_file(File) of
{ok, Content} -> parse(Content, Validate);
{error, Reason} -> {error, Reason}
end.
@doc a TOML config from a string .
-spec parse(string() | binary() | iolist()) ->
{ok, config()} | {error, toml_error()}.
parse(String) ->
parse(String, {fun accept_all/4, []}).
@doc a TOML config from a string .
%%
%% Each of the keys in the config is passed through a validation callback
%% that can accept the key, reject it, make it skipped, or further parse its
%% value for later retrieval.
-spec parse(string() | binary() | iolist(), {validate_fun(), Arg :: term()}) ->
{ok, config()} | {error, toml_error()}.
parse(String, {Fun, Arg} = _Validate) when is_function(Fun, 4) ->
% the grammar assumes that the input ends with newline character
case toml_lexer:tokenize(String) of
{ok, Tokens, _EndLine} ->
case toml_parser:parse(Tokens) of
{ok, Result} ->
build_config(Result, Fun, Arg);
{error, {LineNumber, _ParserModule, _Message}} ->
{error, {parse, LineNumber}}
end;
{error, {LineNumber, _LexerModule, _Message}, _} ->
{error, {tokenize, LineNumber}}
end.
%% @doc Default validation function that accepts all values.
-spec accept_all(section(), key(), toml_value(), term()) ->
ok.
accept_all(_Section, _Key, _Value, _Arg) ->
ok.
%%----------------------------------------------------------
%% build_config() {{{
%% @doc Convert AST coming from parser to a config representation.
-spec build_config([term()], validate_fun(), term()) ->
{ok, config()} | {error, Reason}
when Reason :: {semantic, term()}
| {validate, Where :: validate_location(), validate_error()}
| {bad_return, Where :: validate_location(), term()}.
build_config(Directives, Fun, Arg) ->
case toml_dict:build_store(Directives) of
{ok, Store} ->
EmptyConfig = dict:store([], empty_section(), dict:new()),
try toml_dict:fold(fun build_config/4, {Fun, Arg, EmptyConfig}, Store) of
{_, _, Config} -> {ok, {toml, Config}}
catch
throw:{bad_return, {Section, Key}, Result} ->
Line = toml_dict:find_line(Section, Key, Store),
{error, {bad_return, {Section, Key, Line}, Result}};
throw:{validate, {Section, Key}, Reason} ->
Line = toml_dict:find_line(Section, Key, Store),
{error, {validate, {Section, Key, Line}, Reason}}
end;
{error, Reason} ->
{error, {semantic, Reason}}
end.
@doc Fold workhorse for { @link build_config/1 } .
-spec build_config(section(), key(), section | toml_value(), term()) ->
term().
build_config(Section, Key, section = _Value, {ValidateFun, Arg, Config}) ->
case ValidateFun(Section, Key, section, Arg) of
ok -> ok;
{ok, _Data} -> ignore;
ignore -> ok;
{error, Reason} -> erlang:throw({validate, {Section, Key}, Reason});
Result -> erlang:throw({bad_return, {Section, Key}, Result})
end,
NewConfig = dict:update(
Section,
fun({Keys, SubSects}) -> {Keys, [Key | SubSects]} end,
Config
),
{ValidateFun, Arg, dict:store(Section ++ [Key], empty_section(), NewConfig)};
build_config(Section, Key, {_T, _V} = Value, {ValidateFun, Arg, Config}) ->
% NOTE: array value from `toml_dict' is compatible with this module
NewConfig = case ValidateFun(Section, Key, Value, Arg) of
ok ->
dict:update(
Section,
fun({Keys, SubSects}) -> {dict:store(Key, Value, Keys), SubSects} end,
Config
);
{ok, Data} ->
Value1 = {data, Data},
dict:update(
Section,
fun({Keys, SubSects}) -> {dict:store(Key, Value1, Keys), SubSects} end,
Config
);
ignore ->
Config;
{error, Reason} ->
erlang:throw({validate, {Section, Key}, Reason});
Result ->
erlang:throw({bad_return, {Section, Key}, Result})
end,
{ValidateFun, Arg, NewConfig}.
%% @doc Create a value for an empty section.
empty_section() ->
KeyValues = dict:new(),
SubSections = [],
{KeyValues, SubSections}.
%% }}}
%%----------------------------------------------------------
%%%---------------------------------------------------------------------------
%%% explaining errors
%%%---------------------------------------------------------------------------
%% @doc Prepare a human-readable error message out of an error.
-spec format_error(Reason :: term()) ->
string().
format_error({validate, {_Section, _Key, _Line} = _Where, Reason}) ->
% TODO: use `Where' (error location)
unicode:characters_to_list([
"validation error: ",
io_lib:print(Reason, 1, 16#ffffffff, -1)
]);
format_error({bad_return, {_Section, _Key, _Line} = _Where, Result}) ->
% TODO: use `Where' (error location)
unicode:characters_to_list([
"unexpected value from validation function: ",
io_lib:print(Result, 1, 16#ffffffff, -1)
]);
format_error({semantic, Reason}) ->
toml_dict:format_error(Reason);
format_error({parse, Line}) ->
"syntax error in line " ++ integer_to_list(Line);
format_error({tokenize, Line}) ->
"unexpected character in line " ++ integer_to_list(Line);
format_error(Reason) when is_atom(Reason) ->
file:format_error(Reason);
format_error(Reason) ->
unicode:characters_to_list([
"unrecognized error: ",
io_lib:print(Reason, 1, 16#ffffffff, -1)
]).
%%%---------------------------------------------------------------------------
%%% data accessors
%%%---------------------------------------------------------------------------
%% @doc Get tagged value from config.
-spec get_value(section(), key(), config()) ->
toml_value() | none | section.
get_value(Section, Key, {toml, Store} = _Config) ->
case dict:find(Section, Store) of
{ok, {KeyValues, SubSections}} ->
case dict:find(Key, KeyValues) of
{ok, {_T,_V} = Value} ->
Value;
error ->
case lists:member(Key, SubSections) of
true -> section;
false -> none
end
end;
error ->
none
end.
%% @doc Get tagged value from config.
%% If the key doesn't exist, specified default is returned.
-spec get_value(section(), key(), config(), toml_value()) ->
toml_value() | section.
get_value(Section, Key, {toml, _} = Config, Default) ->
case get_value(Section, Key, Config) of
none -> Default;
Any -> Any
end.
%% @doc Check if the section exists.
%% If there is a key under the specified name, `false' is returned.
-spec exists(section(), config()) ->
boolean().
exists(Section, {toml, Store} = _Config) ->
dict:is_key(Section, Store).
%% @doc Check if the key exists.
%% If there is a section under the specified name, `false' is returned.
-spec exists(section(), key(), config()) ->
boolean().
exists(Section, Key, {toml, Store} = _Config) ->
case dict:find(Section, Store) of
{ok, {KeyValues, _SubSections}} -> dict:is_key(Key, KeyValues);
error -> false
end.
%% @doc List keys of a section.
%%
%% Only keys that correspond to scalars or arrays are returned. Subsections
%% (which include inline sections) are omitted.
%%
%% `none' is returned when `Section' is neither an explicitly defined
%% section, a section introduced implicitly by defining its subsection, nor
%% an inline section.
-spec keys(section(), config()) ->
[key()] | none.
keys(Section, {toml, Store} = _Config) ->
case dict:find(Section, Store) of
{ok, {KeyValues, _SubSections}} -> dict:fetch_keys(KeyValues);
error -> none
end.
%% @doc List direct subsections of a section.
%%
%% `none' is returned when `Section' is neither an explicitly defined
%% section, a section introduced implicitly by defining its subsection, nor
%% an inline section.
-spec sections(section(), config()) ->
[key()] | none.
sections(Section, {toml, Store} = _Config) ->
case dict:find(Section, Store) of
{ok, {_KeyValues, SubSections}} -> SubSections;
error -> none
end.
@doc Traverse all the values set in a section .
-spec foldk(section(), Fun, AccIn, config()) ->
AccOut
when Fun :: fun((section(), key(), toml_value(), AccIn) -> AccOut),
AccIn :: term(),
AccOut :: term().
foldk(Section, Fun, Acc, {toml, Store} = _Config) when is_function(Fun, 4) ->
case dict:find(Section, Store) of
{ok, {KeyValues, _SubSections}} ->
TravAcc = {Section, Fun, Acc},
{_, _, Result} = dict:fold(fun traverse_keys/3, TravAcc, KeyValues),
Result;
error ->
Acc
end.
%% @doc Workhorse for {@link foldk/4}.
traverse_keys(Key, Value, {Section, Fun, FunAcc}) ->
NewFunAcc = Fun(Section, Key, Value, FunAcc),
{Section, Fun, NewFunAcc}.
@doc Traverse the direct subsections of a section .
-spec folds(section(), Fun, AccIn, Config :: config()) ->
AccOut
when Fun :: fun((config(), section(), AccIn) -> AccOut),
AccIn :: term(),
AccOut :: term().
folds(Section, Fun, Acc, {toml, Store} = Config) when is_function(Fun, 3) ->
case dict:find(Section, Store) of
{ok, {_KeyValues, SubSections}} ->
traverse_sections(SubSections, Section, Config, Fun, Acc);
error ->
Acc
end.
%% @doc Workhorse for {@link folds/4}.
traverse_sections([] = _SubSections, _Section, _Config, _Fun, Acc) ->
Acc;
traverse_sections([Name | Rest] = _SubSections, Section, Config, Fun, Acc) ->
NewAcc = Fun(Config, Section ++ [Name], Acc),
traverse_sections(Rest, Section, Config, Fun, NewAcc).
%to_list({toml, Store} = _Config) ->
' TODO ' .
%to_list(Section, {toml, Store} = _Config) ->
' TODO ' .
%%%---------------------------------------------------------------------------
vim : : = marker
| null | https://raw.githubusercontent.com/dozzie/toml/e380dbfac46bd395e3a056ba3d91a513395e0c68/src/toml.erl | erlang | ---------------------------------------------------------------------------
@doc
@end
---------------------------------------------------------------------------
parser wrappers
explaining errors
data accessors
-export([to_list/1, to_list/2]).
---------------------------------------------------------------------------
data types
----------------------------------------------------------
main types {{{
empty list (`[]').
Name of a value in a section.
Value stored under {@type key()}, along with its type.
validate_fun()}) is denoted by {@type @{data, Data@}}.
{@type toml_array()} for details.
}}}
----------------------------------------------------------
auxiliary types {{{
Representation of array's content.
RFC 3339 timestamp (with or without timezone), date, or time.
Object (inline section/table) representation, jsx-style.
Array representation, jsx-style.
Arbitrary value (scalar/array/object), jsx-style. {@type datetime()} is not
really jsx-compatible, and there's no `null'.
}}}
----------------------------------------------------------
validation function {{{
Key validation callback. This callback is specified at configuration
parsing time and has a chance to further verify validity of a value or even
convert it already to its intended form, e.g. listen address
`"<host>:<port>"' can be immediately converted to `{Host,Port}' tuple.
as an array of objects, i.e.
Since it's not allowed to have a section and key of the same name,
subsections themselves are also subject to validation. Validation function
can return `ok', `{ok,_}', or `ignore' to accept the section name (the
and `{error,_}' to reject the name.
{@type @{ok, Data@}} results in the {@type toml_value()} of `{data, Data}'.
See {@link get_value/3}.
{@type @{error, Reason :: validate_error()@}} is reported by
{@link read_file/2} and {@link parse/2} as
for details.
}}}
----------------------------------------------------------
errors {{{
Error signifying that a key/section was already defined, either explicitly
or implicitly. The structure of the error follows convention of
`auto_section' in `Type' means that there already exists a key with the
explicitly, but earlier sections restrict how it could look like (i.e.
a subsection was already defined).
inline array has elements of different types.
that doesn't match.
Location of a semantic error in inline data (arrays and objects). The
location is a path specified in terms appropriate for respective data
Location information of semantic error. `Path' is name of the offending
section and, if applicable, key.
}}}
----------------------------------------------------------
---------------------------------------------------------------------------
parser wrappers
---------------------------------------------------------------------------
Each of the keys in the file is passed through a validation callback that
can accept the key, reject it, make it skipped, or further parse its
value for later retrieval.
Each of the keys in the config is passed through a validation callback
that can accept the key, reject it, make it skipped, or further parse its
value for later retrieval.
the grammar assumes that the input ends with newline character
@doc Default validation function that accepts all values.
----------------------------------------------------------
build_config() {{{
@doc Convert AST coming from parser to a config representation.
NOTE: array value from `toml_dict' is compatible with this module
@doc Create a value for an empty section.
}}}
----------------------------------------------------------
---------------------------------------------------------------------------
explaining errors
---------------------------------------------------------------------------
@doc Prepare a human-readable error message out of an error.
TODO: use `Where' (error location)
TODO: use `Where' (error location)
---------------------------------------------------------------------------
data accessors
---------------------------------------------------------------------------
@doc Get tagged value from config.
@doc Get tagged value from config.
If the key doesn't exist, specified default is returned.
@doc Check if the section exists.
If there is a key under the specified name, `false' is returned.
@doc Check if the key exists.
If there is a section under the specified name, `false' is returned.
@doc List keys of a section.
Only keys that correspond to scalars or arrays are returned. Subsections
(which include inline sections) are omitted.
`none' is returned when `Section' is neither an explicitly defined
section, a section introduced implicitly by defining its subsection, nor
an inline section.
@doc List direct subsections of a section.
`none' is returned when `Section' is neither an explicitly defined
section, a section introduced implicitly by defining its subsection, nor
an inline section.
@doc Workhorse for {@link foldk/4}.
@doc Workhorse for {@link folds/4}.
to_list({toml, Store} = _Config) ->
to_list(Section, {toml, Store} = _Config) ->
--------------------------------------------------------------------------- | TOML parser module .
-module(toml).
-export([read_file/1, read_file/2]).
-export([parse/1, parse/2]).
-export([format_error/1]).
-export([get_value/3, get_value/4, exists/2, exists/3]).
-export([keys/2, sections/2, foldk/4, folds/4]).
-export_type([config/0, section/0, key/0, toml_value/0]).
-export_type([toml_array/0, datetime/0]).
-export_type([jsx_object/0, jsx_list/0, jsx_value/0]).
-export_type([validate_fun/0, validate_fun_return/0]).
-export_type([validate_location/0, validate_error/0]).
-export_type([toml_error/0, semantic_error/0]).
-export_type([semerr_redefinition/0, semerr_inline/0]).
-export_type([semerr_data_location/0, semerr_location/0]).
-opaque config() :: {toml, term()}.
A tuple with atom ` toml ' being its first element .
-type section() :: [string()].
Name of a section ( " table " in TOML 's terms ) . Root section is denoted by
-type key() :: string().
-type toml_value() ::
{string, string()}
| {integer, integer()}
| {float, float()}
| {boolean, boolean()}
| {datetime, datetime()}
| {array, toml_array()}
| {data, term()}.
Custom Erlang structure returned by validation function ( { @type
Array of values is doubly typed , first as an array , and then with data type
of its content , e.g. ` { array , { string , [ " one " , " two " , " three " ] } } ' . See
-type toml_array() ::
{empty, []}
| {string, [string(), ...]}
| {integer, [integer(), ...]}
| {float, [float(), ...]}
| {boolean, [boolean(), ...]}
| {datetime, [datetime(), ...]}
| {array, [toml_array(), ...]}
| {object, [jsx_object(), ...]}.
-type datetime() ::
{datetime, calendar:datetime(), TZ :: string()}
| {datetime, calendar:datetime()}
| {date, calendar:date()}
| {time, calendar:time()}.
` TZ ' is either a ` " Z " ' ( the same as ` " +00:00 " ' ) or has format
` " [ + -]HH : " ' .
-type jsx_object() :: [{}] | [{binary(), jsx_value()}, ...].
-type jsx_list() :: [jsx_value()].
-type jsx_value() :: binary()
| integer()
| float()
| boolean()
| datetime()
| jsx_list()
| jsx_object().
-type validate_fun() ::
fun((section(), key(), toml_value() | section, Arg :: term()) ->
validate_fun_return()).
< b > NOTE</b > : Array section ( " array of tables " in TOML 's terms ) is passed
{ @type @{array , @{object , [ jsx_object ( ) , ... ] @}@ } } .
three values have the same result ; any data from ` { ok , Data } ' is ignored )
-type validate_fun_return() :: ok | {ok, Data :: term()} | ignore
| {error, validate_error()}.
Expected return values from { @type validate_fun ( ) } .
{ @type @{error , @{validate , Where : : validate_location ( ) , Reason@}@ } } .
-type validate_error() :: term().
Error returned by { @type validate_fun ( ) } . See { @type ( ) }
-type toml_error() :: {tokenize, Line :: pos_integer()}
| {parse, Line :: pos_integer()}
| {semantic, semantic_error()}
| {bad_return, validate_location(), Result :: term()}
| {validate, validate_location(), validate_error()}.
Error in processing TOML .
-type validate_location() ::
{Section :: [string()], Key :: string(), Line :: pos_integer()}.
Location information of validation error ( see { @type validate_fun ( ) } ) .
-type semantic_error() :: semerr_redefinition() | semerr_inline().
Data - level error , meaning that data represented by TOML config is forbidden
by TOML specification .
-type semerr_redefinition() ::
{auto_section, key, semerr_location()}
| {section, key | section | array_section, semerr_location()}
| {array_section, key | section | auto_section, semerr_location()}
| {key, key | section | auto_section | array_section, semerr_location()}.
{ @type @{Type , PreviousType , semerr_location()@ } } .
same name as one of the parent sections of the current section .
` auto_section ' in ` PreviousType ' means that the section was not defined
-type semerr_inline() ::
{duplicate, Key :: string(), semerr_data_location(), semerr_location()}
| {type_mismatch,
{Pos :: pos_integer(), OffendingType :: atom(), ExpectedType :: atom()},
semerr_data_location(), semerr_location()}.
Error signifying that inline object has two keys of the same name or an
` Pos ' is a 1 - based index in the array , ` ExpectedType ' is data type of the
first array element , and ` ' is the type of the first element
-type semerr_data_location() ::
[pos_integer() | string()].
types : key for objects , 1 - based index for arrays .
-type semerr_location() ::
{Path :: [string(), ...], CurLine :: pos_integer(),
PrevLine :: pos_integer()}.
@doc a TOML file on disk .
-spec read_file(file:filename()) ->
{ok, config()} | {error, ReadError | toml_error()}
when ReadError :: file:posix() | badarg | terminated | system_limit.
read_file(File) ->
case file:read_file(File) of
{ok, Content} -> parse(Content);
{error, Reason} -> {error, Reason}
end.
@doc a TOML file on disk .
-spec read_file(file:filename(), {validate_fun(), Arg :: term()}) ->
{ok, config()} | {error, ReadError | toml_error()}
when ReadError :: file:posix() | badarg | terminated | system_limit.
read_file(File, {Fun, _Arg} = Validate) when is_function(Fun, 4) ->
case file:read_file(File) of
{ok, Content} -> parse(Content, Validate);
{error, Reason} -> {error, Reason}
end.
@doc a TOML config from a string .
-spec parse(string() | binary() | iolist()) ->
{ok, config()} | {error, toml_error()}.
parse(String) ->
parse(String, {fun accept_all/4, []}).
@doc a TOML config from a string .
-spec parse(string() | binary() | iolist(), {validate_fun(), Arg :: term()}) ->
{ok, config()} | {error, toml_error()}.
parse(String, {Fun, Arg} = _Validate) when is_function(Fun, 4) ->
case toml_lexer:tokenize(String) of
{ok, Tokens, _EndLine} ->
case toml_parser:parse(Tokens) of
{ok, Result} ->
build_config(Result, Fun, Arg);
{error, {LineNumber, _ParserModule, _Message}} ->
{error, {parse, LineNumber}}
end;
{error, {LineNumber, _LexerModule, _Message}, _} ->
{error, {tokenize, LineNumber}}
end.
-spec accept_all(section(), key(), toml_value(), term()) ->
ok.
accept_all(_Section, _Key, _Value, _Arg) ->
ok.
-spec build_config([term()], validate_fun(), term()) ->
{ok, config()} | {error, Reason}
when Reason :: {semantic, term()}
| {validate, Where :: validate_location(), validate_error()}
| {bad_return, Where :: validate_location(), term()}.
build_config(Directives, Fun, Arg) ->
case toml_dict:build_store(Directives) of
{ok, Store} ->
EmptyConfig = dict:store([], empty_section(), dict:new()),
try toml_dict:fold(fun build_config/4, {Fun, Arg, EmptyConfig}, Store) of
{_, _, Config} -> {ok, {toml, Config}}
catch
throw:{bad_return, {Section, Key}, Result} ->
Line = toml_dict:find_line(Section, Key, Store),
{error, {bad_return, {Section, Key, Line}, Result}};
throw:{validate, {Section, Key}, Reason} ->
Line = toml_dict:find_line(Section, Key, Store),
{error, {validate, {Section, Key, Line}, Reason}}
end;
{error, Reason} ->
{error, {semantic, Reason}}
end.
@doc Fold workhorse for { @link build_config/1 } .
-spec build_config(section(), key(), section | toml_value(), term()) ->
term().
build_config(Section, Key, section = _Value, {ValidateFun, Arg, Config}) ->
case ValidateFun(Section, Key, section, Arg) of
ok -> ok;
{ok, _Data} -> ignore;
ignore -> ok;
{error, Reason} -> erlang:throw({validate, {Section, Key}, Reason});
Result -> erlang:throw({bad_return, {Section, Key}, Result})
end,
NewConfig = dict:update(
Section,
fun({Keys, SubSects}) -> {Keys, [Key | SubSects]} end,
Config
),
{ValidateFun, Arg, dict:store(Section ++ [Key], empty_section(), NewConfig)};
build_config(Section, Key, {_T, _V} = Value, {ValidateFun, Arg, Config}) ->
NewConfig = case ValidateFun(Section, Key, Value, Arg) of
ok ->
dict:update(
Section,
fun({Keys, SubSects}) -> {dict:store(Key, Value, Keys), SubSects} end,
Config
);
{ok, Data} ->
Value1 = {data, Data},
dict:update(
Section,
fun({Keys, SubSects}) -> {dict:store(Key, Value1, Keys), SubSects} end,
Config
);
ignore ->
Config;
{error, Reason} ->
erlang:throw({validate, {Section, Key}, Reason});
Result ->
erlang:throw({bad_return, {Section, Key}, Result})
end,
{ValidateFun, Arg, NewConfig}.
empty_section() ->
KeyValues = dict:new(),
SubSections = [],
{KeyValues, SubSections}.
-spec format_error(Reason :: term()) ->
string().
format_error({validate, {_Section, _Key, _Line} = _Where, Reason}) ->
unicode:characters_to_list([
"validation error: ",
io_lib:print(Reason, 1, 16#ffffffff, -1)
]);
format_error({bad_return, {_Section, _Key, _Line} = _Where, Result}) ->
unicode:characters_to_list([
"unexpected value from validation function: ",
io_lib:print(Result, 1, 16#ffffffff, -1)
]);
format_error({semantic, Reason}) ->
toml_dict:format_error(Reason);
format_error({parse, Line}) ->
"syntax error in line " ++ integer_to_list(Line);
format_error({tokenize, Line}) ->
"unexpected character in line " ++ integer_to_list(Line);
format_error(Reason) when is_atom(Reason) ->
file:format_error(Reason);
format_error(Reason) ->
unicode:characters_to_list([
"unrecognized error: ",
io_lib:print(Reason, 1, 16#ffffffff, -1)
]).
-spec get_value(section(), key(), config()) ->
toml_value() | none | section.
get_value(Section, Key, {toml, Store} = _Config) ->
case dict:find(Section, Store) of
{ok, {KeyValues, SubSections}} ->
case dict:find(Key, KeyValues) of
{ok, {_T,_V} = Value} ->
Value;
error ->
case lists:member(Key, SubSections) of
true -> section;
false -> none
end
end;
error ->
none
end.
-spec get_value(section(), key(), config(), toml_value()) ->
toml_value() | section.
get_value(Section, Key, {toml, _} = Config, Default) ->
case get_value(Section, Key, Config) of
none -> Default;
Any -> Any
end.
-spec exists(section(), config()) ->
boolean().
exists(Section, {toml, Store} = _Config) ->
dict:is_key(Section, Store).
-spec exists(section(), key(), config()) ->
boolean().
exists(Section, Key, {toml, Store} = _Config) ->
case dict:find(Section, Store) of
{ok, {KeyValues, _SubSections}} -> dict:is_key(Key, KeyValues);
error -> false
end.
-spec keys(section(), config()) ->
[key()] | none.
keys(Section, {toml, Store} = _Config) ->
case dict:find(Section, Store) of
{ok, {KeyValues, _SubSections}} -> dict:fetch_keys(KeyValues);
error -> none
end.
-spec sections(section(), config()) ->
[key()] | none.
sections(Section, {toml, Store} = _Config) ->
case dict:find(Section, Store) of
{ok, {_KeyValues, SubSections}} -> SubSections;
error -> none
end.
@doc Traverse all the values set in a section .
-spec foldk(section(), Fun, AccIn, config()) ->
AccOut
when Fun :: fun((section(), key(), toml_value(), AccIn) -> AccOut),
AccIn :: term(),
AccOut :: term().
foldk(Section, Fun, Acc, {toml, Store} = _Config) when is_function(Fun, 4) ->
case dict:find(Section, Store) of
{ok, {KeyValues, _SubSections}} ->
TravAcc = {Section, Fun, Acc},
{_, _, Result} = dict:fold(fun traverse_keys/3, TravAcc, KeyValues),
Result;
error ->
Acc
end.
traverse_keys(Key, Value, {Section, Fun, FunAcc}) ->
NewFunAcc = Fun(Section, Key, Value, FunAcc),
{Section, Fun, NewFunAcc}.
@doc Traverse the direct subsections of a section .
-spec folds(section(), Fun, AccIn, Config :: config()) ->
AccOut
when Fun :: fun((config(), section(), AccIn) -> AccOut),
AccIn :: term(),
AccOut :: term().
folds(Section, Fun, Acc, {toml, Store} = Config) when is_function(Fun, 3) ->
case dict:find(Section, Store) of
{ok, {_KeyValues, SubSections}} ->
traverse_sections(SubSections, Section, Config, Fun, Acc);
error ->
Acc
end.
traverse_sections([] = _SubSections, _Section, _Config, _Fun, Acc) ->
Acc;
traverse_sections([Name | Rest] = _SubSections, Section, Config, Fun, Acc) ->
NewAcc = Fun(Config, Section ++ [Name], Acc),
traverse_sections(Rest, Section, Config, Fun, NewAcc).
' TODO ' .
' TODO ' .
vim : : = marker
|
6d797ed1f4e54656202b3e26f800d591924b794a897cb17163835728fa61f9fc | jaredloomis/andromeda | Mesh.hs | {-# LANGUAGE GADTs #-}
# LANGUAGE TupleSections #
module Andromeda.Simple.Render.Mesh where
import Foreign.Ptr (Ptr, nullPtr)
import Foreign.Storable (Storable(..))
import qualified Data.Vector.Storable as V
import qualified Graphics.Rendering.OpenGL.GL as GL
import Andromeda.Simple.Type
import Andromeda.Simple.Render.VertexBuffer
data Mesh = Mesh [(String, MeshAttribute)] !Primitive
data MeshAttribute where
MeshAttribute :: (Typed a, Storable a) =>
!(V.Vector a) -> MeshAttribute
MeshUniform :: Uniform -> MeshAttribute
data CompiledMesh = CompiledMesh
[(String, VertexBuffer)]
[(String, Uniform)]
!Primitive
data Uniform where
Uniform :: (Storable a, GL.Uniform a) =>
a -> Uniform
compileMesh :: Mesh -> IO CompiledMesh
compileMesh (Mesh attrs prim) = do
bufs <- compileAttrs attrs
return $ CompiledMesh bufs (compileUnifs attrs) prim
where
compileAttrs ((name, attr@MeshAttribute{}) : xs) = do
attr' <- (name,) <$> toBuffer attr
(attr' :) <$> compileAttrs xs
compileAttrs (_ : xs) = compileAttrs xs
compileAttrs [] = return []
compileUnifs ((name, MeshUniform unif) : xs) =
(name, unif) : compileUnifs xs
compileUnifs (_ : xs) = compileUnifs xs
compileUnifs [] = []
toBuffer :: MeshAttribute -> IO VertexBuffer
toBuffer (MeshAttribute xs) =
let len = fromIntegral $ V.length xs
nullPtr' = helpInference xs nullPtr
descriptor = GL.VertexArrayDescriptor len GL.Float 0 nullPtr'
in do
buffer <- makeBuffer GL.ArrayBuffer xs
return $ VertexBuffer buffer descriptor
where
helpInference :: V.Vector a -> Ptr a -> Ptr a
helpInference _ ptr = ptr
toBuffer MeshUniform{} = error "toBuffer recieved MeshUniform"
replaceBuffer :: MeshAttribute -> VertexBuffer -> IO VertexBuffer
replaceBuffer (MeshAttribute xs)
buf@(VertexBuffer buffer (GL.VertexArrayDescriptor len _ _ _)) = do
GL.bindBuffer GL.ArrayBuffer GL.$= Just buffer
replaceBuffer' GL.ArrayBuffer xs (fromIntegral len)
return buf
replaceBuffer MeshUniform{} _ = error "replaceBuffer recieved MeshUniform"
| null | https://raw.githubusercontent.com/jaredloomis/andromeda/502dfa6703eca9717c7cde0c93959fa0a83e77ed/src/Andromeda/Simple/Render/Mesh.hs | haskell | # LANGUAGE GADTs # | # LANGUAGE TupleSections #
module Andromeda.Simple.Render.Mesh where
import Foreign.Ptr (Ptr, nullPtr)
import Foreign.Storable (Storable(..))
import qualified Data.Vector.Storable as V
import qualified Graphics.Rendering.OpenGL.GL as GL
import Andromeda.Simple.Type
import Andromeda.Simple.Render.VertexBuffer
data Mesh = Mesh [(String, MeshAttribute)] !Primitive
data MeshAttribute where
MeshAttribute :: (Typed a, Storable a) =>
!(V.Vector a) -> MeshAttribute
MeshUniform :: Uniform -> MeshAttribute
data CompiledMesh = CompiledMesh
[(String, VertexBuffer)]
[(String, Uniform)]
!Primitive
data Uniform where
Uniform :: (Storable a, GL.Uniform a) =>
a -> Uniform
compileMesh :: Mesh -> IO CompiledMesh
compileMesh (Mesh attrs prim) = do
bufs <- compileAttrs attrs
return $ CompiledMesh bufs (compileUnifs attrs) prim
where
compileAttrs ((name, attr@MeshAttribute{}) : xs) = do
attr' <- (name,) <$> toBuffer attr
(attr' :) <$> compileAttrs xs
compileAttrs (_ : xs) = compileAttrs xs
compileAttrs [] = return []
compileUnifs ((name, MeshUniform unif) : xs) =
(name, unif) : compileUnifs xs
compileUnifs (_ : xs) = compileUnifs xs
compileUnifs [] = []
toBuffer :: MeshAttribute -> IO VertexBuffer
toBuffer (MeshAttribute xs) =
let len = fromIntegral $ V.length xs
nullPtr' = helpInference xs nullPtr
descriptor = GL.VertexArrayDescriptor len GL.Float 0 nullPtr'
in do
buffer <- makeBuffer GL.ArrayBuffer xs
return $ VertexBuffer buffer descriptor
where
helpInference :: V.Vector a -> Ptr a -> Ptr a
helpInference _ ptr = ptr
toBuffer MeshUniform{} = error "toBuffer recieved MeshUniform"
replaceBuffer :: MeshAttribute -> VertexBuffer -> IO VertexBuffer
replaceBuffer (MeshAttribute xs)
buf@(VertexBuffer buffer (GL.VertexArrayDescriptor len _ _ _)) = do
GL.bindBuffer GL.ArrayBuffer GL.$= Just buffer
replaceBuffer' GL.ArrayBuffer xs (fromIntegral len)
return buf
replaceBuffer MeshUniform{} _ = error "replaceBuffer recieved MeshUniform"
|
2be52e37a2da909dd71c677bad626a52e1859466fd55fc43705c7d5918c6f211 | henrik42/deeto | project.clj | re - enables http repository support in Leiningen 2.8
(require 'cemerick.pomegranate.aether)
(cemerick.pomegranate.aether/register-wagon-factory!
"http" #(org.apache.maven.wagon.providers.http.HttpWagon.))
(defproject deeto "0.1.1-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:java-source-paths ["java/test" "java/src"]
:target-path "target/%s"
:aot [deeto.java-api
deeto.core]
:dependencies [[org.clojure/clojure "1.8.0"]]
:plugins [[lein-swank "1.4.5"]]
:aliases {"deploy" ["do" "clean," "deploy"]}
:release-tasks [["test"]
["vcs" "assert-committed"]
["change" "version"
"leiningen.release/bump-version" "release"]
["vcs" "commit"]
["vcs" "tag" "--no-sign"]
#_ ["deploy"]
["change" "version" "leiningen.release/bump-version"]
["vcs" "commit"]
#_ ["deploy"]
#_ ["vcs" "push"]]
run a local Nexus in a docker container with :
docker run -d -p 8081:8081 --name nexus sonatype / nexus :
;;
Then you can deploy a SNAPSHOT or release via
;; lein with-profile +local deploy
:local {:repositories [["snapshots" {:url ":8081/nexus/content/repositories/snapshots/"
:sign-releases false :username "admin" :password "admin123"}]
["releases" {:url ":8081/nexus/content/repositories/releases/"
:sign-releases false :username "admin" :password "admin123"}]]}})
| null | https://raw.githubusercontent.com/henrik42/deeto/575e8ec8e309b575f16b7640fc92240279520b45/project.clj | clojure |
lein with-profile +local deploy
| re - enables http repository support in Leiningen 2.8
(require 'cemerick.pomegranate.aether)
(cemerick.pomegranate.aether/register-wagon-factory!
"http" #(org.apache.maven.wagon.providers.http.HttpWagon.))
(defproject deeto "0.1.1-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:java-source-paths ["java/test" "java/src"]
:target-path "target/%s"
:aot [deeto.java-api
deeto.core]
:dependencies [[org.clojure/clojure "1.8.0"]]
:plugins [[lein-swank "1.4.5"]]
:aliases {"deploy" ["do" "clean," "deploy"]}
:release-tasks [["test"]
["vcs" "assert-committed"]
["change" "version"
"leiningen.release/bump-version" "release"]
["vcs" "commit"]
["vcs" "tag" "--no-sign"]
#_ ["deploy"]
["change" "version" "leiningen.release/bump-version"]
["vcs" "commit"]
#_ ["deploy"]
#_ ["vcs" "push"]]
run a local Nexus in a docker container with :
docker run -d -p 8081:8081 --name nexus sonatype / nexus :
Then you can deploy a SNAPSHOT or release via
:local {:repositories [["snapshots" {:url ":8081/nexus/content/repositories/snapshots/"
:sign-releases false :username "admin" :password "admin123"}]
["releases" {:url ":8081/nexus/content/repositories/releases/"
:sign-releases false :username "admin" :password "admin123"}]]}})
|
d297562d6a122252583e392d4d6adc276161ead17e55fd05653856839fd6ee65 | nickzuber/lets-make-a-compiler | mapping.ml | open Ast
open Ast.Assembly
open Polyfill
exception Debugging of string
module Bfs = Interference_graph.Bfs
let naive_interference_ts = ref 0.
let graph_interference_ts = ref 0.
module rec Liveness_mapping : sig
(* An instruction mapped to a set of variables that are live at that instruction. *)
type t = (Select.instruction, Liveness.t) Hashtbl.t
end = Liveness_mapping
and Liveness : sig
(* A set of variables which are considered live. *)
type t = Select.arg Set.t
end = Liveness
(* Mapping of int to general purpose registers. *)
let int_to_valid_register = Hashtbl.create 8
let _ = List.iter (fun (i, reg) -> Hashtbl.add int_to_valid_register i reg)
[ (0, REGISTER "r8")
; (1, REGISTER "r9")
; (2, REGISTER "r10")
; (3, REGISTER "rdx")
; (4, REGISTER "rcx") ]
Caller - save registers should be pushed onto the stack BEFORE a function is called ,
* and restored AFTER it 's done . When you do this , you need to make sure the stack is aligned
* before you make the call instruction .
* and restored AFTER it's done. When you do this, you need to make sure the stack is aligned
* before you make the call instruction. *)
let caller_save_registers =
1st function argument
2st function argument
; REGISTER "rdx" (* 3st function argument *)
4st function argument
5th function argument
6st function argument
; REGISTER "rax"
; REGISTER "r10"
; REGISTER "r11" ]
- save registers should be pushed onto the stack AFTER a function is called ,
* like when you 're inside of the function block , and restored BEFORE it 's done .
* like when you're inside of the function block, and restored BEFORE it's done. *)
let callee_save_registers = (** unused *)
[ REGISTER "rsp"
; REGISTER "rbp"
; REGISTER "rbx"
; REGISTER "r12"
; REGISTER "r13"
; REGISTER "r14"
; REGISTER "r15" ]
(* Get the arguments that are considered for writes, regardless of being a variable or not. *)
let get_write_args (instr : Select.instruction) : Select.arg list =
match instr with
| Select.SUB (src, dest) -> [dest]
| Select.ADD (src, dest) -> [dest]
| Select.CMP (src, dest) -> [dest]
| Select.MOV (src, dest) -> [dest]
| Select.LEAQ (src, dest) -> [dest]
| Select.NEG (arg) -> [arg]
| _ -> []
(* Get the arguments that are considered for reads, regardless of being a variable or not. *)
let get_read_args (instr : Select.instruction) : Select.arg list =
match instr with
| Select.SUB (src, dest) -> [src; dest]
| Select.ADD (src, dest) -> [src; dest]
| Select.CMP (src, dest) -> [src; dest]
| Select.MOV (src, dest) -> [src]
| Select.LEAQ (src, dest) -> [src]
| Select.NEG (arg) -> [arg]
| _ -> []
(* Get the arguments that are considered for writes that are variables. *)
let get_write_variables (instr : Select.instruction) : Liveness.t =
instr |> get_write_args |> List.filter (fun arg ->
match arg with
| Select.VARIABLE _ -> true
| _ -> false) |> Set.set_of_list
(* Get the arguments that are considered for reads that are variables. *)
let get_read_variables (instr : Select.instruction) : Liveness.t =
instr |> get_read_args |> List.filter (fun arg ->
match arg with
| Select.VARIABLE _ -> true
| _ -> false) |> Set.set_of_list
(* Compute the liveness at some isntruction and add it to the given mapping. *)
let rec compute_liveness (instr : Select.instruction) (mapping : (Select.instruction, Liveness.t) Immutable_hashtbl.t) (previous_liveness : Liveness.t) =
match instr with
| Select.IF_STATEMENT (t, c_instrs, a_instrs) ->
let (c_mapping, c_previous_liveness) = build_liveness_mapping c_instrs previous_liveness in
let (a_mapping, a_previous_liveness) = build_liveness_mapping a_instrs previous_liveness in
let liveness = Set.union c_previous_liveness a_previous_liveness in
let new_mapping = Immutable_hashtbl.combine c_mapping a_mapping in
let new_mapping_with_original = Immutable_hashtbl.combine new_mapping mapping in
let new_mapping_with_original' = Immutable_hashtbl.add new_mapping_with_original instr liveness in
(new_mapping_with_original', liveness)
| _ ->
let write_variables = get_write_variables instr in
let read_variables = get_read_variables instr in
(* Take previous liveness, subtract the stuff we write, add the stuff we read *)
let liveness = Set.union (Set.difference previous_liveness write_variables) read_variables in
let mapping' = Immutable_hashtbl.add mapping instr liveness in
(mapping', liveness)
and build_liveness_mapping instructions previous_liveness =
let reversed_instructions = List.rev instructions in
let size = List.length instructions in
let mapping = Immutable_hashtbl.create size in
(* Iterate through instructions, return the final previous_liveness at the end. *)
let rec assign instrs mapping previous_liveness =
match instrs with
| [] -> (mapping, previous_liveness)
| instr :: rest ->
Printf.printf " = > > % s\n<<= % d\n\n " ( Pprint_ast.string_of_instruction instr ) ( Immutable_hashtbl.length mapping ) ;
let (mapping', previous_liveness') = compute_liveness instr mapping previous_liveness in
assign rest mapping' previous_liveness' in
let (mapping', final_liveness) = assign reversed_instructions mapping previous_liveness in
(mapping', final_liveness)
let register_of_variable (var : Select.arg) rsp_offset coloring : Assembly.arg =
let i = Hashtbl.find coloring var in
try
(* Use a register. *)
Hashtbl.find int_to_valid_register i
with
Not_found ->
(* Use main memory. *)
let starting_point = i - (Hashtbl.length int_to_valid_register) in
At the point of a ` call ` , the % rsp base pointer register must be divisibly by 16 .
-x-x64-stack-not-16-byte-aligned-error#comment73772561_43354658
-x-x64-stack-not-16-byte-aligned-error#comment73772561_43354658 *)
let offset = ((starting_point + rsp_offset) * -8) in
let rsp_register_with_offset = REFERENCE ("rbp", offset) in
rsp_register_with_offset
(* Create a mapping from variables to registers, then return the leftover variables and the mapping. *)
let build_variable_to_register_mapping (vars : string list) (coloring : (Select.arg, int) Hashtbl.t) : (string, Assembly.arg) Hashtbl.t * int =
let variable_size = List.length vars in
let mapping = Hashtbl.create variable_size in
(* How many colors point to main memory. *)
let spill_size = max ((count_unique coloring) - (Hashtbl.length int_to_valid_register)) 0 in
let rsp_offset = if spill_size mod 2 = 0 then 1 else 2 in
(* Assign registers to variables and return the list of unassigned variables. *)
List.iter (fun name ->
let v = Select.VARIABLE name in
let reg = register_of_variable v rsp_offset coloring in
Hashtbl.add mapping name reg) vars;
(mapping, spill_size)
let build_liveness_matrix (vars : string list) (mapping : Liveness_mapping.t) =
let start = Unix.gettimeofday () in
let n = List.length vars in
let vars_array = Array.of_list vars in
(* Create a matrix with unique references as elements. *)
let matrix = Array.init n (fun i -> Array.init n (fun i -> ref 0)) in
let rec loop u v =
(* Check for interference. *)
Hashtbl.iter (fun _instr liveness ->
let u_key = Select.VARIABLE vars_array.(u) in
let v_key = Select.VARIABLE vars_array.(v) in
if Set.exists liveness u_key && Set.exists liveness v_key && u <> v then
matrix.(u).(v) := !(matrix.(u).(v)) + 1) mapping;
if (v + 1) < n then
loop (u) (v + 1)
else if (u + 1) < n then
loop (u + 1) (0)
in loop 0 0;
naive_interference_ts := ((Unix.gettimeofday ()) -. start);
matrix
let attempt_to_add_edge liveness args d graph vt =
match d with
| Select.VARIABLE _ ->
Set.for_each liveness (fun v ->
let should_add_edge = List.for_all (fun arg -> arg <> v) args in
if should_add_edge then
(let v' = Hashtbl.find vt v and d' = Hashtbl.find vt d in
Interference_graph.G.add_edge graph d' v'))
| _ -> () (* if dest isn't a variable, we don't consider for reg mapping *)
let build_liveness_graph (vars : string list) (mapping : Liveness_mapping.t) : Interference_graph.G.t =
let start = Unix.gettimeofday () in
let vars' = List.map (fun var -> Select.VARIABLE var) vars in
let (graph, vt) = Interference_graph.init vars' in
(* print_liveness_mapping mapping; *)
Hashtbl.iter (fun instr liveness ->
match instr with
| Select.PUSH d -> attempt_to_add_edge liveness [d] d graph vt
| Select.POP s -> attempt_to_add_edge liveness [s] s graph vt
| Select.MOV (s, d) -> attempt_to_add_edge liveness [s; d] d graph vt
| Select.ADD (_s, d) -> attempt_to_add_edge liveness [d] d graph vt
| Select.SUB (_s, d) -> attempt_to_add_edge liveness [d] d graph vt
| Select.NEG d -> attempt_to_add_edge liveness [d] d graph vt
| Select.XOR (s, d) -> ()
| Select.CMP (s, d) -> attempt_to_add_edge liveness [d] d graph vt
| Select.SET (_cc, d) -> ()
| Select.MOVZB (s, d) -> ()
| _ -> ()) mapping;
graph_interference_ts := ((Unix.gettimeofday ()) -. start);
graph
(* From a hashmap of variables to saturation sets, find the variable with the highest saturation. *)
let get_variable_with_max_saturation (var_to_sat_and_adj : (Select.arg, int Set.t * Liveness.t) Hashtbl.t) : Select.arg * (int Set.t * Liveness.t) =
let maybe_max = Hashtbl.fold (fun k v prev ->
match prev with
| Some (k', (sat', adj')) ->
(let (sat, adj) = v in
let size_v' = Set.size sat' in
let size_v = Set.size sat in
if size_v > size_v' then Some (k, v) else Some (k', (sat', adj')))
| None -> Some (k, v)) var_to_sat_and_adj None in
match maybe_max with
| Some (k, v) -> (k, v)
| None -> (raise Not_found)
(* Given a set, find the lowest positive integer that is not an element of the set. *)
let find_lowest_num_not_in_set set : int =
let rec loop i = if Set.exists set i then loop (i + 1) else i in
loop 0
(* Given an interference graph, return a mapping of ints to variables. *)
let saturate (graph : Interference_graph.G.t) : (Select.arg, int) Hashtbl.t =
let var_to_sat_and_adj = Hashtbl.create 53 in
let coloring = Hashtbl.create 53 in
(* Load up the map with variables mapped to empty satuation sets. *)
let rec loop it =
let v = Bfs.get it in
let var = Interference_graph.G.V.label v in
let sat = Set.create 53 in
let v_adj_list = Interference_graph.G.pred graph v in
let adj_list = List.map (fun v -> Interference_graph.G.V.label v) v_adj_list in
let adj = Set.set_of_list adj_list in
Hashtbl.add var_to_sat_and_adj var (sat, adj);
loop (Bfs.step it)
in
(try loop (Bfs.start graph) with Exit -> ());
(* Create a copy of our var to sat/adj map so we can remove elements we've seen without
* actually damaging the integrity of the original mapping. *)
let vertices = Hashtbl.copy var_to_sat_and_adj in
(* Saturation algorithm *)
while Hashtbl.length vertices > 0 do
let (var, (sat, adj)) = get_variable_with_max_saturation vertices in
let color = find_lowest_num_not_in_set sat in
Hashtbl.add coloring var color;
(* Adjust the saturation of the adjacent vertices. *)
Set.for_each adj (fun v ->
let (sat, _adj) = Hashtbl.find var_to_sat_and_adj v in
Set.add sat color);
Hashtbl.remove vertices var
done;
coloring
Create a mapping between variable names ( strings ) to registers ( memory offsets are included ) .
* This is used by assign to turn our Select program into an Assembly program .
* This is used by assign to turn our Select program into an Assembly program. *)
let create ?(quiet=false) (vars : (string, Ast.t) Hashtbl.t) (instructions : Select.instruction list) : (string, Assembly.arg) Hashtbl.t * int =
(* @TEMP after we changed vars from list to map *)
let vars = Hashtbl.fold (fun k v acc -> k :: acc) vars [] in
(* We want to iterate backwards to compute liveness. *)
let empty_liveness = Set.create 0 in
let (liveness_mapping, _final_liveness) = build_liveness_mapping instructions empty_liveness in
let liveness_graph = build_liveness_graph vars liveness_mapping in
let coloring = saturate liveness_graph in
(* Map the rest of the variables to memory *)
let mapping, spill_size = build_variable_to_register_mapping vars coloring in
(* [DEBUG] Used just for debugging. *)
if Settings.compute_liveness_matrix then
(let liveness_matrix = build_liveness_matrix vars liveness_mapping in
Pprint_ast.print_matrix liveness_matrix !naive_interference_ts);
if Settings.debug_mode = false || quiet = true then () else
Pprint_ast.print_graph liveness_graph coloring (List.length vars) !graph_interference_ts;
(mapping, spill_size)
| null | https://raw.githubusercontent.com/nickzuber/lets-make-a-compiler/ee2337b116f07c1357f7b70bd09f1503e39fdc88/src/transformers/mapping.ml | ocaml | An instruction mapped to a set of variables that are live at that instruction.
A set of variables which are considered live.
Mapping of int to general purpose registers.
3st function argument
* unused
Get the arguments that are considered for writes, regardless of being a variable or not.
Get the arguments that are considered for reads, regardless of being a variable or not.
Get the arguments that are considered for writes that are variables.
Get the arguments that are considered for reads that are variables.
Compute the liveness at some isntruction and add it to the given mapping.
Take previous liveness, subtract the stuff we write, add the stuff we read
Iterate through instructions, return the final previous_liveness at the end.
Use a register.
Use main memory.
Create a mapping from variables to registers, then return the leftover variables and the mapping.
How many colors point to main memory.
Assign registers to variables and return the list of unassigned variables.
Create a matrix with unique references as elements.
Check for interference.
if dest isn't a variable, we don't consider for reg mapping
print_liveness_mapping mapping;
From a hashmap of variables to saturation sets, find the variable with the highest saturation.
Given a set, find the lowest positive integer that is not an element of the set.
Given an interference graph, return a mapping of ints to variables.
Load up the map with variables mapped to empty satuation sets.
Create a copy of our var to sat/adj map so we can remove elements we've seen without
* actually damaging the integrity of the original mapping.
Saturation algorithm
Adjust the saturation of the adjacent vertices.
@TEMP after we changed vars from list to map
We want to iterate backwards to compute liveness.
Map the rest of the variables to memory
[DEBUG] Used just for debugging. | open Ast
open Ast.Assembly
open Polyfill
exception Debugging of string
module Bfs = Interference_graph.Bfs
let naive_interference_ts = ref 0.
let graph_interference_ts = ref 0.
module rec Liveness_mapping : sig
type t = (Select.instruction, Liveness.t) Hashtbl.t
end = Liveness_mapping
and Liveness : sig
type t = Select.arg Set.t
end = Liveness
let int_to_valid_register = Hashtbl.create 8
let _ = List.iter (fun (i, reg) -> Hashtbl.add int_to_valid_register i reg)
[ (0, REGISTER "r8")
; (1, REGISTER "r9")
; (2, REGISTER "r10")
; (3, REGISTER "rdx")
; (4, REGISTER "rcx") ]
Caller - save registers should be pushed onto the stack BEFORE a function is called ,
* and restored AFTER it 's done . When you do this , you need to make sure the stack is aligned
* before you make the call instruction .
* and restored AFTER it's done. When you do this, you need to make sure the stack is aligned
* before you make the call instruction. *)
let caller_save_registers =
1st function argument
2st function argument
4st function argument
5th function argument
6st function argument
; REGISTER "rax"
; REGISTER "r10"
; REGISTER "r11" ]
- save registers should be pushed onto the stack AFTER a function is called ,
* like when you 're inside of the function block , and restored BEFORE it 's done .
* like when you're inside of the function block, and restored BEFORE it's done. *)
[ REGISTER "rsp"
; REGISTER "rbp"
; REGISTER "rbx"
; REGISTER "r12"
; REGISTER "r13"
; REGISTER "r14"
; REGISTER "r15" ]
let get_write_args (instr : Select.instruction) : Select.arg list =
match instr with
| Select.SUB (src, dest) -> [dest]
| Select.ADD (src, dest) -> [dest]
| Select.CMP (src, dest) -> [dest]
| Select.MOV (src, dest) -> [dest]
| Select.LEAQ (src, dest) -> [dest]
| Select.NEG (arg) -> [arg]
| _ -> []
let get_read_args (instr : Select.instruction) : Select.arg list =
match instr with
| Select.SUB (src, dest) -> [src; dest]
| Select.ADD (src, dest) -> [src; dest]
| Select.CMP (src, dest) -> [src; dest]
| Select.MOV (src, dest) -> [src]
| Select.LEAQ (src, dest) -> [src]
| Select.NEG (arg) -> [arg]
| _ -> []
let get_write_variables (instr : Select.instruction) : Liveness.t =
instr |> get_write_args |> List.filter (fun arg ->
match arg with
| Select.VARIABLE _ -> true
| _ -> false) |> Set.set_of_list
let get_read_variables (instr : Select.instruction) : Liveness.t =
instr |> get_read_args |> List.filter (fun arg ->
match arg with
| Select.VARIABLE _ -> true
| _ -> false) |> Set.set_of_list
let rec compute_liveness (instr : Select.instruction) (mapping : (Select.instruction, Liveness.t) Immutable_hashtbl.t) (previous_liveness : Liveness.t) =
match instr with
| Select.IF_STATEMENT (t, c_instrs, a_instrs) ->
let (c_mapping, c_previous_liveness) = build_liveness_mapping c_instrs previous_liveness in
let (a_mapping, a_previous_liveness) = build_liveness_mapping a_instrs previous_liveness in
let liveness = Set.union c_previous_liveness a_previous_liveness in
let new_mapping = Immutable_hashtbl.combine c_mapping a_mapping in
let new_mapping_with_original = Immutable_hashtbl.combine new_mapping mapping in
let new_mapping_with_original' = Immutable_hashtbl.add new_mapping_with_original instr liveness in
(new_mapping_with_original', liveness)
| _ ->
let write_variables = get_write_variables instr in
let read_variables = get_read_variables instr in
let liveness = Set.union (Set.difference previous_liveness write_variables) read_variables in
let mapping' = Immutable_hashtbl.add mapping instr liveness in
(mapping', liveness)
and build_liveness_mapping instructions previous_liveness =
let reversed_instructions = List.rev instructions in
let size = List.length instructions in
let mapping = Immutable_hashtbl.create size in
let rec assign instrs mapping previous_liveness =
match instrs with
| [] -> (mapping, previous_liveness)
| instr :: rest ->
Printf.printf " = > > % s\n<<= % d\n\n " ( Pprint_ast.string_of_instruction instr ) ( Immutable_hashtbl.length mapping ) ;
let (mapping', previous_liveness') = compute_liveness instr mapping previous_liveness in
assign rest mapping' previous_liveness' in
let (mapping', final_liveness) = assign reversed_instructions mapping previous_liveness in
(mapping', final_liveness)
let register_of_variable (var : Select.arg) rsp_offset coloring : Assembly.arg =
let i = Hashtbl.find coloring var in
try
Hashtbl.find int_to_valid_register i
with
Not_found ->
let starting_point = i - (Hashtbl.length int_to_valid_register) in
At the point of a ` call ` , the % rsp base pointer register must be divisibly by 16 .
-x-x64-stack-not-16-byte-aligned-error#comment73772561_43354658
-x-x64-stack-not-16-byte-aligned-error#comment73772561_43354658 *)
let offset = ((starting_point + rsp_offset) * -8) in
let rsp_register_with_offset = REFERENCE ("rbp", offset) in
rsp_register_with_offset
let build_variable_to_register_mapping (vars : string list) (coloring : (Select.arg, int) Hashtbl.t) : (string, Assembly.arg) Hashtbl.t * int =
let variable_size = List.length vars in
let mapping = Hashtbl.create variable_size in
let spill_size = max ((count_unique coloring) - (Hashtbl.length int_to_valid_register)) 0 in
let rsp_offset = if spill_size mod 2 = 0 then 1 else 2 in
List.iter (fun name ->
let v = Select.VARIABLE name in
let reg = register_of_variable v rsp_offset coloring in
Hashtbl.add mapping name reg) vars;
(mapping, spill_size)
let build_liveness_matrix (vars : string list) (mapping : Liveness_mapping.t) =
let start = Unix.gettimeofday () in
let n = List.length vars in
let vars_array = Array.of_list vars in
let matrix = Array.init n (fun i -> Array.init n (fun i -> ref 0)) in
let rec loop u v =
Hashtbl.iter (fun _instr liveness ->
let u_key = Select.VARIABLE vars_array.(u) in
let v_key = Select.VARIABLE vars_array.(v) in
if Set.exists liveness u_key && Set.exists liveness v_key && u <> v then
matrix.(u).(v) := !(matrix.(u).(v)) + 1) mapping;
if (v + 1) < n then
loop (u) (v + 1)
else if (u + 1) < n then
loop (u + 1) (0)
in loop 0 0;
naive_interference_ts := ((Unix.gettimeofday ()) -. start);
matrix
let attempt_to_add_edge liveness args d graph vt =
match d with
| Select.VARIABLE _ ->
Set.for_each liveness (fun v ->
let should_add_edge = List.for_all (fun arg -> arg <> v) args in
if should_add_edge then
(let v' = Hashtbl.find vt v and d' = Hashtbl.find vt d in
Interference_graph.G.add_edge graph d' v'))
let build_liveness_graph (vars : string list) (mapping : Liveness_mapping.t) : Interference_graph.G.t =
let start = Unix.gettimeofday () in
let vars' = List.map (fun var -> Select.VARIABLE var) vars in
let (graph, vt) = Interference_graph.init vars' in
Hashtbl.iter (fun instr liveness ->
match instr with
| Select.PUSH d -> attempt_to_add_edge liveness [d] d graph vt
| Select.POP s -> attempt_to_add_edge liveness [s] s graph vt
| Select.MOV (s, d) -> attempt_to_add_edge liveness [s; d] d graph vt
| Select.ADD (_s, d) -> attempt_to_add_edge liveness [d] d graph vt
| Select.SUB (_s, d) -> attempt_to_add_edge liveness [d] d graph vt
| Select.NEG d -> attempt_to_add_edge liveness [d] d graph vt
| Select.XOR (s, d) -> ()
| Select.CMP (s, d) -> attempt_to_add_edge liveness [d] d graph vt
| Select.SET (_cc, d) -> ()
| Select.MOVZB (s, d) -> ()
| _ -> ()) mapping;
graph_interference_ts := ((Unix.gettimeofday ()) -. start);
graph
let get_variable_with_max_saturation (var_to_sat_and_adj : (Select.arg, int Set.t * Liveness.t) Hashtbl.t) : Select.arg * (int Set.t * Liveness.t) =
let maybe_max = Hashtbl.fold (fun k v prev ->
match prev with
| Some (k', (sat', adj')) ->
(let (sat, adj) = v in
let size_v' = Set.size sat' in
let size_v = Set.size sat in
if size_v > size_v' then Some (k, v) else Some (k', (sat', adj')))
| None -> Some (k, v)) var_to_sat_and_adj None in
match maybe_max with
| Some (k, v) -> (k, v)
| None -> (raise Not_found)
let find_lowest_num_not_in_set set : int =
let rec loop i = if Set.exists set i then loop (i + 1) else i in
loop 0
let saturate (graph : Interference_graph.G.t) : (Select.arg, int) Hashtbl.t =
let var_to_sat_and_adj = Hashtbl.create 53 in
let coloring = Hashtbl.create 53 in
let rec loop it =
let v = Bfs.get it in
let var = Interference_graph.G.V.label v in
let sat = Set.create 53 in
let v_adj_list = Interference_graph.G.pred graph v in
let adj_list = List.map (fun v -> Interference_graph.G.V.label v) v_adj_list in
let adj = Set.set_of_list adj_list in
Hashtbl.add var_to_sat_and_adj var (sat, adj);
loop (Bfs.step it)
in
(try loop (Bfs.start graph) with Exit -> ());
let vertices = Hashtbl.copy var_to_sat_and_adj in
while Hashtbl.length vertices > 0 do
let (var, (sat, adj)) = get_variable_with_max_saturation vertices in
let color = find_lowest_num_not_in_set sat in
Hashtbl.add coloring var color;
Set.for_each adj (fun v ->
let (sat, _adj) = Hashtbl.find var_to_sat_and_adj v in
Set.add sat color);
Hashtbl.remove vertices var
done;
coloring
Create a mapping between variable names ( strings ) to registers ( memory offsets are included ) .
* This is used by assign to turn our Select program into an Assembly program .
* This is used by assign to turn our Select program into an Assembly program. *)
let create ?(quiet=false) (vars : (string, Ast.t) Hashtbl.t) (instructions : Select.instruction list) : (string, Assembly.arg) Hashtbl.t * int =
let vars = Hashtbl.fold (fun k v acc -> k :: acc) vars [] in
let empty_liveness = Set.create 0 in
let (liveness_mapping, _final_liveness) = build_liveness_mapping instructions empty_liveness in
let liveness_graph = build_liveness_graph vars liveness_mapping in
let coloring = saturate liveness_graph in
let mapping, spill_size = build_variable_to_register_mapping vars coloring in
if Settings.compute_liveness_matrix then
(let liveness_matrix = build_liveness_matrix vars liveness_mapping in
Pprint_ast.print_matrix liveness_matrix !naive_interference_ts);
if Settings.debug_mode = false || quiet = true then () else
Pprint_ast.print_graph liveness_graph coloring (List.length vars) !graph_interference_ts;
(mapping, spill_size)
|
b8508fa487e5cbf9febe9e8bfe87245dae5309e38004e37ad96b3ac1189abf3f | mfp/ocsiblog | test_simple_markup.ml | Copyright ( C ) 2009 < >
open OUnit
open Simple_markup
open Printf
open ExtString
let wrap f x =
"\n" ^ f x ^ "\n"
let aeq_pars ?msg expected actual =
assert_equal ?msg ~printer:(wrap string_of_paragraphs) expected actual
let check expected input =
aeq_pars ~msg:(sprintf "With input:\n%s\n" (String.strip input))
expected (parse_text input)
let test_read_list () =
check
[Ulist ([Normal [Text "foo "; Bold "bar"]], [[Normal [Text "baz"]]])]
"* foo\n*bar*\n* baz";
check
[Ulist ([Normal [Text "foo bar baz"]], [[Normal [Text "baz"]]])]
"* foo\nbar \n baz\n* baz";
check
[Ulist ([Normal [Text "foo"]; Normal [Text "bar"]], [[Normal [Text "baz"]]])]
"* foo\n\n bar\n* baz";
check
[Ulist ([Normal [Text "foo"]], [])]
"* foo";
check
[Ulist ([Normal [Text "foo"]], [[Normal [Text "bar"]]])]
"* foo\n* bar";
check
[Ulist ([Normal [Text "foo"]], [[Normal [Text "bar"]]])]
"* foo\n\n* bar";
check
[Ulist ([Normal [Text "foo"]; Ulist ([Normal [Text "bar"]], [])],
[])]
"* foo\n\n * bar";
check
[Ulist ([Normal [Text "foo"]; Ulist ([Normal [Text "bar"]], []);
Olist ([Normal [Text "1"]], [[Normal [Text "2"]]])],
[]);
Olist ([Normal [Text "3"]], [])]
"* foo\n\n * bar\n # 1\n # 2\n# 3";
check
[Ulist ([Normal [Text "foo"]; Ulist ([Normal [Text "bar"]], []);
Olist ([Normal [Text "1"]], [[Normal [Text "2 #3"]]])],
[])]
"* foo\n\n * bar\n # 1\n # 2\n#3";
check
[Ulist
([Normal [Text "some paragraph"]; Normal [Text "And another one."]],
[[Normal [Text "two"]]; [Normal [Text "three"]]])]
"
* some
paragraph
And another one.
* two
* three
";
check
[Ulist ([Normal [Text "foo "; Bold "bar baz"]; Normal [Text "xxx"]],
[[Normal [Text "baz"]]])]
"*\tfoo\n*bar\n baz*\n\n xxx\n\n* baz";
check
[Normal [Text "foo"]; Ulist ([Normal [Text "bar"]], [])]
"foo\n*\tbar";
check
[Olist ([Normal [Text "one"]],
[[Normal [Text "two"]]; [Normal [Text "three"]]])]
"
#\tone
#\ttwo
#\tthree"
let test_read_normal () =
check [Normal [Text "foo "; Struck [Text " bar baz "]; Text " foobar"]]
"foo == bar\nbaz == foobar";
check
[Normal
[Text "foo "; Bold "bar"; Text " "; Bold "baz"; Text " ";
Emph "foobar"; Text " _foobar_";
Link { href_target = "target"; href_desc = "desc"};
Image { img_src = "image"; img_alt = "alt"};
Text "."]]
"foo *bar* *baz* __foobar__ _foobar_[desc](target).";
check
[Normal [Bold "foo"; Text " "; Struck [Bold "foo"; Emph "bar"; Text "_baz_"]]]
"*foo* ==*foo*__bar___baz_==";
check
[Normal
[Link { href_target = ""; href_desc = "" }]]
"[]()";
check [Normal [Text ""]] "[]()";
check
[Normal
[Text "foo "; Anchor "internal-link"; Text ". ";
Link { href_target = "#internal-link"; href_desc = "back" }]]
"foo [](#internal-link). [back](#internal-link)"
let test_read_normal_unmatched () =
check [Normal [Text "foo * bar"]] "foo * bar";
check [Normal [Text "foo _ bar"]] "foo _ bar";
check [Normal [Text "foo __ bar"]] "foo __ bar";
check [Normal [Text "foo == bar"]] "foo == bar";
check [Normal [Text "foo == bar"]; Normal [Text "baz =="]] "foo == bar\n\nbaz =="
let test_read_pre () =
check
[Normal [Text "foo * bar"];
Pre("a\n b\n c\n", None);
Pre("a\\0\\1\\2\n b\n c\n", Some "whatever")]
"foo * bar\n{{\na\n b\n c\n}}\n\n{{whatever\na\\0\\1\\2\n b\n c\n}}\n ";
check
[Pre("a\n b\n c\n", Some "foobar")]
"{{foobar
a
b
c
}}";
check
[Pre("a\n b\n c\n", Some "foo")]
" {{foo
a
b
c
}}";
check
[Pre("a\n }}\n \\}}\n }}}\n", None)]
"{{
a
\\}}
\\\\}}
}}}
}}"
let test_heading () =
for i = 1 to 6 do
check
[Heading (i, [Text "foo "; Link { href_target = "dst"; href_desc = "foo" }])]
(String.make i '!' ^ "foo [foo](dst)")
done
let test_quote () =
check [Quote [Normal [Text "xxx"]]] "> xxx";
check [Quote [Normal [Text "xxx"]]] "> \n> xxx\n> ";
check [Normal [Text "foo says:"];
Quote [Normal [Text "xxx:"];
Ulist ([Normal [Text "xxx yyy"]],
[[Normal [Emph "2"]]; [Normal [Text "_2_"]]; [Normal [Bold "3"]]]);
Quote [Normal [Text "yyy"]; Quote [Normal [Text "zzz"]];
Normal [Text "aaa"]]]]
"foo says:\n\
\n\
> xxx:\n\
> * xxx\n\
> yyy\n\
> * __2__\n\
> * _2_\n\
> * *3*\n\
> > yyy\n\
> > > zzz\n\
> > aaa\n\
\n\
";
check [Quote [Ulist ([Normal [Text "one"]; Normal [Text "xxx"]],
[[Normal [Text "two"]]])]]
"> * one\n\
>\n\
> xxx\n\
> * two\n\
\n"
let tests = "Simple_markup unit" >:::
[
"Normal" >:: test_read_normal;
"Normal, unmatched delimiters" >:: test_read_normal_unmatched;
"Ulist and Olist" >:: test_read_list;
"Pre" >:: test_read_pre;
"Heading" >:: test_heading;
"Quote" >:: test_quote;
]
| null | https://raw.githubusercontent.com/mfp/ocsiblog/e5048a971f3e4289855214418338ac032f11ea4c/test_simple_markup.ml | ocaml | Copyright ( C ) 2009 < >
open OUnit
open Simple_markup
open Printf
open ExtString
let wrap f x =
"\n" ^ f x ^ "\n"
let aeq_pars ?msg expected actual =
assert_equal ?msg ~printer:(wrap string_of_paragraphs) expected actual
let check expected input =
aeq_pars ~msg:(sprintf "With input:\n%s\n" (String.strip input))
expected (parse_text input)
let test_read_list () =
check
[Ulist ([Normal [Text "foo "; Bold "bar"]], [[Normal [Text "baz"]]])]
"* foo\n*bar*\n* baz";
check
[Ulist ([Normal [Text "foo bar baz"]], [[Normal [Text "baz"]]])]
"* foo\nbar \n baz\n* baz";
check
[Ulist ([Normal [Text "foo"]; Normal [Text "bar"]], [[Normal [Text "baz"]]])]
"* foo\n\n bar\n* baz";
check
[Ulist ([Normal [Text "foo"]], [])]
"* foo";
check
[Ulist ([Normal [Text "foo"]], [[Normal [Text "bar"]]])]
"* foo\n* bar";
check
[Ulist ([Normal [Text "foo"]], [[Normal [Text "bar"]]])]
"* foo\n\n* bar";
check
[Ulist ([Normal [Text "foo"]; Ulist ([Normal [Text "bar"]], [])],
[])]
"* foo\n\n * bar";
check
[Ulist ([Normal [Text "foo"]; Ulist ([Normal [Text "bar"]], []);
Olist ([Normal [Text "1"]], [[Normal [Text "2"]]])],
[]);
Olist ([Normal [Text "3"]], [])]
"* foo\n\n * bar\n # 1\n # 2\n# 3";
check
[Ulist ([Normal [Text "foo"]; Ulist ([Normal [Text "bar"]], []);
Olist ([Normal [Text "1"]], [[Normal [Text "2 #3"]]])],
[])]
"* foo\n\n * bar\n # 1\n # 2\n#3";
check
[Ulist
([Normal [Text "some paragraph"]; Normal [Text "And another one."]],
[[Normal [Text "two"]]; [Normal [Text "three"]]])]
"
* some
paragraph
And another one.
* two
* three
";
check
[Ulist ([Normal [Text "foo "; Bold "bar baz"]; Normal [Text "xxx"]],
[[Normal [Text "baz"]]])]
"*\tfoo\n*bar\n baz*\n\n xxx\n\n* baz";
check
[Normal [Text "foo"]; Ulist ([Normal [Text "bar"]], [])]
"foo\n*\tbar";
check
[Olist ([Normal [Text "one"]],
[[Normal [Text "two"]]; [Normal [Text "three"]]])]
"
#\tone
#\ttwo
#\tthree"
let test_read_normal () =
check [Normal [Text "foo "; Struck [Text " bar baz "]; Text " foobar"]]
"foo == bar\nbaz == foobar";
check
[Normal
[Text "foo "; Bold "bar"; Text " "; Bold "baz"; Text " ";
Emph "foobar"; Text " _foobar_";
Link { href_target = "target"; href_desc = "desc"};
Image { img_src = "image"; img_alt = "alt"};
Text "."]]
"foo *bar* *baz* __foobar__ _foobar_[desc](target).";
check
[Normal [Bold "foo"; Text " "; Struck [Bold "foo"; Emph "bar"; Text "_baz_"]]]
"*foo* ==*foo*__bar___baz_==";
check
[Normal
[Link { href_target = ""; href_desc = "" }]]
"[]()";
check [Normal [Text ""]] "[]()";
check
[Normal
[Text "foo "; Anchor "internal-link"; Text ". ";
Link { href_target = "#internal-link"; href_desc = "back" }]]
"foo [](#internal-link). [back](#internal-link)"
let test_read_normal_unmatched () =
check [Normal [Text "foo * bar"]] "foo * bar";
check [Normal [Text "foo _ bar"]] "foo _ bar";
check [Normal [Text "foo __ bar"]] "foo __ bar";
check [Normal [Text "foo == bar"]] "foo == bar";
check [Normal [Text "foo == bar"]; Normal [Text "baz =="]] "foo == bar\n\nbaz =="
let test_read_pre () =
check
[Normal [Text "foo * bar"];
Pre("a\n b\n c\n", None);
Pre("a\\0\\1\\2\n b\n c\n", Some "whatever")]
"foo * bar\n{{\na\n b\n c\n}}\n\n{{whatever\na\\0\\1\\2\n b\n c\n}}\n ";
check
[Pre("a\n b\n c\n", Some "foobar")]
"{{foobar
a
b
c
}}";
check
[Pre("a\n b\n c\n", Some "foo")]
" {{foo
a
b
c
}}";
check
[Pre("a\n }}\n \\}}\n }}}\n", None)]
"{{
a
\\}}
\\\\}}
}}}
}}"
let test_heading () =
for i = 1 to 6 do
check
[Heading (i, [Text "foo "; Link { href_target = "dst"; href_desc = "foo" }])]
(String.make i '!' ^ "foo [foo](dst)")
done
let test_quote () =
check [Quote [Normal [Text "xxx"]]] "> xxx";
check [Quote [Normal [Text "xxx"]]] "> \n> xxx\n> ";
check [Normal [Text "foo says:"];
Quote [Normal [Text "xxx:"];
Ulist ([Normal [Text "xxx yyy"]],
[[Normal [Emph "2"]]; [Normal [Text "_2_"]]; [Normal [Bold "3"]]]);
Quote [Normal [Text "yyy"]; Quote [Normal [Text "zzz"]];
Normal [Text "aaa"]]]]
"foo says:\n\
\n\
> xxx:\n\
> * xxx\n\
> yyy\n\
> * __2__\n\
> * _2_\n\
> * *3*\n\
> > yyy\n\
> > > zzz\n\
> > aaa\n\
\n\
";
check [Quote [Ulist ([Normal [Text "one"]; Normal [Text "xxx"]],
[[Normal [Text "two"]]])]]
"> * one\n\
>\n\
> xxx\n\
> * two\n\
\n"
let tests = "Simple_markup unit" >:::
[
"Normal" >:: test_read_normal;
"Normal, unmatched delimiters" >:: test_read_normal_unmatched;
"Ulist and Olist" >:: test_read_list;
"Pre" >:: test_read_pre;
"Heading" >:: test_heading;
"Quote" >:: test_quote;
]
|
|
fbb4c940e366b4fead4967870dd3994c7f076ad02275cf70a36e98fa750c5c4d | ghc/nofib | Chl_routs.hs |
The first part of Choleski decomposition .
Contains a matrix reodering function .
The generalized envelope method is implemented here .
XZ , 24/10/91
The first part of Choleski decomposition.
Contains a matrix reodering function.
The generalized envelope method is implemented here.
XZ, 24/10/91
-}
Modified to adopt S_arrays .
More efficient algorithms have been adopted .
They include :
a ) minimum degree ordering ( in module Min_degree.hs ) ;
b ) K matrix assembly .
Also , the output format has been changed .
XZ , 19/2/92
Modified to adopt S_arrays.
More efficient algorithms have been adopted.
They include:
a) minimum degree ordering (in module Min_degree.hs);
b) K matrix assembly.
Also, the output format has been changed.
XZ, 19/2/92
-}
module Chl_routs ( orded_mat ) where
import Defs
import S_Array -- not needed w/ proper module handling
import Norm -- ditto
import Min_degree
import Data.Ix
infix 1 =:
(=:) a b = (a,b)
-----------------------------------------------------------
-- Liu's generalized envelope method adopted here. --
Reordering the system matric by firstly applying --
-- minimum degree ordering ( to minimize fill-ins ) and --
-- secondly applying postordering ( to optimize matrix --
-- structure ). The system matrix structure is found --
-- using the elimination tree. Used at the data setup --
-- stage. --
-----------------------------------------------------------
orded_mat
:: Int
-> (My_Array Int (Frac_type,((Frac_type,Frac_type,Frac_type),
(Frac_type,Frac_type,Frac_type))))
-> (My_Array Int [Int])
-> [Int]
-> (My_Array Int (My_Array Int Frac_type,My_Array Int (Int,[Frac_type])),My_Array Int Int)
orded_mat p_total el_det_fac p_steer fixed =
(init_L,o_to_n)
where
old Haskell 1.0 " let " , essentially
also old Haskell 1.0 thing
n_bnds = (1,p_total)
n_bnds' = (0,p_total)
-- the inverse of an 1-D Int array.
inv_map = \a ->
s_array n_bnds' (map (\(i,j)->j=:i) (s_assocs a))
-- find the column indecies of nonzero entries in a row
get_js old_i map_f =
filter (\j->j<=i) (map ((!^) map_f) (old_rows!^old_i))
where i = map_f!^old_i
-- children of individual elimination tree nodes
chldrn = \e_tree ->
s_accumArray (++) [] n_bnds'
(map (\(i,j)->j=:[i]) (s_assocs e_tree))
-- the entry map from the input matrix to the output matrix
-- ( combination of o_to_min and min_to_n )
o_to_n :: (My_Array Int Int)
o_to_n = s_amap ((!^) min_to_n) o_to_min
n_to_o = inv_map o_to_n
-- the entry map of the minimum degree ordering
o_to_min = inv_map min_to_o
min_to_o = s_listArray n_bnds' (0:min_degree old_rows)
-- the entry map of postordering
-- switch off ordering
min_to_n :: My_Array Int Int
-- min_to_n = s_listArray n_bnds' (range n_bnds')
-- min_to_o = min_to_n
min_to_n =
s_array n_bnds' ((0=:0):(fst (recur ([],1) (chn!^0))))
where
chn = chldrn min_e_tree
-- recursive postordering
recur =
foldl
(
-- pattern before entering a loop
\ res r ->
-- current result of post-reordering
(recur res (chn!^r)) `bindTo` ( \ (new_reord,label) ->
((r=:label):new_reord,label+1) )
)
-- the elimination tree of the reordered matrix
new_e_tree =
s_array n_bnds
( map (\(i,j)-> (min_to_n!^i =: min_to_n!^j))
( s_assocs min_e_tree ))
-- elimination tree of the matrix after minimum degree
-- ordering
min_e_tree =
s_def_array n_bnds (0::Int)
(all_rs (1::Int) init_arr [])
where
init_arr = s_def_array n_bnds (0::Int) []
-- implementation of an elimination tree construction
-- algorithm
all_rs i ance pare =
if ( i>p_total )
then pare
else all_rs (i+1) new_ance pare++rss
where
root old@(k,old_anc) =
if ( (new_k==0) || (new_k==i) )
then old
else root (new_k,old_anc//^[k=:i])
where new_k = old_anc!^k
-- finding new parents and ancestors
(rss,new_ance) =
-- looping over connetions of current node in
-- the matrix graph
foldl
(
-- pattern before entering a loop
\ (rs,anc) k1 ->
-- appending a new parent
(root (k1,anc)) `bindTo` ( \ (r,new_anc) ->
(r=:i) `bindTo` ( \ new_r ->
if new_anc!^r /= 0
then (rs, new_anc)
else (new_r:rs, new_anc //^ [new_r]) ))
)
([],ance) (remove i (get_js (min_to_o!^i) o_to_min))
initial L
init_L =
s_listArray (1,length block_ends)
[
(
s_listArray bn [get_v i i|i<-range bn],
(filter (\ (_,j)->j<=u)
[ (i, find_first bn (find_non0 i))
| i <- range (l+1,p_total)
]) `bindTo` ( \ non_emp_set ->
s_def_array (l+1,p_total) (u+1,[])
[ i=:(j',[get_v i j | j<- range (j',min u (i-1))])
| (i,j') <- non_emp_set
] )
)
| bn@(l,u) <- block_bnds
]
where
get_v i j =
if ( i'<j' )
then (old_mat!^j')!^i'
else (old_mat!^i')!^j'
where
i' = n_to_o!^i
j' = n_to_o!^j
find_non0 i =
foldl ( \ar j -> all_non0s j ar )
(s_def_array (1,i) False [])
(get_js (n_to_o!^i) o_to_n)
where
all_non0s j arr =
if ( j>i || j==0 || arr!^j )
then arr
else all_non0s (new_e_tree!^j) (arr//^[j=:True])
finding the first non - zero entry between l and u of the ith line
find_first :: (Int,Int) -> (My_Array Int Bool) -> Int
find_first (j1,u) non0_line = f' j1
where
f' j =
if (j>u) || non0_line!^j
then j
else f' (j+1)
-- reordered matrix in a new sparse form
block_ends =
[ i | (i,j)<-s_assocs new_e_tree, j/=(i+1) ]
block_bnds = zip (1:(map ((+) 1) (init block_ends))) block_ends
-- descendants of nodes of elimination tree
decnd :: My_Array Int [Int]
decnd =
s_listArray n_bnds
[ chn_n ++ concat [ decnd!^i | i <- chn_n ]
| chn_n <- s_elems (chldrn new_e_tree)
]
-- rows of the K matrix (before ordering)
old_rows =
s_accumArray (++) [] n_bnds
( concat
[
[j|(j,_)<-sparse_assocs (old_mat!^i)] `bindTo` ( \ j_set ->
(i=:j_set):[j'=:[i]|j'<-j_set,i/=j'] )
| i <- range n_bnds
]
)
-- Value and index pairs of the original matrix.
-- This is found by assembling system K.
-- Fixed entries are multiplied by a large number
old_mat :: My_Array Int (My_Array Int Frac_type)
old_mat =
arr //^
[ (arr!^i) `bindTo` ( \ ar ->
i =: ar //^ [i=:(ar!^i)*large_scalor] )
| i <- fixed
]
where
arr =
s_listArray n_bnds
[
s_accumArray (+) (0::Frac_type) (1,i) (temp!^i)
| i<-range n_bnds
]
temp :: My_Array Int [(Int,Frac_type)]
temp =
s_accumArray (++) [] n_bnds
( concat
[
(el_det_fac!^e) `bindTo` ( \ d_f ->
(zip (range (1,p_nodel)) (p_steer!^e)) `bindTo` ( \ pairs ->
concat
[
(dd_mat!^ii) `bindTo` ( \ dd_m ->
[ i =: [j =: (dd_m!^jj) d_f]
| (jj,j) <- pairs, j<=i
] )
| (ii,i) <- pairs
] ))
| e <- s_indices el_det_fac
]
)
-- element contribution matrix
dd_mat =
s_listArray (1,p_nodel) [
s_listArray (1,p_nodel) [f11,f12,f13],
s_listArray (1,p_nodel) [f12,f22,f23],
s_listArray (1,p_nodel) [f13,f23,f33]
]
where
f = \x y u v d -> (x*y+u*v)*d
s1 = \(x,_,_) -> x
s2 = \(_,y,_) -> y
s3 = \(_,_,z) -> z
f11 (det,(x,y)) = f c1 c1 c2 c2 det
where
c1 = s1 x
c2 = s1 y
f12 = \(det,(x,y)) -> f (s1 x) (s2 x) (s1 y) (s2 y) det
f13 = \(det,(x,y)) -> f (s1 x) (s3 x) (s1 y) (s3 y) det
f22 (det,(x,y)) = f c1 c1 c2 c2 det
where
c1 = s2 x
c2 = s2 y
f23 = \(det,(x,y)) -> f (s2 x) (s3 x) (s2 y) (s3 y) det
f33 (det,(x,y)) = f c1 c1 c2 c2 det
where
c1 = s3 x
c2 = s3 y
| null | https://raw.githubusercontent.com/ghc/nofib/f34b90b5a6ce46284693119a06d1133908b11856/real/fluid/Chl_routs.hs | haskell | not needed w/ proper module handling
ditto
---------------------------------------------------------
Liu's generalized envelope method adopted here. --
minimum degree ordering ( to minimize fill-ins ) and --
secondly applying postordering ( to optimize matrix --
structure ). The system matrix structure is found --
using the elimination tree. Used at the data setup --
stage. --
---------------------------------------------------------
the inverse of an 1-D Int array.
find the column indecies of nonzero entries in a row
children of individual elimination tree nodes
the entry map from the input matrix to the output matrix
( combination of o_to_min and min_to_n )
the entry map of the minimum degree ordering
the entry map of postordering
switch off ordering
min_to_n = s_listArray n_bnds' (range n_bnds')
min_to_o = min_to_n
recursive postordering
pattern before entering a loop
current result of post-reordering
the elimination tree of the reordered matrix
elimination tree of the matrix after minimum degree
ordering
implementation of an elimination tree construction
algorithm
finding new parents and ancestors
looping over connetions of current node in
the matrix graph
pattern before entering a loop
appending a new parent
reordered matrix in a new sparse form
descendants of nodes of elimination tree
rows of the K matrix (before ordering)
Value and index pairs of the original matrix.
This is found by assembling system K.
Fixed entries are multiplied by a large number
element contribution matrix |
The first part of Choleski decomposition .
Contains a matrix reodering function .
The generalized envelope method is implemented here .
XZ , 24/10/91
The first part of Choleski decomposition.
Contains a matrix reodering function.
The generalized envelope method is implemented here.
XZ, 24/10/91
-}
Modified to adopt S_arrays .
More efficient algorithms have been adopted .
They include :
a ) minimum degree ordering ( in module Min_degree.hs ) ;
b ) K matrix assembly .
Also , the output format has been changed .
XZ , 19/2/92
Modified to adopt S_arrays.
More efficient algorithms have been adopted.
They include:
a) minimum degree ordering (in module Min_degree.hs);
b) K matrix assembly.
Also, the output format has been changed.
XZ, 19/2/92
-}
module Chl_routs ( orded_mat ) where
import Defs
import Min_degree
import Data.Ix
infix 1 =:
(=:) a b = (a,b)
orded_mat
:: Int
-> (My_Array Int (Frac_type,((Frac_type,Frac_type,Frac_type),
(Frac_type,Frac_type,Frac_type))))
-> (My_Array Int [Int])
-> [Int]
-> (My_Array Int (My_Array Int Frac_type,My_Array Int (Int,[Frac_type])),My_Array Int Int)
orded_mat p_total el_det_fac p_steer fixed =
(init_L,o_to_n)
where
old Haskell 1.0 " let " , essentially
also old Haskell 1.0 thing
n_bnds = (1,p_total)
n_bnds' = (0,p_total)
inv_map = \a ->
s_array n_bnds' (map (\(i,j)->j=:i) (s_assocs a))
get_js old_i map_f =
filter (\j->j<=i) (map ((!^) map_f) (old_rows!^old_i))
where i = map_f!^old_i
chldrn = \e_tree ->
s_accumArray (++) [] n_bnds'
(map (\(i,j)->j=:[i]) (s_assocs e_tree))
o_to_n :: (My_Array Int Int)
o_to_n = s_amap ((!^) min_to_n) o_to_min
n_to_o = inv_map o_to_n
o_to_min = inv_map min_to_o
min_to_o = s_listArray n_bnds' (0:min_degree old_rows)
min_to_n :: My_Array Int Int
min_to_n =
s_array n_bnds' ((0=:0):(fst (recur ([],1) (chn!^0))))
where
chn = chldrn min_e_tree
recur =
foldl
(
\ res r ->
(recur res (chn!^r)) `bindTo` ( \ (new_reord,label) ->
((r=:label):new_reord,label+1) )
)
new_e_tree =
s_array n_bnds
( map (\(i,j)-> (min_to_n!^i =: min_to_n!^j))
( s_assocs min_e_tree ))
min_e_tree =
s_def_array n_bnds (0::Int)
(all_rs (1::Int) init_arr [])
where
init_arr = s_def_array n_bnds (0::Int) []
all_rs i ance pare =
if ( i>p_total )
then pare
else all_rs (i+1) new_ance pare++rss
where
root old@(k,old_anc) =
if ( (new_k==0) || (new_k==i) )
then old
else root (new_k,old_anc//^[k=:i])
where new_k = old_anc!^k
(rss,new_ance) =
foldl
(
\ (rs,anc) k1 ->
(root (k1,anc)) `bindTo` ( \ (r,new_anc) ->
(r=:i) `bindTo` ( \ new_r ->
if new_anc!^r /= 0
then (rs, new_anc)
else (new_r:rs, new_anc //^ [new_r]) ))
)
([],ance) (remove i (get_js (min_to_o!^i) o_to_min))
initial L
init_L =
s_listArray (1,length block_ends)
[
(
s_listArray bn [get_v i i|i<-range bn],
(filter (\ (_,j)->j<=u)
[ (i, find_first bn (find_non0 i))
| i <- range (l+1,p_total)
]) `bindTo` ( \ non_emp_set ->
s_def_array (l+1,p_total) (u+1,[])
[ i=:(j',[get_v i j | j<- range (j',min u (i-1))])
| (i,j') <- non_emp_set
] )
)
| bn@(l,u) <- block_bnds
]
where
get_v i j =
if ( i'<j' )
then (old_mat!^j')!^i'
else (old_mat!^i')!^j'
where
i' = n_to_o!^i
j' = n_to_o!^j
find_non0 i =
foldl ( \ar j -> all_non0s j ar )
(s_def_array (1,i) False [])
(get_js (n_to_o!^i) o_to_n)
where
all_non0s j arr =
if ( j>i || j==0 || arr!^j )
then arr
else all_non0s (new_e_tree!^j) (arr//^[j=:True])
finding the first non - zero entry between l and u of the ith line
find_first :: (Int,Int) -> (My_Array Int Bool) -> Int
find_first (j1,u) non0_line = f' j1
where
f' j =
if (j>u) || non0_line!^j
then j
else f' (j+1)
block_ends =
[ i | (i,j)<-s_assocs new_e_tree, j/=(i+1) ]
block_bnds = zip (1:(map ((+) 1) (init block_ends))) block_ends
decnd :: My_Array Int [Int]
decnd =
s_listArray n_bnds
[ chn_n ++ concat [ decnd!^i | i <- chn_n ]
| chn_n <- s_elems (chldrn new_e_tree)
]
old_rows =
s_accumArray (++) [] n_bnds
( concat
[
[j|(j,_)<-sparse_assocs (old_mat!^i)] `bindTo` ( \ j_set ->
(i=:j_set):[j'=:[i]|j'<-j_set,i/=j'] )
| i <- range n_bnds
]
)
old_mat :: My_Array Int (My_Array Int Frac_type)
old_mat =
arr //^
[ (arr!^i) `bindTo` ( \ ar ->
i =: ar //^ [i=:(ar!^i)*large_scalor] )
| i <- fixed
]
where
arr =
s_listArray n_bnds
[
s_accumArray (+) (0::Frac_type) (1,i) (temp!^i)
| i<-range n_bnds
]
temp :: My_Array Int [(Int,Frac_type)]
temp =
s_accumArray (++) [] n_bnds
( concat
[
(el_det_fac!^e) `bindTo` ( \ d_f ->
(zip (range (1,p_nodel)) (p_steer!^e)) `bindTo` ( \ pairs ->
concat
[
(dd_mat!^ii) `bindTo` ( \ dd_m ->
[ i =: [j =: (dd_m!^jj) d_f]
| (jj,j) <- pairs, j<=i
] )
| (ii,i) <- pairs
] ))
| e <- s_indices el_det_fac
]
)
dd_mat =
s_listArray (1,p_nodel) [
s_listArray (1,p_nodel) [f11,f12,f13],
s_listArray (1,p_nodel) [f12,f22,f23],
s_listArray (1,p_nodel) [f13,f23,f33]
]
where
f = \x y u v d -> (x*y+u*v)*d
s1 = \(x,_,_) -> x
s2 = \(_,y,_) -> y
s3 = \(_,_,z) -> z
f11 (det,(x,y)) = f c1 c1 c2 c2 det
where
c1 = s1 x
c2 = s1 y
f12 = \(det,(x,y)) -> f (s1 x) (s2 x) (s1 y) (s2 y) det
f13 = \(det,(x,y)) -> f (s1 x) (s3 x) (s1 y) (s3 y) det
f22 (det,(x,y)) = f c1 c1 c2 c2 det
where
c1 = s2 x
c2 = s2 y
f23 = \(det,(x,y)) -> f (s2 x) (s3 x) (s2 y) (s3 y) det
f33 (det,(x,y)) = f c1 c1 c2 c2 det
where
c1 = s3 x
c2 = s3 y
|
b37c2962300c35bd1e95d0e8583e41ef6ff195f6b909026e76b8943eb0486765 | yannham/mechaml | reddit_login_bind_operators.ml | (* This file is in the public domain *)
(** Connect to reddit then fill and submit the login form then download the
resulting page *)
* This is the same program as reddit_login , but where we used the new OCaml
binding operators introduced in 4.08 and supported in through the
module . Agent . Syntax
binding operators introduced in 4.08 and supported in Mechaml through the
module Mechaml.Agent.Syntax *)
open Mechaml
module M = Agent.Monad
open M.Syntax
let require msg = function
| Some a -> a
| None -> failwith msg
let action_login =
let* response = Agent.get "" in
let form =
response
|> Agent.HttpResponse.page
|> Page.form_with "[id=login_login-main]"
|> require "Can't find the login form !"
|> Page.Form.set "user" "mynick"
|> Page.Form.set "passwd" "@xlz43" in
let* response = Agent.submit form in
response
|> Agent.HttpResponse.content
|> M.save_content "reddit-login.html"
let _ =
M.run (Agent.init ()) action_login
| null | https://raw.githubusercontent.com/yannham/mechaml/1efeb59e21ed72bc36e2421a96a52e807065b3d6/examples/reddit_login_bind_operators.ml | ocaml | This file is in the public domain
* Connect to reddit then fill and submit the login form then download the
resulting page |
* This is the same program as reddit_login , but where we used the new OCaml
binding operators introduced in 4.08 and supported in through the
module . Agent . Syntax
binding operators introduced in 4.08 and supported in Mechaml through the
module Mechaml.Agent.Syntax *)
open Mechaml
module M = Agent.Monad
open M.Syntax
let require msg = function
| Some a -> a
| None -> failwith msg
let action_login =
let* response = Agent.get "" in
let form =
response
|> Agent.HttpResponse.page
|> Page.form_with "[id=login_login-main]"
|> require "Can't find the login form !"
|> Page.Form.set "user" "mynick"
|> Page.Form.set "passwd" "@xlz43" in
let* response = Agent.submit form in
response
|> Agent.HttpResponse.content
|> M.save_content "reddit-login.html"
let _ =
M.run (Agent.init ()) action_login
|
d0b2319785fb145a8212b3ef053c8dd0263cd62c43c17c6d73d70d8997648120 | facebook/flow | parsing_service_js.mli |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Utils_js
open Docblock_parser
type types_mode =
| TypesAllowed
| TypesForbiddenByDefault
(* result of individual parse *)
type result =
| Parse_ok of {
ast: (Loc.t, Loc.t) Flow_ast.Program.t;
file_sig: File_sig.With_Loc.t;
locs: Parsing_heaps.locs_tbl;
type_sig: Parsing_heaps.type_sig;
tolerable_errors: File_sig.With_Loc.tolerable_error list;
exports: Exports.t;
imports: Imports.t;
cas_digest: Cas_digest.t option;
}
| Parse_recovered of {
ast: (Loc.t, Loc.t) Flow_ast.Program.t;
file_sig: File_sig.With_Loc.t;
tolerable_errors: File_sig.With_Loc.tolerable_error list;
parse_errors: parse_error Nel.t;
}
| Parse_exn of Exception.t
| Parse_skip of parse_skip_reason
and parse_skip_reason =
| Skip_resource_file
| Skip_non_flow_file
| Skip_package_json of (Package_json.t, parse_error) Result.t
and parse_error = Loc.t * Parse_error.t
and parse_failure =
| Uncaught_exception of Exception.t
| Docblock_errors of docblock_error list
| Parse_error of parse_error
(* results of parse job, returned by parse and reparse *)
type results = {
(* successfully parsed files *)
parsed: FilenameSet.t;
(* list of skipped files *)
unparsed: FilenameSet.t;
(* list of files skipped due to an out of date hash *)
changed: FilenameSet.t;
(* list of failed files *)
failed: File_key.t list * parse_failure list;
(* set of unchanged files *)
unchanged: FilenameSet.t;
(* set of files that were not found on disk *)
not_found: FilenameSet.t;
(* package.json files parsed *)
package_json: File_key.t list * parse_error option list;
(* set of modules that need to be committed *)
dirty_modules: Modulename.Set.t;
}
type parse_options = {
parse_types_mode: types_mode;
parse_use_strict: bool;
parse_prevent_munge: bool;
parse_module_ref_prefix: string option;
parse_module_ref_prefix_LEGACY_INTEROP: string option;
parse_facebook_fbt: string option;
parse_suppress_types: SSet.t;
parse_max_literal_len: int;
parse_exact_by_default: bool;
parse_enable_enums: bool;
parse_enable_relay_integration: bool;
parse_relay_integration_excludes: Str.regexp list;
parse_relay_integration_module_prefix: string option;
parse_relay_integration_module_prefix_includes: Str.regexp list;
parse_node_main_fields: string list;
parse_distributed: bool;
}
val make_parse_options :
?types_mode:types_mode -> ?use_strict:bool -> Docblock.t -> Options.t -> parse_options
(* Use default values for the various settings that parse takes. Each one can be overridden
individually *)
val parse_with_defaults :
?types_mode:types_mode ->
?use_strict:bool ->
reader:Mutator_state_reader.t ->
Options.t ->
MultiWorkerLwt.worker list option ->
File_key.t list Bucket.next ->
results Lwt.t
val reparse_with_defaults :
transaction:Transaction.t ->
reader:Mutator_state_reader.t ->
?types_mode:types_mode ->
?use_strict:bool ->
?with_progress:bool ->
workers:MultiWorkerLwt.worker list option ->
modified:FilenameSet.t ->
Options.t ->
results Lwt.t
val ensure_parsed :
reader:Mutator_state_reader.t ->
Options.t ->
MultiWorkerLwt.worker list option ->
FilenameSet.t ->
FilenameSet.t Lwt.t
val parse_package_json_file :
node_main_fields:string list -> string -> File_key.t -> (Package_json.t, parse_error) Result.t
(* parse contents of a file *)
val do_parse :
parse_options:parse_options ->
info:Docblock.t ->
string ->
(* contents of the file *)
File_key.t ->
(* filename *)
result
(* Utility to create the `next` parameter that `parse` requires *)
val next_of_filename_set :
?with_progress:bool ->
MultiWorkerLwt.worker list option ->
FilenameSet.t ->
File_key.t list Bucket.next
val does_content_match_file_hash : reader:Abstract_state_reader.t -> File_key.t -> string -> bool
| null | https://raw.githubusercontent.com/facebook/flow/f7d50bb772462888b27b5dbf9acf7d079eb1ff5f/src/parsing/parsing_service_js.mli | ocaml | result of individual parse
results of parse job, returned by parse and reparse
successfully parsed files
list of skipped files
list of files skipped due to an out of date hash
list of failed files
set of unchanged files
set of files that were not found on disk
package.json files parsed
set of modules that need to be committed
Use default values for the various settings that parse takes. Each one can be overridden
individually
parse contents of a file
contents of the file
filename
Utility to create the `next` parameter that `parse` requires |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open Utils_js
open Docblock_parser
type types_mode =
| TypesAllowed
| TypesForbiddenByDefault
type result =
| Parse_ok of {
ast: (Loc.t, Loc.t) Flow_ast.Program.t;
file_sig: File_sig.With_Loc.t;
locs: Parsing_heaps.locs_tbl;
type_sig: Parsing_heaps.type_sig;
tolerable_errors: File_sig.With_Loc.tolerable_error list;
exports: Exports.t;
imports: Imports.t;
cas_digest: Cas_digest.t option;
}
| Parse_recovered of {
ast: (Loc.t, Loc.t) Flow_ast.Program.t;
file_sig: File_sig.With_Loc.t;
tolerable_errors: File_sig.With_Loc.tolerable_error list;
parse_errors: parse_error Nel.t;
}
| Parse_exn of Exception.t
| Parse_skip of parse_skip_reason
and parse_skip_reason =
| Skip_resource_file
| Skip_non_flow_file
| Skip_package_json of (Package_json.t, parse_error) Result.t
and parse_error = Loc.t * Parse_error.t
and parse_failure =
| Uncaught_exception of Exception.t
| Docblock_errors of docblock_error list
| Parse_error of parse_error
type results = {
parsed: FilenameSet.t;
unparsed: FilenameSet.t;
changed: FilenameSet.t;
failed: File_key.t list * parse_failure list;
unchanged: FilenameSet.t;
not_found: FilenameSet.t;
package_json: File_key.t list * parse_error option list;
dirty_modules: Modulename.Set.t;
}
type parse_options = {
parse_types_mode: types_mode;
parse_use_strict: bool;
parse_prevent_munge: bool;
parse_module_ref_prefix: string option;
parse_module_ref_prefix_LEGACY_INTEROP: string option;
parse_facebook_fbt: string option;
parse_suppress_types: SSet.t;
parse_max_literal_len: int;
parse_exact_by_default: bool;
parse_enable_enums: bool;
parse_enable_relay_integration: bool;
parse_relay_integration_excludes: Str.regexp list;
parse_relay_integration_module_prefix: string option;
parse_relay_integration_module_prefix_includes: Str.regexp list;
parse_node_main_fields: string list;
parse_distributed: bool;
}
val make_parse_options :
?types_mode:types_mode -> ?use_strict:bool -> Docblock.t -> Options.t -> parse_options
val parse_with_defaults :
?types_mode:types_mode ->
?use_strict:bool ->
reader:Mutator_state_reader.t ->
Options.t ->
MultiWorkerLwt.worker list option ->
File_key.t list Bucket.next ->
results Lwt.t
val reparse_with_defaults :
transaction:Transaction.t ->
reader:Mutator_state_reader.t ->
?types_mode:types_mode ->
?use_strict:bool ->
?with_progress:bool ->
workers:MultiWorkerLwt.worker list option ->
modified:FilenameSet.t ->
Options.t ->
results Lwt.t
val ensure_parsed :
reader:Mutator_state_reader.t ->
Options.t ->
MultiWorkerLwt.worker list option ->
FilenameSet.t ->
FilenameSet.t Lwt.t
val parse_package_json_file :
node_main_fields:string list -> string -> File_key.t -> (Package_json.t, parse_error) Result.t
val do_parse :
parse_options:parse_options ->
info:Docblock.t ->
string ->
File_key.t ->
result
val next_of_filename_set :
?with_progress:bool ->
MultiWorkerLwt.worker list option ->
FilenameSet.t ->
File_key.t list Bucket.next
val does_content_match_file_hash : reader:Abstract_state_reader.t -> File_key.t -> string -> bool
|
3aebf3971617a82188762c0ea43f538aa300bb0c6cd931b39deb5d4f365fb134 | bmeurer/ocamljit2 | includeclass.mli | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1997 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
(* Inclusion checks for the class language *)
open Types
open Typedtree
open Ctype
open Format
val class_types:
Env.t -> class_type -> class_type -> class_match_failure list
val class_type_declarations:
Env.t -> cltype_declaration -> cltype_declaration ->
class_match_failure list
val class_declarations:
Env.t -> class_declaration -> class_declaration ->
class_match_failure list
val report_error: formatter -> class_match_failure list -> unit
| null | https://raw.githubusercontent.com/bmeurer/ocamljit2/ef06db5c688c1160acc1de1f63c29473bcd0055c/typing/includeclass.mli | ocaml | *********************************************************************
Objective Caml
*********************************************************************
Inclusion checks for the class language | , projet Cristal , INRIA Rocquencourt
Copyright 1997 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
open Types
open Typedtree
open Ctype
open Format
val class_types:
Env.t -> class_type -> class_type -> class_match_failure list
val class_type_declarations:
Env.t -> cltype_declaration -> cltype_declaration ->
class_match_failure list
val class_declarations:
Env.t -> class_declaration -> class_declaration ->
class_match_failure list
val report_error: formatter -> class_match_failure list -> unit
|
0d8303d4f3b89ab81a14f73513875f88375830f4a2106de3dd4dd178ce4d8715 | toddaaro/advanced-dan | mk.scm | (library
(mk)
(export
var var? rhs lhs lambdag@ walk walk* mzerog unitg
choiceg lambdaf@ : take empty-f conde conda ifa
condu ifu fresh project onceo succeed fail prt)
(import (rnrs) (only (chezscheme) pretty-print))
(define var
(lambda (x)
(vector x)))
(define var?
(lambda (x)
(vector? x)))
(define rhs
(lambda (x)
(cdr x)))
(define lhs
(lambda (x)
(car x)))
(define-syntax :
(lambda (x)
(syntax-violation 'mk "misplaced aux keyword" x)))
(define-syntax lambdag@
(syntax-rules (:)
((_ (a : s c) e)
(lambda (a) (let ((s (car a)) (c (cdr a))) e)))
((_ (a) e) (lambda (a) e))))
(define-syntax lambdaf@
(syntax-rules ()
((_ () e) (lambda () e))))
(define walk
(lambda (v s)
(cond
((var? v)
(let ((a (assq v s)))
(cond
(a (walk (rhs a) s))
(else v))))
(else v))))
(define walk*
(lambda (w s)
(let ((v (walk w s)))
(cond
((var? v) v)
((pair? v)
(cons
(walk* (car v) s)
(walk* (cdr v) s)))
(else v)))))
(define mzerog (lambda () #f))
(define unitg (lambdag@ (a) a))
(define choiceg (lambda (a f) (cons a f)))
(define succeed (lambdag@ (a) a))
(define fail (lambdag@ (a) (mzerog)))
(define prt (lambdag@ (a) (begin (pretty-print a) (unitg a))))
(define-syntax inc
(syntax-rules ()
((_ e) (lambdaf@ () e))))
(define-syntax case-inf
(syntax-rules ()
((_ e (() e0) ((f^) e1) ((a^) e2) ((a f) e3))
(let ((a-inf e))
(cond
((not a-inf) e0)
((procedure? a-inf) (let ((f^ a-inf)) e1))
((not (and (pair? a-inf)
(procedure? (cdr a-inf))))
(let ((a^ a-inf)) e2))
(else (let ((a (car a-inf)) (f (cdr a-inf)))
e3)))))))
(define empty-f (lambdaf@ () (mzerog)))
(define take
(lambda (n f)
(cond
((and n (zero? n)) '())
(else (case-inf (f)
(() '())
((f) (take n f))
((a) (cons a '()))
((a f) (cons a (take (and n (- n 1)) f))))))))
(define-syntax bindg*
(syntax-rules ()
((_ e) e)
((_ e g0 g ...) (bindg* (bindg e g0) g ...))))
(define bindg
(lambda (a-inf g)
(case-inf a-inf
(() (mzerog))
((f) (inc (bindg (f) g)))
((a) (g a))
((a f) (mplusg (g a) (lambdaf@ () (bindg (f) g)))))))
(define-syntax conde
(syntax-rules ()
((_ (g0 g ...) (g1 g^ ...) ...)
(lambdag@ (a)
(inc
(mplusg*
(bindg* (g0 a) g ...)
(bindg* (g1 a) g^ ...) ...))))))
(define-syntax mplusg*
(syntax-rules ()
((_ e) e)
((_ e0 e ...)
(mplusg e0
(lambdaf@ () (mplusg* e ...))))))
(define mplusg
(lambda (a-inf f)
(case-inf a-inf
(() (f))
((f^) (inc (mplusg (f) f^)))
((a) (choiceg a f))
((a f^) (choiceg a (lambdaf@ () (mplusg (f) f^)))))))
(define-syntax conda
(syntax-rules ()
((_ (g0 g ...) (g1 g^ ...) ...)
(lambdag@ (a)
(inc
(ifa ((g0 a) g ...)
((g1 a) g^ ...) ...))))))
(define-syntax ifa
(syntax-rules ()
((_) (mzerog))
((_ (e g ...) b ...)
(let loop ((a-inf e))
(case-inf a-inf
(() (ifa b ...))
((f) (inc (loop (f))))
((a) (bindg* a-inf g ...))
((a f) (bindg* a-inf g ...)))))))
(define-syntax condu
(syntax-rules ()
((_ (g0 g ...) (g1 g^ ...) ...)
(lambdag@ (a)
(inc
(ifu ((g0 a) g ...)
((g1 a) g^ ...) ...))))))
(define-syntax ifu
(syntax-rules ()
((_) (mzerog))
((_ (e g ...) b ...)
(let loop ((a-inf e))
(case-inf a-inf
(() (ifu b ...))
((f) (inc (loop (f))))
((a) (bindg* a-inf g ...))
((a f) (bindg* (unitg a) g ...)))))))
(define-syntax fresh
(syntax-rules ()
((_ (x ...) g0 g ...)
(lambdag@ (a)
(inc
(let ((x (var 'x)) ...)
(bindg* (g0 a) g ...)))))))
(define-syntax project
(syntax-rules ()
((_ (x ...) g g* ...)
(lambdag@ (a : s c)
(let ((x (walk* x s)) ...)
((fresh () g g* ...) a))))))
(define onceo (lambda (g) (condu (g))))
)
(import (mk))
| null | https://raw.githubusercontent.com/toddaaro/advanced-dan/5d6c0762d998aa37774e0414a0f37404e804b536/valo/mk.scm | scheme | (library
(mk)
(export
var var? rhs lhs lambdag@ walk walk* mzerog unitg
choiceg lambdaf@ : take empty-f conde conda ifa
condu ifu fresh project onceo succeed fail prt)
(import (rnrs) (only (chezscheme) pretty-print))
(define var
(lambda (x)
(vector x)))
(define var?
(lambda (x)
(vector? x)))
(define rhs
(lambda (x)
(cdr x)))
(define lhs
(lambda (x)
(car x)))
(define-syntax :
(lambda (x)
(syntax-violation 'mk "misplaced aux keyword" x)))
(define-syntax lambdag@
(syntax-rules (:)
((_ (a : s c) e)
(lambda (a) (let ((s (car a)) (c (cdr a))) e)))
((_ (a) e) (lambda (a) e))))
(define-syntax lambdaf@
(syntax-rules ()
((_ () e) (lambda () e))))
(define walk
(lambda (v s)
(cond
((var? v)
(let ((a (assq v s)))
(cond
(a (walk (rhs a) s))
(else v))))
(else v))))
(define walk*
(lambda (w s)
(let ((v (walk w s)))
(cond
((var? v) v)
((pair? v)
(cons
(walk* (car v) s)
(walk* (cdr v) s)))
(else v)))))
(define mzerog (lambda () #f))
(define unitg (lambdag@ (a) a))
(define choiceg (lambda (a f) (cons a f)))
(define succeed (lambdag@ (a) a))
(define fail (lambdag@ (a) (mzerog)))
(define prt (lambdag@ (a) (begin (pretty-print a) (unitg a))))
(define-syntax inc
(syntax-rules ()
((_ e) (lambdaf@ () e))))
(define-syntax case-inf
(syntax-rules ()
((_ e (() e0) ((f^) e1) ((a^) e2) ((a f) e3))
(let ((a-inf e))
(cond
((not a-inf) e0)
((procedure? a-inf) (let ((f^ a-inf)) e1))
((not (and (pair? a-inf)
(procedure? (cdr a-inf))))
(let ((a^ a-inf)) e2))
(else (let ((a (car a-inf)) (f (cdr a-inf)))
e3)))))))
(define empty-f (lambdaf@ () (mzerog)))
(define take
(lambda (n f)
(cond
((and n (zero? n)) '())
(else (case-inf (f)
(() '())
((f) (take n f))
((a) (cons a '()))
((a f) (cons a (take (and n (- n 1)) f))))))))
(define-syntax bindg*
(syntax-rules ()
((_ e) e)
((_ e g0 g ...) (bindg* (bindg e g0) g ...))))
(define bindg
(lambda (a-inf g)
(case-inf a-inf
(() (mzerog))
((f) (inc (bindg (f) g)))
((a) (g a))
((a f) (mplusg (g a) (lambdaf@ () (bindg (f) g)))))))
(define-syntax conde
(syntax-rules ()
((_ (g0 g ...) (g1 g^ ...) ...)
(lambdag@ (a)
(inc
(mplusg*
(bindg* (g0 a) g ...)
(bindg* (g1 a) g^ ...) ...))))))
(define-syntax mplusg*
(syntax-rules ()
((_ e) e)
((_ e0 e ...)
(mplusg e0
(lambdaf@ () (mplusg* e ...))))))
(define mplusg
(lambda (a-inf f)
(case-inf a-inf
(() (f))
((f^) (inc (mplusg (f) f^)))
((a) (choiceg a f))
((a f^) (choiceg a (lambdaf@ () (mplusg (f) f^)))))))
(define-syntax conda
(syntax-rules ()
((_ (g0 g ...) (g1 g^ ...) ...)
(lambdag@ (a)
(inc
(ifa ((g0 a) g ...)
((g1 a) g^ ...) ...))))))
(define-syntax ifa
(syntax-rules ()
((_) (mzerog))
((_ (e g ...) b ...)
(let loop ((a-inf e))
(case-inf a-inf
(() (ifa b ...))
((f) (inc (loop (f))))
((a) (bindg* a-inf g ...))
((a f) (bindg* a-inf g ...)))))))
(define-syntax condu
(syntax-rules ()
((_ (g0 g ...) (g1 g^ ...) ...)
(lambdag@ (a)
(inc
(ifu ((g0 a) g ...)
((g1 a) g^ ...) ...))))))
(define-syntax ifu
(syntax-rules ()
((_) (mzerog))
((_ (e g ...) b ...)
(let loop ((a-inf e))
(case-inf a-inf
(() (ifu b ...))
((f) (inc (loop (f))))
((a) (bindg* a-inf g ...))
((a f) (bindg* (unitg a) g ...)))))))
(define-syntax fresh
(syntax-rules ()
((_ (x ...) g0 g ...)
(lambdag@ (a)
(inc
(let ((x (var 'x)) ...)
(bindg* (g0 a) g ...)))))))
(define-syntax project
(syntax-rules ()
((_ (x ...) g g* ...)
(lambdag@ (a : s c)
(let ((x (walk* x s)) ...)
((fresh () g g* ...) a))))))
(define onceo (lambda (g) (condu (g))))
)
(import (mk))
|
|
5bda057e4a8353fd1bb7cfc11f1c8d18769eacf04f81e1e20f6e1eea14759c82 | expipiplus1/vulkan | VK_KHR_fragment_shading_rate.hs | {-# language CPP #-}
-- | = Name
--
VK_KHR_fragment_shading_rate - device extension
--
-- == VK_KHR_fragment_shading_rate
--
-- [__Name String__]
@VK_KHR_fragment_shading_rate@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
227
--
-- [__Revision__]
2
--
-- [__Extension and Version Dependencies__]
--
- Requires support for Vulkan 1.0
--
-- - Requires @VK_KHR_create_renderpass2@ to be enabled for any
-- device-level functionality
--
-- - Requires @VK_KHR_get_physical_device_properties2@ to be enabled
-- for any device-level functionality
--
-- [__Contact__]
--
-
< -Docs/issues/new?body=[VK_KHR_fragment_shading_rate ] @tobski%0A*Here describe the issue or question you have about the VK_KHR_fragment_shading_rate extension * >
--
-- [__Extension Proposal__]
-- <-Docs/tree/main/proposals/VK_KHR_fragment_shading_rate.adoc VK_KHR_fragment_shading_rate>
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
2021 - 09 - 30
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
-- <-Registry/blob/master/extensions/KHR/SPV_KHR_fragment_shading_rate.html SPV_KHR_fragment_shading_rate>.
--
-- - This extension provides API support for
-- < GL_EXT_fragment_shading_rate>
--
-- [__Contributors__]
--
- , AMD
--
- , AMD
--
- , AMD
--
- , Nvidia
--
- , Qualcomm
--
- , Intel
--
- Jan - , Arm
--
- , Nvidia
--
- , Roblox
--
-- - Contributors to the VK_NV_shading_rate_image specification
--
- Contributors to the specification
--
-- == Description
--
-- This extension adds the ability to change the rate at which fragments
-- are shaded. Rather than the usual single fragment invocation for each
-- pixel covered by a primitive, multiple pixels can be shaded by a single
-- fragment shader invocation.
--
Up to three methods are available to the application to change the
-- fragment shading rate:
--
-- - <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-pipeline>,
-- which allows the specification of a rate per-draw.
--
-- - <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive>,
-- which allows the specification of a rate per primitive, specified
-- during shading.
--
-- - <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-attachment>,
-- which allows the specification of a rate per-region of the
-- framebuffer, specified in a specialized image attachment.
--
-- Additionally, these rates can all be specified and combined in order to
-- adjust the overall detail in the image at each point.
--
-- This functionality can be used to focus shading efforts where higher
-- levels of detail are needed in some parts of a scene compared to others.
This can be particularly useful in high resolution rendering , or for XR
-- contexts.
--
This extension also adds support for the @SPV_KHR_fragment_shading_rate@
-- extension which enables setting the
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive primitive fragment shading rate>,
-- and allows querying the final shading rate from a fragment shader.
--
-- == New Commands
--
-- - 'cmdSetFragmentShadingRateKHR'
--
-- - 'getPhysicalDeviceFragmentShadingRatesKHR'
--
-- == New Structures
--
-- - 'PhysicalDeviceFragmentShadingRateKHR'
--
- Extending ' Vulkan . Core10.Pipeline . ' :
--
-- - 'PipelineFragmentShadingRateStateCreateInfoKHR'
--
-- - Extending
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2 ' ,
' Vulkan . Core10.Device . DeviceCreateInfo ' :
--
- ' '
--
-- - Extending
' Vulkan . ' :
--
- ' PhysicalDeviceFragmentShadingRatePropertiesKHR '
--
-- - Extending
' Vulkan . Core12.Promoted_From_VK_KHR_create_renderpass2.SubpassDescription2 ' :
--
-- - 'FragmentShadingRateAttachmentInfoKHR'
--
-- == New Enums
--
-- - 'FragmentShadingRateCombinerOpKHR'
--
-- == New Enum Constants
--
-- - 'KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME'
--
-- - 'KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION'
--
- Extending ' Vulkan . Core10.Enums . AccessFlagBits . AccessFlagBits ' :
--
- ' Vulkan . Core10.Enums . AccessFlagBits . ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR '
--
- Extending ' Vulkan . Core10.Enums . DynamicState . DynamicState ' :
--
- ' Vulkan . Core10.Enums . DynamicState . '
--
-- - Extending
' Vulkan . Core10.Enums . FormatFeatureFlagBits . FormatFeatureFlagBits ' :
--
- ' Vulkan . Core10.Enums . FormatFeatureFlagBits . FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR '
--
- Extending ' Vulkan . Core10.Enums . ImageLayout . ImageLayout ' :
--
- ' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR '
--
-- - Extending
' Vulkan . Core10.Enums . ImageUsageFlagBits . ImageUsageFlagBits ' :
--
- ' Vulkan . Core10.Enums . ImageUsageFlagBits . IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR '
--
-- - Extending
' Vulkan . Core10.Enums . . ' :
--
- ' Vulkan . Core10.Enums . . '
--
- Extending ' Vulkan . Core10.Enums . StructureType . StructureType ' :
--
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR '
--
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR '
--
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR '
--
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR '
--
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR '
--
-- If
-- <-extensions/html/vkspec.html#VK_KHR_format_feature_flags2 VK_KHR_format_feature_flags2>
-- is supported:
--
-- - Extending
' Vulkan . Core13.Enums . ' :
--
- ' Vulkan . Core13.Enums . FormatFeatureFlags2.FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR '
--
-- == Version History
--
- Revision 1 , 2020 - 05 - 06 ( )
--
-- - Initial revision
--
- Revision 2 , 2021 - 09 - 30 ( )
--
-- - Add interaction with @VK_KHR_format_feature_flags2@ to @vk.xml@
--
-- == See Also
--
-- 'FragmentShadingRateAttachmentInfoKHR',
-- 'FragmentShadingRateCombinerOpKHR',
' ' ,
-- 'PhysicalDeviceFragmentShadingRateKHR',
' PhysicalDeviceFragmentShadingRatePropertiesKHR ' ,
' PipelineFragmentShadingRateStateCreateInfoKHR ' ,
-- 'cmdSetFragmentShadingRateKHR',
-- 'getPhysicalDeviceFragmentShadingRatesKHR'
--
-- == Document Notes
--
-- For more information, see the
-- <-extensions/html/vkspec.html#VK_KHR_fragment_shading_rate Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_fragment_shading_rate ( cmdSetFragmentShadingRateKHR
, getPhysicalDeviceFragmentShadingRatesKHR
, FragmentShadingRateAttachmentInfoKHR(..)
, PipelineFragmentShadingRateStateCreateInfoKHR(..)
, PhysicalDeviceFragmentShadingRateFeaturesKHR(..)
, PhysicalDeviceFragmentShadingRatePropertiesKHR(..)
, PhysicalDeviceFragmentShadingRateKHR(..)
, FragmentShadingRateCombinerOpKHR( FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR
, ..
)
, KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION
, pattern KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION
, KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME
, pattern KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME
) where
import Vulkan.CStruct.Utils (FixedArray)
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import Foreign.Marshal.Utils (maybePeek)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (castPtr)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import GHC.Show (showsPrec)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Data.Vector (generateM)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero)
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Data.Int (Int32)
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Data.Word (Word32)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Data.Vector (Vector)
import Vulkan.CStruct.Utils (advancePtrBytes)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.CStruct.Extends (forgetExtensions)
import Vulkan.CStruct.Utils (lowerArrayPtr)
import Vulkan.CStruct.Extends (peekSomeCStruct)
import Vulkan.CStruct.Extends (withSomeCStruct)
import Vulkan.NamedType ((:::))
import Vulkan.Core12.Promoted_From_VK_KHR_create_renderpass2 (AttachmentReference2)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Handles (CommandBuffer)
import Vulkan.Core10.Handles (CommandBuffer(..))
import Vulkan.Core10.Handles (CommandBuffer(CommandBuffer))
import Vulkan.Core10.Handles (CommandBuffer_T)
import Vulkan.Dynamic (DeviceCmds(pVkCmdSetFragmentShadingRateKHR))
import Vulkan.Core10.FundamentalTypes (Extent2D)
import Vulkan.Dynamic (InstanceCmds(pVkGetPhysicalDeviceFragmentShadingRatesKHR))
import Vulkan.Core10.Handles (PhysicalDevice)
import Vulkan.Core10.Handles (PhysicalDevice(..))
import Vulkan.Core10.Handles (PhysicalDevice(PhysicalDevice))
import Vulkan.Core10.Handles (PhysicalDevice_T)
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.Core10.Enums.SampleCountFlagBits (SampleCountFlagBits)
import Vulkan.Core10.Enums.SampleCountFlagBits (SampleCountFlags)
import Vulkan.CStruct.Extends (SomeStruct)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Exception (VulkanException(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdSetFragmentShadingRateKHR
:: FunPtr (Ptr CommandBuffer_T -> Ptr Extent2D -> Ptr (FixedArray 2 FragmentShadingRateCombinerOpKHR) -> IO ()) -> Ptr CommandBuffer_T -> Ptr Extent2D -> Ptr (FixedArray 2 FragmentShadingRateCombinerOpKHR) -> IO ()
-- | vkCmdSetFragmentShadingRateKHR - Set pipeline fragment shading rate and
-- combiner operation dynamically for a command buffer
--
-- = Description
--
-- This command sets the pipeline fragment shading rate and combiner
-- operation for subsequent drawing commands when the graphics pipeline is
-- created with
' Vulkan . Core10.Enums . DynamicState . '
-- set in
' Vulkan . Core10.Pipeline . PipelineDynamicStateCreateInfo'::@pDynamicStates@.
-- Otherwise, this state is specified by the
-- 'PipelineFragmentShadingRateStateCreateInfoKHR' values used to create
-- the currently active pipeline.
--
-- == Valid Usage
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-pipelineFragmentShadingRate-04507#
-- If
-- <-extensions/html/vkspec.html#features-pipelineFragmentShadingRate pipelineFragmentShadingRate>
is not enabled , @pFragmentSize->width@ /must/ be @1@
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-pipelineFragmentShadingRate-04508#
-- If
-- <-extensions/html/vkspec.html#features-pipelineFragmentShadingRate pipelineFragmentShadingRate>
is not enabled , @pFragmentSize->height@ /must/ be @1@
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-pipelineFragmentShadingRate-04509#
One of
-- <-extensions/html/vkspec.html#features-pipelineFragmentShadingRate pipelineFragmentShadingRate>,
-- <-extensions/html/vkspec.html#features-primitiveFragmentShadingRate primitiveFragmentShadingRate>,
-- or
-- <-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
-- /must/ be enabled
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-primitiveFragmentShadingRate-04510#
-- If the
-- <-extensions/html/vkspec.html#features-primitiveFragmentShadingRate primitiveFragmentShadingRate>
-- feature is not enabled, @combinerOps@[0] /must/ be
-- 'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR'
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-attachmentFragmentShadingRate-04511#
-- If the
-- <-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
-- feature is not enabled, @combinerOps@[1] /must/ be
-- 'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR'
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-fragmentSizeNonTrivialCombinerOps-04512#
-- If the
-- <-extensions/html/vkspec.html#limits-fragmentShadingRateNonTrivialCombinerOps fragmentSizeNonTrivialCombinerOps>
limit is not supported , elements of @combinerOps@ /must/ be either
-- 'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR' or
' '
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04513#
-- @pFragmentSize->width@ /must/ be greater than or equal to @1@
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04514#
-- @pFragmentSize->height@ /must/ be greater than or equal to @1@
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04515#
@pFragmentSize->width@ /must/ be a power - of - two value
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04516#
@pFragmentSize->height@ /must/ be a power - of - two value
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04517#
-- @pFragmentSize->width@ /must/ be less than or equal to @4@
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04518#
-- @pFragmentSize->height@ /must/ be less than or equal to @4@
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-commandBuffer-parameter#
-- @commandBuffer@ /must/ be a valid
' Vulkan . Core10.Handles . CommandBuffer ' handle
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-parameter#
@pFragmentSize@ /must/ be a valid pointer to a valid
' Vulkan . Core10.FundamentalTypes . Extent2D ' structure
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-combinerOps-parameter# Any
given element of @combinerOps@ /must/ be a valid
-- 'FragmentShadingRateCombinerOpKHR' value
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-commandBuffer-recording#
-- @commandBuffer@ /must/ be in the
-- <-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-commandBuffer-cmdpool# The
' Vulkan . Core10.Handles . ' that @commandBuffer@ was
-- allocated from /must/ support graphics operations
--
-- - #VUID-vkCmdSetFragmentShadingRateKHR-videocoding# This command
-- /must/ only be called outside of a video coding scope
--
-- == Host Synchronization
--
-- - Host access to @commandBuffer@ /must/ be externally synchronized
--
- Host access to the ' Vulkan . Core10.Handles . ' that
-- @commandBuffer@ was allocated from /must/ be externally synchronized
--
-- == Command Properties
--
-- \'
--
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
| < -extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels > | < -extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope > | < -extensions/html/vkspec.html#vkCmdBeginVideoCodingKHR Video Coding Scope > | < -extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types > | < -extensions/html/vkspec.html#fundamentals-queueoperation-command-types Command Type > |
-- +============================================================================================================================+========================================================================================================================+=============================================================================================================================+=======================================================================================================================+========================================================================================================================================+
-- | Primary | Both | Outside | Graphics | State |
-- | Secondary | | | | |
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
--
-- = See Also
--
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.Handles . CommandBuffer ' ,
' Vulkan . Core10.FundamentalTypes . Extent2D ' ,
-- 'FragmentShadingRateCombinerOpKHR'
cmdSetFragmentShadingRateKHR :: forall io
. (MonadIO io)
=> -- | @commandBuffer@ is the command buffer into which the command will be
-- recorded.
CommandBuffer
-> -- | @pFragmentSize@ specifies the pipeline fragment shading rate for
-- subsequent drawing commands.
("fragmentSize" ::: Extent2D)
-> -- | @combinerOps@ specifies a 'FragmentShadingRateCombinerOpKHR' determining
-- how the
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-pipeline pipeline>,
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive primitive>,
-- and
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-attachment attachment shading rates>
-- are
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-combining combined>
-- for fragments generated by subsequent drawing commands.
("combinerOps" ::: (FragmentShadingRateCombinerOpKHR, FragmentShadingRateCombinerOpKHR))
-> io ()
cmdSetFragmentShadingRateKHR commandBuffer
fragmentSize
combinerOps = liftIO . evalContT $ do
let vkCmdSetFragmentShadingRateKHRPtr = pVkCmdSetFragmentShadingRateKHR (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
lift $ unless (vkCmdSetFragmentShadingRateKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdSetFragmentShadingRateKHR is null" Nothing Nothing
let vkCmdSetFragmentShadingRateKHR' = mkVkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHRPtr
pFragmentSize <- ContT $ withCStruct (fragmentSize)
pCombinerOps <- ContT $ allocaBytes @(FixedArray 2 FragmentShadingRateCombinerOpKHR) 8
let pCombinerOps' = lowerArrayPtr pCombinerOps
lift $ case (combinerOps) of
(e0, e1) -> do
poke (pCombinerOps' :: Ptr FragmentShadingRateCombinerOpKHR) (e0)
poke (pCombinerOps' `plusPtr` 4 :: Ptr FragmentShadingRateCombinerOpKHR) (e1)
lift $ traceAroundEvent "vkCmdSetFragmentShadingRateKHR" (vkCmdSetFragmentShadingRateKHR'
(commandBufferHandle (commandBuffer))
pFragmentSize
(pCombinerOps))
pure $ ()
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetPhysicalDeviceFragmentShadingRatesKHR
:: FunPtr (Ptr PhysicalDevice_T -> Ptr Word32 -> Ptr PhysicalDeviceFragmentShadingRateKHR -> IO Result) -> Ptr PhysicalDevice_T -> Ptr Word32 -> Ptr PhysicalDeviceFragmentShadingRateKHR -> IO Result
-- | vkGetPhysicalDeviceFragmentShadingRatesKHR - Get available shading rates
-- for a physical device
--
-- = Description
--
If @pFragmentShadingRates@ is @NULL@ , then the number of fragment
-- shading rates available is returned in @pFragmentShadingRateCount@.
-- Otherwise, @pFragmentShadingRateCount@ /must/ point to a variable set by
the user to the number of elements in the @pFragmentShadingRates@ array ,
-- and on return the variable is overwritten with the number of structures
-- actually written to @pFragmentShadingRates@. If
-- @pFragmentShadingRateCount@ is less than the number of fragment shading
-- rates available, at most @pFragmentShadingRateCount@ structures will be
written , and ' Vulkan . Core10.Enums . Result . INCOMPLETE ' will be returned
instead of ' Vulkan . Core10.Enums . Result . SUCCESS ' , to indicate that not
-- all the available fragment shading rates were returned.
--
-- The returned array of fragment shading rates /must/ be ordered from
largest @fragmentSize.width@ value to smallest , and each set of fragment
shading rates with the same @fragmentSize.width@ value /must/ be ordered
from largest @fragmentSize.height@ to smallest . Any two entries in the
-- array /must/ not have the same @fragmentSize@ values.
--
-- For any entry in the array, the following rules also apply:
--
- The value of @fragmentSize.width@ /must/ be less than or equal to
-- <-extensions/html/vkspec.html#limits-maxFragmentSize maxFragmentSize.width>.
--
- The value of @fragmentSize.width@ /must/ be greater than or equal to
@1@.
--
- The value of @fragmentSize.width@ /must/ be a power - of - two .
--
-- - The value of @fragmentSize.height@ /must/ be less than or equal to
-- <-extensions/html/vkspec.html#limits-maxFragmentSize maxFragmentSize.height>.
--
-- - The value of @fragmentSize.height@ /must/ be greater than or equal
to @1@.
--
- The value of @fragmentSize.height@ /must/ be a power - of - two .
--
-- - The highest sample count in @sampleCounts@ /must/ be less than or
-- equal to
-- <-extensions/html/vkspec.html#limits-maxFragmentShadingRateRasterizationSamples maxFragmentShadingRateRasterizationSamples>.
--
- The product of @fragmentSize.width@ , @fragmentSize.height@ , and the
-- highest sample count in @sampleCounts@ /must/ be less than or equal
-- to
< > .
--
-- Implementations /must/ support at least the following shading rates:
--
-- +--------------------------------------------------------------+-----------------------------------+
-- | @sampleCounts@ | @fragmentSize@ |
-- +==============================================================+===================================+
| ' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_1_BIT ' | { 2,2 } |
-- | | | |
| ' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_4_BIT ' | |
-- +--------------------------------------------------------------+-----------------------------------+
| ' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_1_BIT ' | { 2,1 } |
-- | | | |
| ' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_4_BIT ' | |
-- +--------------------------------------------------------------+-----------------------------------+
| ~0 | { 1,1 } |
-- +--------------------------------------------------------------+-----------------------------------+
--
-- If
< framebufferColorSampleCounts > ,
includes ' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_2_BIT ' ,
-- the required rates /must/ also include
' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_2_BIT ' .
--
-- Note
--
Including the { 1,1 } fragment size is done for completeness ; it has no
-- actual effect on the support of rendering without setting the fragment
-- size. All sample counts and render pass transforms are supported for
-- this rate.
--
-- The returned set of fragment shading rates /must/ be returned in the
-- native (rotated) coordinate system. For rasterization using render pass
@transform@ not equal to
' Vulkan . Extensions . VK_KHR_surface . SURFACE_TRANSFORM_IDENTITY_BIT_KHR ' ,
-- the application /must/ transform the returned fragment shading rates
-- into the current (unrotated) coordinate system to get the supported
-- rates for that transform.
--
-- Note
--
For example , consider an implementation returning support for 4x2 , but
not 2x4 in the set of supported fragment shading rates . This means that
-- for transforms
' Vulkan . Extensions . VK_KHR_surface . '
-- and
' Vulkan . Extensions . VK_KHR_surface . SURFACE_TRANSFORM_ROTATE_270_BIT_KHR ' ,
2x4 is a supported rate , but 4x2 is an unsupported rate .
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkGetPhysicalDeviceFragmentShadingRatesKHR-physicalDevice-parameter#
-- @physicalDevice@ /must/ be a valid
' Vulkan . Core10.Handles . PhysicalDevice ' handle
--
-- - #VUID-vkGetPhysicalDeviceFragmentShadingRatesKHR-pFragmentShadingRateCount-parameter#
-- @pFragmentShadingRateCount@ /must/ be a valid pointer to a
@uint32_t@ value
--
-- - #VUID-vkGetPhysicalDeviceFragmentShadingRatesKHR-pFragmentShadingRates-parameter#
If the value referenced by @pFragmentShadingRateCount@ is not @0@ ,
and is not @NULL@ ,
-- /must/ be a valid pointer to an array of @pFragmentShadingRateCount@
-- 'PhysicalDeviceFragmentShadingRateKHR' structures
--
-- == Return Codes
--
-- [<-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
- ' Vulkan . Core10.Enums . Result . SUCCESS '
--
- ' Vulkan . Core10.Enums . Result . INCOMPLETE '
--
-- [<-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
- ' Vulkan . Core10.Enums . Result . ERROR_OUT_OF_HOST_MEMORY '
--
-- = See Also
--
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.Handles . PhysicalDevice ' ,
-- 'PhysicalDeviceFragmentShadingRateKHR'
getPhysicalDeviceFragmentShadingRatesKHR :: forall io
. (MonadIO io)
=> -- | @physicalDevice@ is the handle to the physical device whose properties
-- will be queried.
PhysicalDevice
-> io (Result, ("fragmentShadingRates" ::: Vector PhysicalDeviceFragmentShadingRateKHR))
getPhysicalDeviceFragmentShadingRatesKHR physicalDevice = liftIO . evalContT $ do
let vkGetPhysicalDeviceFragmentShadingRatesKHRPtr = pVkGetPhysicalDeviceFragmentShadingRatesKHR (case physicalDevice of PhysicalDevice{instanceCmds} -> instanceCmds)
lift $ unless (vkGetPhysicalDeviceFragmentShadingRatesKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetPhysicalDeviceFragmentShadingRatesKHR is null" Nothing Nothing
let vkGetPhysicalDeviceFragmentShadingRatesKHR' = mkVkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHRPtr
let physicalDevice' = physicalDeviceHandle (physicalDevice)
pPFragmentShadingRateCount <- ContT $ bracket (callocBytes @Word32 4) free
r <- lift $ traceAroundEvent "vkGetPhysicalDeviceFragmentShadingRatesKHR" (vkGetPhysicalDeviceFragmentShadingRatesKHR'
physicalDevice'
(pPFragmentShadingRateCount)
(nullPtr))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pFragmentShadingRateCount <- lift $ peek @Word32 pPFragmentShadingRateCount
pPFragmentShadingRates <- ContT $ bracket (callocBytes @PhysicalDeviceFragmentShadingRateKHR ((fromIntegral (pFragmentShadingRateCount)) * 32)) free
_ <- traverse (\i -> ContT $ pokeZeroCStruct (pPFragmentShadingRates `advancePtrBytes` (i * 32) :: Ptr PhysicalDeviceFragmentShadingRateKHR) . ($ ())) [0..(fromIntegral (pFragmentShadingRateCount)) - 1]
r' <- lift $ traceAroundEvent "vkGetPhysicalDeviceFragmentShadingRatesKHR" (vkGetPhysicalDeviceFragmentShadingRatesKHR'
physicalDevice'
(pPFragmentShadingRateCount)
((pPFragmentShadingRates)))
lift $ when (r' < SUCCESS) (throwIO (VulkanException r'))
pFragmentShadingRateCount' <- lift $ peek @Word32 pPFragmentShadingRateCount
pFragmentShadingRates' <- lift $ generateM (fromIntegral (pFragmentShadingRateCount')) (\i -> peekCStruct @PhysicalDeviceFragmentShadingRateKHR (((pPFragmentShadingRates) `advancePtrBytes` (32 * (i)) :: Ptr PhysicalDeviceFragmentShadingRateKHR)))
pure $ ((r'), pFragmentShadingRates')
-- | VkFragmentShadingRateAttachmentInfoKHR - Structure specifying a fragment
shading rate attachment for a subpass
--
-- = Description
--
-- If no shading rate attachment is specified, or if this structure is not
-- specified, the implementation behaves as if a valid shading rate
-- attachment was specified with all texels specifying a single pixel per
-- fragment.
--
-- == Valid Usage
--
-- - #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04524#
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' , its @layout@ member
-- /must/ be equal to
' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_GENERAL ' or
' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR '
--
-- - #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04525#
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.width@ /must/ be a power of two
-- value
--
-- - #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04526#
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.width@ /must/ be less than or equal
-- to
< -extensions/html/vkspec.html#limits-maxFragmentShadingRateAttachmentTexelSize maxFragmentShadingRateAttachmentTexelSize.width >
--
-- - #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04527#
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.width@ /must/ be greater than or
-- equal to
< -extensions/html/vkspec.html#limits-minFragmentShadingRateAttachmentTexelSize minFragmentShadingRateAttachmentTexelSize.width >
--
-- - #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04528#
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.height@ /must/ be a power of two
-- value
--
-- - #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04529#
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.height@ /must/ be less than or equal
-- to
< -extensions/html/vkspec.html#limits-maxFragmentShadingRateAttachmentTexelSize >
--
-- - #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04530#
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.height@ /must/ be greater than or
-- equal to
-- <-extensions/html/vkspec.html#limits-minFragmentShadingRateAttachmentTexelSize minFragmentShadingRateAttachmentTexelSize.height>
--
-- - #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04531#
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' , the quotient of
@shadingRateAttachmentTexelSize.width@ and
@shadingRateAttachmentTexelSize.height@ /must/ be less than or equal
-- to
-- <-extensions/html/vkspec.html#limits-maxFragmentShadingRateAttachmentTexelSizeAspectRatio maxFragmentShadingRateAttachmentTexelSizeAspectRatio>
--
-- - #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04532#
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' , the quotient of
@shadingRateAttachmentTexelSize.height@ and
@shadingRateAttachmentTexelSize.width@ /must/ be less than or equal
-- to
-- <-extensions/html/vkspec.html#limits-maxFragmentShadingRateAttachmentTexelSizeAspectRatio maxFragmentShadingRateAttachmentTexelSizeAspectRatio>
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkFragmentShadingRateAttachmentInfoKHR-sType-sType# @sType@
-- /must/ be
' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR '
--
-- - #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-parameter#
If @pFragmentShadingRateAttachment@ is not @NULL@ ,
@pFragmentShadingRateAttachment@ /must/ be a valid pointer to a
-- valid
' Vulkan . Core12.Promoted_From_VK_KHR_create_renderpass2.AttachmentReference2 '
-- structure
--
-- = See Also
--
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core12.Promoted_From_VK_KHR_create_renderpass2.AttachmentReference2 ' ,
' Vulkan . Core10.FundamentalTypes . Extent2D ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data FragmentShadingRateAttachmentInfoKHR = FragmentShadingRateAttachmentInfoKHR
| @pFragmentShadingRateAttachment@ is @NULL@ or a pointer to a
' Vulkan . Core12.Promoted_From_VK_KHR_create_renderpass2.AttachmentReference2 '
-- structure defining the fragment shading rate attachment for this
subpass .
fragmentShadingRateAttachment :: Maybe (SomeStruct AttachmentReference2)
, -- | @shadingRateAttachmentTexelSize@ specifies the size of the portion of
-- the framebuffer corresponding to each texel in
-- @pFragmentShadingRateAttachment@.
shadingRateAttachmentTexelSize :: Extent2D
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (FragmentShadingRateAttachmentInfoKHR)
#endif
deriving instance Show FragmentShadingRateAttachmentInfoKHR
instance ToCStruct FragmentShadingRateAttachmentInfoKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p FragmentShadingRateAttachmentInfoKHR{..} f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
pFragmentShadingRateAttachment'' <- case (fragmentShadingRateAttachment) of
Nothing -> pure nullPtr
Just j -> ContT @_ @_ @(Ptr (AttachmentReference2 '[])) $ \cont -> withSomeCStruct @AttachmentReference2 (j) (cont . castPtr)
lift $ poke ((p `plusPtr` 16 :: Ptr (Ptr (AttachmentReference2 _)))) pFragmentShadingRateAttachment''
lift $ poke ((p `plusPtr` 24 :: Ptr Extent2D)) (shadingRateAttachmentTexelSize)
lift $ f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 24 :: Ptr Extent2D)) (zero)
f
instance FromCStruct FragmentShadingRateAttachmentInfoKHR where
peekCStruct p = do
pFragmentShadingRateAttachment <- peek @(Ptr (AttachmentReference2 _)) ((p `plusPtr` 16 :: Ptr (Ptr (AttachmentReference2 _))))
pFragmentShadingRateAttachment' <- maybePeek (\j -> peekSomeCStruct (forgetExtensions (j))) pFragmentShadingRateAttachment
shadingRateAttachmentTexelSize <- peekCStruct @Extent2D ((p `plusPtr` 24 :: Ptr Extent2D))
pure $ FragmentShadingRateAttachmentInfoKHR
pFragmentShadingRateAttachment' shadingRateAttachmentTexelSize
instance Zero FragmentShadingRateAttachmentInfoKHR where
zero = FragmentShadingRateAttachmentInfoKHR
Nothing
zero
-- | VkPipelineFragmentShadingRateStateCreateInfoKHR - Structure specifying
-- parameters controlling the fragment shading rate
--
-- = Description
--
-- If the @pNext@ chain of
' Vulkan . Core10.Pipeline . ' includes a
-- 'PipelineFragmentShadingRateStateCreateInfoKHR' structure, then that
-- structure includes parameters controlling the pipeline fragment shading
-- rate.
--
-- If this structure is not present, @fragmentSize@ is considered to be
equal to ( 1,1 ) , and both elements of @combinerOps@ are considered to be
-- equal to 'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR'.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.FundamentalTypes . Extent2D ' ,
-- 'FragmentShadingRateCombinerOpKHR',
' Vulkan . Core10.Enums . StructureType . StructureType '
data PipelineFragmentShadingRateStateCreateInfoKHR = PipelineFragmentShadingRateStateCreateInfoKHR
| @fragmentSize@ specifies a ' Vulkan . Core10.FundamentalTypes . Extent2D '
-- structure containing the fragment size used to define the pipeline
-- fragment shading rate for drawing commands using this pipeline.
fragmentSize :: Extent2D
, -- | @combinerOps@ specifies a 'FragmentShadingRateCombinerOpKHR' value
-- determining how the
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-pipeline pipeline>,
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive primitive>,
-- and
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-attachment attachment shading rates>
-- are
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-combining combined>
-- for fragments generated by drawing commands using the created pipeline.
combinerOps :: (FragmentShadingRateCombinerOpKHR, FragmentShadingRateCombinerOpKHR)
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PipelineFragmentShadingRateStateCreateInfoKHR)
#endif
deriving instance Show PipelineFragmentShadingRateStateCreateInfoKHR
instance ToCStruct PipelineFragmentShadingRateStateCreateInfoKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PipelineFragmentShadingRateStateCreateInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Extent2D)) (fragmentSize)
let pCombinerOps' = lowerArrayPtr ((p `plusPtr` 24 :: Ptr (FixedArray 2 FragmentShadingRateCombinerOpKHR)))
case (combinerOps) of
(e0, e1) -> do
poke (pCombinerOps' :: Ptr FragmentShadingRateCombinerOpKHR) (e0)
poke (pCombinerOps' `plusPtr` 4 :: Ptr FragmentShadingRateCombinerOpKHR) (e1)
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Extent2D)) (zero)
let pCombinerOps' = lowerArrayPtr ((p `plusPtr` 24 :: Ptr (FixedArray 2 FragmentShadingRateCombinerOpKHR)))
case ((zero, zero)) of
(e0, e1) -> do
poke (pCombinerOps' :: Ptr FragmentShadingRateCombinerOpKHR) (e0)
poke (pCombinerOps' `plusPtr` 4 :: Ptr FragmentShadingRateCombinerOpKHR) (e1)
f
instance FromCStruct PipelineFragmentShadingRateStateCreateInfoKHR where
peekCStruct p = do
fragmentSize <- peekCStruct @Extent2D ((p `plusPtr` 16 :: Ptr Extent2D))
let pcombinerOps = lowerArrayPtr @FragmentShadingRateCombinerOpKHR ((p `plusPtr` 24 :: Ptr (FixedArray 2 FragmentShadingRateCombinerOpKHR)))
combinerOps0 <- peek @FragmentShadingRateCombinerOpKHR ((pcombinerOps `advancePtrBytes` 0 :: Ptr FragmentShadingRateCombinerOpKHR))
combinerOps1 <- peek @FragmentShadingRateCombinerOpKHR ((pcombinerOps `advancePtrBytes` 4 :: Ptr FragmentShadingRateCombinerOpKHR))
pure $ PipelineFragmentShadingRateStateCreateInfoKHR
fragmentSize ((combinerOps0, combinerOps1))
instance Storable PipelineFragmentShadingRateStateCreateInfoKHR where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PipelineFragmentShadingRateStateCreateInfoKHR where
zero = PipelineFragmentShadingRateStateCreateInfoKHR
zero
(zero, zero)
-- | VkPhysicalDeviceFragmentShadingRateFeaturesKHR - Structure indicating
-- support for variable rate fragment shading
--
-- = Members
--
-- This structure describes the following features:
--
-- = Description
--
If the ' ' structure is
-- included in the @pNext@ chain of the
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2 '
-- structure passed to
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2 ' ,
-- it is filled in to indicate whether each corresponding feature is
supported . ' ' also be
used in the @pNext@ chain of ' Vulkan . Core10.Device . DeviceCreateInfo ' to
-- selectively enable these features.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.FundamentalTypes . Bool32 ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceFragmentShadingRateFeaturesKHR = PhysicalDeviceFragmentShadingRateFeaturesKHR
{ -- | #features-pipelineFragmentShadingRate# @pipelineFragmentShadingRate@
-- indicates that the implementation supports the
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-pipeline pipeline fragment shading rate>.
pipelineFragmentShadingRate :: Bool
, -- | #features-primitiveFragmentShadingRate# @primitiveFragmentShadingRate@
-- indicates that the implementation supports the
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive primitive fragment shading rate>.
primitiveFragmentShadingRate :: Bool
, -- | #features-attachmentFragmentShadingRate# @attachmentFragmentShadingRate@
-- indicates that the implementation supports the
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-attachment attachment fragment shading rate>.
attachmentFragmentShadingRate :: Bool
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceFragmentShadingRateFeaturesKHR)
#endif
deriving instance Show PhysicalDeviceFragmentShadingRateFeaturesKHR
instance ToCStruct PhysicalDeviceFragmentShadingRateFeaturesKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceFragmentShadingRateFeaturesKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (pipelineFragmentShadingRate))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (primitiveFragmentShadingRate))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (attachmentFragmentShadingRate))
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceFragmentShadingRateFeaturesKHR where
peekCStruct p = do
pipelineFragmentShadingRate <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
primitiveFragmentShadingRate <- peek @Bool32 ((p `plusPtr` 20 :: Ptr Bool32))
attachmentFragmentShadingRate <- peek @Bool32 ((p `plusPtr` 24 :: Ptr Bool32))
pure $ PhysicalDeviceFragmentShadingRateFeaturesKHR
(bool32ToBool pipelineFragmentShadingRate)
(bool32ToBool primitiveFragmentShadingRate)
(bool32ToBool attachmentFragmentShadingRate)
instance Storable PhysicalDeviceFragmentShadingRateFeaturesKHR where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceFragmentShadingRateFeaturesKHR where
zero = PhysicalDeviceFragmentShadingRateFeaturesKHR
zero
zero
zero
-- | VkPhysicalDeviceFragmentShadingRatePropertiesKHR - Structure describing
-- variable fragment shading rate limits that can be supported by an
-- implementation
--
-- = Description
--
-- Note
--
Multiplication of the combiner rates using the fragment width\/height in
-- linear space is equivalent to an addition of those values in log2 space.
-- Some implementations inadvertently implemented an addition in linear
-- space due to unclear requirements originating outside of this
-- specification. This resulted in
-- <-extensions/html/vkspec.html#limits-fragmentShadingRateStrictMultiplyCombiner fragmentShadingRateStrictMultiplyCombiner>
being added . Fortunately , this only affects situations where a rate of 1
in either dimension is combined with another rate of 1 . All other
-- combinations result in the exact same result as if multiplication was
-- performed in linear space due to the clamping logic, and the fact that
both the sum and product of 2 and 2 are equal . In many cases , this limit
-- will not affect the correct operation of applications.
--
If the ' PhysicalDeviceFragmentShadingRatePropertiesKHR ' structure is
-- included in the @pNext@ chain of the
' Vulkan . '
-- structure passed to
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceProperties2 ' ,
-- it is filled in with each corresponding implementation-dependent
-- property.
--
-- These properties are related to
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate fragment shading rates>.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.FundamentalTypes . Bool32 ' ,
' Vulkan . Core10.FundamentalTypes . Extent2D ' ,
' Vulkan . Core10.Enums . SampleCountFlagBits . SampleCountFlagBits ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceFragmentShadingRatePropertiesKHR = PhysicalDeviceFragmentShadingRatePropertiesKHR
{ -- | #limits-minFragmentShadingRateAttachmentTexelSize#
-- @minFragmentShadingRateAttachmentTexelSize@ indicates minimum supported
-- width and height of the portion of the framebuffer corresponding to each
-- texel in a fragment shading rate attachment. Each value /must/ be less
-- than or equal to the values in
-- @maxFragmentShadingRateAttachmentTexelSize@. Each value /must/ be a
power - of - two . It /must/ be ( 0,0 ) if the
-- <-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
-- feature is not supported.
minFragmentShadingRateAttachmentTexelSize :: Extent2D
, -- | #limits-maxFragmentShadingRateAttachmentTexelSize#
-- @maxFragmentShadingRateAttachmentTexelSize@ indicates maximum supported
-- width and height of the portion of the framebuffer corresponding to each
-- texel in a fragment shading rate attachment. Each value /must/ be
-- greater than or equal to the values in
-- @minFragmentShadingRateAttachmentTexelSize@. Each value /must/ be a
power - of - two . It /must/ be ( 0,0 ) if the
-- <-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
-- feature is not supported.
maxFragmentShadingRateAttachmentTexelSize :: Extent2D
, -- | #limits-maxFragmentShadingRateAttachmentTexelSizeAspectRatio#
-- @maxFragmentShadingRateAttachmentTexelSizeAspectRatio@ indicates the
-- maximum ratio between the width and height of the portion of the
-- framebuffer corresponding to each texel in a fragment shading rate
-- attachment. @maxFragmentShadingRateAttachmentTexelSizeAspectRatio@
/must/ be a power - of - two value , and /must/ be less than or equal to
max(@maxFragmentShadingRateAttachmentTexelSize.width@ \/
-- @minFragmentShadingRateAttachmentTexelSize.height@,
-- @maxFragmentShadingRateAttachmentTexelSize.height@ \/
) . It /must/ be 0 if
-- the
-- <-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
-- feature is not supported.
maxFragmentShadingRateAttachmentTexelSizeAspectRatio :: Word32
, -- | #limits-primitiveFragmentShadingRateWithMultipleViewports#
-- @primitiveFragmentShadingRateWithMultipleViewports@ specifies whether
-- the
-- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive primitive fragment shading rate>
-- /can/ be used when multiple viewports are used. If this value is
' Vulkan . Core10.FundamentalTypes . FALSE ' , only a single viewport /must/ be
-- used, and applications /must/ not write to the @ViewportMaskNV@ or
-- @ViewportIndex@ built-in when setting @PrimitiveShadingRateKHR@. It
/must/ be ' Vulkan . Core10.FundamentalTypes . FALSE ' if the
< >
-- feature, the @VK_EXT_shader_viewport_index_layer@ extension, or the
< -extensions/html/vkspec.html#features-geometryShader >
-- feature is not supported, or if the
-- <-extensions/html/vkspec.html#features-primitiveFragmentShadingRate primitiveFragmentShadingRate>
-- feature is not supported.
primitiveFragmentShadingRateWithMultipleViewports :: Bool
, -- | #limits-layeredShadingRateAttachments# @layeredShadingRateAttachments@
specifies whether a shading rate attachment image view be created
-- with multiple layers. If this value is
' Vulkan . Core10.FundamentalTypes . FALSE ' , when creating an image view with
-- a @usage@ that includes
' Vulkan . Core10.Enums . ImageUsageFlagBits . IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR ' ,
@layerCount@ /must/ be @1@. It /must/ be
' Vulkan . Core10.FundamentalTypes . FALSE ' if the
-- <-extensions/html/vkspec.html#features-multiview multiview>
-- feature, the
< >
-- feature, the @VK_EXT_shader_viewport_index_layer@ extension, or the
< -extensions/html/vkspec.html#features-geometryShader >
-- feature is not supported, or if the
-- <-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
-- feature is not supported.
layeredShadingRateAttachments :: Bool
, -- | #limits-fragmentShadingRateNonTrivialCombinerOps#
@fragmentShadingRateNonTrivialCombinerOps@ specifies whether
-- 'FragmentShadingRateCombinerOpKHR' enums other than
-- 'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR' or
' ' be used . It /must/
be ' Vulkan . Core10.FundamentalTypes . FALSE ' unless either the
-- <-extensions/html/vkspec.html#features-primitiveFragmentShadingRate primitiveFragmentShadingRate>
-- or
-- <-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
-- feature is supported.
fragmentShadingRateNonTrivialCombinerOps :: Bool
, -- | #limits-maxFragmentSize# @maxFragmentSize@ indicates the maximum
supported width and height of a fragment . Its @width@ and @height@
members /must/ both be power - of - two values . This limit is purely
-- informational, and is not validated.
maxFragmentSize :: Extent2D
, -- | #limits-maxFragmentSizeAspectRatio# @maxFragmentSizeAspectRatio@
-- indicates the maximum ratio between the width and height of a fragment.
@maxFragmentSizeAspectRatio@ /must/ be a power - of - two value , and /must/
be less than or equal to the maximum of the @width@ and @height@ members
-- of @maxFragmentSize@. This limit is purely informational, and is not
-- validated.
maxFragmentSizeAspectRatio :: Word32
, -- | #limits-maxFragmentShadingRateCoverageSamples#
-- @maxFragmentShadingRateCoverageSamples@ specifies the maximum number of
-- coverage samples supported in a single fragment.
-- @maxFragmentShadingRateCoverageSamples@ /must/ be less than or equal to
the product of the @width@ and @height@ members of @maxFragmentSize@ ,
-- and the sample count reported by
-- @maxFragmentShadingRateRasterizationSamples@.
-- @maxFragmentShadingRateCoverageSamples@ /must/ be less than or equal to
@maxSampleMaskWords@ × 32 if @fragmentShadingRateWithShaderSampleMask@
-- is supported. This limit is purely informational, and is not validated.
maxFragmentShadingRateCoverageSamples :: Word32
, -- | #limits-maxFragmentShadingRateRasterizationSamples#
-- @maxFragmentShadingRateRasterizationSamples@ is a
' Vulkan . Core10.Enums . SampleCountFlagBits . SampleCountFlagBits ' value
-- specifying the maximum sample rate supported when a fragment covers
-- multiple pixels. This limit is purely informational, and is not
-- validated.
maxFragmentShadingRateRasterizationSamples :: SampleCountFlagBits
, -- | #limits-fragmentShadingRateWithShaderDepthStencilWrites#
-- @fragmentShadingRateWithShaderDepthStencilWrites@ specifies whether the
implementation supports writing @FragDepth@ or @FragStencilRefEXT@ from
-- a fragment shader for multi-pixel fragments. If this value is
' Vulkan . Core10.FundamentalTypes . FALSE ' , writing to those built - ins will
clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithShaderDepthStencilWrites :: Bool
, -- | #limits-fragmentShadingRateWithSampleMask#
@fragmentShadingRateWithSampleMask@ specifies whether the the
-- implementation supports setting valid bits of
' Vulkan . Core10.Pipeline . PipelineMultisampleStateCreateInfo'::@pSampleMask@
to @0@ for multi - pixel fragments . If this value is
' Vulkan . Core10.FundamentalTypes . FALSE ' , zeroing valid bits in the sample
mask will clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithSampleMask :: Bool
, -- | #limits-fragmentShadingRateWithShaderSampleMask#
-- @fragmentShadingRateWithShaderSampleMask@ specifies whether the
-- implementation supports reading or writing
' Vulkan . Core10.FundamentalTypes . ' for multi - pixel fragments .
If this value is ' Vulkan . Core10.FundamentalTypes . FALSE ' , using that
built - in will clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithShaderSampleMask :: Bool
, -- | #limits-fragmentShadingRateWithConservativeRasterization#
@fragmentShadingRateWithConservativeRasterization@ specifies whether
-- <-extensions/html/vkspec.html#primsrast-conservativeraster conservative rasterization>
-- is supported for multi-pixel fragments. It /must/ be
' Vulkan . Core10.FundamentalTypes . FALSE ' if
@VK_EXT_conservative_rasterization@ is not supported . If this value is
' Vulkan . Core10.FundamentalTypes . FALSE ' , using
-- <-extensions/html/vkspec.html#primsrast-conservativeraster conservative rasterization>
will clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithConservativeRasterization :: Bool
, -- | #limits-fragmentShadingRateWithFragmentShaderInterlock#
-- @fragmentShadingRateWithFragmentShaderInterlock@ specifies whether
-- <-extensions/html/vkspec.html#fragops-shader-interlock fragment shader interlock>
-- is supported for multi-pixel fragments. It /must/ be
' Vulkan . Core10.FundamentalTypes . FALSE ' if
-- @VK_EXT_fragment_shader_interlock@ is not supported. If this value is
' Vulkan . Core10.FundamentalTypes . FALSE ' , using
-- <-extensions/html/vkspec.html#fragops-shader-interlock fragment shader interlock>
will clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithFragmentShaderInterlock :: Bool
, -- | #limits-fragmentShadingRateWithCustomSampleLocations#
@fragmentShadingRateWithCustomSampleLocations@ specifies whether
-- <-extensions/html/vkspec.html#primsrast-samplelocations custom sample locations>
-- are supported for multi-pixel fragments. It /must/ be
' Vulkan . Core10.FundamentalTypes . FALSE ' if @VK_EXT_sample_locations@ is
not supported . If this value is ' Vulkan . Core10.FundamentalTypes . FALSE ' ,
-- using
-- <-extensions/html/vkspec.html#primsrast-samplelocations custom sample locations>
will clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithCustomSampleLocations :: Bool
, -- | #limits-fragmentShadingRateStrictMultiplyCombiner#
-- @fragmentShadingRateStrictMultiplyCombiner@ specifies whether
-- 'FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR' accurately performs a
-- multiplication or not. Implementations where this value is
' Vulkan . Core10.FundamentalTypes . FALSE ' will instead combine rates with
an addition . If @fragmentShadingRateNonTrivialCombinerOps@ is
' Vulkan . Core10.FundamentalTypes . FALSE ' , implementations /must/ report
this as ' Vulkan . Core10.FundamentalTypes . FALSE ' . If
@fragmentShadingRateNonTrivialCombinerOps@ is
' Vulkan . Core10.FundamentalTypes . TRUE ' , implementations /should/ report
this as ' Vulkan . Core10.FundamentalTypes . TRUE ' .
fragmentShadingRateStrictMultiplyCombiner :: Bool
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceFragmentShadingRatePropertiesKHR)
#endif
deriving instance Show PhysicalDeviceFragmentShadingRatePropertiesKHR
instance ToCStruct PhysicalDeviceFragmentShadingRatePropertiesKHR where
withCStruct x f = allocaBytes 96 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceFragmentShadingRatePropertiesKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Extent2D)) (minFragmentShadingRateAttachmentTexelSize)
poke ((p `plusPtr` 24 :: Ptr Extent2D)) (maxFragmentShadingRateAttachmentTexelSize)
poke ((p `plusPtr` 32 :: Ptr Word32)) (maxFragmentShadingRateAttachmentTexelSizeAspectRatio)
poke ((p `plusPtr` 36 :: Ptr Bool32)) (boolToBool32 (primitiveFragmentShadingRateWithMultipleViewports))
poke ((p `plusPtr` 40 :: Ptr Bool32)) (boolToBool32 (layeredShadingRateAttachments))
poke ((p `plusPtr` 44 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateNonTrivialCombinerOps))
poke ((p `plusPtr` 48 :: Ptr Extent2D)) (maxFragmentSize)
poke ((p `plusPtr` 56 :: Ptr Word32)) (maxFragmentSizeAspectRatio)
poke ((p `plusPtr` 60 :: Ptr Word32)) (maxFragmentShadingRateCoverageSamples)
poke ((p `plusPtr` 64 :: Ptr SampleCountFlagBits)) (maxFragmentShadingRateRasterizationSamples)
poke ((p `plusPtr` 68 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithShaderDepthStencilWrites))
poke ((p `plusPtr` 72 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithSampleMask))
poke ((p `plusPtr` 76 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithShaderSampleMask))
poke ((p `plusPtr` 80 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithConservativeRasterization))
poke ((p `plusPtr` 84 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithFragmentShaderInterlock))
poke ((p `plusPtr` 88 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithCustomSampleLocations))
poke ((p `plusPtr` 92 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateStrictMultiplyCombiner))
f
cStructSize = 96
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Extent2D)) (zero)
poke ((p `plusPtr` 24 :: Ptr Extent2D)) (zero)
poke ((p `plusPtr` 32 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 36 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 40 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 44 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 48 :: Ptr Extent2D)) (zero)
poke ((p `plusPtr` 56 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 60 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 64 :: Ptr SampleCountFlagBits)) (zero)
poke ((p `plusPtr` 68 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 72 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 76 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 80 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 84 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 88 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 92 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceFragmentShadingRatePropertiesKHR where
peekCStruct p = do
minFragmentShadingRateAttachmentTexelSize <- peekCStruct @Extent2D ((p `plusPtr` 16 :: Ptr Extent2D))
maxFragmentShadingRateAttachmentTexelSize <- peekCStruct @Extent2D ((p `plusPtr` 24 :: Ptr Extent2D))
maxFragmentShadingRateAttachmentTexelSizeAspectRatio <- peek @Word32 ((p `plusPtr` 32 :: Ptr Word32))
primitiveFragmentShadingRateWithMultipleViewports <- peek @Bool32 ((p `plusPtr` 36 :: Ptr Bool32))
layeredShadingRateAttachments <- peek @Bool32 ((p `plusPtr` 40 :: Ptr Bool32))
fragmentShadingRateNonTrivialCombinerOps <- peek @Bool32 ((p `plusPtr` 44 :: Ptr Bool32))
maxFragmentSize <- peekCStruct @Extent2D ((p `plusPtr` 48 :: Ptr Extent2D))
maxFragmentSizeAspectRatio <- peek @Word32 ((p `plusPtr` 56 :: Ptr Word32))
maxFragmentShadingRateCoverageSamples <- peek @Word32 ((p `plusPtr` 60 :: Ptr Word32))
maxFragmentShadingRateRasterizationSamples <- peek @SampleCountFlagBits ((p `plusPtr` 64 :: Ptr SampleCountFlagBits))
fragmentShadingRateWithShaderDepthStencilWrites <- peek @Bool32 ((p `plusPtr` 68 :: Ptr Bool32))
fragmentShadingRateWithSampleMask <- peek @Bool32 ((p `plusPtr` 72 :: Ptr Bool32))
fragmentShadingRateWithShaderSampleMask <- peek @Bool32 ((p `plusPtr` 76 :: Ptr Bool32))
fragmentShadingRateWithConservativeRasterization <- peek @Bool32 ((p `plusPtr` 80 :: Ptr Bool32))
fragmentShadingRateWithFragmentShaderInterlock <- peek @Bool32 ((p `plusPtr` 84 :: Ptr Bool32))
fragmentShadingRateWithCustomSampleLocations <- peek @Bool32 ((p `plusPtr` 88 :: Ptr Bool32))
fragmentShadingRateStrictMultiplyCombiner <- peek @Bool32 ((p `plusPtr` 92 :: Ptr Bool32))
pure $ PhysicalDeviceFragmentShadingRatePropertiesKHR
minFragmentShadingRateAttachmentTexelSize
maxFragmentShadingRateAttachmentTexelSize
maxFragmentShadingRateAttachmentTexelSizeAspectRatio
(bool32ToBool primitiveFragmentShadingRateWithMultipleViewports)
(bool32ToBool layeredShadingRateAttachments)
(bool32ToBool fragmentShadingRateNonTrivialCombinerOps)
maxFragmentSize
maxFragmentSizeAspectRatio
maxFragmentShadingRateCoverageSamples
maxFragmentShadingRateRasterizationSamples
(bool32ToBool fragmentShadingRateWithShaderDepthStencilWrites)
(bool32ToBool fragmentShadingRateWithSampleMask)
(bool32ToBool fragmentShadingRateWithShaderSampleMask)
(bool32ToBool fragmentShadingRateWithConservativeRasterization)
(bool32ToBool fragmentShadingRateWithFragmentShaderInterlock)
(bool32ToBool fragmentShadingRateWithCustomSampleLocations)
(bool32ToBool fragmentShadingRateStrictMultiplyCombiner)
instance Storable PhysicalDeviceFragmentShadingRatePropertiesKHR where
sizeOf ~_ = 96
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceFragmentShadingRatePropertiesKHR where
zero = PhysicalDeviceFragmentShadingRatePropertiesKHR
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
| VkPhysicalDeviceFragmentShadingRateKHR - Structure returning information
-- about sample count specific additional multisampling capabilities
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.FundamentalTypes . Extent2D ' ,
' Vulkan . Core10.Enums . SampleCountFlagBits . SampleCountFlags ' ,
' Vulkan . Core10.Enums . StructureType . StructureType ' ,
-- 'getPhysicalDeviceFragmentShadingRatesKHR'
data PhysicalDeviceFragmentShadingRateKHR = PhysicalDeviceFragmentShadingRateKHR
{ -- | @sampleCounts@ is a bitmask of sample counts for which the shading rate
-- described by @fragmentSize@ is supported.
sampleCounts :: SampleCountFlags
| @fragmentSize@ is a ' Vulkan . Core10.FundamentalTypes . Extent2D ' describing
-- the width and height of a supported shading rate.
fragmentSize :: Extent2D
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceFragmentShadingRateKHR)
#endif
deriving instance Show PhysicalDeviceFragmentShadingRateKHR
instance ToCStruct PhysicalDeviceFragmentShadingRateKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceFragmentShadingRateKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr SampleCountFlags)) (sampleCounts)
poke ((p `plusPtr` 20 :: Ptr Extent2D)) (fragmentSize)
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr SampleCountFlags)) (zero)
poke ((p `plusPtr` 20 :: Ptr Extent2D)) (zero)
f
instance FromCStruct PhysicalDeviceFragmentShadingRateKHR where
peekCStruct p = do
sampleCounts <- peek @SampleCountFlags ((p `plusPtr` 16 :: Ptr SampleCountFlags))
fragmentSize <- peekCStruct @Extent2D ((p `plusPtr` 20 :: Ptr Extent2D))
pure $ PhysicalDeviceFragmentShadingRateKHR
sampleCounts fragmentSize
instance Storable PhysicalDeviceFragmentShadingRateKHR where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceFragmentShadingRateKHR where
zero = PhysicalDeviceFragmentShadingRateKHR
zero
zero
-- | VkFragmentShadingRateCombinerOpKHR - Control how fragment shading rates
-- are combined
--
-- = Description
--
where combine(Axy , ) is the combine operation , and Axy and are the
-- inputs to the operation.
--
-- If
-- <-extensions/html/vkspec.html#limits-fragmentShadingRateStrictMultiplyCombiner fragmentShadingRateStrictMultiplyCombiner>
is ' Vulkan . Core10.FundamentalTypes . FALSE ' , using
' FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR ' with values of 1 for both A
and B in the same dimension results in the value 2 being produced for
-- that dimension. See the definition of
-- <-extensions/html/vkspec.html#limits-fragmentShadingRateStrictMultiplyCombiner fragmentShadingRateStrictMultiplyCombiner>
-- for more information.
--
-- These operations are performed in a component-wise fashion.
--
-- = See Also
--
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Extensions . VK_NV_fragment_shading_rate_enums . PipelineFragmentShadingRateEnumStateCreateInfoNV ' ,
' PipelineFragmentShadingRateStateCreateInfoKHR ' ,
' Vulkan . Extensions . VK_NV_fragment_shading_rate_enums.cmdSetFragmentShadingRateEnumNV ' ,
-- 'cmdSetFragmentShadingRateKHR'
newtype FragmentShadingRateCombinerOpKHR = FragmentShadingRateCombinerOpKHR Int32
deriving newtype (Eq, Ord, Storable, Zero)
-- | 'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR' specifies a combiner
operation of combine(Axy , ) = Axy .
pattern FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = FragmentShadingRateCombinerOpKHR 0
| ' ' specifies a combiner
operation of combine(Axy , ) = Bxy .
pattern FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = FragmentShadingRateCombinerOpKHR 1
-- | 'FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR' specifies a combiner
operation of combine(Axy , ) = min(Axy , ) .
pattern FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = FragmentShadingRateCombinerOpKHR 2
-- | 'FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR' specifies a combiner
operation of combine(Axy , ) = max(Axy , ) .
pattern FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = FragmentShadingRateCombinerOpKHR 3
-- | 'FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR' specifies a combiner
operation of combine(Axy , ) = Axy*Bxy .
pattern FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = FragmentShadingRateCombinerOpKHR 4
# COMPLETE
FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR
, , FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR : :
FragmentShadingRateCombinerOpKHR
#
FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR ::
FragmentShadingRateCombinerOpKHR
#-}
conNameFragmentShadingRateCombinerOpKHR :: String
conNameFragmentShadingRateCombinerOpKHR = "FragmentShadingRateCombinerOpKHR"
enumPrefixFragmentShadingRateCombinerOpKHR :: String
enumPrefixFragmentShadingRateCombinerOpKHR = "FRAGMENT_SHADING_RATE_COMBINER_OP_"
showTableFragmentShadingRateCombinerOpKHR :: [(FragmentShadingRateCombinerOpKHR, String)]
showTableFragmentShadingRateCombinerOpKHR =
[
( FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR
, "KEEP_KHR"
)
,
( FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR
, "REPLACE_KHR"
)
,
( FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR
, "MIN_KHR"
)
,
( FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR
, "MAX_KHR"
)
,
( FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR
, "MUL_KHR"
)
]
instance Show FragmentShadingRateCombinerOpKHR where
showsPrec =
enumShowsPrec
enumPrefixFragmentShadingRateCombinerOpKHR
showTableFragmentShadingRateCombinerOpKHR
conNameFragmentShadingRateCombinerOpKHR
(\(FragmentShadingRateCombinerOpKHR x) -> x)
(showsPrec 11)
instance Read FragmentShadingRateCombinerOpKHR where
readPrec =
enumReadPrec
enumPrefixFragmentShadingRateCombinerOpKHR
showTableFragmentShadingRateCombinerOpKHR
conNameFragmentShadingRateCombinerOpKHR
FragmentShadingRateCombinerOpKHR
type KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION = 2
No documentation found for TopLevel " VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION "
pattern KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION = 2
type KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME = "VK_KHR_fragment_shading_rate"
No documentation found for TopLevel " VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "
pattern KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME = "VK_KHR_fragment_shading_rate"
| null | https://raw.githubusercontent.com/expipiplus1/vulkan/ebc0dde0bcd9cf251f18538de6524eb4f2ab3e9d/src/Vulkan/Extensions/VK_KHR_fragment_shading_rate.hs | haskell | # language CPP #
| = Name
== VK_KHR_fragment_shading_rate
[__Name String__]
[__Extension Type__]
Device extension
[__Registered Extension Number__]
[__Revision__]
[__Extension and Version Dependencies__]
- Requires @VK_KHR_create_renderpass2@ to be enabled for any
device-level functionality
- Requires @VK_KHR_get_physical_device_properties2@ to be enabled
for any device-level functionality
[__Contact__]
[__Extension Proposal__]
<-Docs/tree/main/proposals/VK_KHR_fragment_shading_rate.adoc VK_KHR_fragment_shading_rate>
== Other Extension Metadata
[__Last Modified Date__]
[__Interactions and External Dependencies__]
- This extension requires
<-Registry/blob/master/extensions/KHR/SPV_KHR_fragment_shading_rate.html SPV_KHR_fragment_shading_rate>.
- This extension provides API support for
< GL_EXT_fragment_shading_rate>
[__Contributors__]
- Contributors to the VK_NV_shading_rate_image specification
== Description
This extension adds the ability to change the rate at which fragments
are shaded. Rather than the usual single fragment invocation for each
pixel covered by a primitive, multiple pixels can be shaded by a single
fragment shader invocation.
fragment shading rate:
- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-pipeline>,
which allows the specification of a rate per-draw.
- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive>,
which allows the specification of a rate per primitive, specified
during shading.
- <-extensions/html/vkspec.html#primsrast-fragment-shading-rate-attachment>,
which allows the specification of a rate per-region of the
framebuffer, specified in a specialized image attachment.
Additionally, these rates can all be specified and combined in order to
adjust the overall detail in the image at each point.
This functionality can be used to focus shading efforts where higher
levels of detail are needed in some parts of a scene compared to others.
contexts.
extension which enables setting the
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive primitive fragment shading rate>,
and allows querying the final shading rate from a fragment shader.
== New Commands
- 'cmdSetFragmentShadingRateKHR'
- 'getPhysicalDeviceFragmentShadingRatesKHR'
== New Structures
- 'PhysicalDeviceFragmentShadingRateKHR'
- 'PipelineFragmentShadingRateStateCreateInfoKHR'
- Extending
- Extending
- Extending
- 'FragmentShadingRateAttachmentInfoKHR'
== New Enums
- 'FragmentShadingRateCombinerOpKHR'
== New Enum Constants
- 'KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME'
- 'KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION'
- Extending
- Extending
- Extending
If
<-extensions/html/vkspec.html#VK_KHR_format_feature_flags2 VK_KHR_format_feature_flags2>
is supported:
- Extending
== Version History
- Initial revision
- Add interaction with @VK_KHR_format_feature_flags2@ to @vk.xml@
== See Also
'FragmentShadingRateAttachmentInfoKHR',
'FragmentShadingRateCombinerOpKHR',
'PhysicalDeviceFragmentShadingRateKHR',
'cmdSetFragmentShadingRateKHR',
'getPhysicalDeviceFragmentShadingRatesKHR'
== Document Notes
For more information, see the
<-extensions/html/vkspec.html#VK_KHR_fragment_shading_rate Vulkan Specification>
This page is a generated document. Fixes and changes should be made to
the generator scripts, not directly.
| vkCmdSetFragmentShadingRateKHR - Set pipeline fragment shading rate and
combiner operation dynamically for a command buffer
= Description
This command sets the pipeline fragment shading rate and combiner
operation for subsequent drawing commands when the graphics pipeline is
created with
set in
Otherwise, this state is specified by the
'PipelineFragmentShadingRateStateCreateInfoKHR' values used to create
the currently active pipeline.
== Valid Usage
- #VUID-vkCmdSetFragmentShadingRateKHR-pipelineFragmentShadingRate-04507#
If
<-extensions/html/vkspec.html#features-pipelineFragmentShadingRate pipelineFragmentShadingRate>
- #VUID-vkCmdSetFragmentShadingRateKHR-pipelineFragmentShadingRate-04508#
If
<-extensions/html/vkspec.html#features-pipelineFragmentShadingRate pipelineFragmentShadingRate>
- #VUID-vkCmdSetFragmentShadingRateKHR-pipelineFragmentShadingRate-04509#
<-extensions/html/vkspec.html#features-pipelineFragmentShadingRate pipelineFragmentShadingRate>,
<-extensions/html/vkspec.html#features-primitiveFragmentShadingRate primitiveFragmentShadingRate>,
or
<-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
/must/ be enabled
- #VUID-vkCmdSetFragmentShadingRateKHR-primitiveFragmentShadingRate-04510#
If the
<-extensions/html/vkspec.html#features-primitiveFragmentShadingRate primitiveFragmentShadingRate>
feature is not enabled, @combinerOps@[0] /must/ be
'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR'
- #VUID-vkCmdSetFragmentShadingRateKHR-attachmentFragmentShadingRate-04511#
If the
<-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
feature is not enabled, @combinerOps@[1] /must/ be
'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR'
- #VUID-vkCmdSetFragmentShadingRateKHR-fragmentSizeNonTrivialCombinerOps-04512#
If the
<-extensions/html/vkspec.html#limits-fragmentShadingRateNonTrivialCombinerOps fragmentSizeNonTrivialCombinerOps>
'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR' or
- #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04513#
@pFragmentSize->width@ /must/ be greater than or equal to @1@
- #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04514#
@pFragmentSize->height@ /must/ be greater than or equal to @1@
- #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04515#
- #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04516#
- #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04517#
@pFragmentSize->width@ /must/ be less than or equal to @4@
- #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-04518#
@pFragmentSize->height@ /must/ be less than or equal to @4@
== Valid Usage (Implicit)
- #VUID-vkCmdSetFragmentShadingRateKHR-commandBuffer-parameter#
@commandBuffer@ /must/ be a valid
- #VUID-vkCmdSetFragmentShadingRateKHR-pFragmentSize-parameter#
- #VUID-vkCmdSetFragmentShadingRateKHR-combinerOps-parameter# Any
'FragmentShadingRateCombinerOpKHR' value
- #VUID-vkCmdSetFragmentShadingRateKHR-commandBuffer-recording#
@commandBuffer@ /must/ be in the
<-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
- #VUID-vkCmdSetFragmentShadingRateKHR-commandBuffer-cmdpool# The
allocated from /must/ support graphics operations
- #VUID-vkCmdSetFragmentShadingRateKHR-videocoding# This command
/must/ only be called outside of a video coding scope
== Host Synchronization
- Host access to @commandBuffer@ /must/ be externally synchronized
@commandBuffer@ was allocated from /must/ be externally synchronized
== Command Properties
\'
+----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
+============================================================================================================================+========================================================================================================================+=============================================================================================================================+=======================================================================================================================+========================================================================================================================================+
| Primary | Both | Outside | Graphics | State |
| Secondary | | | | |
+----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------+
= See Also
'FragmentShadingRateCombinerOpKHR'
| @commandBuffer@ is the command buffer into which the command will be
recorded.
| @pFragmentSize@ specifies the pipeline fragment shading rate for
subsequent drawing commands.
| @combinerOps@ specifies a 'FragmentShadingRateCombinerOpKHR' determining
how the
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-pipeline pipeline>,
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive primitive>,
and
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-attachment attachment shading rates>
are
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-combining combined>
for fragments generated by subsequent drawing commands.
| vkGetPhysicalDeviceFragmentShadingRatesKHR - Get available shading rates
for a physical device
= Description
shading rates available is returned in @pFragmentShadingRateCount@.
Otherwise, @pFragmentShadingRateCount@ /must/ point to a variable set by
and on return the variable is overwritten with the number of structures
actually written to @pFragmentShadingRates@. If
@pFragmentShadingRateCount@ is less than the number of fragment shading
rates available, at most @pFragmentShadingRateCount@ structures will be
all the available fragment shading rates were returned.
The returned array of fragment shading rates /must/ be ordered from
array /must/ not have the same @fragmentSize@ values.
For any entry in the array, the following rules also apply:
<-extensions/html/vkspec.html#limits-maxFragmentSize maxFragmentSize.width>.
- The value of @fragmentSize.height@ /must/ be less than or equal to
<-extensions/html/vkspec.html#limits-maxFragmentSize maxFragmentSize.height>.
- The value of @fragmentSize.height@ /must/ be greater than or equal
- The highest sample count in @sampleCounts@ /must/ be less than or
equal to
<-extensions/html/vkspec.html#limits-maxFragmentShadingRateRasterizationSamples maxFragmentShadingRateRasterizationSamples>.
highest sample count in @sampleCounts@ /must/ be less than or equal
to
Implementations /must/ support at least the following shading rates:
+--------------------------------------------------------------+-----------------------------------+
| @sampleCounts@ | @fragmentSize@ |
+==============================================================+===================================+
| | | |
+--------------------------------------------------------------+-----------------------------------+
| | | |
+--------------------------------------------------------------+-----------------------------------+
+--------------------------------------------------------------+-----------------------------------+
If
the required rates /must/ also include
Note
actual effect on the support of rendering without setting the fragment
size. All sample counts and render pass transforms are supported for
this rate.
The returned set of fragment shading rates /must/ be returned in the
native (rotated) coordinate system. For rasterization using render pass
the application /must/ transform the returned fragment shading rates
into the current (unrotated) coordinate system to get the supported
rates for that transform.
Note
for transforms
and
== Valid Usage (Implicit)
- #VUID-vkGetPhysicalDeviceFragmentShadingRatesKHR-physicalDevice-parameter#
@physicalDevice@ /must/ be a valid
- #VUID-vkGetPhysicalDeviceFragmentShadingRatesKHR-pFragmentShadingRateCount-parameter#
@pFragmentShadingRateCount@ /must/ be a valid pointer to a
- #VUID-vkGetPhysicalDeviceFragmentShadingRatesKHR-pFragmentShadingRates-parameter#
/must/ be a valid pointer to an array of @pFragmentShadingRateCount@
'PhysicalDeviceFragmentShadingRateKHR' structures
== Return Codes
[<-extensions/html/vkspec.html#fundamentals-successcodes Success>]
[<-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
= See Also
'PhysicalDeviceFragmentShadingRateKHR'
| @physicalDevice@ is the handle to the physical device whose properties
will be queried.
| VkFragmentShadingRateAttachmentInfoKHR - Structure specifying a fragment
= Description
If no shading rate attachment is specified, or if this structure is not
specified, the implementation behaves as if a valid shading rate
attachment was specified with all texels specifying a single pixel per
fragment.
== Valid Usage
- #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04524#
/must/ be equal to
- #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04525#
value
- #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04526#
to
- #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04527#
equal to
- #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04528#
value
- #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04529#
to
- #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04530#
equal to
<-extensions/html/vkspec.html#limits-minFragmentShadingRateAttachmentTexelSize minFragmentShadingRateAttachmentTexelSize.height>
- #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04531#
to
<-extensions/html/vkspec.html#limits-maxFragmentShadingRateAttachmentTexelSizeAspectRatio maxFragmentShadingRateAttachmentTexelSizeAspectRatio>
- #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-04532#
to
<-extensions/html/vkspec.html#limits-maxFragmentShadingRateAttachmentTexelSizeAspectRatio maxFragmentShadingRateAttachmentTexelSizeAspectRatio>
== Valid Usage (Implicit)
- #VUID-VkFragmentShadingRateAttachmentInfoKHR-sType-sType# @sType@
/must/ be
- #VUID-VkFragmentShadingRateAttachmentInfoKHR-pFragmentShadingRateAttachment-parameter#
valid
structure
= See Also
structure defining the fragment shading rate attachment for this
| @shadingRateAttachmentTexelSize@ specifies the size of the portion of
the framebuffer corresponding to each texel in
@pFragmentShadingRateAttachment@.
| VkPipelineFragmentShadingRateStateCreateInfoKHR - Structure specifying
parameters controlling the fragment shading rate
= Description
If the @pNext@ chain of
'PipelineFragmentShadingRateStateCreateInfoKHR' structure, then that
structure includes parameters controlling the pipeline fragment shading
rate.
If this structure is not present, @fragmentSize@ is considered to be
equal to 'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR'.
== Valid Usage (Implicit)
= See Also
'FragmentShadingRateCombinerOpKHR',
structure containing the fragment size used to define the pipeline
fragment shading rate for drawing commands using this pipeline.
| @combinerOps@ specifies a 'FragmentShadingRateCombinerOpKHR' value
determining how the
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-pipeline pipeline>,
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive primitive>,
and
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-attachment attachment shading rates>
are
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-combining combined>
for fragments generated by drawing commands using the created pipeline.
| VkPhysicalDeviceFragmentShadingRateFeaturesKHR - Structure indicating
support for variable rate fragment shading
= Members
This structure describes the following features:
= Description
included in the @pNext@ chain of the
structure passed to
it is filled in to indicate whether each corresponding feature is
selectively enable these features.
== Valid Usage (Implicit)
= See Also
| #features-pipelineFragmentShadingRate# @pipelineFragmentShadingRate@
indicates that the implementation supports the
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-pipeline pipeline fragment shading rate>.
| #features-primitiveFragmentShadingRate# @primitiveFragmentShadingRate@
indicates that the implementation supports the
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive primitive fragment shading rate>.
| #features-attachmentFragmentShadingRate# @attachmentFragmentShadingRate@
indicates that the implementation supports the
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-attachment attachment fragment shading rate>.
| VkPhysicalDeviceFragmentShadingRatePropertiesKHR - Structure describing
variable fragment shading rate limits that can be supported by an
implementation
= Description
Note
linear space is equivalent to an addition of those values in log2 space.
Some implementations inadvertently implemented an addition in linear
space due to unclear requirements originating outside of this
specification. This resulted in
<-extensions/html/vkspec.html#limits-fragmentShadingRateStrictMultiplyCombiner fragmentShadingRateStrictMultiplyCombiner>
combinations result in the exact same result as if multiplication was
performed in linear space due to the clamping logic, and the fact that
will not affect the correct operation of applications.
included in the @pNext@ chain of the
structure passed to
it is filled in with each corresponding implementation-dependent
property.
These properties are related to
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate fragment shading rates>.
== Valid Usage (Implicit)
= See Also
| #limits-minFragmentShadingRateAttachmentTexelSize#
@minFragmentShadingRateAttachmentTexelSize@ indicates minimum supported
width and height of the portion of the framebuffer corresponding to each
texel in a fragment shading rate attachment. Each value /must/ be less
than or equal to the values in
@maxFragmentShadingRateAttachmentTexelSize@. Each value /must/ be a
<-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
feature is not supported.
| #limits-maxFragmentShadingRateAttachmentTexelSize#
@maxFragmentShadingRateAttachmentTexelSize@ indicates maximum supported
width and height of the portion of the framebuffer corresponding to each
texel in a fragment shading rate attachment. Each value /must/ be
greater than or equal to the values in
@minFragmentShadingRateAttachmentTexelSize@. Each value /must/ be a
<-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
feature is not supported.
| #limits-maxFragmentShadingRateAttachmentTexelSizeAspectRatio#
@maxFragmentShadingRateAttachmentTexelSizeAspectRatio@ indicates the
maximum ratio between the width and height of the portion of the
framebuffer corresponding to each texel in a fragment shading rate
attachment. @maxFragmentShadingRateAttachmentTexelSizeAspectRatio@
@minFragmentShadingRateAttachmentTexelSize.height@,
@maxFragmentShadingRateAttachmentTexelSize.height@ \/
the
<-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
feature is not supported.
| #limits-primitiveFragmentShadingRateWithMultipleViewports#
@primitiveFragmentShadingRateWithMultipleViewports@ specifies whether
the
<-extensions/html/vkspec.html#primsrast-fragment-shading-rate-primitive primitive fragment shading rate>
/can/ be used when multiple viewports are used. If this value is
used, and applications /must/ not write to the @ViewportMaskNV@ or
@ViewportIndex@ built-in when setting @PrimitiveShadingRateKHR@. It
feature, the @VK_EXT_shader_viewport_index_layer@ extension, or the
feature is not supported, or if the
<-extensions/html/vkspec.html#features-primitiveFragmentShadingRate primitiveFragmentShadingRate>
feature is not supported.
| #limits-layeredShadingRateAttachments# @layeredShadingRateAttachments@
with multiple layers. If this value is
a @usage@ that includes
<-extensions/html/vkspec.html#features-multiview multiview>
feature, the
feature, the @VK_EXT_shader_viewport_index_layer@ extension, or the
feature is not supported, or if the
<-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
feature is not supported.
| #limits-fragmentShadingRateNonTrivialCombinerOps#
'FragmentShadingRateCombinerOpKHR' enums other than
'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR' or
<-extensions/html/vkspec.html#features-primitiveFragmentShadingRate primitiveFragmentShadingRate>
or
<-extensions/html/vkspec.html#features-attachmentFragmentShadingRate attachmentFragmentShadingRate>
feature is supported.
| #limits-maxFragmentSize# @maxFragmentSize@ indicates the maximum
informational, and is not validated.
| #limits-maxFragmentSizeAspectRatio# @maxFragmentSizeAspectRatio@
indicates the maximum ratio between the width and height of a fragment.
of @maxFragmentSize@. This limit is purely informational, and is not
validated.
| #limits-maxFragmentShadingRateCoverageSamples#
@maxFragmentShadingRateCoverageSamples@ specifies the maximum number of
coverage samples supported in a single fragment.
@maxFragmentShadingRateCoverageSamples@ /must/ be less than or equal to
and the sample count reported by
@maxFragmentShadingRateRasterizationSamples@.
@maxFragmentShadingRateCoverageSamples@ /must/ be less than or equal to
is supported. This limit is purely informational, and is not validated.
| #limits-maxFragmentShadingRateRasterizationSamples#
@maxFragmentShadingRateRasterizationSamples@ is a
specifying the maximum sample rate supported when a fragment covers
multiple pixels. This limit is purely informational, and is not
validated.
| #limits-fragmentShadingRateWithShaderDepthStencilWrites#
@fragmentShadingRateWithShaderDepthStencilWrites@ specifies whether the
a fragment shader for multi-pixel fragments. If this value is
| #limits-fragmentShadingRateWithSampleMask#
implementation supports setting valid bits of
| #limits-fragmentShadingRateWithShaderSampleMask#
@fragmentShadingRateWithShaderSampleMask@ specifies whether the
implementation supports reading or writing
| #limits-fragmentShadingRateWithConservativeRasterization#
<-extensions/html/vkspec.html#primsrast-conservativeraster conservative rasterization>
is supported for multi-pixel fragments. It /must/ be
<-extensions/html/vkspec.html#primsrast-conservativeraster conservative rasterization>
| #limits-fragmentShadingRateWithFragmentShaderInterlock#
@fragmentShadingRateWithFragmentShaderInterlock@ specifies whether
<-extensions/html/vkspec.html#fragops-shader-interlock fragment shader interlock>
is supported for multi-pixel fragments. It /must/ be
@VK_EXT_fragment_shader_interlock@ is not supported. If this value is
<-extensions/html/vkspec.html#fragops-shader-interlock fragment shader interlock>
| #limits-fragmentShadingRateWithCustomSampleLocations#
<-extensions/html/vkspec.html#primsrast-samplelocations custom sample locations>
are supported for multi-pixel fragments. It /must/ be
using
<-extensions/html/vkspec.html#primsrast-samplelocations custom sample locations>
| #limits-fragmentShadingRateStrictMultiplyCombiner#
@fragmentShadingRateStrictMultiplyCombiner@ specifies whether
'FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR' accurately performs a
multiplication or not. Implementations where this value is
about sample count specific additional multisampling capabilities
== Valid Usage (Implicit)
= See Also
'getPhysicalDeviceFragmentShadingRatesKHR'
| @sampleCounts@ is a bitmask of sample counts for which the shading rate
described by @fragmentSize@ is supported.
the width and height of a supported shading rate.
| VkFragmentShadingRateCombinerOpKHR - Control how fragment shading rates
are combined
= Description
inputs to the operation.
If
<-extensions/html/vkspec.html#limits-fragmentShadingRateStrictMultiplyCombiner fragmentShadingRateStrictMultiplyCombiner>
that dimension. See the definition of
<-extensions/html/vkspec.html#limits-fragmentShadingRateStrictMultiplyCombiner fragmentShadingRateStrictMultiplyCombiner>
for more information.
These operations are performed in a component-wise fashion.
= See Also
'cmdSetFragmentShadingRateKHR'
| 'FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR' specifies a combiner
| 'FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR' specifies a combiner
| 'FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR' specifies a combiner
| 'FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR' specifies a combiner | VK_KHR_fragment_shading_rate - device extension
@VK_KHR_fragment_shading_rate@
227
2
- Requires support for Vulkan 1.0
-
< -Docs/issues/new?body=[VK_KHR_fragment_shading_rate ] @tobski%0A*Here describe the issue or question you have about the VK_KHR_fragment_shading_rate extension * >
2021 - 09 - 30
- , AMD
- , AMD
- , AMD
- , Nvidia
- , Qualcomm
- , Intel
- Jan - , Arm
- , Nvidia
- , Roblox
- Contributors to the specification
Up to three methods are available to the application to change the
This can be particularly useful in high resolution rendering , or for XR
This extension also adds support for the @SPV_KHR_fragment_shading_rate@
- Extending ' Vulkan . Core10.Pipeline . ' :
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2 ' ,
' Vulkan . Core10.Device . DeviceCreateInfo ' :
- ' '
' Vulkan . ' :
- ' PhysicalDeviceFragmentShadingRatePropertiesKHR '
' Vulkan . Core12.Promoted_From_VK_KHR_create_renderpass2.SubpassDescription2 ' :
- Extending ' Vulkan . Core10.Enums . AccessFlagBits . AccessFlagBits ' :
- ' Vulkan . Core10.Enums . AccessFlagBits . ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR '
- Extending ' Vulkan . Core10.Enums . DynamicState . DynamicState ' :
- ' Vulkan . Core10.Enums . DynamicState . '
' Vulkan . Core10.Enums . FormatFeatureFlagBits . FormatFeatureFlagBits ' :
- ' Vulkan . Core10.Enums . FormatFeatureFlagBits . FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR '
- Extending ' Vulkan . Core10.Enums . ImageLayout . ImageLayout ' :
- ' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR '
' Vulkan . Core10.Enums . ImageUsageFlagBits . ImageUsageFlagBits ' :
- ' Vulkan . Core10.Enums . ImageUsageFlagBits . IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR '
' Vulkan . Core10.Enums . . ' :
- ' Vulkan . Core10.Enums . . '
- Extending ' Vulkan . Core10.Enums . StructureType . StructureType ' :
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR '
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR '
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR '
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR '
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR '
' Vulkan . Core13.Enums . ' :
- ' Vulkan . Core13.Enums . FormatFeatureFlags2.FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR '
- Revision 1 , 2020 - 05 - 06 ( )
- Revision 2 , 2021 - 09 - 30 ( )
' ' ,
' PhysicalDeviceFragmentShadingRatePropertiesKHR ' ,
' PipelineFragmentShadingRateStateCreateInfoKHR ' ,
module Vulkan.Extensions.VK_KHR_fragment_shading_rate ( cmdSetFragmentShadingRateKHR
, getPhysicalDeviceFragmentShadingRatesKHR
, FragmentShadingRateAttachmentInfoKHR(..)
, PipelineFragmentShadingRateStateCreateInfoKHR(..)
, PhysicalDeviceFragmentShadingRateFeaturesKHR(..)
, PhysicalDeviceFragmentShadingRatePropertiesKHR(..)
, PhysicalDeviceFragmentShadingRateKHR(..)
, FragmentShadingRateCombinerOpKHR( FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR
, ..
)
, KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION
, pattern KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION
, KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME
, pattern KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME
) where
import Vulkan.CStruct.Utils (FixedArray)
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import Foreign.Marshal.Utils (maybePeek)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (castPtr)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import GHC.Show (showsPrec)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Data.Vector (generateM)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero)
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Data.Int (Int32)
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Data.Word (Word32)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Data.Vector (Vector)
import Vulkan.CStruct.Utils (advancePtrBytes)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.CStruct.Extends (forgetExtensions)
import Vulkan.CStruct.Utils (lowerArrayPtr)
import Vulkan.CStruct.Extends (peekSomeCStruct)
import Vulkan.CStruct.Extends (withSomeCStruct)
import Vulkan.NamedType ((:::))
import Vulkan.Core12.Promoted_From_VK_KHR_create_renderpass2 (AttachmentReference2)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Handles (CommandBuffer)
import Vulkan.Core10.Handles (CommandBuffer(..))
import Vulkan.Core10.Handles (CommandBuffer(CommandBuffer))
import Vulkan.Core10.Handles (CommandBuffer_T)
import Vulkan.Dynamic (DeviceCmds(pVkCmdSetFragmentShadingRateKHR))
import Vulkan.Core10.FundamentalTypes (Extent2D)
import Vulkan.Dynamic (InstanceCmds(pVkGetPhysicalDeviceFragmentShadingRatesKHR))
import Vulkan.Core10.Handles (PhysicalDevice)
import Vulkan.Core10.Handles (PhysicalDevice(..))
import Vulkan.Core10.Handles (PhysicalDevice(PhysicalDevice))
import Vulkan.Core10.Handles (PhysicalDevice_T)
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.Core10.Enums.SampleCountFlagBits (SampleCountFlagBits)
import Vulkan.Core10.Enums.SampleCountFlagBits (SampleCountFlags)
import Vulkan.CStruct.Extends (SomeStruct)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Exception (VulkanException(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdSetFragmentShadingRateKHR
:: FunPtr (Ptr CommandBuffer_T -> Ptr Extent2D -> Ptr (FixedArray 2 FragmentShadingRateCombinerOpKHR) -> IO ()) -> Ptr CommandBuffer_T -> Ptr Extent2D -> Ptr (FixedArray 2 FragmentShadingRateCombinerOpKHR) -> IO ()
' Vulkan . Core10.Enums . DynamicState . '
' Vulkan . Core10.Pipeline . PipelineDynamicStateCreateInfo'::@pDynamicStates@.
is not enabled , @pFragmentSize->width@ /must/ be @1@
is not enabled , @pFragmentSize->height@ /must/ be @1@
One of
limit is not supported , elements of @combinerOps@ /must/ be either
' '
@pFragmentSize->width@ /must/ be a power - of - two value
@pFragmentSize->height@ /must/ be a power - of - two value
' Vulkan . Core10.Handles . CommandBuffer ' handle
@pFragmentSize@ /must/ be a valid pointer to a valid
' Vulkan . Core10.FundamentalTypes . Extent2D ' structure
given element of @combinerOps@ /must/ be a valid
' Vulkan . Core10.Handles . ' that @commandBuffer@ was
- Host access to the ' Vulkan . Core10.Handles . ' that
| < -extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels > | < -extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope > | < -extensions/html/vkspec.html#vkCmdBeginVideoCodingKHR Video Coding Scope > | < -extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types > | < -extensions/html/vkspec.html#fundamentals-queueoperation-command-types Command Type > |
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.Handles . CommandBuffer ' ,
' Vulkan . Core10.FundamentalTypes . Extent2D ' ,
cmdSetFragmentShadingRateKHR :: forall io
. (MonadIO io)
CommandBuffer
("fragmentSize" ::: Extent2D)
("combinerOps" ::: (FragmentShadingRateCombinerOpKHR, FragmentShadingRateCombinerOpKHR))
-> io ()
cmdSetFragmentShadingRateKHR commandBuffer
fragmentSize
combinerOps = liftIO . evalContT $ do
let vkCmdSetFragmentShadingRateKHRPtr = pVkCmdSetFragmentShadingRateKHR (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
lift $ unless (vkCmdSetFragmentShadingRateKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdSetFragmentShadingRateKHR is null" Nothing Nothing
let vkCmdSetFragmentShadingRateKHR' = mkVkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHRPtr
pFragmentSize <- ContT $ withCStruct (fragmentSize)
pCombinerOps <- ContT $ allocaBytes @(FixedArray 2 FragmentShadingRateCombinerOpKHR) 8
let pCombinerOps' = lowerArrayPtr pCombinerOps
lift $ case (combinerOps) of
(e0, e1) -> do
poke (pCombinerOps' :: Ptr FragmentShadingRateCombinerOpKHR) (e0)
poke (pCombinerOps' `plusPtr` 4 :: Ptr FragmentShadingRateCombinerOpKHR) (e1)
lift $ traceAroundEvent "vkCmdSetFragmentShadingRateKHR" (vkCmdSetFragmentShadingRateKHR'
(commandBufferHandle (commandBuffer))
pFragmentSize
(pCombinerOps))
pure $ ()
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetPhysicalDeviceFragmentShadingRatesKHR
:: FunPtr (Ptr PhysicalDevice_T -> Ptr Word32 -> Ptr PhysicalDeviceFragmentShadingRateKHR -> IO Result) -> Ptr PhysicalDevice_T -> Ptr Word32 -> Ptr PhysicalDeviceFragmentShadingRateKHR -> IO Result
If @pFragmentShadingRates@ is @NULL@ , then the number of fragment
the user to the number of elements in the @pFragmentShadingRates@ array ,
written , and ' Vulkan . Core10.Enums . Result . INCOMPLETE ' will be returned
instead of ' Vulkan . Core10.Enums . Result . SUCCESS ' , to indicate that not
largest @fragmentSize.width@ value to smallest , and each set of fragment
shading rates with the same @fragmentSize.width@ value /must/ be ordered
from largest @fragmentSize.height@ to smallest . Any two entries in the
- The value of @fragmentSize.width@ /must/ be less than or equal to
- The value of @fragmentSize.width@ /must/ be greater than or equal to
@1@.
- The value of @fragmentSize.width@ /must/ be a power - of - two .
to @1@.
- The value of @fragmentSize.height@ /must/ be a power - of - two .
- The product of @fragmentSize.width@ , @fragmentSize.height@ , and the
< > .
| ' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_1_BIT ' | { 2,2 } |
| ' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_4_BIT ' | |
| ' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_1_BIT ' | { 2,1 } |
| ' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_4_BIT ' | |
| ~0 | { 1,1 } |
< framebufferColorSampleCounts > ,
includes ' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_2_BIT ' ,
' Vulkan . Core10.Enums . SampleCountFlagBits . SAMPLE_COUNT_2_BIT ' .
Including the { 1,1 } fragment size is done for completeness ; it has no
@transform@ not equal to
' Vulkan . Extensions . VK_KHR_surface . SURFACE_TRANSFORM_IDENTITY_BIT_KHR ' ,
For example , consider an implementation returning support for 4x2 , but
not 2x4 in the set of supported fragment shading rates . This means that
' Vulkan . Extensions . VK_KHR_surface . '
' Vulkan . Extensions . VK_KHR_surface . SURFACE_TRANSFORM_ROTATE_270_BIT_KHR ' ,
2x4 is a supported rate , but 4x2 is an unsupported rate .
' Vulkan . Core10.Handles . PhysicalDevice ' handle
@uint32_t@ value
If the value referenced by @pFragmentShadingRateCount@ is not @0@ ,
and is not @NULL@ ,
- ' Vulkan . Core10.Enums . Result . SUCCESS '
- ' Vulkan . Core10.Enums . Result . INCOMPLETE '
- ' Vulkan . Core10.Enums . Result . ERROR_OUT_OF_HOST_MEMORY '
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.Handles . PhysicalDevice ' ,
getPhysicalDeviceFragmentShadingRatesKHR :: forall io
. (MonadIO io)
PhysicalDevice
-> io (Result, ("fragmentShadingRates" ::: Vector PhysicalDeviceFragmentShadingRateKHR))
getPhysicalDeviceFragmentShadingRatesKHR physicalDevice = liftIO . evalContT $ do
let vkGetPhysicalDeviceFragmentShadingRatesKHRPtr = pVkGetPhysicalDeviceFragmentShadingRatesKHR (case physicalDevice of PhysicalDevice{instanceCmds} -> instanceCmds)
lift $ unless (vkGetPhysicalDeviceFragmentShadingRatesKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetPhysicalDeviceFragmentShadingRatesKHR is null" Nothing Nothing
let vkGetPhysicalDeviceFragmentShadingRatesKHR' = mkVkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHRPtr
let physicalDevice' = physicalDeviceHandle (physicalDevice)
pPFragmentShadingRateCount <- ContT $ bracket (callocBytes @Word32 4) free
r <- lift $ traceAroundEvent "vkGetPhysicalDeviceFragmentShadingRatesKHR" (vkGetPhysicalDeviceFragmentShadingRatesKHR'
physicalDevice'
(pPFragmentShadingRateCount)
(nullPtr))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pFragmentShadingRateCount <- lift $ peek @Word32 pPFragmentShadingRateCount
pPFragmentShadingRates <- ContT $ bracket (callocBytes @PhysicalDeviceFragmentShadingRateKHR ((fromIntegral (pFragmentShadingRateCount)) * 32)) free
_ <- traverse (\i -> ContT $ pokeZeroCStruct (pPFragmentShadingRates `advancePtrBytes` (i * 32) :: Ptr PhysicalDeviceFragmentShadingRateKHR) . ($ ())) [0..(fromIntegral (pFragmentShadingRateCount)) - 1]
r' <- lift $ traceAroundEvent "vkGetPhysicalDeviceFragmentShadingRatesKHR" (vkGetPhysicalDeviceFragmentShadingRatesKHR'
physicalDevice'
(pPFragmentShadingRateCount)
((pPFragmentShadingRates)))
lift $ when (r' < SUCCESS) (throwIO (VulkanException r'))
pFragmentShadingRateCount' <- lift $ peek @Word32 pPFragmentShadingRateCount
pFragmentShadingRates' <- lift $ generateM (fromIntegral (pFragmentShadingRateCount')) (\i -> peekCStruct @PhysicalDeviceFragmentShadingRateKHR (((pPFragmentShadingRates) `advancePtrBytes` (32 * (i)) :: Ptr PhysicalDeviceFragmentShadingRateKHR)))
pure $ ((r'), pFragmentShadingRates')
shading rate attachment for a subpass
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' , its @layout@ member
' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_GENERAL ' or
' Vulkan . Core10.Enums . ImageLayout . IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR '
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.width@ /must/ be a power of two
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.width@ /must/ be less than or equal
< -extensions/html/vkspec.html#limits-maxFragmentShadingRateAttachmentTexelSize maxFragmentShadingRateAttachmentTexelSize.width >
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.width@ /must/ be greater than or
< -extensions/html/vkspec.html#limits-minFragmentShadingRateAttachmentTexelSize minFragmentShadingRateAttachmentTexelSize.width >
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.height@ /must/ be a power of two
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.height@ /must/ be less than or equal
< -extensions/html/vkspec.html#limits-maxFragmentShadingRateAttachmentTexelSize >
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' ,
@shadingRateAttachmentTexelSize.height@ /must/ be greater than or
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' , the quotient of
@shadingRateAttachmentTexelSize.width@ and
@shadingRateAttachmentTexelSize.height@ /must/ be less than or equal
If @pFragmentShadingRateAttachment@ is not @NULL@ and its
@attachment@ member is not
' Vulkan . Core10.APIConstants . ATTACHMENT_UNUSED ' , the quotient of
@shadingRateAttachmentTexelSize.height@ and
@shadingRateAttachmentTexelSize.width@ /must/ be less than or equal
' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR '
If @pFragmentShadingRateAttachment@ is not @NULL@ ,
@pFragmentShadingRateAttachment@ /must/ be a valid pointer to a
' Vulkan . Core12.Promoted_From_VK_KHR_create_renderpass2.AttachmentReference2 '
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core12.Promoted_From_VK_KHR_create_renderpass2.AttachmentReference2 ' ,
' Vulkan . Core10.FundamentalTypes . Extent2D ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data FragmentShadingRateAttachmentInfoKHR = FragmentShadingRateAttachmentInfoKHR
| @pFragmentShadingRateAttachment@ is @NULL@ or a pointer to a
' Vulkan . Core12.Promoted_From_VK_KHR_create_renderpass2.AttachmentReference2 '
subpass .
fragmentShadingRateAttachment :: Maybe (SomeStruct AttachmentReference2)
shadingRateAttachmentTexelSize :: Extent2D
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (FragmentShadingRateAttachmentInfoKHR)
#endif
deriving instance Show FragmentShadingRateAttachmentInfoKHR
instance ToCStruct FragmentShadingRateAttachmentInfoKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p FragmentShadingRateAttachmentInfoKHR{..} f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
pFragmentShadingRateAttachment'' <- case (fragmentShadingRateAttachment) of
Nothing -> pure nullPtr
Just j -> ContT @_ @_ @(Ptr (AttachmentReference2 '[])) $ \cont -> withSomeCStruct @AttachmentReference2 (j) (cont . castPtr)
lift $ poke ((p `plusPtr` 16 :: Ptr (Ptr (AttachmentReference2 _)))) pFragmentShadingRateAttachment''
lift $ poke ((p `plusPtr` 24 :: Ptr Extent2D)) (shadingRateAttachmentTexelSize)
lift $ f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 24 :: Ptr Extent2D)) (zero)
f
instance FromCStruct FragmentShadingRateAttachmentInfoKHR where
peekCStruct p = do
pFragmentShadingRateAttachment <- peek @(Ptr (AttachmentReference2 _)) ((p `plusPtr` 16 :: Ptr (Ptr (AttachmentReference2 _))))
pFragmentShadingRateAttachment' <- maybePeek (\j -> peekSomeCStruct (forgetExtensions (j))) pFragmentShadingRateAttachment
shadingRateAttachmentTexelSize <- peekCStruct @Extent2D ((p `plusPtr` 24 :: Ptr Extent2D))
pure $ FragmentShadingRateAttachmentInfoKHR
pFragmentShadingRateAttachment' shadingRateAttachmentTexelSize
instance Zero FragmentShadingRateAttachmentInfoKHR where
zero = FragmentShadingRateAttachmentInfoKHR
Nothing
zero
' Vulkan . Core10.Pipeline . ' includes a
equal to ( 1,1 ) , and both elements of @combinerOps@ are considered to be
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.FundamentalTypes . Extent2D ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PipelineFragmentShadingRateStateCreateInfoKHR = PipelineFragmentShadingRateStateCreateInfoKHR
| @fragmentSize@ specifies a ' Vulkan . Core10.FundamentalTypes . Extent2D '
fragmentSize :: Extent2D
combinerOps :: (FragmentShadingRateCombinerOpKHR, FragmentShadingRateCombinerOpKHR)
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PipelineFragmentShadingRateStateCreateInfoKHR)
#endif
deriving instance Show PipelineFragmentShadingRateStateCreateInfoKHR
instance ToCStruct PipelineFragmentShadingRateStateCreateInfoKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PipelineFragmentShadingRateStateCreateInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Extent2D)) (fragmentSize)
let pCombinerOps' = lowerArrayPtr ((p `plusPtr` 24 :: Ptr (FixedArray 2 FragmentShadingRateCombinerOpKHR)))
case (combinerOps) of
(e0, e1) -> do
poke (pCombinerOps' :: Ptr FragmentShadingRateCombinerOpKHR) (e0)
poke (pCombinerOps' `plusPtr` 4 :: Ptr FragmentShadingRateCombinerOpKHR) (e1)
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Extent2D)) (zero)
let pCombinerOps' = lowerArrayPtr ((p `plusPtr` 24 :: Ptr (FixedArray 2 FragmentShadingRateCombinerOpKHR)))
case ((zero, zero)) of
(e0, e1) -> do
poke (pCombinerOps' :: Ptr FragmentShadingRateCombinerOpKHR) (e0)
poke (pCombinerOps' `plusPtr` 4 :: Ptr FragmentShadingRateCombinerOpKHR) (e1)
f
instance FromCStruct PipelineFragmentShadingRateStateCreateInfoKHR where
peekCStruct p = do
fragmentSize <- peekCStruct @Extent2D ((p `plusPtr` 16 :: Ptr Extent2D))
let pcombinerOps = lowerArrayPtr @FragmentShadingRateCombinerOpKHR ((p `plusPtr` 24 :: Ptr (FixedArray 2 FragmentShadingRateCombinerOpKHR)))
combinerOps0 <- peek @FragmentShadingRateCombinerOpKHR ((pcombinerOps `advancePtrBytes` 0 :: Ptr FragmentShadingRateCombinerOpKHR))
combinerOps1 <- peek @FragmentShadingRateCombinerOpKHR ((pcombinerOps `advancePtrBytes` 4 :: Ptr FragmentShadingRateCombinerOpKHR))
pure $ PipelineFragmentShadingRateStateCreateInfoKHR
fragmentSize ((combinerOps0, combinerOps1))
instance Storable PipelineFragmentShadingRateStateCreateInfoKHR where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PipelineFragmentShadingRateStateCreateInfoKHR where
zero = PipelineFragmentShadingRateStateCreateInfoKHR
zero
(zero, zero)
If the ' ' structure is
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2 '
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2 ' ,
supported . ' ' also be
used in the @pNext@ chain of ' Vulkan . Core10.Device . DeviceCreateInfo ' to
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.FundamentalTypes . Bool32 ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceFragmentShadingRateFeaturesKHR = PhysicalDeviceFragmentShadingRateFeaturesKHR
pipelineFragmentShadingRate :: Bool
primitiveFragmentShadingRate :: Bool
attachmentFragmentShadingRate :: Bool
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceFragmentShadingRateFeaturesKHR)
#endif
deriving instance Show PhysicalDeviceFragmentShadingRateFeaturesKHR
instance ToCStruct PhysicalDeviceFragmentShadingRateFeaturesKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceFragmentShadingRateFeaturesKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (pipelineFragmentShadingRate))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (primitiveFragmentShadingRate))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (attachmentFragmentShadingRate))
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceFragmentShadingRateFeaturesKHR where
peekCStruct p = do
pipelineFragmentShadingRate <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
primitiveFragmentShadingRate <- peek @Bool32 ((p `plusPtr` 20 :: Ptr Bool32))
attachmentFragmentShadingRate <- peek @Bool32 ((p `plusPtr` 24 :: Ptr Bool32))
pure $ PhysicalDeviceFragmentShadingRateFeaturesKHR
(bool32ToBool pipelineFragmentShadingRate)
(bool32ToBool primitiveFragmentShadingRate)
(bool32ToBool attachmentFragmentShadingRate)
instance Storable PhysicalDeviceFragmentShadingRateFeaturesKHR where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceFragmentShadingRateFeaturesKHR where
zero = PhysicalDeviceFragmentShadingRateFeaturesKHR
zero
zero
zero
Multiplication of the combiner rates using the fragment width\/height in
being added . Fortunately , this only affects situations where a rate of 1
in either dimension is combined with another rate of 1 . All other
both the sum and product of 2 and 2 are equal . In many cases , this limit
If the ' PhysicalDeviceFragmentShadingRatePropertiesKHR ' structure is
' Vulkan . '
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceProperties2 ' ,
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.FundamentalTypes . Bool32 ' ,
' Vulkan . Core10.FundamentalTypes . Extent2D ' ,
' Vulkan . Core10.Enums . SampleCountFlagBits . SampleCountFlagBits ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceFragmentShadingRatePropertiesKHR = PhysicalDeviceFragmentShadingRatePropertiesKHR
power - of - two . It /must/ be ( 0,0 ) if the
minFragmentShadingRateAttachmentTexelSize :: Extent2D
power - of - two . It /must/ be ( 0,0 ) if the
maxFragmentShadingRateAttachmentTexelSize :: Extent2D
/must/ be a power - of - two value , and /must/ be less than or equal to
max(@maxFragmentShadingRateAttachmentTexelSize.width@ \/
) . It /must/ be 0 if
maxFragmentShadingRateAttachmentTexelSizeAspectRatio :: Word32
' Vulkan . Core10.FundamentalTypes . FALSE ' , only a single viewport /must/ be
/must/ be ' Vulkan . Core10.FundamentalTypes . FALSE ' if the
< >
< -extensions/html/vkspec.html#features-geometryShader >
primitiveFragmentShadingRateWithMultipleViewports :: Bool
specifies whether a shading rate attachment image view be created
' Vulkan . Core10.FundamentalTypes . FALSE ' , when creating an image view with
' Vulkan . Core10.Enums . ImageUsageFlagBits . IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR ' ,
@layerCount@ /must/ be @1@. It /must/ be
' Vulkan . Core10.FundamentalTypes . FALSE ' if the
< >
< -extensions/html/vkspec.html#features-geometryShader >
layeredShadingRateAttachments :: Bool
@fragmentShadingRateNonTrivialCombinerOps@ specifies whether
' ' be used . It /must/
be ' Vulkan . Core10.FundamentalTypes . FALSE ' unless either the
fragmentShadingRateNonTrivialCombinerOps :: Bool
supported width and height of a fragment . Its @width@ and @height@
members /must/ both be power - of - two values . This limit is purely
maxFragmentSize :: Extent2D
@maxFragmentSizeAspectRatio@ /must/ be a power - of - two value , and /must/
be less than or equal to the maximum of the @width@ and @height@ members
maxFragmentSizeAspectRatio :: Word32
the product of the @width@ and @height@ members of @maxFragmentSize@ ,
@maxSampleMaskWords@ × 32 if @fragmentShadingRateWithShaderSampleMask@
maxFragmentShadingRateCoverageSamples :: Word32
' Vulkan . Core10.Enums . SampleCountFlagBits . SampleCountFlagBits ' value
maxFragmentShadingRateRasterizationSamples :: SampleCountFlagBits
implementation supports writing @FragDepth@ or @FragStencilRefEXT@ from
' Vulkan . Core10.FundamentalTypes . FALSE ' , writing to those built - ins will
clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithShaderDepthStencilWrites :: Bool
@fragmentShadingRateWithSampleMask@ specifies whether the the
' Vulkan . Core10.Pipeline . PipelineMultisampleStateCreateInfo'::@pSampleMask@
to @0@ for multi - pixel fragments . If this value is
' Vulkan . Core10.FundamentalTypes . FALSE ' , zeroing valid bits in the sample
mask will clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithSampleMask :: Bool
' Vulkan . Core10.FundamentalTypes . ' for multi - pixel fragments .
If this value is ' Vulkan . Core10.FundamentalTypes . FALSE ' , using that
built - in will clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithShaderSampleMask :: Bool
@fragmentShadingRateWithConservativeRasterization@ specifies whether
' Vulkan . Core10.FundamentalTypes . FALSE ' if
@VK_EXT_conservative_rasterization@ is not supported . If this value is
' Vulkan . Core10.FundamentalTypes . FALSE ' , using
will clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithConservativeRasterization :: Bool
' Vulkan . Core10.FundamentalTypes . FALSE ' if
' Vulkan . Core10.FundamentalTypes . FALSE ' , using
will clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithFragmentShaderInterlock :: Bool
@fragmentShadingRateWithCustomSampleLocations@ specifies whether
' Vulkan . Core10.FundamentalTypes . FALSE ' if @VK_EXT_sample_locations@ is
not supported . If this value is ' Vulkan . Core10.FundamentalTypes . FALSE ' ,
will clamp the fragment shading rate to ( 1,1 ) .
fragmentShadingRateWithCustomSampleLocations :: Bool
' Vulkan . Core10.FundamentalTypes . FALSE ' will instead combine rates with
an addition . If @fragmentShadingRateNonTrivialCombinerOps@ is
' Vulkan . Core10.FundamentalTypes . FALSE ' , implementations /must/ report
this as ' Vulkan . Core10.FundamentalTypes . FALSE ' . If
@fragmentShadingRateNonTrivialCombinerOps@ is
' Vulkan . Core10.FundamentalTypes . TRUE ' , implementations /should/ report
this as ' Vulkan . Core10.FundamentalTypes . TRUE ' .
fragmentShadingRateStrictMultiplyCombiner :: Bool
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceFragmentShadingRatePropertiesKHR)
#endif
deriving instance Show PhysicalDeviceFragmentShadingRatePropertiesKHR
instance ToCStruct PhysicalDeviceFragmentShadingRatePropertiesKHR where
withCStruct x f = allocaBytes 96 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceFragmentShadingRatePropertiesKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Extent2D)) (minFragmentShadingRateAttachmentTexelSize)
poke ((p `plusPtr` 24 :: Ptr Extent2D)) (maxFragmentShadingRateAttachmentTexelSize)
poke ((p `plusPtr` 32 :: Ptr Word32)) (maxFragmentShadingRateAttachmentTexelSizeAspectRatio)
poke ((p `plusPtr` 36 :: Ptr Bool32)) (boolToBool32 (primitiveFragmentShadingRateWithMultipleViewports))
poke ((p `plusPtr` 40 :: Ptr Bool32)) (boolToBool32 (layeredShadingRateAttachments))
poke ((p `plusPtr` 44 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateNonTrivialCombinerOps))
poke ((p `plusPtr` 48 :: Ptr Extent2D)) (maxFragmentSize)
poke ((p `plusPtr` 56 :: Ptr Word32)) (maxFragmentSizeAspectRatio)
poke ((p `plusPtr` 60 :: Ptr Word32)) (maxFragmentShadingRateCoverageSamples)
poke ((p `plusPtr` 64 :: Ptr SampleCountFlagBits)) (maxFragmentShadingRateRasterizationSamples)
poke ((p `plusPtr` 68 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithShaderDepthStencilWrites))
poke ((p `plusPtr` 72 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithSampleMask))
poke ((p `plusPtr` 76 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithShaderSampleMask))
poke ((p `plusPtr` 80 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithConservativeRasterization))
poke ((p `plusPtr` 84 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithFragmentShaderInterlock))
poke ((p `plusPtr` 88 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateWithCustomSampleLocations))
poke ((p `plusPtr` 92 :: Ptr Bool32)) (boolToBool32 (fragmentShadingRateStrictMultiplyCombiner))
f
cStructSize = 96
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Extent2D)) (zero)
poke ((p `plusPtr` 24 :: Ptr Extent2D)) (zero)
poke ((p `plusPtr` 32 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 36 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 40 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 44 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 48 :: Ptr Extent2D)) (zero)
poke ((p `plusPtr` 56 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 60 :: Ptr Word32)) (zero)
poke ((p `plusPtr` 64 :: Ptr SampleCountFlagBits)) (zero)
poke ((p `plusPtr` 68 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 72 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 76 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 80 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 84 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 88 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 92 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceFragmentShadingRatePropertiesKHR where
peekCStruct p = do
minFragmentShadingRateAttachmentTexelSize <- peekCStruct @Extent2D ((p `plusPtr` 16 :: Ptr Extent2D))
maxFragmentShadingRateAttachmentTexelSize <- peekCStruct @Extent2D ((p `plusPtr` 24 :: Ptr Extent2D))
maxFragmentShadingRateAttachmentTexelSizeAspectRatio <- peek @Word32 ((p `plusPtr` 32 :: Ptr Word32))
primitiveFragmentShadingRateWithMultipleViewports <- peek @Bool32 ((p `plusPtr` 36 :: Ptr Bool32))
layeredShadingRateAttachments <- peek @Bool32 ((p `plusPtr` 40 :: Ptr Bool32))
fragmentShadingRateNonTrivialCombinerOps <- peek @Bool32 ((p `plusPtr` 44 :: Ptr Bool32))
maxFragmentSize <- peekCStruct @Extent2D ((p `plusPtr` 48 :: Ptr Extent2D))
maxFragmentSizeAspectRatio <- peek @Word32 ((p `plusPtr` 56 :: Ptr Word32))
maxFragmentShadingRateCoverageSamples <- peek @Word32 ((p `plusPtr` 60 :: Ptr Word32))
maxFragmentShadingRateRasterizationSamples <- peek @SampleCountFlagBits ((p `plusPtr` 64 :: Ptr SampleCountFlagBits))
fragmentShadingRateWithShaderDepthStencilWrites <- peek @Bool32 ((p `plusPtr` 68 :: Ptr Bool32))
fragmentShadingRateWithSampleMask <- peek @Bool32 ((p `plusPtr` 72 :: Ptr Bool32))
fragmentShadingRateWithShaderSampleMask <- peek @Bool32 ((p `plusPtr` 76 :: Ptr Bool32))
fragmentShadingRateWithConservativeRasterization <- peek @Bool32 ((p `plusPtr` 80 :: Ptr Bool32))
fragmentShadingRateWithFragmentShaderInterlock <- peek @Bool32 ((p `plusPtr` 84 :: Ptr Bool32))
fragmentShadingRateWithCustomSampleLocations <- peek @Bool32 ((p `plusPtr` 88 :: Ptr Bool32))
fragmentShadingRateStrictMultiplyCombiner <- peek @Bool32 ((p `plusPtr` 92 :: Ptr Bool32))
pure $ PhysicalDeviceFragmentShadingRatePropertiesKHR
minFragmentShadingRateAttachmentTexelSize
maxFragmentShadingRateAttachmentTexelSize
maxFragmentShadingRateAttachmentTexelSizeAspectRatio
(bool32ToBool primitiveFragmentShadingRateWithMultipleViewports)
(bool32ToBool layeredShadingRateAttachments)
(bool32ToBool fragmentShadingRateNonTrivialCombinerOps)
maxFragmentSize
maxFragmentSizeAspectRatio
maxFragmentShadingRateCoverageSamples
maxFragmentShadingRateRasterizationSamples
(bool32ToBool fragmentShadingRateWithShaderDepthStencilWrites)
(bool32ToBool fragmentShadingRateWithSampleMask)
(bool32ToBool fragmentShadingRateWithShaderSampleMask)
(bool32ToBool fragmentShadingRateWithConservativeRasterization)
(bool32ToBool fragmentShadingRateWithFragmentShaderInterlock)
(bool32ToBool fragmentShadingRateWithCustomSampleLocations)
(bool32ToBool fragmentShadingRateStrictMultiplyCombiner)
instance Storable PhysicalDeviceFragmentShadingRatePropertiesKHR where
sizeOf ~_ = 96
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceFragmentShadingRatePropertiesKHR where
zero = PhysicalDeviceFragmentShadingRatePropertiesKHR
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
zero
| VkPhysicalDeviceFragmentShadingRateKHR - Structure returning information
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Core10.FundamentalTypes . Extent2D ' ,
' Vulkan . Core10.Enums . SampleCountFlagBits . SampleCountFlags ' ,
' Vulkan . Core10.Enums . StructureType . StructureType ' ,
data PhysicalDeviceFragmentShadingRateKHR = PhysicalDeviceFragmentShadingRateKHR
sampleCounts :: SampleCountFlags
| @fragmentSize@ is a ' Vulkan . Core10.FundamentalTypes . Extent2D ' describing
fragmentSize :: Extent2D
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceFragmentShadingRateKHR)
#endif
deriving instance Show PhysicalDeviceFragmentShadingRateKHR
instance ToCStruct PhysicalDeviceFragmentShadingRateKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceFragmentShadingRateKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr SampleCountFlags)) (sampleCounts)
poke ((p `plusPtr` 20 :: Ptr Extent2D)) (fragmentSize)
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr SampleCountFlags)) (zero)
poke ((p `plusPtr` 20 :: Ptr Extent2D)) (zero)
f
instance FromCStruct PhysicalDeviceFragmentShadingRateKHR where
peekCStruct p = do
sampleCounts <- peek @SampleCountFlags ((p `plusPtr` 16 :: Ptr SampleCountFlags))
fragmentSize <- peekCStruct @Extent2D ((p `plusPtr` 20 :: Ptr Extent2D))
pure $ PhysicalDeviceFragmentShadingRateKHR
sampleCounts fragmentSize
instance Storable PhysicalDeviceFragmentShadingRateKHR where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceFragmentShadingRateKHR where
zero = PhysicalDeviceFragmentShadingRateKHR
zero
zero
where combine(Axy , ) is the combine operation , and Axy and are the
is ' Vulkan . Core10.FundamentalTypes . FALSE ' , using
' FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR ' with values of 1 for both A
and B in the same dimension results in the value 2 being produced for
< -extensions/html/vkspec.html#VK_KHR_fragment_shading_rate VK_KHR_fragment_shading_rate > ,
' Vulkan . Extensions . VK_NV_fragment_shading_rate_enums . PipelineFragmentShadingRateEnumStateCreateInfoNV ' ,
' PipelineFragmentShadingRateStateCreateInfoKHR ' ,
' Vulkan . Extensions . VK_NV_fragment_shading_rate_enums.cmdSetFragmentShadingRateEnumNV ' ,
newtype FragmentShadingRateCombinerOpKHR = FragmentShadingRateCombinerOpKHR Int32
deriving newtype (Eq, Ord, Storable, Zero)
operation of combine(Axy , ) = Axy .
pattern FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = FragmentShadingRateCombinerOpKHR 0
| ' ' specifies a combiner
operation of combine(Axy , ) = Bxy .
pattern FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = FragmentShadingRateCombinerOpKHR 1
operation of combine(Axy , ) = min(Axy , ) .
pattern FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = FragmentShadingRateCombinerOpKHR 2
operation of combine(Axy , ) = max(Axy , ) .
pattern FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = FragmentShadingRateCombinerOpKHR 3
operation of combine(Axy , ) = Axy*Bxy .
pattern FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = FragmentShadingRateCombinerOpKHR 4
# COMPLETE
FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR
, , FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR : :
FragmentShadingRateCombinerOpKHR
#
FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR
, FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR ::
FragmentShadingRateCombinerOpKHR
#-}
conNameFragmentShadingRateCombinerOpKHR :: String
conNameFragmentShadingRateCombinerOpKHR = "FragmentShadingRateCombinerOpKHR"
enumPrefixFragmentShadingRateCombinerOpKHR :: String
enumPrefixFragmentShadingRateCombinerOpKHR = "FRAGMENT_SHADING_RATE_COMBINER_OP_"
showTableFragmentShadingRateCombinerOpKHR :: [(FragmentShadingRateCombinerOpKHR, String)]
showTableFragmentShadingRateCombinerOpKHR =
[
( FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR
, "KEEP_KHR"
)
,
( FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR
, "REPLACE_KHR"
)
,
( FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR
, "MIN_KHR"
)
,
( FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR
, "MAX_KHR"
)
,
( FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR
, "MUL_KHR"
)
]
instance Show FragmentShadingRateCombinerOpKHR where
showsPrec =
enumShowsPrec
enumPrefixFragmentShadingRateCombinerOpKHR
showTableFragmentShadingRateCombinerOpKHR
conNameFragmentShadingRateCombinerOpKHR
(\(FragmentShadingRateCombinerOpKHR x) -> x)
(showsPrec 11)
instance Read FragmentShadingRateCombinerOpKHR where
readPrec =
enumReadPrec
enumPrefixFragmentShadingRateCombinerOpKHR
showTableFragmentShadingRateCombinerOpKHR
conNameFragmentShadingRateCombinerOpKHR
FragmentShadingRateCombinerOpKHR
type KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION = 2
No documentation found for TopLevel " VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION "
pattern KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION = 2
type KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME = "VK_KHR_fragment_shading_rate"
No documentation found for TopLevel " VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "
pattern KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME = "VK_KHR_fragment_shading_rate"
|
f96a45f1f71e5853faa80c9eebf5f09685b224e31b5a40d0083d5075bc3bfc13 | liquidz/misaki | _config.clj | {
;; directory setting
:public-dir "public/"
:template-dir "template/"
:layout-dir "layouts/"
;; posts and tags url setting
;; default value: "/"
;; ex)
;; "/" => "/YYYY-MM/POST.html"
;; "/foo" => "/foo/YYYY-MM/POST.html"
:url-base "/"
;; dev server port
default value : 8080
:port 8080
;; site language
;; default value: "en"
:lang "en"
;; default site data
:site {:charset "utf-8"
:css ["+Sans"
"/css/common.css"
"/css/main.css"]}
;; highlight setting
:code-highlight {:CLJ "lang-clj", :CLOJURE "lang-clj"}
;; compiler setting
;; default value: "default"
:compiler "default"
}
| null | https://raw.githubusercontent.com/liquidz/misaki/b8104e632058e3b3da4487513d10e666e5914ec9/samples/impress/_config.clj | clojure | directory setting
posts and tags url setting
default value: "/"
ex)
"/" => "/YYYY-MM/POST.html"
"/foo" => "/foo/YYYY-MM/POST.html"
dev server port
site language
default value: "en"
default site data
highlight setting
compiler setting
default value: "default" | {
:public-dir "public/"
:template-dir "template/"
:layout-dir "layouts/"
:url-base "/"
default value : 8080
:port 8080
:lang "en"
:site {:charset "utf-8"
:css ["+Sans"
"/css/common.css"
"/css/main.css"]}
:code-highlight {:CLJ "lang-clj", :CLOJURE "lang-clj"}
:compiler "default"
}
|
fef403d7a0b3c6565f5f4970bac59b174272c70ec7c6c0e7d2f12c07d4ae039d | chapmanb/bcbio.variation | reorder.clj | (ns bcbio.align.reorder
"Reorder BAM alignment files to a reference dictionary, potentially swapping naming.
Handles Human hg19 to GRCh37 naming conversions."
(:import [htsjdk.samtools SAMFileReader SAMFileWriterFactory SAMReadGroupRecord
SAMTag SAMFileReader$ValidationStringency])
(:use [clojure.java.io]
[bcbio.align.ref :only [get-seq-dict]]
[bcbio.run.broad :only [index-bam]]
[bcbio.variation.normalize :only [prep-rename-map]])
(:require [bcbio.run.fsp :as fsp]
[bcbio.run.itx :as itx]))
(defn- updated-bam-header
"Add updated sequence dictionary and run group information to header."
[in-bam ref-file call exp]
(letfn [(update-rgs [rgs]
(if-not (empty? rgs) rgs
[(doto (SAMReadGroupRecord. "1")
(.setLibrary (:sample exp))
(.setPlatform (get call :platform "illumina"))
(.setSample (:sample exp))
(.setPlatformUnit (:sample exp)))]))]
(let [read-groups (update-rgs (-> in-bam .getFileHeader .getReadGroups))]
(doto (-> in-bam .getFileHeader .clone)
(.setSequenceDictionary (-> ref-file get-seq-dict))
(.setReadGroups read-groups)))))
(defn get-new-chr-order
"Retrieve order of chromosomes to fetch and mapping to new index."
[bam-names ref-names ref-file]
(letfn [(get-bam-name-map [bam-names orig-ref-names]
(let [ref-names (set orig-ref-names)
name-remap (prep-rename-map :GRCh37 ref-file)]
(reduce (fn [coll x]
(assoc coll (cond
(contains? ref-names x) x
(contains? name-remap x) (get name-remap x)
:else (throw (Exception. (str "Could not map " x))))
x))
{} bam-names)))
(get-index-map [name-map]
(let [bam-name-map (reduce (fn [coll [x y]] (assoc coll y x))
{} name-map)]
(reduce (fn [coll [i x]]
(assoc coll i (.indexOf ref-names (get bam-name-map x))))
{} (map-indexed vector bam-names))))]
(when-not (every? #(apply = %) (partition 2 (interleave ref-names bam-names)))
(let [name-map (get-bam-name-map bam-names ref-names)]
{:names (remove nil? (map #(get name-map %) ref-names))
:indexes (get-index-map name-map)}))))
(defn bam-read-seq
"Lazy sequence for BAM reads from a Picard iterator."
[iter]
(lazy-seq
(when (.hasNext iter)
(cons (.next iter) (bam-read-seq iter)))))
(defn- write-reorder-bam
"Write reordered BAM file in specified chromosome order."
[in-bam out-bam chr-order header]
(let [default-rg-id (-> header .getReadGroups first .getId)]
(letfn [(update-read [read]
(let [new-rg-id (if-let [x (.getAttribute read (.name SAMTag/RG))] x
default-rg-id)]
(doto read
(.setHeader header)
(.setReferenceIndex (get (:indexes chr-order)
(.getReferenceIndex read) -1))
(.setMateReferenceIndex (get (:indexes chr-order)
(.getMateReferenceIndex read) -1))
(.setAttribute (.name SAMTag/RG) new-rg-id))))]
(doseq [cur-chr (:names chr-order)]
(with-open [iter (.query in-bam cur-chr 0 0 false)]
(doseq [read (bam-read-seq iter)]
(.addAlignment out-bam (update-read read)))))
(with-open [iter (.queryUnmapped in-bam)]
(doseq [read (bam-read-seq iter)]
(.addAlignment out-bam (update-read read)))))))
(defn reorder-bam
"Reorder and remap BAM file to match supplied reference file."
[bam-file ref-file call exp & {:keys [out-dir]}]
(let [out-file (fsp/add-file-part bam-file "reorder" out-dir)]
(when (itx/needs-run? out-file)
(index-bam bam-file)
(SAMFileReader/setDefaultValidationStringency SAMFileReader$ValidationStringency/LENIENT)
(with-open [in-bam (SAMFileReader. (file bam-file))]
(let [ref-names (map #(.getSequenceName %) (-> ref-file get-seq-dict .getSequences))
bam-names (map #(.getSequenceName %) (-> in-bam .getFileHeader .getSequenceDictionary
.getSequences))
header (updated-bam-header in-bam ref-file call exp)]
(if-let [chr-order (get-new-chr-order bam-names ref-names ref-file)]
(do
(with-open [out-bam (.makeSAMOrBAMWriter (SAMFileWriterFactory.)
header true (file out-file))]
(write-reorder-bam in-bam out-bam chr-order header))
out-file)
bam-file))))))
(defn -main [bam-file ref-file sample-name]
(reorder-bam bam-file ref-file {} {:sample sample-name}))
| null | https://raw.githubusercontent.com/chapmanb/bcbio.variation/c48834a6819e63dcccb5bc51540c7e19b212a019/src/bcbio/align/reorder.clj | clojure | (ns bcbio.align.reorder
"Reorder BAM alignment files to a reference dictionary, potentially swapping naming.
Handles Human hg19 to GRCh37 naming conversions."
(:import [htsjdk.samtools SAMFileReader SAMFileWriterFactory SAMReadGroupRecord
SAMTag SAMFileReader$ValidationStringency])
(:use [clojure.java.io]
[bcbio.align.ref :only [get-seq-dict]]
[bcbio.run.broad :only [index-bam]]
[bcbio.variation.normalize :only [prep-rename-map]])
(:require [bcbio.run.fsp :as fsp]
[bcbio.run.itx :as itx]))
(defn- updated-bam-header
"Add updated sequence dictionary and run group information to header."
[in-bam ref-file call exp]
(letfn [(update-rgs [rgs]
(if-not (empty? rgs) rgs
[(doto (SAMReadGroupRecord. "1")
(.setLibrary (:sample exp))
(.setPlatform (get call :platform "illumina"))
(.setSample (:sample exp))
(.setPlatformUnit (:sample exp)))]))]
(let [read-groups (update-rgs (-> in-bam .getFileHeader .getReadGroups))]
(doto (-> in-bam .getFileHeader .clone)
(.setSequenceDictionary (-> ref-file get-seq-dict))
(.setReadGroups read-groups)))))
(defn get-new-chr-order
"Retrieve order of chromosomes to fetch and mapping to new index."
[bam-names ref-names ref-file]
(letfn [(get-bam-name-map [bam-names orig-ref-names]
(let [ref-names (set orig-ref-names)
name-remap (prep-rename-map :GRCh37 ref-file)]
(reduce (fn [coll x]
(assoc coll (cond
(contains? ref-names x) x
(contains? name-remap x) (get name-remap x)
:else (throw (Exception. (str "Could not map " x))))
x))
{} bam-names)))
(get-index-map [name-map]
(let [bam-name-map (reduce (fn [coll [x y]] (assoc coll y x))
{} name-map)]
(reduce (fn [coll [i x]]
(assoc coll i (.indexOf ref-names (get bam-name-map x))))
{} (map-indexed vector bam-names))))]
(when-not (every? #(apply = %) (partition 2 (interleave ref-names bam-names)))
(let [name-map (get-bam-name-map bam-names ref-names)]
{:names (remove nil? (map #(get name-map %) ref-names))
:indexes (get-index-map name-map)}))))
(defn bam-read-seq
"Lazy sequence for BAM reads from a Picard iterator."
[iter]
(lazy-seq
(when (.hasNext iter)
(cons (.next iter) (bam-read-seq iter)))))
(defn- write-reorder-bam
"Write reordered BAM file in specified chromosome order."
[in-bam out-bam chr-order header]
(let [default-rg-id (-> header .getReadGroups first .getId)]
(letfn [(update-read [read]
(let [new-rg-id (if-let [x (.getAttribute read (.name SAMTag/RG))] x
default-rg-id)]
(doto read
(.setHeader header)
(.setReferenceIndex (get (:indexes chr-order)
(.getReferenceIndex read) -1))
(.setMateReferenceIndex (get (:indexes chr-order)
(.getMateReferenceIndex read) -1))
(.setAttribute (.name SAMTag/RG) new-rg-id))))]
(doseq [cur-chr (:names chr-order)]
(with-open [iter (.query in-bam cur-chr 0 0 false)]
(doseq [read (bam-read-seq iter)]
(.addAlignment out-bam (update-read read)))))
(with-open [iter (.queryUnmapped in-bam)]
(doseq [read (bam-read-seq iter)]
(.addAlignment out-bam (update-read read)))))))
(defn reorder-bam
"Reorder and remap BAM file to match supplied reference file."
[bam-file ref-file call exp & {:keys [out-dir]}]
(let [out-file (fsp/add-file-part bam-file "reorder" out-dir)]
(when (itx/needs-run? out-file)
(index-bam bam-file)
(SAMFileReader/setDefaultValidationStringency SAMFileReader$ValidationStringency/LENIENT)
(with-open [in-bam (SAMFileReader. (file bam-file))]
(let [ref-names (map #(.getSequenceName %) (-> ref-file get-seq-dict .getSequences))
bam-names (map #(.getSequenceName %) (-> in-bam .getFileHeader .getSequenceDictionary
.getSequences))
header (updated-bam-header in-bam ref-file call exp)]
(if-let [chr-order (get-new-chr-order bam-names ref-names ref-file)]
(do
(with-open [out-bam (.makeSAMOrBAMWriter (SAMFileWriterFactory.)
header true (file out-file))]
(write-reorder-bam in-bam out-bam chr-order header))
out-file)
bam-file))))))
(defn -main [bam-file ref-file sample-name]
(reorder-bam bam-file ref-file {} {:sample sample-name}))
|
|
385d1e79985a7eafb13a3259490a77244c882675b97628f343d050fde3421782 | rpasta42/ChessKell | Helpers.hs | module Helpers
( posToCoord, coordToPos, moveToStr
, mkBoard, mkBoardFromPair
, mkPiece, mkPieceW, mkPieceB, mkPieceNoMoves
, flipBoardColor, setPieceMoves
, flipColor
, coordEq
, getPieceCoord
, pieceMovesTo2
, isCoordOnBoard
, elem'
, strToMove
) where
import qualified Data.Matrix as M
import qualified Data.Char as C
import qualified Data.List as L
import Types
posToCoord / coordToPos / moveToStr
posToCoord :: Position -> Coord
posToCoord (cX, y) = (numX, y)
where numX = C.ord cX - C.ord 'A' + 1
coordToPos :: Coord -> Position
coordToPos (x, y) = (cX, y)
where cX = C.chr (C.ord 'A' + x - 1)
numToChr x = C.chr $ C.ord '0' + x
moveToStr $ Move ( ( ' a ' , 1 ) , ( ' b ' , 2 ) )
moveToStr (Move ((x1, y1), (x2,y2))) =
[C.toLower x1] ++ [numToChr y1] ++ [C.toLower x2] ++ [numToChr y2]
---start make functions
mkBoard whitePieces blackPieces lastMove nextToMove =
Board { getWhitePieces=whitePieces
, getBlackPieces=blackPieces
, getLastMove=lastMove
, getNextPlayer=nextToMove
}
mkBoardFromPair lastMove nextPlayer
(wPieces, bPieces) =
mkBoard wPieces bPieces lastMove nextPlayer
mkPiece color piece pos moved moves =
BoardPiece { getColor = color
, getPiece = piece
, getPosition = pos
, getHaveMoved = moved
, getMoves = moves
}
mkPieceW = mkPiece White
mkPieceB = mkPiece Black
mkPieceNoMoves color piece pos moved =
mkPiece color piece pos moved Nothing
--end make functions
--small funcs
setPieceMoves :: BoardPiece -> PieceMoves2 -> BoardPiece
setPieceMoves (BoardPiece { getPiece = piece
, getColor = color
, getPosition = pos
, getHaveMoved = haveMoved
})
moves =
mkPiece color piece pos haveMoved $ Just moves
flipBoardColor :: Board -> Board
flipBoardColor (Board { getWhitePieces = wPieces
, getBlackPieces = bPieces
, getNextPlayer = color
, getLastMove = lastMove
}) =
mkBoard wPieces bPieces lastMove (flipColor color)
flipColor :: Color -> Color
flipColor White = Black
flipColor Black = White
coordEq a b = a == b
getPieceCoord :: BoardPiece -> Coord
getPieceCoord p = posToCoord $ getPosition p
pieceMovesTo2 :: PieceMoves -> PieceMoves2
pieceMovesTo2 (_, caps, moves) = (caps, moves)
isCoordOnBoard :: Coord -> Bool
isCoordOnBoard (x,y) = x >= 1 && x <= 8 && y >= 1 && y <= 8
elem' :: (Eq a, Foldable t) => t a -> a -> Bool
elem' = flip L.elem
" a1,b2 "
strToMove :: String -> ChessRet Move
strToMove s =
let splitted@(part1, part2) = splitAt 2 s
(fromStr, toStr) = (part1, tail part2)
in Right $ Move (strToPos fromStr, strToPos toStr)
where
strToPos s =
let (x, y) = splitAt 1 s
pos = (C.toUpper $ head x, C.digitToInt $ head y)
in pos
| null | https://raw.githubusercontent.com/rpasta42/ChessKell/1b79ebac26bffcc8d18953f1ede862adec23fee1/Helpers.hs | haskell | -start make functions
end make functions
small funcs | module Helpers
( posToCoord, coordToPos, moveToStr
, mkBoard, mkBoardFromPair
, mkPiece, mkPieceW, mkPieceB, mkPieceNoMoves
, flipBoardColor, setPieceMoves
, flipColor
, coordEq
, getPieceCoord
, pieceMovesTo2
, isCoordOnBoard
, elem'
, strToMove
) where
import qualified Data.Matrix as M
import qualified Data.Char as C
import qualified Data.List as L
import Types
posToCoord / coordToPos / moveToStr
posToCoord :: Position -> Coord
posToCoord (cX, y) = (numX, y)
where numX = C.ord cX - C.ord 'A' + 1
coordToPos :: Coord -> Position
coordToPos (x, y) = (cX, y)
where cX = C.chr (C.ord 'A' + x - 1)
numToChr x = C.chr $ C.ord '0' + x
moveToStr $ Move ( ( ' a ' , 1 ) , ( ' b ' , 2 ) )
moveToStr (Move ((x1, y1), (x2,y2))) =
[C.toLower x1] ++ [numToChr y1] ++ [C.toLower x2] ++ [numToChr y2]
mkBoard whitePieces blackPieces lastMove nextToMove =
Board { getWhitePieces=whitePieces
, getBlackPieces=blackPieces
, getLastMove=lastMove
, getNextPlayer=nextToMove
}
mkBoardFromPair lastMove nextPlayer
(wPieces, bPieces) =
mkBoard wPieces bPieces lastMove nextPlayer
mkPiece color piece pos moved moves =
BoardPiece { getColor = color
, getPiece = piece
, getPosition = pos
, getHaveMoved = moved
, getMoves = moves
}
mkPieceW = mkPiece White
mkPieceB = mkPiece Black
mkPieceNoMoves color piece pos moved =
mkPiece color piece pos moved Nothing
setPieceMoves :: BoardPiece -> PieceMoves2 -> BoardPiece
setPieceMoves (BoardPiece { getPiece = piece
, getColor = color
, getPosition = pos
, getHaveMoved = haveMoved
})
moves =
mkPiece color piece pos haveMoved $ Just moves
flipBoardColor :: Board -> Board
flipBoardColor (Board { getWhitePieces = wPieces
, getBlackPieces = bPieces
, getNextPlayer = color
, getLastMove = lastMove
}) =
mkBoard wPieces bPieces lastMove (flipColor color)
flipColor :: Color -> Color
flipColor White = Black
flipColor Black = White
coordEq a b = a == b
getPieceCoord :: BoardPiece -> Coord
getPieceCoord p = posToCoord $ getPosition p
pieceMovesTo2 :: PieceMoves -> PieceMoves2
pieceMovesTo2 (_, caps, moves) = (caps, moves)
isCoordOnBoard :: Coord -> Bool
isCoordOnBoard (x,y) = x >= 1 && x <= 8 && y >= 1 && y <= 8
elem' :: (Eq a, Foldable t) => t a -> a -> Bool
elem' = flip L.elem
" a1,b2 "
strToMove :: String -> ChessRet Move
strToMove s =
let splitted@(part1, part2) = splitAt 2 s
(fromStr, toStr) = (part1, tail part2)
in Right $ Move (strToPos fromStr, strToPos toStr)
where
strToPos s =
let (x, y) = splitAt 1 s
pos = (C.toUpper $ head x, C.digitToInt $ head y)
in pos
|
43e2fb647d82cf690d2ac12dabc6c6ece39a5b6da81b1b3cf8d2911e43adbcdc | zeniuseducation/poly-euler | five.clj | (ns alfa.under200.five
(:require
[clojure.set :refer [union difference intersection subset?]]
[clojure.core.reducers :as r]
[clojure.string :refer [split-lines]]
[alfa.common :refer :all]
[clojure.string :as cs]))
(defn ^long sol3
[^long tar]
(loop [i (int 3) n tar]
(cond
(== i n) i
(== 0 (rem n i)) (if (odd-prime? i)
(let [tmp (loop [m n]
(if (== 0 (rem m i))
(recur (quot m i))
m))]
(recur (+ i 2) tmp))
(recur (+ i 2) n))
:else (recur (+ i 2) n))))
(defn ^long sol5
[^long lim]
(let [faks (int-array (range (+ lim 1)))]
(loop [i (int 2) res (int 1)]
(if (> i lim)
res
(let [p (aget faks i)]
(do (loop [j (int (* i 2))]
(when (<= j lim)
(aset faks j (quot (aget faks j) p))
(recur (+ j i))))
(recur (+ i 1) (* res p))))))))
(defn ^long sol10
[^long lim]
(let [llim (int (Math/sqrt lim))
hlim (if (even? llim) (+ llim 1) (+ llim 2))
primes (boolean-array (+ lim 1) true)
res (loop [i (int 3) res (int 2)]
(if (> i llim)
res
(if (aget primes i)
(do (loop [j (int (* i i))]
(when (<= j lim)
(aset primes j false)
(recur (+ j i i))))
(recur (+ i 2)
(+ res i)))
(recur (+ i 2) res))))]
(loop [i (int hlim) resi res]
(if (> i lim)
resi
(if (aget primes i)
(recur (+ i 2)
(+ i resi))
(recur (+ i 2) resi))))))
(defn ^long sol14b
[^long lim]
(let [refs (ref {1 1})
colat (fn colat [n]
(if-let [tmp (@refs n nil)]
tmp
(let [tmpi (if (even? n)
(+ 1 (colat (quot n 2)))
(+ 1 (colat (+ 1 (* 3 n)))))]
(dosync (alter refs assoc n tmpi))
tmpi)))]
(->> [(range 1 200001)
(range 200001 400001)
(range 400001 600001)
(range 600001 800001)
(range 800001 (+ lim 1))]
(pmap #(apply max-key colat %))
(apply max-key colat))))
(defn ^long sol14
[^long lim]
(let [refs (int-array (+ lim 1) 0)
collat (fn collat [^long n]
(if (<= n lim)
(let [tmp (aget refs n)]
(if (== 0 tmp)
(let [tmpi (+ 1 (if (even? n)
(collat (quot n 2))
(collat (+ 1 (* 3 n)))))]
(aset refs n tmpi)
tmpi)
tmp))
(if (even? n)
(+ 1 (collat (quot n 2)))
(+ 1 (collat (+ 1 (* 3 n)))))))]
(aset refs 1 1)
(loop [i (int 1) cur (int 1) maxi (int 1)]
(if (> i lim)
[cur maxi]
(let [tmp (collat i)]
(if (> tmp maxi)
(recur (+ i 1) i tmp)
(recur (+ i 1) cur maxi)))))))
(defn ^long sol173
[^long lim]
(loop [i (int 1) res (int 0)]
(let [isqr (* i i)
t (- (* (+ i 2) (+ i 2)) isqr)]
(if (> t lim)
res
(recur (+ i 1)
(+ res (loop [j (int (+ i 2)) resj (int 0)]
(let [jsqr (* j j)
t (- jsqr isqr)]
(if (> t lim)
resj
(recur (+ j 2) (+ resj 1)))))))))))
(defn ^long sol174
[^long lim]
(let [refs (int-array (+ lim 1) 0)]
(do (loop [i (int 1)]
(let [isqr (* i i)
t (- (* (+ i 2) (+ i 2)) isqr)]
(when (<= t lim)
(loop [j (int (+ i 2))]
(let [jsqr (* j j)
t (- jsqr isqr)]
(when (<= t lim)
(aset refs t (+ (aget refs t) 1))
(recur (+ j 2)))))
(recur (+ i 1)))))
(loop [i (int 4) res (int 0)]
(if (> i lim)
res
(if (<= 1 (aget refs i) 10)
(recur (+ i 4) (+ res 1))
(recur (+ i 4) res)))))))
(defn jumlah-faktor
[^long lim]
(let [faks (int-array (+ lim 1) 1)
llim (int (Math/sqrt lim))]
(do (doseq [i (range 2 (+ llim 1))]
(let [isqr (* i i)]
(doseq [j (range (* 2 i) (+ lim 1) i)
:when (<= isqr j)]
(if (== (* i i) j)
(aset faks j (+ i (aget faks j)))
(aset faks j (+ i (quot j i) (aget faks j)))))))
(sequence
(comp (map #(vector % (aget faks %)))
(filter #(let [[a b] %] (> b a)))
(map first))
(range 2 (+ lim 1))))))
runs in 2.4ms
(defn ^long sol21
[^long n]
(let [lim (int (* 3 n))
llim (int (Math/sqrt lim))
faks (int-array (+ lim 1) 1)]
(loop [i (int 2)]
(when (<= i llim)
(let [isqr (* i i)]
(aset faks isqr (+ (aget faks isqr) i))
(loop [j (+ isqr i)]
(when (<= j lim)
(aset faks j (+ (aget faks j) i (quot j i)))
(recur (+ j i))))
(recur (+ i 1)))))
(loop [i (int 2) res (int 0)]
(if (< i n)
(let [itmp (aget faks i)]
(if (and (not= i itmp) (== i (aget faks itmp)))
(recur (+ i 1) (+ res i))
(recur (+ i 1) res)))
res))))
(defn sol23
[^long lim]
(let [tmp (int-array (jumlah-faktor lim))
cnt (count tmp)
abuns (boolean-array (+ lim 1) false)]
(do (doseq [i (range cnt)
:let [itmp (aget tmp i)]]
(doseq [j (range i cnt)
:let [jtmp (aget tmp j)
stmp (+ itmp jtmp)]
:while (<= stmp lim)]
(aset abuns stmp true)))
(->> (range 12 (+ lim 1))
(filter #(aget abuns %))
(reduce +)
(- (reduce + (range (+ lim 1))))))))
| null | https://raw.githubusercontent.com/zeniuseducation/poly-euler/734fdcf1ddd096a8730600b684bf7398d071d499/Alfa/src/alfa/under200/five.clj | clojure | (ns alfa.under200.five
(:require
[clojure.set :refer [union difference intersection subset?]]
[clojure.core.reducers :as r]
[clojure.string :refer [split-lines]]
[alfa.common :refer :all]
[clojure.string :as cs]))
(defn ^long sol3
[^long tar]
(loop [i (int 3) n tar]
(cond
(== i n) i
(== 0 (rem n i)) (if (odd-prime? i)
(let [tmp (loop [m n]
(if (== 0 (rem m i))
(recur (quot m i))
m))]
(recur (+ i 2) tmp))
(recur (+ i 2) n))
:else (recur (+ i 2) n))))
(defn ^long sol5
[^long lim]
(let [faks (int-array (range (+ lim 1)))]
(loop [i (int 2) res (int 1)]
(if (> i lim)
res
(let [p (aget faks i)]
(do (loop [j (int (* i 2))]
(when (<= j lim)
(aset faks j (quot (aget faks j) p))
(recur (+ j i))))
(recur (+ i 1) (* res p))))))))
(defn ^long sol10
[^long lim]
(let [llim (int (Math/sqrt lim))
hlim (if (even? llim) (+ llim 1) (+ llim 2))
primes (boolean-array (+ lim 1) true)
res (loop [i (int 3) res (int 2)]
(if (> i llim)
res
(if (aget primes i)
(do (loop [j (int (* i i))]
(when (<= j lim)
(aset primes j false)
(recur (+ j i i))))
(recur (+ i 2)
(+ res i)))
(recur (+ i 2) res))))]
(loop [i (int hlim) resi res]
(if (> i lim)
resi
(if (aget primes i)
(recur (+ i 2)
(+ i resi))
(recur (+ i 2) resi))))))
(defn ^long sol14b
[^long lim]
(let [refs (ref {1 1})
colat (fn colat [n]
(if-let [tmp (@refs n nil)]
tmp
(let [tmpi (if (even? n)
(+ 1 (colat (quot n 2)))
(+ 1 (colat (+ 1 (* 3 n)))))]
(dosync (alter refs assoc n tmpi))
tmpi)))]
(->> [(range 1 200001)
(range 200001 400001)
(range 400001 600001)
(range 600001 800001)
(range 800001 (+ lim 1))]
(pmap #(apply max-key colat %))
(apply max-key colat))))
(defn ^long sol14
[^long lim]
(let [refs (int-array (+ lim 1) 0)
collat (fn collat [^long n]
(if (<= n lim)
(let [tmp (aget refs n)]
(if (== 0 tmp)
(let [tmpi (+ 1 (if (even? n)
(collat (quot n 2))
(collat (+ 1 (* 3 n)))))]
(aset refs n tmpi)
tmpi)
tmp))
(if (even? n)
(+ 1 (collat (quot n 2)))
(+ 1 (collat (+ 1 (* 3 n)))))))]
(aset refs 1 1)
(loop [i (int 1) cur (int 1) maxi (int 1)]
(if (> i lim)
[cur maxi]
(let [tmp (collat i)]
(if (> tmp maxi)
(recur (+ i 1) i tmp)
(recur (+ i 1) cur maxi)))))))
(defn ^long sol173
[^long lim]
(loop [i (int 1) res (int 0)]
(let [isqr (* i i)
t (- (* (+ i 2) (+ i 2)) isqr)]
(if (> t lim)
res
(recur (+ i 1)
(+ res (loop [j (int (+ i 2)) resj (int 0)]
(let [jsqr (* j j)
t (- jsqr isqr)]
(if (> t lim)
resj
(recur (+ j 2) (+ resj 1)))))))))))
(defn ^long sol174
[^long lim]
(let [refs (int-array (+ lim 1) 0)]
(do (loop [i (int 1)]
(let [isqr (* i i)
t (- (* (+ i 2) (+ i 2)) isqr)]
(when (<= t lim)
(loop [j (int (+ i 2))]
(let [jsqr (* j j)
t (- jsqr isqr)]
(when (<= t lim)
(aset refs t (+ (aget refs t) 1))
(recur (+ j 2)))))
(recur (+ i 1)))))
(loop [i (int 4) res (int 0)]
(if (> i lim)
res
(if (<= 1 (aget refs i) 10)
(recur (+ i 4) (+ res 1))
(recur (+ i 4) res)))))))
(defn jumlah-faktor
[^long lim]
(let [faks (int-array (+ lim 1) 1)
llim (int (Math/sqrt lim))]
(do (doseq [i (range 2 (+ llim 1))]
(let [isqr (* i i)]
(doseq [j (range (* 2 i) (+ lim 1) i)
:when (<= isqr j)]
(if (== (* i i) j)
(aset faks j (+ i (aget faks j)))
(aset faks j (+ i (quot j i) (aget faks j)))))))
(sequence
(comp (map #(vector % (aget faks %)))
(filter #(let [[a b] %] (> b a)))
(map first))
(range 2 (+ lim 1))))))
runs in 2.4ms
(defn ^long sol21
[^long n]
(let [lim (int (* 3 n))
llim (int (Math/sqrt lim))
faks (int-array (+ lim 1) 1)]
(loop [i (int 2)]
(when (<= i llim)
(let [isqr (* i i)]
(aset faks isqr (+ (aget faks isqr) i))
(loop [j (+ isqr i)]
(when (<= j lim)
(aset faks j (+ (aget faks j) i (quot j i)))
(recur (+ j i))))
(recur (+ i 1)))))
(loop [i (int 2) res (int 0)]
(if (< i n)
(let [itmp (aget faks i)]
(if (and (not= i itmp) (== i (aget faks itmp)))
(recur (+ i 1) (+ res i))
(recur (+ i 1) res)))
res))))
(defn sol23
[^long lim]
(let [tmp (int-array (jumlah-faktor lim))
cnt (count tmp)
abuns (boolean-array (+ lim 1) false)]
(do (doseq [i (range cnt)
:let [itmp (aget tmp i)]]
(doseq [j (range i cnt)
:let [jtmp (aget tmp j)
stmp (+ itmp jtmp)]
:while (<= stmp lim)]
(aset abuns stmp true)))
(->> (range 12 (+ lim 1))
(filter #(aget abuns %))
(reduce +)
(- (reduce + (range (+ lim 1))))))))
|
|
1b26f901db68b527fcebdd44f8833bc06d2dc3d07ee84282dedf487785ae9acb | GrammaticalFramework/gf-core | CommandInfo.hs | module GF.Command.CommandInfo where
import GF.Command.Abstract(Option,Expr,Term)
import GF.Text.Pretty(render)
import GF.Grammar.Printer() -- instance Pretty Term
import GF.Grammar.Macros(string2term)
import qualified PGF as H(showExpr)
import qualified PGF.Internal as H(Literal(LStr),Expr(ELit)) ----
data CommandInfo m = CommandInfo {
exec :: [Option] -> CommandArguments -> m CommandOutput,
synopsis :: String,
syntax :: String,
explanation :: String,
longname :: String,
options :: [(String,String)],
flags :: [(String,String)],
examples :: [(String,String)],
needsTypeCheck :: Bool
}
mapCommandExec f c = c { exec = \ opts ts -> f (exec c opts ts) }
emptyCommandInfo : : CommandInfo env
emptyCommandInfo = CommandInfo {
exec = error "command not implemented",
synopsis = "",
syntax = "",
explanation = "",
longname = "",
options = [],
flags = [],
examples = [],
needsTypeCheck = True
}
--------------------------------------------------------------------------------
class Monad m => TypeCheckArg m where typeCheckArg :: Expr -> m Expr
--------------------------------------------------------------------------------
data CommandArguments = Exprs [Expr] | Strings [String] | Term Term
newtype CommandOutput = Piped (CommandArguments,String) ---- errors, etc
-- ** Converting command output
fromStrings ss = Piped (Strings ss, unlines ss)
fromExprs es = Piped (Exprs es,unlines (map (H.showExpr []) es))
fromString s = Piped (Strings [s], s)
pipeWithMessage es msg = Piped (Exprs es,msg)
pipeMessage msg = Piped (Exprs [],msg)
pipeExprs es = Piped (Exprs es,[]) -- only used in emptyCommandInfo
void = Piped (Exprs [],"")
stringAsExpr = H.ELit . H.LStr -- should be a pattern macro
-- ** Converting command input
toStrings args =
case args of
Strings ss -> ss
Exprs es -> zipWith showAsString (True:repeat False) es
Term t -> [render t]
where
showAsString first t =
case t of
H.ELit (H.LStr s) -> s
_ -> ['\n'|not first] ++
-newline needed in other cases than the first
toExprs args =
case args of
Exprs es -> es
Strings ss -> map stringAsExpr ss
Term t -> [stringAsExpr (render t)]
toTerm args =
case args of
Term t -> t
Strings ss -> string2term $ unwords ss -- hmm
Exprs es -> string2term $ unwords $ map (H.showExpr []) es -- hmm
-- ** Creating documentation
mkEx s = let (command,expl) = break (=="--") (words s) in (unwords command, unwords (drop 1 expl))
| null | https://raw.githubusercontent.com/GrammaticalFramework/gf-core/9b4f2dd18b64b770aaebfa1885085e8e3447f119/src/compiler/GF/Command/CommandInfo.hs | haskell | instance Pretty Term
--
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- errors, etc
** Converting command output
only used in emptyCommandInfo
should be a pattern macro
** Converting command input
hmm
hmm
** Creating documentation | module GF.Command.CommandInfo where
import GF.Command.Abstract(Option,Expr,Term)
import GF.Text.Pretty(render)
import GF.Grammar.Macros(string2term)
import qualified PGF as H(showExpr)
data CommandInfo m = CommandInfo {
exec :: [Option] -> CommandArguments -> m CommandOutput,
synopsis :: String,
syntax :: String,
explanation :: String,
longname :: String,
options :: [(String,String)],
flags :: [(String,String)],
examples :: [(String,String)],
needsTypeCheck :: Bool
}
mapCommandExec f c = c { exec = \ opts ts -> f (exec c opts ts) }
emptyCommandInfo : : CommandInfo env
emptyCommandInfo = CommandInfo {
exec = error "command not implemented",
synopsis = "",
syntax = "",
explanation = "",
longname = "",
options = [],
flags = [],
examples = [],
needsTypeCheck = True
}
class Monad m => TypeCheckArg m where typeCheckArg :: Expr -> m Expr
data CommandArguments = Exprs [Expr] | Strings [String] | Term Term
fromStrings ss = Piped (Strings ss, unlines ss)
fromExprs es = Piped (Exprs es,unlines (map (H.showExpr []) es))
fromString s = Piped (Strings [s], s)
pipeWithMessage es msg = Piped (Exprs es,msg)
pipeMessage msg = Piped (Exprs [],msg)
void = Piped (Exprs [],"")
toStrings args =
case args of
Strings ss -> ss
Exprs es -> zipWith showAsString (True:repeat False) es
Term t -> [render t]
where
showAsString first t =
case t of
H.ELit (H.LStr s) -> s
_ -> ['\n'|not first] ++
-newline needed in other cases than the first
toExprs args =
case args of
Exprs es -> es
Strings ss -> map stringAsExpr ss
Term t -> [stringAsExpr (render t)]
toTerm args =
case args of
Term t -> t
mkEx s = let (command,expl) = break (=="--") (words s) in (unwords command, unwords (drop 1 expl))
|
b4cf300e26b9a6cf117f14bd1e360900693965cb2adf769a34a915923a5b0951 | dbuenzli/lit | image.mli | ----------------------------------------------------------------------------
Copyright ( c ) 2007 , . All rights reserved .
Distributed under a BSD license , see .. /LICENSE .
----------------------------------------------------------------------------
Copyright (c) 2007, Daniel C. Bünzli. All rights reserved.
Distributed under a BSD license, see ../LICENSE.
----------------------------------------------------------------------------*)
* Image buffers ( F ) .
Image buffers store pixel information in a buffer . Images can have
from one to three dimensions . The first pixel of the buffer
is the image 's lower left frontmost pixel .
Image buffers store pixel information in a buffer. Images can have
from one to three dimensions. The first pixel of the buffer
is the image's lower left frontmost pixel. *)
open Gg;;
* { 2 Format }
type dim = [ `D1 | `D2 | `D3 ]
(** Image dimension. *)
type comps = [ `A | `R | `G | `B | `RGB | `ARGB | `RGBA | `LA | `L | `D ]
(** Pixel components. *)
val comps_count : comps -> int
(** Number of components. *)
type storage = [ Buf.uint_kind | `Float32]
type 'a pf = [
| `A of 'a
| `R of 'a
| `G of 'a
| `B of 'a
| `L of 'a
| `LA of 'a
| `RGB of 'a * [`C | `P332 | `P565 | `DXT1 ]
| `RGBA of 'a * [`C | `P4444 | `P5551 | `P8888 | `P1010102 |
`DXT1 | `DXT3 | `DXT5]
| `ARGB of 'a * [`P4444 | `P1555 | `P8888 | `P2101010 ]]
constraint 'a = [< storage ]
* Pixel formats .
Pixel formats [ ` A ] , [ ` R ] , [ ` G ] , [ ` B ] , [ ` L ] , [ ` LA ] , [ ` RGB ] and
[ ` RGBA ] with [ ` C ] store each component in a single
buffer element with the leftmost component first . For example
[ ` RGB ( ` Float32 , ` C ) ] stores each pixel in a sequence of three
[ ` Float32 ] elements , red first . [ ` C_rev ]
Pixel formats [ ` RGB ] , [ ` RGBA ] and [ ` ARGB ] with explicit bit
layouts store the whole pixel in a single buffer element . The
leftmost component is stored in the most significant bit . The
storage kind must be an integer whose size fits the bits .
For example [ ` RGB ( ` Uint8 , ` P332 ) ] stores a pixel in a
byte as [ 0bRRRGGGBB ] .
Pixel formats [ ` RGB ] and [ ` RGBA ] with [ ` DXT1 ] , [ ` DXT2 ] and
[ ` ] store pixel data according to the
{ { : }S3TC
format } . The storage kind must be equal to [ ` Uint8 ] .
For fast integer and floating point data transfers
between the cpu and the gpu , [ ` ARGB ( ` Uint32 , ` P8888 ) ] and
[ ` RGBA ( ` Float32 , ` C ) ] seem to be good multiplatform choices .
Pixel formats [`A], [`R], [`G], [`B], [`L], [`LA], [`RGB] and
[`RGBA] with [`C] store each component in a single
buffer element with the leftmost component first. For example
[`RGB (`Float32, `C)] stores each pixel in a sequence of three
[`Float32] elements, red first. [`C_rev]
Pixel formats [`RGB], [`RGBA] and [`ARGB] with explicit bit
layouts store the whole pixel in a single buffer element. The
leftmost component is stored in the most significant bit. The
storage kind must be an integer whose size fits the bits.
For example [`RGB (`Uint8, `P332)] stores a pixel in a
byte as [0bRRRGGGBB].
Pixel formats [`RGB] and [`RGBA] with [`DXT1], [`DXT2] and
[`DXT3] store pixel data according to the
{{:}S3TC
format}. The storage kind must be equal to [`Uint8].
For fast integer and floating point data transfers
between the cpu and the gpu, [`ARGB (`Uint32, `P8888)] and
[`RGBA (`Float32, `C)] seem to be good multiplatform choices. *)
val pf_compressed : 'a pf -> bool
(** True if the pixel format is compressed. *)
val pf_storage : 'a pf -> 'a
(** Storage of the pixel format. *)
val pf_cast : ([< storage] as 'a) -> 'b pf -> 'a pf
(** Specializes the pixel format's type variable. Raises [Invalid_argument]
if the given storage is not equal to {!Image.storage}. *)
type 'a format constraint 'a = [< storage ]
(** The type for image formats. *)
val format : ?first:int -> ?w_skip:int -> ?h_skip:int -> w:int ->
?h:int -> ?d:int -> 'a pf -> dim -> 'a format
* Creates a new format for an image with the given pixel format
and dimension . Arguments irrelevant for a dimension are ignored .
{ ul
{ - [ first ] , buffer element where the data of the bottom , left , frontmost
pixel [ ( 0,0,0 ) ] starts . }
{ - [ w_skip ] , number of { e pixels } to skip between two
consecutive lines , defaults to [ 0 ] . }
{ - [ h_skip ] , number of { e lines } to skip between two
consecutive images , defaults to [ 0 ] . }
{ - [ w ] , [ h ] , [ d ] , image width , height and depth in pixels ,
[ h ] and [ d ] default to [ 1 ] . }
}
{ b Note . } [ Invalid_argument ] is raised if the bit layout of
a pixel format does n't fits its storage and for compressed pixel formats :
if the storage is not [ ` Uint8 ] or the skip values are different from
[ 0 ] or the dimension in not [ ` D2 ] .
and dimension. Arguments irrelevant for a dimension are ignored.
{ul
{- [first], buffer element where the data of the bottom, left, frontmost
pixel [(0,0,0)] starts.}
{- [w_skip], number of {e pixels} to skip between two
consecutive lines, defaults to [0].}
{- [h_skip], number of {e lines} to skip between two
consecutive images, defaults to [0].}
{- [w], [h], [d], image width, height and depth in pixels,
[h] and [d] default to [1].}
}
{b Note.} [Invalid_argument] is raised if the bit layout of
a pixel format doesn't fits its storage and for compressed pixel formats :
if the storage is not [`Uint8] or the skip values are different from
[0] or the dimension in not [`D2]. *)
val first : 'a format -> int
val width_skip : 'a format -> int
val height_skip : 'a format -> int
val width : 'a format -> int
val height : 'a format -> int
val depth : 'a format -> int
val pf : 'a format -> 'a pf
val dim : 'a format -> dim
val storage : 'a format -> 'a
val comps : 'a format -> comps
val extent : 'a format -> v3
(** Width, height, depth. *)
val extent2 : 'a format -> v2
(** Width, height. *)
val size : 'a format -> int
(** Number of buffer elements needed to hold the image. *)
val sub_format : 'a format -> ?x:int -> ?y:int -> ?z:int -> ?w:int ->
?h:int -> ?d:int -> ?dim:dim -> unit ->
'a format
* Format for a subimage of the given format .
{ ul
{ - [ x],[y],[z ] , new origin of the image in pixels , defaults to [ 0,0,0 ] . }
{ - [ w],[h],[d ] , new size of the image , defaults to the original size
minus the new origin . }
{ - [ dim ] , new dimension of the image , must be smaller than the original one ,
defaults to the original dimension . } }
{ b Note . } Raises [ Invalid_argument ] on compressed formats .
{ul
{- [x],[y],[z], new origin of the image in pixels, defaults to [0,0,0].}
{- [w],[h],[d], new size of the image, defaults to the original size
minus the new origin.}
{- [dim], new dimension of the image, must be smaller than the original one,
defaults to the original dimension.}}
{b Note.} Raises [Invalid_argument] on compressed formats.
*)
val print_format : Format.formatter -> 'a format -> unit
(** {2 Image} *)
type 'a t = 'a Buf.t * 'a format constraint 'a = [< Buf.uint_kind | `Float32 ]
(** The type for image buffers. *)
val pos : 'a t -> x:int -> y:int -> int
(** Position of the given pixel in the buffer (assumes [z = 0]).
Irrelevant for compressed formats.
*)
val pos3 : 'a t -> x:int -> y:int -> z:int -> int
(** Position of the given pixel in the buffer. Irrelevant
for compressed formats. *)
val pixel_size : 'a t -> int
(** Buffer elements per pixel. Irrelevant for compressed formats. *)
val sub : 'a t -> ?x:int -> ?y:int -> ?z:int -> ?w:int ->
?h:int -> ?d:int -> ?dim:dim -> unit -> 'a t
(** Applies {!Image.sub_format} to the image's format. *)
val cast : ([< storage] as 'a) -> 'b t -> 'a t
(** Specializes the image's type variable. Raises [Invalid_argument]
if the given storage is not equal to {!Image.storage}. *)
val print : Format.formatter -> ?x:int -> ?y:int -> ?z:int -> ?w:int -> ?h:int -> ?d:int -> 'a t -> unit
* Prints the given pixel range , higher lines first .
(**/**)
val _gl_set_pixel_pack : Gl.Ptr.ft -> 'a format -> unit
val _gl_set_pixel_unpack : Gl.Ptr.ft -> 'a format -> unit
val _gl_pformat : 'a format -> Gl.Enum.pformat
val _gl_pstorage : 'a format -> Gl.Enum.pstorage
val _string_of_dim : dim -> string
| null | https://raw.githubusercontent.com/dbuenzli/lit/4058b8a133cd51d3bf756c66b9ab620e39e1d2c4/attic/image.mli | ocaml | * Image dimension.
* Pixel components.
* Number of components.
* True if the pixel format is compressed.
* Storage of the pixel format.
* Specializes the pixel format's type variable. Raises [Invalid_argument]
if the given storage is not equal to {!Image.storage}.
* The type for image formats.
* Width, height, depth.
* Width, height.
* Number of buffer elements needed to hold the image.
* {2 Image}
* The type for image buffers.
* Position of the given pixel in the buffer (assumes [z = 0]).
Irrelevant for compressed formats.
* Position of the given pixel in the buffer. Irrelevant
for compressed formats.
* Buffer elements per pixel. Irrelevant for compressed formats.
* Applies {!Image.sub_format} to the image's format.
* Specializes the image's type variable. Raises [Invalid_argument]
if the given storage is not equal to {!Image.storage}.
*/* | ----------------------------------------------------------------------------
Copyright ( c ) 2007 , . All rights reserved .
Distributed under a BSD license , see .. /LICENSE .
----------------------------------------------------------------------------
Copyright (c) 2007, Daniel C. Bünzli. All rights reserved.
Distributed under a BSD license, see ../LICENSE.
----------------------------------------------------------------------------*)
* Image buffers ( F ) .
Image buffers store pixel information in a buffer . Images can have
from one to three dimensions . The first pixel of the buffer
is the image 's lower left frontmost pixel .
Image buffers store pixel information in a buffer. Images can have
from one to three dimensions. The first pixel of the buffer
is the image's lower left frontmost pixel. *)
open Gg;;
* { 2 Format }
type dim = [ `D1 | `D2 | `D3 ]
type comps = [ `A | `R | `G | `B | `RGB | `ARGB | `RGBA | `LA | `L | `D ]
val comps_count : comps -> int
type storage = [ Buf.uint_kind | `Float32]
type 'a pf = [
| `A of 'a
| `R of 'a
| `G of 'a
| `B of 'a
| `L of 'a
| `LA of 'a
| `RGB of 'a * [`C | `P332 | `P565 | `DXT1 ]
| `RGBA of 'a * [`C | `P4444 | `P5551 | `P8888 | `P1010102 |
`DXT1 | `DXT3 | `DXT5]
| `ARGB of 'a * [`P4444 | `P1555 | `P8888 | `P2101010 ]]
constraint 'a = [< storage ]
* Pixel formats .
Pixel formats [ ` A ] , [ ` R ] , [ ` G ] , [ ` B ] , [ ` L ] , [ ` LA ] , [ ` RGB ] and
[ ` RGBA ] with [ ` C ] store each component in a single
buffer element with the leftmost component first . For example
[ ` RGB ( ` Float32 , ` C ) ] stores each pixel in a sequence of three
[ ` Float32 ] elements , red first . [ ` C_rev ]
Pixel formats [ ` RGB ] , [ ` RGBA ] and [ ` ARGB ] with explicit bit
layouts store the whole pixel in a single buffer element . The
leftmost component is stored in the most significant bit . The
storage kind must be an integer whose size fits the bits .
For example [ ` RGB ( ` Uint8 , ` P332 ) ] stores a pixel in a
byte as [ 0bRRRGGGBB ] .
Pixel formats [ ` RGB ] and [ ` RGBA ] with [ ` DXT1 ] , [ ` DXT2 ] and
[ ` ] store pixel data according to the
{ { : }S3TC
format } . The storage kind must be equal to [ ` Uint8 ] .
For fast integer and floating point data transfers
between the cpu and the gpu , [ ` ARGB ( ` Uint32 , ` P8888 ) ] and
[ ` RGBA ( ` Float32 , ` C ) ] seem to be good multiplatform choices .
Pixel formats [`A], [`R], [`G], [`B], [`L], [`LA], [`RGB] and
[`RGBA] with [`C] store each component in a single
buffer element with the leftmost component first. For example
[`RGB (`Float32, `C)] stores each pixel in a sequence of three
[`Float32] elements, red first. [`C_rev]
Pixel formats [`RGB], [`RGBA] and [`ARGB] with explicit bit
layouts store the whole pixel in a single buffer element. The
leftmost component is stored in the most significant bit. The
storage kind must be an integer whose size fits the bits.
For example [`RGB (`Uint8, `P332)] stores a pixel in a
byte as [0bRRRGGGBB].
Pixel formats [`RGB] and [`RGBA] with [`DXT1], [`DXT2] and
[`DXT3] store pixel data according to the
{{:}S3TC
format}. The storage kind must be equal to [`Uint8].
For fast integer and floating point data transfers
between the cpu and the gpu, [`ARGB (`Uint32, `P8888)] and
[`RGBA (`Float32, `C)] seem to be good multiplatform choices. *)
val pf_compressed : 'a pf -> bool
val pf_storage : 'a pf -> 'a
val pf_cast : ([< storage] as 'a) -> 'b pf -> 'a pf
type 'a format constraint 'a = [< storage ]
val format : ?first:int -> ?w_skip:int -> ?h_skip:int -> w:int ->
?h:int -> ?d:int -> 'a pf -> dim -> 'a format
* Creates a new format for an image with the given pixel format
and dimension . Arguments irrelevant for a dimension are ignored .
{ ul
{ - [ first ] , buffer element where the data of the bottom , left , frontmost
pixel [ ( 0,0,0 ) ] starts . }
{ - [ w_skip ] , number of { e pixels } to skip between two
consecutive lines , defaults to [ 0 ] . }
{ - [ h_skip ] , number of { e lines } to skip between two
consecutive images , defaults to [ 0 ] . }
{ - [ w ] , [ h ] , [ d ] , image width , height and depth in pixels ,
[ h ] and [ d ] default to [ 1 ] . }
}
{ b Note . } [ Invalid_argument ] is raised if the bit layout of
a pixel format does n't fits its storage and for compressed pixel formats :
if the storage is not [ ` Uint8 ] or the skip values are different from
[ 0 ] or the dimension in not [ ` D2 ] .
and dimension. Arguments irrelevant for a dimension are ignored.
{ul
{- [first], buffer element where the data of the bottom, left, frontmost
pixel [(0,0,0)] starts.}
{- [w_skip], number of {e pixels} to skip between two
consecutive lines, defaults to [0].}
{- [h_skip], number of {e lines} to skip between two
consecutive images, defaults to [0].}
{- [w], [h], [d], image width, height and depth in pixels,
[h] and [d] default to [1].}
}
{b Note.} [Invalid_argument] is raised if the bit layout of
a pixel format doesn't fits its storage and for compressed pixel formats :
if the storage is not [`Uint8] or the skip values are different from
[0] or the dimension in not [`D2]. *)
val first : 'a format -> int
val width_skip : 'a format -> int
val height_skip : 'a format -> int
val width : 'a format -> int
val height : 'a format -> int
val depth : 'a format -> int
val pf : 'a format -> 'a pf
val dim : 'a format -> dim
val storage : 'a format -> 'a
val comps : 'a format -> comps
val extent : 'a format -> v3
val extent2 : 'a format -> v2
val size : 'a format -> int
val sub_format : 'a format -> ?x:int -> ?y:int -> ?z:int -> ?w:int ->
?h:int -> ?d:int -> ?dim:dim -> unit ->
'a format
* Format for a subimage of the given format .
{ ul
{ - [ x],[y],[z ] , new origin of the image in pixels , defaults to [ 0,0,0 ] . }
{ - [ w],[h],[d ] , new size of the image , defaults to the original size
minus the new origin . }
{ - [ dim ] , new dimension of the image , must be smaller than the original one ,
defaults to the original dimension . } }
{ b Note . } Raises [ Invalid_argument ] on compressed formats .
{ul
{- [x],[y],[z], new origin of the image in pixels, defaults to [0,0,0].}
{- [w],[h],[d], new size of the image, defaults to the original size
minus the new origin.}
{- [dim], new dimension of the image, must be smaller than the original one,
defaults to the original dimension.}}
{b Note.} Raises [Invalid_argument] on compressed formats.
*)
val print_format : Format.formatter -> 'a format -> unit
type 'a t = 'a Buf.t * 'a format constraint 'a = [< Buf.uint_kind | `Float32 ]
val pos : 'a t -> x:int -> y:int -> int
val pos3 : 'a t -> x:int -> y:int -> z:int -> int
val pixel_size : 'a t -> int
val sub : 'a t -> ?x:int -> ?y:int -> ?z:int -> ?w:int ->
?h:int -> ?d:int -> ?dim:dim -> unit -> 'a t
val cast : ([< storage] as 'a) -> 'b t -> 'a t
val print : Format.formatter -> ?x:int -> ?y:int -> ?z:int -> ?w:int -> ?h:int -> ?d:int -> 'a t -> unit
* Prints the given pixel range , higher lines first .
val _gl_set_pixel_pack : Gl.Ptr.ft -> 'a format -> unit
val _gl_set_pixel_unpack : Gl.Ptr.ft -> 'a format -> unit
val _gl_pformat : 'a format -> Gl.Enum.pformat
val _gl_pstorage : 'a format -> Gl.Enum.pstorage
val _string_of_dim : dim -> string
|
519d894dcb63a9b67899f1c8e5b17cdcf89d7433f0ff570cb200be1d7e6be0bb | mpickering/eventlog2html | HtmlTemplate.hs | {-# LANGUAGE OverloadedStrings #-}
module Eventlog.HtmlTemplate where
import Data.Aeson (Value, encode)
import Data.Aeson.Text (encodeToLazyText)
import Data.String
import Data.Text (Text, append)
import qualified Data.Text as T
import qualified Data.Text.Lazy.Encoding as T
import qualified Data.Text.Lazy as TL
--import Text.Blaze.Html
import Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes as A
import Text.Blaze.Html.Renderer.String
import Eventlog.Javascript
import Eventlog.Args
import Eventlog.Types (Header(..), HeapProfBreakdown(..))
import Eventlog.VegaTemplate
import Eventlog.AssetVersions
import Paths_eventlog2html
import Data.Version
import Control.Monad
import Data.Maybe
type VizID = Int
insertJsonData :: Value -> Html
insertJsonData dat = preEscapedToHtml $ T.unlines [
"data_json= " `append` dat' `append` ";"
, "console.log(data_json);" ]
where
dat' = TL.toStrict (T.decodeUtf8 (encode dat))
insertJsonDesc :: Value -> Html
insertJsonDesc dat = preEscapedToHtml $ T.unlines [
"desc_json= " `append` dat' `append` ";"
, "console.log(desc_json);" ]
where
dat' = TL.toStrict (T.decodeUtf8 (encode dat))
-- Dynamically bound in ccs tree
insertColourScheme :: Text -> Html
insertColourScheme scheme = preEscapedToHtml $ T.unlines [
"colour_scheme= \"" `append` scheme `append` "\";"
, "console.log(colour_scheme);" ]
data_sets :: [Text] -> [Text]
data_sets itd = Prelude.map line itd
where
line t = "res.view.insert(\"data_json_" <> t <>"\", data_json."<> t <>");"
data IncludeTraceData = TraceData | NoTraceData
encloseScript :: [Text] -> VizID -> Text -> Html
encloseScript = encloseScriptX
encloseRawVegaScript :: VizID -> Text -> Html
encloseRawVegaScript = encloseScriptX []
encloseScriptX :: [Text] -> VizID -> Text -> Html
encloseScriptX insert_data_sets vid vegaspec = preEscapedToHtml $ T.unlines ([
"var yourVlSpec" `append` vidt `append`"= " `append` vegaspec `append` ";"
, "vegaEmbed('#vis" `append` vidt `append` "', yourVlSpec" `append` vidt `append` ")"
, ".then((res) => { " ]
For the 4 vega lite charts we dynamically insert the data after the
-- chart is created to avoid duplicating it. For the vega chart, this
-- causes a harmless error so we just don't do it.
++ (data_sets insert_data_sets) ++
[ "; res.view.resize()"
, "; res.view.runAsync()"
, "})" ])
where
vidt = T.pack $ show vid
jsScript :: String -> Html
jsScript url = script ! src (fromString $ url) $ ""
css :: AttributeValue -> Html
css url = link ! rel "stylesheet" ! href url
htmlHeader :: Value -> Maybe Value -> Args -> Html
htmlHeader dat desc as =
H.head $ do
H.title "eventlog2html - Heap Profile"
meta ! charset "UTF-8"
script $ insertJsonData dat
maybe (return ()) (script . insertJsonDesc) desc
script $ insertColourScheme (userColourScheme as)
if not (noIncludejs as)
then do
script $ preEscapedToHtml vegaLite
script $ preEscapedToHtml vega
script $ preEscapedToHtml vegaEmbed
script $ preEscapedToHtml jquery
H.style $ preEscapedToHtml bootstrapCSS
script $ preEscapedToHtml bootstrap
script $ preEscapedToHtml fancytable
script $ preEscapedToHtml sparkline
else do
jsScript vegaURL
jsScript vegaLiteURL
jsScript vegaEmbedURL
jsScript jqueryURL
css (preEscapedStringValue bootstrapCSSURL)
jsScript bootstrapURL
css "//fonts.googleapis.com/css?family=Roboto:300,300italic,700,700italic"
jsScript fancyTableURL
jsScript sparklinesURL
-- Include this last to overwrite some milligram styling
H.style $ preEscapedToHtml stylesheet
template :: Header -> Value -> Maybe Value -> Maybe Html -> Args -> Html
template header' dat cc_descs closure_descs as = docTypeHtml $ do
H.stringComment $ "Generated with eventlog2html-" <> showVersion version
htmlHeader dat cc_descs as
body $ H.div ! class_ "container" $ do
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
h1 $ a ! href "" $ "eventlog2html"
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
"Options: "
code $ toHtml $ hJob header'
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
"Created at: "
code $ toHtml $ hDate header'
forM_ (hHeapProfileType header') $ \prof_type -> do
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
"Type of profile: "
code $ toHtml $ ppHeapProfileType prof_type
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
"Sampling rate in seconds: "
code $ toHtml $ hSamplingRate header'
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
button ! class_ "tablink button-black" ! onclick "changeTab('areachart', this)" ! A.id "defaultOpen" $ "Area Chart"
button ! class_ "tablink button-black" ! onclick "changeTab('normalizedchart', this)" $ "Normalized"
button ! class_ "tablink button-black" ! onclick "changeTab('streamgraph', this)" $ "Streamgraph"
button ! class_ "tablink button-black" ! onclick "changeTab('linechart', this)" $ "Linechart"
button ! class_ "tablink button-black" ! onclick "changeTab('heapchart', this)" $ "Heap"
when (isJust cc_descs) $ do
button ! class_ "tablink button-black" ! onclick "changeTab('cost-centres', this)" $ "Cost Centres"
when (isJust closure_descs) $ do
button ! class_ "tablink button-black" ! onclick "changeTab('closures', this)" $ "Detailed"
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
let itd = if (noTraces as) then NoTraceData else TraceData
mapM_ (\(vid, chartname, conf) ->
H.div ! A.id chartname ! class_ "tabviz" $ do
renderChart itd conf True vid
(TL.toStrict (encodeToLazyText (vegaJson (htmlConf as conf)))))
[(1, "areachart", AreaChart Stacked)
,(2, "normalizedchart", AreaChart Normalized)
,(3, "streamgraph", AreaChart StreamGraph)
,(4, "linechart", LineChart)
,(5, "heapchart", HeapChart) ]
when (isJust cc_descs) $ do
H.div ! A.id "cost-centres" ! class_ "tabviz" $ do
renderChart itd LineChart False 6 treevega
forM_ closure_descs $ \v -> do
H.div ! A.id "closures" ! class_ "tabviz" $ do
v
script $ preEscapedToHtml tablogic
select_data :: IncludeTraceData -> ChartType -> [Text]
select_data itd c =
case c of
AreaChart {} -> prof_data
LineChart {} -> prof_data
HeapChart {} -> ["heap"] ++ ["traces" | TraceData <- [itd]]
where
prof_data = ["samples"] ++ ["traces" | TraceData <- [itd]]
htmlConf :: Args -> ChartType -> ChartConfig
htmlConf as ct = ChartConfig 1200 1000 (not (noTraces as)) (userColourScheme as) "set1" ct (fromIntegral <$> (fixedYAxis as))
renderChart :: IncludeTraceData -> ChartType -> Bool -> VizID -> Text -> Html
renderChart itd ct vega_lite vid vegaSpec = do
let fields = select_data itd ct
H.div ! A.id (fromString $ "vis" ++ show vid) ! class_ "chart" $ ""
script ! type_ "text/javascript" $ do
if vega_lite
then encloseScript fields vid vegaSpec
else encloseRawVegaScript vid vegaSpec
renderChartWithJson :: IncludeTraceData -> ChartType -> Int -> Value -> Text -> Html
renderChartWithJson itd ct k dat vegaSpec = do
script $ insertJsonData dat
renderChart itd ct True k vegaSpec
templateString :: Header -> Value -> Maybe Value -> Maybe Html -> Args -> String
templateString header' dat cc_descs closure_descs as =
renderHtml $ template header' dat cc_descs closure_descs as
ppHeapProfileType :: HeapProfBreakdown -> Text
ppHeapProfileType (HeapProfBreakdownCostCentre) = "Cost centre profiling (implied by -hc)"
ppHeapProfileType (HeapProfBreakdownModule) = "Profiling by module (implied by -hm)"
ppHeapProfileType (HeapProfBreakdownClosureDescr) = "Profiling by closure description (implied by -hd)"
ppHeapProfileType (HeapProfBreakdownTypeDescr) = "Profiling by type (implied by -hy)"
ppHeapProfileType (HeapProfBreakdownRetainer) = "Retainer profiling (implied by -hr)"
ppHeapProfileType (HeapProfBreakdownBiography) = "Biographical profiling (implied by -hb)"
ppHeapProfileType (HeapProfBreakdownClosureType) = "Basic heap profile (implied by -hT)"
ppHeapProfileType (HeapProfBreakdownInfoTable) = "Info table profile (implied by -hi)"
| null | https://raw.githubusercontent.com/mpickering/eventlog2html/a18ec810328c71122ccc630fccfcea5b48c0e937/src/Eventlog/HtmlTemplate.hs | haskell | # LANGUAGE OverloadedStrings #
import Text.Blaze.Html
Dynamically bound in ccs tree
chart is created to avoid duplicating it. For the vega chart, this
causes a harmless error so we just don't do it.
Include this last to overwrite some milligram styling | module Eventlog.HtmlTemplate where
import Data.Aeson (Value, encode)
import Data.Aeson.Text (encodeToLazyText)
import Data.String
import Data.Text (Text, append)
import qualified Data.Text as T
import qualified Data.Text.Lazy.Encoding as T
import qualified Data.Text.Lazy as TL
import Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes as A
import Text.Blaze.Html.Renderer.String
import Eventlog.Javascript
import Eventlog.Args
import Eventlog.Types (Header(..), HeapProfBreakdown(..))
import Eventlog.VegaTemplate
import Eventlog.AssetVersions
import Paths_eventlog2html
import Data.Version
import Control.Monad
import Data.Maybe
type VizID = Int
insertJsonData :: Value -> Html
insertJsonData dat = preEscapedToHtml $ T.unlines [
"data_json= " `append` dat' `append` ";"
, "console.log(data_json);" ]
where
dat' = TL.toStrict (T.decodeUtf8 (encode dat))
insertJsonDesc :: Value -> Html
insertJsonDesc dat = preEscapedToHtml $ T.unlines [
"desc_json= " `append` dat' `append` ";"
, "console.log(desc_json);" ]
where
dat' = TL.toStrict (T.decodeUtf8 (encode dat))
insertColourScheme :: Text -> Html
insertColourScheme scheme = preEscapedToHtml $ T.unlines [
"colour_scheme= \"" `append` scheme `append` "\";"
, "console.log(colour_scheme);" ]
data_sets :: [Text] -> [Text]
data_sets itd = Prelude.map line itd
where
line t = "res.view.insert(\"data_json_" <> t <>"\", data_json."<> t <>");"
data IncludeTraceData = TraceData | NoTraceData
encloseScript :: [Text] -> VizID -> Text -> Html
encloseScript = encloseScriptX
encloseRawVegaScript :: VizID -> Text -> Html
encloseRawVegaScript = encloseScriptX []
encloseScriptX :: [Text] -> VizID -> Text -> Html
encloseScriptX insert_data_sets vid vegaspec = preEscapedToHtml $ T.unlines ([
"var yourVlSpec" `append` vidt `append`"= " `append` vegaspec `append` ";"
, "vegaEmbed('#vis" `append` vidt `append` "', yourVlSpec" `append` vidt `append` ")"
, ".then((res) => { " ]
For the 4 vega lite charts we dynamically insert the data after the
++ (data_sets insert_data_sets) ++
[ "; res.view.resize()"
, "; res.view.runAsync()"
, "})" ])
where
vidt = T.pack $ show vid
jsScript :: String -> Html
jsScript url = script ! src (fromString $ url) $ ""
css :: AttributeValue -> Html
css url = link ! rel "stylesheet" ! href url
htmlHeader :: Value -> Maybe Value -> Args -> Html
htmlHeader dat desc as =
H.head $ do
H.title "eventlog2html - Heap Profile"
meta ! charset "UTF-8"
script $ insertJsonData dat
maybe (return ()) (script . insertJsonDesc) desc
script $ insertColourScheme (userColourScheme as)
if not (noIncludejs as)
then do
script $ preEscapedToHtml vegaLite
script $ preEscapedToHtml vega
script $ preEscapedToHtml vegaEmbed
script $ preEscapedToHtml jquery
H.style $ preEscapedToHtml bootstrapCSS
script $ preEscapedToHtml bootstrap
script $ preEscapedToHtml fancytable
script $ preEscapedToHtml sparkline
else do
jsScript vegaURL
jsScript vegaLiteURL
jsScript vegaEmbedURL
jsScript jqueryURL
css (preEscapedStringValue bootstrapCSSURL)
jsScript bootstrapURL
css "//fonts.googleapis.com/css?family=Roboto:300,300italic,700,700italic"
jsScript fancyTableURL
jsScript sparklinesURL
H.style $ preEscapedToHtml stylesheet
template :: Header -> Value -> Maybe Value -> Maybe Html -> Args -> Html
template header' dat cc_descs closure_descs as = docTypeHtml $ do
H.stringComment $ "Generated with eventlog2html-" <> showVersion version
htmlHeader dat cc_descs as
body $ H.div ! class_ "container" $ do
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
h1 $ a ! href "" $ "eventlog2html"
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
"Options: "
code $ toHtml $ hJob header'
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
"Created at: "
code $ toHtml $ hDate header'
forM_ (hHeapProfileType header') $ \prof_type -> do
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
"Type of profile: "
code $ toHtml $ ppHeapProfileType prof_type
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
"Sampling rate in seconds: "
code $ toHtml $ hSamplingRate header'
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
button ! class_ "tablink button-black" ! onclick "changeTab('areachart', this)" ! A.id "defaultOpen" $ "Area Chart"
button ! class_ "tablink button-black" ! onclick "changeTab('normalizedchart', this)" $ "Normalized"
button ! class_ "tablink button-black" ! onclick "changeTab('streamgraph', this)" $ "Streamgraph"
button ! class_ "tablink button-black" ! onclick "changeTab('linechart', this)" $ "Linechart"
button ! class_ "tablink button-black" ! onclick "changeTab('heapchart', this)" $ "Heap"
when (isJust cc_descs) $ do
button ! class_ "tablink button-black" ! onclick "changeTab('cost-centres', this)" $ "Cost Centres"
when (isJust closure_descs) $ do
button ! class_ "tablink button-black" ! onclick "changeTab('closures', this)" $ "Detailed"
H.div ! class_ "row" $ do
H.div ! class_ "column" $ do
let itd = if (noTraces as) then NoTraceData else TraceData
mapM_ (\(vid, chartname, conf) ->
H.div ! A.id chartname ! class_ "tabviz" $ do
renderChart itd conf True vid
(TL.toStrict (encodeToLazyText (vegaJson (htmlConf as conf)))))
[(1, "areachart", AreaChart Stacked)
,(2, "normalizedchart", AreaChart Normalized)
,(3, "streamgraph", AreaChart StreamGraph)
,(4, "linechart", LineChart)
,(5, "heapchart", HeapChart) ]
when (isJust cc_descs) $ do
H.div ! A.id "cost-centres" ! class_ "tabviz" $ do
renderChart itd LineChart False 6 treevega
forM_ closure_descs $ \v -> do
H.div ! A.id "closures" ! class_ "tabviz" $ do
v
script $ preEscapedToHtml tablogic
select_data :: IncludeTraceData -> ChartType -> [Text]
select_data itd c =
case c of
AreaChart {} -> prof_data
LineChart {} -> prof_data
HeapChart {} -> ["heap"] ++ ["traces" | TraceData <- [itd]]
where
prof_data = ["samples"] ++ ["traces" | TraceData <- [itd]]
htmlConf :: Args -> ChartType -> ChartConfig
htmlConf as ct = ChartConfig 1200 1000 (not (noTraces as)) (userColourScheme as) "set1" ct (fromIntegral <$> (fixedYAxis as))
renderChart :: IncludeTraceData -> ChartType -> Bool -> VizID -> Text -> Html
renderChart itd ct vega_lite vid vegaSpec = do
let fields = select_data itd ct
H.div ! A.id (fromString $ "vis" ++ show vid) ! class_ "chart" $ ""
script ! type_ "text/javascript" $ do
if vega_lite
then encloseScript fields vid vegaSpec
else encloseRawVegaScript vid vegaSpec
renderChartWithJson :: IncludeTraceData -> ChartType -> Int -> Value -> Text -> Html
renderChartWithJson itd ct k dat vegaSpec = do
script $ insertJsonData dat
renderChart itd ct True k vegaSpec
templateString :: Header -> Value -> Maybe Value -> Maybe Html -> Args -> String
templateString header' dat cc_descs closure_descs as =
renderHtml $ template header' dat cc_descs closure_descs as
ppHeapProfileType :: HeapProfBreakdown -> Text
ppHeapProfileType (HeapProfBreakdownCostCentre) = "Cost centre profiling (implied by -hc)"
ppHeapProfileType (HeapProfBreakdownModule) = "Profiling by module (implied by -hm)"
ppHeapProfileType (HeapProfBreakdownClosureDescr) = "Profiling by closure description (implied by -hd)"
ppHeapProfileType (HeapProfBreakdownTypeDescr) = "Profiling by type (implied by -hy)"
ppHeapProfileType (HeapProfBreakdownRetainer) = "Retainer profiling (implied by -hr)"
ppHeapProfileType (HeapProfBreakdownBiography) = "Biographical profiling (implied by -hb)"
ppHeapProfileType (HeapProfBreakdownClosureType) = "Basic heap profile (implied by -hT)"
ppHeapProfileType (HeapProfBreakdownInfoTable) = "Info table profile (implied by -hi)"
|
31c341455ae0bafc84b06d03239a8a2c1fdd6cca756ce7107b7765859b6146d5 | elaforge/karya | ControlFunction.hs | Copyright 2014
-- This program is distributed under the terms of the GNU General Public
-- License 3.0, see COPYING or -3.0.txt
-- | Calls and functions for 'DeriveT.ControlFunction's.
module Derive.C.Prelude.ControlFunction where
import qualified Data.List as List
import qualified Data.Map as Map
import qualified System.Random.Mersenne.Pure64 as Pure64
import qualified Util.Doc as Doc
import qualified Util.Num as Num
import qualified Util.Seq as Seq
import qualified Derive.Call as Call
import qualified Derive.Call.ControlUtil as ControlUtil
import qualified Derive.Call.Make as Make
import qualified Derive.Call.Module as Module
import qualified Derive.Call.Tags as Tags
import qualified Derive.Controls as Controls
import qualified Derive.Derive as Derive
import qualified Derive.DeriveT as DeriveT
import qualified Derive.Env as Env
import qualified Derive.EnvKey as EnvKey
import qualified Derive.Expr as Expr
import qualified Derive.Library as Library
import qualified Derive.ScoreT as ScoreT
import qualified Derive.ShowVal as ShowVal
import qualified Derive.Sig as Sig
import qualified Derive.Typecheck as Typecheck
import qualified Derive.Warp as Warp
import qualified Perform.RealTime as RealTime
import qualified Perform.Signal as Signal
import qualified Ui.Meter.Mark as Mark
import qualified Ui.Meter.Meter as Meter
import qualified Ui.Ruler as Ruler
import qualified Ui.ScoreTime as ScoreTime
import Global
import Types
library :: Library.Library
library = Library.vals $
[ ("cf-rnd", c_cf_rnd const)
, ("cf-rnd+", c_cf_rnd (+))
, ("cf-rnd*", c_cf_rnd (*))
, ("cf-rnd-a", c_cf_rnd_around const)
, ("cf-rnd-a+", c_cf_rnd_around (+))
, ("cf-rnd-a*", c_cf_rnd_around (*))
, ("cf-rnd01", c_cf_rnd01)
, ("cf-swing", c_cf_swing)
, ("cf-clamp", c_cf_clamp)
] ++ map (make_call Nothing . snd) ControlUtil.standard_curves
++ map (uncurry make_call . first Just) curves
make_call :: Maybe Doc.Doc -> ControlUtil.CurveD
-> (Expr.Symbol, Derive.ValCall)
make_call doc curve =
( "cf-" <> Expr.Symbol (ControlUtil.curve_name curve)
, ControlUtil.make_curve_call doc curve
)
data Distribution =
Uniform
-- | Approximate a bounded normal distribution.
| Normal
-- | This is like Normal, but rotated, so the peaks are at the extremities.
| Bimodal
deriving (Bounded, Eq, Enum, Show)
instance ShowVal.ShowVal Distribution
instance Typecheck.Typecheck Distribution
c_cf_rnd :: (Signal.Y -> Signal.Y -> Signal.Y) -> Derive.ValCall
c_cf_rnd combine = val_call "cf-rnd"
(Tags.control_function <> Tags.random)
"Randomize a control. Normally it replaces the control of the same name,\
\ while the `+` and `*` variants add to and multiply with it."
$ Sig.call ((,,)
<$> Sig.required "low" "Low end of the range."
<*> Sig.required "high" "High end of the range."
<*> Sig.environ "distribution" Sig.Prefixed Normal "Random distribution."
) $ \(low, high, distribution) _args -> return $!
DeriveT.ControlFunction "cf-rnd" $ \control dyn pos ->
ScoreT.untyped $ combine
(cf_rnd distribution low high
(random_stream pos (dyn_seed dyn)))
(dyn_control dyn control pos)
c_cf_rnd_around :: (Signal.Y -> Signal.Y -> Signal.Y) -> Derive.ValCall
c_cf_rnd_around combine = val_call "cf-rnd-a"
(Tags.control_function <> Tags.random)
"Randomize a control around a center point.\
\ Normally it replaces the control of the same name,\
\ while the `+` and `*` variants add to and multiply with it."
$ Sig.call ((,,)
<$> Sig.required "range" "Range this far from the center."
<*> Sig.defaulted "center" 0 "Center of the range."
<*> Sig.environ "distribution" Sig.Prefixed Normal "Random distribution."
) $ \(range, center, distribution) _args -> return $!
DeriveT.ControlFunction "cf-rnd-a" $ \control dyn pos ->
ScoreT.untyped $ combine
(cf_rnd distribution (center-range) (center+range)
(random_stream pos (dyn_seed dyn)))
(dyn_control dyn control pos)
c_cf_rnd01 :: Derive.ValCall
c_cf_rnd01 = Make.modify_vcall (c_cf_rnd (+)) Module.prelude "cf-rnd01"
"This is an abbreviation for `(cf-clamp (cf-rnd+ ..) 0 1)`." $
\val -> case Typecheck.from_val_simple val of
Just cf -> Typecheck.to_val $ cf_compose "cf-clamp" (Num.clamp 0 1) cf
Nothing -> val
cf_rnd :: Distribution -> Double -> Double -> [Double] -> Double
cf_rnd dist low high rnds = Num.scale low high $ case dist of
Uniform -> head rnds
Normal -> Call.make_normal 1 rnds
Bimodal
| v >= 0.5 -> v - 0.5
| otherwise -> v + 0.5
where v = Call.make_normal 1 rnds
random_stream :: RealTime -> Double -> [Double]
random_stream pos =
List.unfoldr (Just . Pure64.randomDouble) . Pure64.pureMT . floor
. (+ RealTime.to_seconds pos)
-- * cf-swing
c_cf_swing :: Derive.ValCall
c_cf_swing = val_call "cf-swing" Tags.control_function
("Add a curved offset to the control, suitable for swing tempo when added\
\ to " <> ShowVal.doc Controls.start_s <> ". The curve is a sine wave,\
\ from trough to trough.")
$ Sig.call ((,)
<$> Sig.defaulted "rank" Meter.Q
"The time steps are on the beat, and midway between offset by the\
\ given amount."
<*> Sig.defaulted "amount" (DeriveT.real_control "swing" (1/3))
"Swing amount, multiplied by the rank duration / 2."
) $ \(rank, amount) _args -> return $!
DeriveT.ControlFunction "cf-swing" (cf_swing_ rank amount)
where
cf_swing_ rank amount control dyn pos
| Just marks <- maybe_marks = ScoreT.untyped $
dyn_control dyn control pos + RealTime.to_seconds
(cf_swing (real dyn) rank
(to_function dyn 0 amount) marks (score dyn pos))
| otherwise = ScoreT.untyped 0
where
maybe_marks = snd <$>
Map.lookup Ruler.meter_name (DeriveT.dyn_ruler dyn)
cf_swing :: (ScoreTime -> RealTime) -> Meter.Rank -> Typecheck.Function
-> Mark.Marklist -> ScoreTime -> RealTime
cf_swing to_real rank amount marks pos = case marks_around rank marks pos of
Nothing -> 0
Just (pre, post) -> (to_real post - to_real pre) / 2
* RealTime.seconds (amount (to_real pos))
* swing (Num.normalize pre post pos)
marks_around :: Meter.Rank -> Mark.Marklist -> ScoreTime
-> Maybe (ScoreTime, ScoreTime)
marks_around rank marks pos =
(,) <$> get (Mark.descending pos marks) <*> get (Mark.ascending pos marks)
where get = fmap fst . Seq.head . filter ((<=rank) . Mark.mark_rank . snd)
^ time from this beat to the next , normalized 0 to 1
^ amount of swing offset , also normalized 0 to 1
swing = RealTime.seconds . Num.normalize (-1) 1 . sin . (*pi)
. Num.scale (-0.5) 1.5 . ScoreTime.to_double
-- * cf-clamp
c_cf_clamp :: Derive.ValCall
c_cf_clamp = val_call "cf-clamp" Tags.control_function
"Clamp the output of a control function to the given range."
$ Sig.call ((,,)
<$> Sig.required "cf" "Control function."
<*> Sig.defaulted "low" 0 "Low value."
<*> Sig.defaulted "high" 1 "High value."
) $ \(cf, low, high) _args ->
return $ cf_compose "cf-clamp" (Num.clamp low high) cf
cf_compose :: Text -> (Signal.Y -> Signal.Y) -> DeriveT.ControlFunction
-> DeriveT.ControlFunction
cf_compose name f (DeriveT.ControlFunction cf_name cf) =
DeriveT.ControlFunction (name <> " . " <> cf_name)
(\c dyn x -> f <$> cf c dyn x)
-- * curve interpolators
curves :: [(Doc.Doc, ControlUtil.CurveD)]
curves =
[ ( "Jump to the destination at 0.5."
, ControlUtil.CurveD "jump" (pure ()) $
\() -> ControlUtil.Function $ \n -> if n < 0.5 then 0 else 1
)
, ("No interpolation.", ControlUtil.CurveD "const" (pure ()) $
\() -> ControlUtil.Function (const 0))
]
-- * DeriveT.Dynamic
dyn_seed :: DeriveT.Dynamic -> Double
dyn_seed dyn = fromIntegral (DeriveT.dyn_event_serial dyn) + seed dyn
where
seed = fromMaybe 0 . Env.maybe_val EnvKey.seed . DeriveT.dyn_environ
dyn_control :: DeriveT.Dynamic -> ScoreT.Control -> RealTime -> Double
dyn_control dyn control pos = maybe 0 (Signal.at pos . ScoreT.typed_val) $
Map.lookup control $ DeriveT.dyn_controls dyn
real :: DeriveT.Dynamic -> ScoreTime -> RealTime
real dyn = Warp.warp (DeriveT.dyn_warp dyn)
score :: DeriveT.Dynamic -> RealTime -> ScoreTime
score dyn = Warp.unwarp (DeriveT.dyn_warp dyn)
-- ** ControlRef
to_function :: DeriveT.Dynamic -> Signal.Y -> DeriveT.ControlRef
-> Typecheck.Function
to_function dyn deflt =
(ScoreT.typed_val .) . to_typed_function dyn (ScoreT.untyped deflt)
to_typed_function :: DeriveT.Dynamic -> ScoreT.Typed Signal.Y
-> DeriveT.ControlRef -> Typecheck.TypedFunction
to_typed_function dyn deflt control =
case to_signal_or_function dyn control of
Nothing -> const deflt
Just (Left sig) -> Derive.signal_function sig
Just (Right f) -> DeriveT.call_control_function f score_control dyn
where
score_control = case control of
DeriveT.ControlSignal {} -> Controls.null
DeriveT.DefaultedControl cont _ -> cont
DeriveT.LiteralControl cont -> cont
to_signal_or_function :: DeriveT.Dynamic -> DeriveT.ControlRef
-> Maybe (Either (ScoreT.Typed Signal.Control) DeriveT.ControlFunction)
to_signal_or_function dyn control = case control of
DeriveT.ControlSignal sig -> return $ Left sig
DeriveT.DefaultedControl cont deflt ->
get_control (ScoreT.type_of deflt) (return $ Left deflt) cont
DeriveT.LiteralControl cont ->
get_control ScoreT.Untyped Nothing cont
where
get_control default_type deflt cont = case get_function cont of
Just f -> return $ Right $
DeriveT.modify_control_function (inherit_type default_type .) f
Nothing -> case get_signal cont of
Just sig -> return $ Left sig
Nothing -> deflt
get_function cont = Map.lookup cont $ DeriveT.dyn_control_functions dyn
get_signal cont = Map.lookup cont $ DeriveT.dyn_controls dyn
-- If the signal was untyped, it gets the type of the default, since
-- presumably the caller expects that type.
inherit_type default_type val =
val { ScoreT.type_of = ScoreT.type_of val <> default_type }
-- * misc
val_call :: Typecheck.ToVal a => Derive.CallName -> Tags.Tags -> Doc.Doc
-> Derive.WithArgDoc (Derive.PassedArgs Derive.Tagged -> Derive.Deriver a)
-> Derive.ValCall
val_call = Derive.val_call Module.prelude
| null | https://raw.githubusercontent.com/elaforge/karya/a6638f16da9f018686023977c1292d6ce5095e28/Derive/C/Prelude/ControlFunction.hs | haskell | This program is distributed under the terms of the GNU General Public
License 3.0, see COPYING or -3.0.txt
| Calls and functions for 'DeriveT.ControlFunction's.
| Approximate a bounded normal distribution.
| This is like Normal, but rotated, so the peaks are at the extremities.
* cf-swing
* cf-clamp
* curve interpolators
* DeriveT.Dynamic
** ControlRef
If the signal was untyped, it gets the type of the default, since
presumably the caller expects that type.
* misc | Copyright 2014
module Derive.C.Prelude.ControlFunction where
import qualified Data.List as List
import qualified Data.Map as Map
import qualified System.Random.Mersenne.Pure64 as Pure64
import qualified Util.Doc as Doc
import qualified Util.Num as Num
import qualified Util.Seq as Seq
import qualified Derive.Call as Call
import qualified Derive.Call.ControlUtil as ControlUtil
import qualified Derive.Call.Make as Make
import qualified Derive.Call.Module as Module
import qualified Derive.Call.Tags as Tags
import qualified Derive.Controls as Controls
import qualified Derive.Derive as Derive
import qualified Derive.DeriveT as DeriveT
import qualified Derive.Env as Env
import qualified Derive.EnvKey as EnvKey
import qualified Derive.Expr as Expr
import qualified Derive.Library as Library
import qualified Derive.ScoreT as ScoreT
import qualified Derive.ShowVal as ShowVal
import qualified Derive.Sig as Sig
import qualified Derive.Typecheck as Typecheck
import qualified Derive.Warp as Warp
import qualified Perform.RealTime as RealTime
import qualified Perform.Signal as Signal
import qualified Ui.Meter.Mark as Mark
import qualified Ui.Meter.Meter as Meter
import qualified Ui.Ruler as Ruler
import qualified Ui.ScoreTime as ScoreTime
import Global
import Types
library :: Library.Library
library = Library.vals $
[ ("cf-rnd", c_cf_rnd const)
, ("cf-rnd+", c_cf_rnd (+))
, ("cf-rnd*", c_cf_rnd (*))
, ("cf-rnd-a", c_cf_rnd_around const)
, ("cf-rnd-a+", c_cf_rnd_around (+))
, ("cf-rnd-a*", c_cf_rnd_around (*))
, ("cf-rnd01", c_cf_rnd01)
, ("cf-swing", c_cf_swing)
, ("cf-clamp", c_cf_clamp)
] ++ map (make_call Nothing . snd) ControlUtil.standard_curves
++ map (uncurry make_call . first Just) curves
make_call :: Maybe Doc.Doc -> ControlUtil.CurveD
-> (Expr.Symbol, Derive.ValCall)
make_call doc curve =
( "cf-" <> Expr.Symbol (ControlUtil.curve_name curve)
, ControlUtil.make_curve_call doc curve
)
data Distribution =
Uniform
| Normal
| Bimodal
deriving (Bounded, Eq, Enum, Show)
instance ShowVal.ShowVal Distribution
instance Typecheck.Typecheck Distribution
c_cf_rnd :: (Signal.Y -> Signal.Y -> Signal.Y) -> Derive.ValCall
c_cf_rnd combine = val_call "cf-rnd"
(Tags.control_function <> Tags.random)
"Randomize a control. Normally it replaces the control of the same name,\
\ while the `+` and `*` variants add to and multiply with it."
$ Sig.call ((,,)
<$> Sig.required "low" "Low end of the range."
<*> Sig.required "high" "High end of the range."
<*> Sig.environ "distribution" Sig.Prefixed Normal "Random distribution."
) $ \(low, high, distribution) _args -> return $!
DeriveT.ControlFunction "cf-rnd" $ \control dyn pos ->
ScoreT.untyped $ combine
(cf_rnd distribution low high
(random_stream pos (dyn_seed dyn)))
(dyn_control dyn control pos)
c_cf_rnd_around :: (Signal.Y -> Signal.Y -> Signal.Y) -> Derive.ValCall
c_cf_rnd_around combine = val_call "cf-rnd-a"
(Tags.control_function <> Tags.random)
"Randomize a control around a center point.\
\ Normally it replaces the control of the same name,\
\ while the `+` and `*` variants add to and multiply with it."
$ Sig.call ((,,)
<$> Sig.required "range" "Range this far from the center."
<*> Sig.defaulted "center" 0 "Center of the range."
<*> Sig.environ "distribution" Sig.Prefixed Normal "Random distribution."
) $ \(range, center, distribution) _args -> return $!
DeriveT.ControlFunction "cf-rnd-a" $ \control dyn pos ->
ScoreT.untyped $ combine
(cf_rnd distribution (center-range) (center+range)
(random_stream pos (dyn_seed dyn)))
(dyn_control dyn control pos)
c_cf_rnd01 :: Derive.ValCall
c_cf_rnd01 = Make.modify_vcall (c_cf_rnd (+)) Module.prelude "cf-rnd01"
"This is an abbreviation for `(cf-clamp (cf-rnd+ ..) 0 1)`." $
\val -> case Typecheck.from_val_simple val of
Just cf -> Typecheck.to_val $ cf_compose "cf-clamp" (Num.clamp 0 1) cf
Nothing -> val
cf_rnd :: Distribution -> Double -> Double -> [Double] -> Double
cf_rnd dist low high rnds = Num.scale low high $ case dist of
Uniform -> head rnds
Normal -> Call.make_normal 1 rnds
Bimodal
| v >= 0.5 -> v - 0.5
| otherwise -> v + 0.5
where v = Call.make_normal 1 rnds
random_stream :: RealTime -> Double -> [Double]
random_stream pos =
List.unfoldr (Just . Pure64.randomDouble) . Pure64.pureMT . floor
. (+ RealTime.to_seconds pos)
c_cf_swing :: Derive.ValCall
c_cf_swing = val_call "cf-swing" Tags.control_function
("Add a curved offset to the control, suitable for swing tempo when added\
\ to " <> ShowVal.doc Controls.start_s <> ". The curve is a sine wave,\
\ from trough to trough.")
$ Sig.call ((,)
<$> Sig.defaulted "rank" Meter.Q
"The time steps are on the beat, and midway between offset by the\
\ given amount."
<*> Sig.defaulted "amount" (DeriveT.real_control "swing" (1/3))
"Swing amount, multiplied by the rank duration / 2."
) $ \(rank, amount) _args -> return $!
DeriveT.ControlFunction "cf-swing" (cf_swing_ rank amount)
where
cf_swing_ rank amount control dyn pos
| Just marks <- maybe_marks = ScoreT.untyped $
dyn_control dyn control pos + RealTime.to_seconds
(cf_swing (real dyn) rank
(to_function dyn 0 amount) marks (score dyn pos))
| otherwise = ScoreT.untyped 0
where
maybe_marks = snd <$>
Map.lookup Ruler.meter_name (DeriveT.dyn_ruler dyn)
cf_swing :: (ScoreTime -> RealTime) -> Meter.Rank -> Typecheck.Function
-> Mark.Marklist -> ScoreTime -> RealTime
cf_swing to_real rank amount marks pos = case marks_around rank marks pos of
Nothing -> 0
Just (pre, post) -> (to_real post - to_real pre) / 2
* RealTime.seconds (amount (to_real pos))
* swing (Num.normalize pre post pos)
marks_around :: Meter.Rank -> Mark.Marklist -> ScoreTime
-> Maybe (ScoreTime, ScoreTime)
marks_around rank marks pos =
(,) <$> get (Mark.descending pos marks) <*> get (Mark.ascending pos marks)
where get = fmap fst . Seq.head . filter ((<=rank) . Mark.mark_rank . snd)
^ time from this beat to the next , normalized 0 to 1
^ amount of swing offset , also normalized 0 to 1
swing = RealTime.seconds . Num.normalize (-1) 1 . sin . (*pi)
. Num.scale (-0.5) 1.5 . ScoreTime.to_double
c_cf_clamp :: Derive.ValCall
c_cf_clamp = val_call "cf-clamp" Tags.control_function
"Clamp the output of a control function to the given range."
$ Sig.call ((,,)
<$> Sig.required "cf" "Control function."
<*> Sig.defaulted "low" 0 "Low value."
<*> Sig.defaulted "high" 1 "High value."
) $ \(cf, low, high) _args ->
return $ cf_compose "cf-clamp" (Num.clamp low high) cf
cf_compose :: Text -> (Signal.Y -> Signal.Y) -> DeriveT.ControlFunction
-> DeriveT.ControlFunction
cf_compose name f (DeriveT.ControlFunction cf_name cf) =
DeriveT.ControlFunction (name <> " . " <> cf_name)
(\c dyn x -> f <$> cf c dyn x)
curves :: [(Doc.Doc, ControlUtil.CurveD)]
curves =
[ ( "Jump to the destination at 0.5."
, ControlUtil.CurveD "jump" (pure ()) $
\() -> ControlUtil.Function $ \n -> if n < 0.5 then 0 else 1
)
, ("No interpolation.", ControlUtil.CurveD "const" (pure ()) $
\() -> ControlUtil.Function (const 0))
]
dyn_seed :: DeriveT.Dynamic -> Double
dyn_seed dyn = fromIntegral (DeriveT.dyn_event_serial dyn) + seed dyn
where
seed = fromMaybe 0 . Env.maybe_val EnvKey.seed . DeriveT.dyn_environ
dyn_control :: DeriveT.Dynamic -> ScoreT.Control -> RealTime -> Double
dyn_control dyn control pos = maybe 0 (Signal.at pos . ScoreT.typed_val) $
Map.lookup control $ DeriveT.dyn_controls dyn
real :: DeriveT.Dynamic -> ScoreTime -> RealTime
real dyn = Warp.warp (DeriveT.dyn_warp dyn)
score :: DeriveT.Dynamic -> RealTime -> ScoreTime
score dyn = Warp.unwarp (DeriveT.dyn_warp dyn)
to_function :: DeriveT.Dynamic -> Signal.Y -> DeriveT.ControlRef
-> Typecheck.Function
to_function dyn deflt =
(ScoreT.typed_val .) . to_typed_function dyn (ScoreT.untyped deflt)
to_typed_function :: DeriveT.Dynamic -> ScoreT.Typed Signal.Y
-> DeriveT.ControlRef -> Typecheck.TypedFunction
to_typed_function dyn deflt control =
case to_signal_or_function dyn control of
Nothing -> const deflt
Just (Left sig) -> Derive.signal_function sig
Just (Right f) -> DeriveT.call_control_function f score_control dyn
where
score_control = case control of
DeriveT.ControlSignal {} -> Controls.null
DeriveT.DefaultedControl cont _ -> cont
DeriveT.LiteralControl cont -> cont
to_signal_or_function :: DeriveT.Dynamic -> DeriveT.ControlRef
-> Maybe (Either (ScoreT.Typed Signal.Control) DeriveT.ControlFunction)
to_signal_or_function dyn control = case control of
DeriveT.ControlSignal sig -> return $ Left sig
DeriveT.DefaultedControl cont deflt ->
get_control (ScoreT.type_of deflt) (return $ Left deflt) cont
DeriveT.LiteralControl cont ->
get_control ScoreT.Untyped Nothing cont
where
get_control default_type deflt cont = case get_function cont of
Just f -> return $ Right $
DeriveT.modify_control_function (inherit_type default_type .) f
Nothing -> case get_signal cont of
Just sig -> return $ Left sig
Nothing -> deflt
get_function cont = Map.lookup cont $ DeriveT.dyn_control_functions dyn
get_signal cont = Map.lookup cont $ DeriveT.dyn_controls dyn
inherit_type default_type val =
val { ScoreT.type_of = ScoreT.type_of val <> default_type }
val_call :: Typecheck.ToVal a => Derive.CallName -> Tags.Tags -> Doc.Doc
-> Derive.WithArgDoc (Derive.PassedArgs Derive.Tagged -> Derive.Deriver a)
-> Derive.ValCall
val_call = Derive.val_call Module.prelude
|
c8599de68436dc246b0027f4febeee2421ea9e2904ef633a4fd599d8042a80a3 | dmitryvk/sbcl-win32-threads | fixup.lisp | fixups , extracted from codegen.lisp by WHN 19990227 in order
;;;; to help with cross-compiling bootstrapping
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB!C")
;;; a fixup of some kind
(defstruct (fixup
(:constructor make-fixup (name flavor &optional offset))
(:copier nil))
;; the name and flavor of the fixup. The assembler makes no
;; assumptions about the contents of these fields; their semantics
;; are imposed by the dumper.
name
flavor
OFFSET is an optional offset from whatever external label this
;; fixup refers to. Or in the case of the :CODE-OBJECT flavor of
;; fixups on the :X86 architecture, NAME is always NIL, so this
fixup does n't refer to an external label , and OFFSET is an offset
;; from the beginning of the current code block.
offset)
(defstruct (fixup-note
(:constructor make-fixup-note (kind fixup position))
(:copier nil))
kind
fixup
position)
(defvar *fixup-notes*)
;;; Setting this variable lets you see what's going on as items are
;;; being pushed onto *FIXUPS*.
#!+sb-show (defvar *show-fixups-being-pushed-p* nil)
;;; This function is called by assembler instruction emitters when
;;; they find themselves trying to deal with a fixup.
(defun note-fixup (segment kind fixup)
(sb!assem:emit-back-patch segment
0
(lambda (segment posn)
(declare (ignore segment))
;; Why use EMIT-BACK-PATCH to cause this PUSH to
;; be done later, instead of just doing it now?
;; I'm not sure. Perhaps there's some concern
;; that POSN isn't known accurately now? Perhaps
;; there's a desire for all fixing up to go
;; through EMIT-BACK-PATCH whether it needs to or
;; not? -- WHN 19990905
#!+sb-show
(when *show-fixups-being-pushed-p*
(/show "PUSHING FIXUP" kind fixup posn))
(push (make-fixup-note kind fixup posn) *fixup-notes*)))
(values))
| null | https://raw.githubusercontent.com/dmitryvk/sbcl-win32-threads/5abfd64b00a0937ba2df2919f177697d1d91bde4/src/compiler/fixup.lisp | lisp | to help with cross-compiling bootstrapping
more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
a fixup of some kind
the name and flavor of the fixup. The assembler makes no
assumptions about the contents of these fields; their semantics
are imposed by the dumper.
fixup refers to. Or in the case of the :CODE-OBJECT flavor of
fixups on the :X86 architecture, NAME is always NIL, so this
from the beginning of the current code block.
Setting this variable lets you see what's going on as items are
being pushed onto *FIXUPS*.
This function is called by assembler instruction emitters when
they find themselves trying to deal with a fixup.
Why use EMIT-BACK-PATCH to cause this PUSH to
be done later, instead of just doing it now?
I'm not sure. Perhaps there's some concern
that POSN isn't known accurately now? Perhaps
there's a desire for all fixing up to go
through EMIT-BACK-PATCH whether it needs to or
not? -- WHN 19990905 | fixups , extracted from codegen.lisp by WHN 19990227 in order
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB!C")
(defstruct (fixup
(:constructor make-fixup (name flavor &optional offset))
(:copier nil))
name
flavor
OFFSET is an optional offset from whatever external label this
fixup does n't refer to an external label , and OFFSET is an offset
offset)
(defstruct (fixup-note
(:constructor make-fixup-note (kind fixup position))
(:copier nil))
kind
fixup
position)
(defvar *fixup-notes*)
#!+sb-show (defvar *show-fixups-being-pushed-p* nil)
(defun note-fixup (segment kind fixup)
(sb!assem:emit-back-patch segment
0
(lambda (segment posn)
(declare (ignore segment))
#!+sb-show
(when *show-fixups-being-pushed-p*
(/show "PUSHING FIXUP" kind fixup posn))
(push (make-fixup-note kind fixup posn) *fixup-notes*)))
(values))
|
4ab612269b544a9c915f80fe27098a70ea22281208b932f52831cfc00fa24f70 | lehitoskin/typed-stack | typed-stack.rkt | (module typed-stack
typed/racket
; typed-stack.rkt
; LIFO stack
; "top" refers to the beginning of the list
; "bottom" or "end" refers to the end of the list
(provide make-stack empty-stack stack->list stack->string
stack-empty? stack-length stack=? top in-stack
pop pop! push push! push* push*! push-dup push-dup!
pop-all! swap swap! push-over push-over!
rotate rotate! reverse-rotate reverse-rotate!
pop-nip pop-nip! push-tuck push-tuck!
push-pick push-pick! roll roll! Stack
(rename-out [Stack? stack?]))
(struct (A) Stack ([contents : (Listof A)]) #:mutable #:transparent)
; produces a mutable stack
(: make-stack (All (A) (A * -> (Stack A))))
(define (make-stack . lst)
(Stack lst))
(: empty-stack (-> (Stack Any)))
(define (empty-stack)
(Stack '()))
; creates a list from the stack as-is
(: stack->list (All (A) ((Stack A) -> (Listof A))))
(define (stack->list stk)
(Stack-contents stk))
; builds a string representation of the stack
; with ordering from bottom to top
(: stack->string (All (A) ((Stack A) -> String)))
(define (stack->string stk)
(with-output-to-string (λ () (printf "~a" (reverse (stack->list stk))))))
(: stack-empty? (All (A) ((Stack A) -> Boolean)))
(define (stack-empty? stk)
(empty? (stack->list stk)))
(: stack-length (All (A) ((Stack A) -> Nonnegative-Integer)))
(define (stack-length stk)
(length (stack->list stk)))
(: stack=? (All (A B) ((Stack A) (Stack B) -> Boolean)))
(define (stack=? stk1 stk2)
(equal? (stack->list stk1) (stack->list stk2)))
(: top (All (A) ((Stack A) -> A)))
(define (top stk)
(if (stack-empty? stk)
(raise-argument-error 'top "stack-length >= 1" 0)
(first (stack->list stk))))
; returns a sequence to use with stacks
(: in-stack (All (A) ((Stack A) -> (Sequenceof A))))
(define (in-stack stk)
(in-list (stack->list stk)))
; pops the stack
(: pop (All (A) ((Stack A) -> (Values A (Stack A)))))
(define (pop stk)
(values (top stk) (Stack (rest (stack->list stk)))))
(: pop! (All (A) ((Stack A) -> A)))
(define (pop! stk)
(define t (top stk))
(set-Stack-contents! stk (rest (stack->list stk)))
t)
(: push (All (A) ((Stack A) A -> (Stack A))))
(define (push stk val)
(Stack (cons val (stack->list stk))))
(: push! (All (A) ((Stack A) A -> Void)))
(define (push! stk val)
(set-Stack-contents! stk (cons val (stack->list stk))))
; push multiple values to the stack from right to left
; the order in the procedure call is the same as the
; stack order
(: push* (All (A) ((Stack A) A * -> (Stack A))))
(define (push* stk . lst)
(Stack (append lst (stack->list stk))))
(: push*! (All (A) ((Stack A) A * -> Void)))
(define (push*! stk . lst)
(set-Stack-contents! stk (append lst (stack->list stk))))
; push a copy of the top of the stack onto the stack
(: push-dup (All (A) ((Stack A) -> (Stack A))))
(define (push-dup stk)
(push stk (top stk)))
(: push-dup! (All (A) ((Stack A) -> Void)))
(define (push-dup! stk)
(push! stk (top stk)))
; removes all items from the stack
(: pop-all! (All (A) ((Stack A) -> Void)))
(define (pop-all! stk)
(unless (stack-empty? stk)
(set-Stack-contents! stk empty)))
swaps the location of the two topmost items
(: swap (All (A) ((Stack A) -> (Stack A))))
(define (swap stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'swap "stack-length >= 2" (stack-length stk))]
[else
(define-values (one rst1) (pop stk))
(define two (top rst1))
(define-values (three rst2) (pop rst1))
(push (push rst2 one) two)]))
(: swap! (All (A) ((Stack A) -> Void)))
(define (swap! stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'swap! "stack-length >= 2" (stack-length stk))]
[else
(define one (pop! stk))
(define two (pop! stk))
(push! stk one)
(push! stk two)]))
push a copy of the second topmost item onto the stack
(: push-over (All (A) ((Stack A) -> (Stack A))))
(define (push-over stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'push-over "stack-length >= 2"
(stack-length stk))]
[else
(define-values (one rst) (pop stk))
(define two (top rst))
(push stk two)]))
(: push-over! (All (A) ((Stack A) -> Void)))
(define (push-over! stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'push-over! "stack-length >= 2"
(stack-length stk))]
[else
(define-values (one rst) (pop stk))
(define two (top rst))
(push! stk two)]))
rotates the top three items downward
( stack ' ( 3 2 1 ) ) - > ( stack ' ( 1 3 2 ) )
(: rotate (All (A) ((Stack A) -> (Stack A))))
(define (rotate stk)
(cond [(< (stack-length stk) 3)
(raise-argument-error 'rotate "stack-length >= 3" (stack-length stk))]
[else (roll stk 2)]))
(: rotate! (All (A) ((Stack A) -> Void)))
(define (rotate! stk)
(cond [(< (stack-length stk) 3)
(raise-argument-error 'rotate! "stack-length >= 3" (stack-length stk))]
[else (roll! stk 2)]))
rotates the top three items upward
( stack ' ( 3 2 1 ) ) - > ( stack ' ( 2 1 3 ) )
; equivalent to rotating twice
(: reverse-rotate (All (A) ((Stack A) -> (Stack A))))
(define (reverse-rotate stk)
(cond [(< (stack-length stk) 3)
(raise-argument-error 'reverse-rotate "stack-length >= 3"
(stack-length stk))]
[else (roll (roll stk 2) 2)]))
(: reverse-rotate! (All (A) ((Stack A) -> Void)))
(define (reverse-rotate! stk)
(cond [(< (stack-length stk) 3)
(raise-argument-error 'reverse-rotate! "stack-length >= 3"
(stack-length stk))]
[else
(roll! stk 2)
(roll! stk 2)]))
removes the second topmost item from the stack
(: pop-nip (All (A) ((Stack A) -> (Stack A))))
(define (pop-nip stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'pop-nip "stack-length >= 2" (stack-length stk))]
[else
(define-values (one rst1) (pop stk))
(define-values (two rst2) (pop rst1))
(push rst2 one)]))
(: pop-nip! (All (A) ((Stack A) -> Void)))
(define (pop-nip! stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'pop-nip! "stack-length >= 2"
(stack-length stk))]
[else
(define val (top stk))
(pop! stk)
(pop! stk)
(push! stk val)]))
swaps the top two items and then pushes a copy of the former top item
(: push-tuck (All (A) ((Stack A) -> (Stack A))))
(define (push-tuck stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'push-tuck "stack-length >= 2"
(stack-length stk))]
[else
(push-over (swap stk))]))
(: push-tuck! (All (A) ((Stack A) -> Void)))
(define (push-tuck! stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'push-tuck! "stack-length >= 2"
(stack-length stk))]
[else
(swap! stk)
(push-over! stk)]))
; pushes a copy of the specified index
(: push-pick (All (A) ((Stack A) Nonnegative-Integer -> (Stack A))))
(define (push-pick stk i)
(define stk-len (stack-length stk))
(cond [(>= i stk-len)
(raise-argument-error 'push-pick! (format "i < ~a" stk-len) i)]
[else
(define val (list-ref (stack->list stk) i))
(push stk val)]))
(: push-pick! (All (A) ((Stack A) Nonnegative-Integer -> Void)))
(define (push-pick! stk i)
(define stk-len (stack-length stk))
(cond [(>= i stk-len)
(raise-argument-error 'push-pick! (format "i < ~a" stk-len) i)]
[else
(define val (list-ref (stack->list stk) i))
(push! stk val)]))
; removes the item at the index and pushes to the top of the stack
(: roll (All (A) ((Stack A) Nonnegative-Integer -> (Stack A))))
(define (roll stk i)
(define stk-len (stack-length stk))
(cond [(>= i stk-len)
(raise-argument-error 'roll (format "i < ~a" stk-len) i)]
[else
(define lst (stack->list stk))
(define val (list-ref lst i))
(define-values (a b) (split-at lst i))
(push (Stack (append a (cdr b))) val)]))
(: roll! (All (A) ((Stack A) Nonnegative-Integer -> Void)))
(define (roll! stk i)
(define stk-len (stack-length stk))
(cond [(>= i stk-len)
(raise-argument-error 'roll! (format "i < ~a" stk-len) i)]
[else
(define lst (stack->list stk))
(define val (list-ref lst i))
(define-values (a b) (split-at lst i))
(set-Stack-contents! stk (cons val (append a (rest b))))]))
)
| null | https://raw.githubusercontent.com/lehitoskin/typed-stack/5bcf55322b3a97ecfb0233ed77f282507eb2f6ad/typed-stack.rkt | racket | typed-stack.rkt
LIFO stack
"top" refers to the beginning of the list
"bottom" or "end" refers to the end of the list
produces a mutable stack
creates a list from the stack as-is
builds a string representation of the stack
with ordering from bottom to top
returns a sequence to use with stacks
pops the stack
push multiple values to the stack from right to left
the order in the procedure call is the same as the
stack order
push a copy of the top of the stack onto the stack
removes all items from the stack
equivalent to rotating twice
pushes a copy of the specified index
removes the item at the index and pushes to the top of the stack | (module typed-stack
typed/racket
(provide make-stack empty-stack stack->list stack->string
stack-empty? stack-length stack=? top in-stack
pop pop! push push! push* push*! push-dup push-dup!
pop-all! swap swap! push-over push-over!
rotate rotate! reverse-rotate reverse-rotate!
pop-nip pop-nip! push-tuck push-tuck!
push-pick push-pick! roll roll! Stack
(rename-out [Stack? stack?]))
(struct (A) Stack ([contents : (Listof A)]) #:mutable #:transparent)
(: make-stack (All (A) (A * -> (Stack A))))
(define (make-stack . lst)
(Stack lst))
(: empty-stack (-> (Stack Any)))
(define (empty-stack)
(Stack '()))
(: stack->list (All (A) ((Stack A) -> (Listof A))))
(define (stack->list stk)
(Stack-contents stk))
(: stack->string (All (A) ((Stack A) -> String)))
(define (stack->string stk)
(with-output-to-string (λ () (printf "~a" (reverse (stack->list stk))))))
(: stack-empty? (All (A) ((Stack A) -> Boolean)))
(define (stack-empty? stk)
(empty? (stack->list stk)))
(: stack-length (All (A) ((Stack A) -> Nonnegative-Integer)))
(define (stack-length stk)
(length (stack->list stk)))
(: stack=? (All (A B) ((Stack A) (Stack B) -> Boolean)))
(define (stack=? stk1 stk2)
(equal? (stack->list stk1) (stack->list stk2)))
(: top (All (A) ((Stack A) -> A)))
(define (top stk)
(if (stack-empty? stk)
(raise-argument-error 'top "stack-length >= 1" 0)
(first (stack->list stk))))
(: in-stack (All (A) ((Stack A) -> (Sequenceof A))))
(define (in-stack stk)
(in-list (stack->list stk)))
(: pop (All (A) ((Stack A) -> (Values A (Stack A)))))
(define (pop stk)
(values (top stk) (Stack (rest (stack->list stk)))))
(: pop! (All (A) ((Stack A) -> A)))
(define (pop! stk)
(define t (top stk))
(set-Stack-contents! stk (rest (stack->list stk)))
t)
(: push (All (A) ((Stack A) A -> (Stack A))))
(define (push stk val)
(Stack (cons val (stack->list stk))))
(: push! (All (A) ((Stack A) A -> Void)))
(define (push! stk val)
(set-Stack-contents! stk (cons val (stack->list stk))))
(: push* (All (A) ((Stack A) A * -> (Stack A))))
(define (push* stk . lst)
(Stack (append lst (stack->list stk))))
(: push*! (All (A) ((Stack A) A * -> Void)))
(define (push*! stk . lst)
(set-Stack-contents! stk (append lst (stack->list stk))))
(: push-dup (All (A) ((Stack A) -> (Stack A))))
(define (push-dup stk)
(push stk (top stk)))
(: push-dup! (All (A) ((Stack A) -> Void)))
(define (push-dup! stk)
(push! stk (top stk)))
(: pop-all! (All (A) ((Stack A) -> Void)))
(define (pop-all! stk)
(unless (stack-empty? stk)
(set-Stack-contents! stk empty)))
swaps the location of the two topmost items
(: swap (All (A) ((Stack A) -> (Stack A))))
(define (swap stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'swap "stack-length >= 2" (stack-length stk))]
[else
(define-values (one rst1) (pop stk))
(define two (top rst1))
(define-values (three rst2) (pop rst1))
(push (push rst2 one) two)]))
(: swap! (All (A) ((Stack A) -> Void)))
(define (swap! stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'swap! "stack-length >= 2" (stack-length stk))]
[else
(define one (pop! stk))
(define two (pop! stk))
(push! stk one)
(push! stk two)]))
push a copy of the second topmost item onto the stack
(: push-over (All (A) ((Stack A) -> (Stack A))))
(define (push-over stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'push-over "stack-length >= 2"
(stack-length stk))]
[else
(define-values (one rst) (pop stk))
(define two (top rst))
(push stk two)]))
(: push-over! (All (A) ((Stack A) -> Void)))
(define (push-over! stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'push-over! "stack-length >= 2"
(stack-length stk))]
[else
(define-values (one rst) (pop stk))
(define two (top rst))
(push! stk two)]))
rotates the top three items downward
( stack ' ( 3 2 1 ) ) - > ( stack ' ( 1 3 2 ) )
(: rotate (All (A) ((Stack A) -> (Stack A))))
(define (rotate stk)
(cond [(< (stack-length stk) 3)
(raise-argument-error 'rotate "stack-length >= 3" (stack-length stk))]
[else (roll stk 2)]))
(: rotate! (All (A) ((Stack A) -> Void)))
(define (rotate! stk)
(cond [(< (stack-length stk) 3)
(raise-argument-error 'rotate! "stack-length >= 3" (stack-length stk))]
[else (roll! stk 2)]))
rotates the top three items upward
( stack ' ( 3 2 1 ) ) - > ( stack ' ( 2 1 3 ) )
(: reverse-rotate (All (A) ((Stack A) -> (Stack A))))
(define (reverse-rotate stk)
(cond [(< (stack-length stk) 3)
(raise-argument-error 'reverse-rotate "stack-length >= 3"
(stack-length stk))]
[else (roll (roll stk 2) 2)]))
(: reverse-rotate! (All (A) ((Stack A) -> Void)))
(define (reverse-rotate! stk)
(cond [(< (stack-length stk) 3)
(raise-argument-error 'reverse-rotate! "stack-length >= 3"
(stack-length stk))]
[else
(roll! stk 2)
(roll! stk 2)]))
removes the second topmost item from the stack
(: pop-nip (All (A) ((Stack A) -> (Stack A))))
(define (pop-nip stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'pop-nip "stack-length >= 2" (stack-length stk))]
[else
(define-values (one rst1) (pop stk))
(define-values (two rst2) (pop rst1))
(push rst2 one)]))
(: pop-nip! (All (A) ((Stack A) -> Void)))
(define (pop-nip! stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'pop-nip! "stack-length >= 2"
(stack-length stk))]
[else
(define val (top stk))
(pop! stk)
(pop! stk)
(push! stk val)]))
swaps the top two items and then pushes a copy of the former top item
(: push-tuck (All (A) ((Stack A) -> (Stack A))))
(define (push-tuck stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'push-tuck "stack-length >= 2"
(stack-length stk))]
[else
(push-over (swap stk))]))
(: push-tuck! (All (A) ((Stack A) -> Void)))
(define (push-tuck! stk)
(cond [(< (stack-length stk) 2)
(raise-argument-error 'push-tuck! "stack-length >= 2"
(stack-length stk))]
[else
(swap! stk)
(push-over! stk)]))
(: push-pick (All (A) ((Stack A) Nonnegative-Integer -> (Stack A))))
(define (push-pick stk i)
(define stk-len (stack-length stk))
(cond [(>= i stk-len)
(raise-argument-error 'push-pick! (format "i < ~a" stk-len) i)]
[else
(define val (list-ref (stack->list stk) i))
(push stk val)]))
(: push-pick! (All (A) ((Stack A) Nonnegative-Integer -> Void)))
(define (push-pick! stk i)
(define stk-len (stack-length stk))
(cond [(>= i stk-len)
(raise-argument-error 'push-pick! (format "i < ~a" stk-len) i)]
[else
(define val (list-ref (stack->list stk) i))
(push! stk val)]))
(: roll (All (A) ((Stack A) Nonnegative-Integer -> (Stack A))))
(define (roll stk i)
(define stk-len (stack-length stk))
(cond [(>= i stk-len)
(raise-argument-error 'roll (format "i < ~a" stk-len) i)]
[else
(define lst (stack->list stk))
(define val (list-ref lst i))
(define-values (a b) (split-at lst i))
(push (Stack (append a (cdr b))) val)]))
(: roll! (All (A) ((Stack A) Nonnegative-Integer -> Void)))
(define (roll! stk i)
(define stk-len (stack-length stk))
(cond [(>= i stk-len)
(raise-argument-error 'roll! (format "i < ~a" stk-len) i)]
[else
(define lst (stack->list stk))
(define val (list-ref lst i))
(define-values (a b) (split-at lst i))
(set-Stack-contents! stk (cons val (append a (rest b))))]))
)
|
dd732baadf67414c2d9955fee0cc075a84ea681198fceb272b399b10420619d2 | mbutterick/sugar | list.rkt | #lang racket/base
(require (for-syntax
racket/base)
racket/list
racket/match
racket/function
"define.rkt")
(define (increasing-nonnegative-list? x)
(and (list? x) (or (empty? x) (apply < -1 x))))
(define+provide+safe (trimf xs test-proc)
(list? procedure? . -> . list?)
(unless (list? xs)
(raise-argument-error 'trimf "list?" xs))
(dropf-right (dropf xs test-proc) test-proc))
(define (slicef-and-filter-split-helper xs pred [separate-negated? #f])
(let loop ([xs xs][negating? #f][acc empty][negated-acc empty])
(match xs
[(? empty?) (if separate-negated?
(values (reverse acc) (reverse negated-acc))
(reverse acc))]
[(list* (? (if negating? (negate pred) pred) pred-xs) ... other-xs)
(cond
[(and negating? separate-negated?)
(loop other-xs
(not negating?)
acc
(match pred-xs
[(? empty?) negated-acc]
[_ (cons pred-xs negated-acc)]))]
[else
(loop other-xs
(not negating?)
(match pred-xs
[(? empty?) acc]
[_ (cons pred-xs acc)])
negated-acc)])])))
(define+provide+safe (slicef xs pred)
(list? procedure? . -> . (listof list?))
(unless (list? xs)
(raise-argument-error 'slicef "list?" xs))
(slicef-and-filter-split-helper xs pred))
(define+provide+safe (slicef-at xs pred [force? #f])
((list? procedure?) (boolean?) . ->* . (listof list?))
(unless (list? xs)
(raise-argument-error 'slicef-at "list?" xs))
(unless (procedure? pred)
(raise-argument-error 'slicef-at "procedure?" pred))
(let loop ([xs xs][acc empty])
(match xs
[(== empty) (reverse acc)]
[(list* (? pred pred-x) (? (negate pred) not-pred-xs) ... tail)
(loop tail (cons (cons pred-x not-pred-xs) acc))]
[(list* (? (negate pred) not-pred-xs) ... tail)
(loop tail (if force? acc (cons not-pred-xs acc)))])))
(define+provide+safe (slicef-after xs pred [force? #f])
((list? procedure?) (boolean?) . ->* . (listof list?))
(unless (list? xs)
(raise-argument-error 'slicef-after "list?" xs))
(unless (procedure? pred)
(raise-argument-error 'slicef-after "procedure?" pred))
(let loop ([xs xs][acc empty])
(match xs
[(== empty) (reverse acc)]
[(list* (? (negate pred) not-pred-xs) ... (? pred pred-x) tail)
(loop tail (cons (append not-pred-xs (list pred-x)) acc))]
[tail (loop empty (if force? acc (cons tail acc)))])))
(define+provide+safe (slice-at xs len [force? #f])
((list? exact-nonnegative-integer?) (boolean?) . ->* . (listof list?))
(unless (list? xs)
(raise-argument-error 'slice-at "list?" xs))
(unless (and (integer? len) (positive? len))
(raise-argument-error 'slice-at "positive integer for sublist length" len))
(let loop ([xs xs][slices empty])
(if (< (length xs) len)
(reverse (if (or force? (empty? xs))
slices
(cons xs slices)))
(match/values (split-at xs len)
[(subxs rest) (loop rest (cons subxs slices))]))))
(define+provide+safe (partition* pred xs)
(predicate/c list? . -> . (values list? list?))
(unless (list? xs)
(raise-argument-error 'partition* "list?" xs))
(slicef-and-filter-split-helper xs pred 'drop-negated))
(define+provide+safe (filter-split xs pred)
(list? predicate/c . -> . (listof list?))
(unless (list? xs)
(raise-argument-error 'filter-split "list?" xs))
;; same idea as slicef, but the negated items are dropped
(define-values (negated-pred-xs _) (partition* (negate pred) xs))
negated-pred-xs)
(define+provide+safe (frequency-hash xs)
(list? . -> . hash?)
(unless (list? xs)
(raise-argument-error 'frequency-hash "list?" xs))
(define counter (make-hash))
(for ([item (in-list xs)])
(hash-update! counter item add1 0))
counter)
(define (->list x)
(match x
[(? list? x) x]
[(? vector?) (vector->list x)]
[(? string?) (string->list x)]
[else (raise-argument-error '->list "item that can be converted to list" x)]))
(define+provide+safe (members-unique? x)
((or/c list? vector? string?) . -> . boolean?)
(match (->list x)
[(? list? x) (= (length (remove-duplicates x)) (length x))]
[_ (raise-argument-error 'members-unique? "list, vector, or string" x)]))
(define+provide+safe (members-unique?/error x)
((or/c list? vector? string?) . -> . boolean?)
(match (members-unique? x)
[(== #false)
(define duplicate-keys (filter values (hash-map (frequency-hash (->list x))
(λ (element freq) (and (> freq 1) element)))))
(error (string-append "members-unique? failed because " (if (= (length duplicate-keys) 1)
"item isn't"
"items aren't") " unique:") duplicate-keys)]
[result result]))
(provide+safe values->list)
(define-syntax (values->list stx)
(syntax-case stx ()
[(_ VALUES-EXPR) #'(call-with-values (λ () VALUES-EXPR) list)]))
(define+provide+safe (sublist xs i j)
(list? exact-nonnegative-integer? exact-nonnegative-integer? . -> . list?)
(unless (list? xs)
(raise-argument-error 'sublist "list?" xs))
(cond
[(> j (length xs)) (error 'sublist (format "ending index ~a exceeds length of list" j))]
[(>= j i) (for/list ([(x idx) (in-indexed xs)]
#:when (<= i idx (sub1 j)))
x)]
[else (raise-argument-error 'sublist (format "starting index larger than ending index" (list i j)))]))
(define+provide+safe (break-at xs bps-in)
(list? any/c . -> . (listof list?))
(unless (list? xs)
(raise-argument-error 'break-at "list" xs))
(define bps ((if (list? bps-in) values list) bps-in))
(when (ormap (λ (bp) (<= (length xs) bp)) bps)
(raise-argument-error 'break-at
(format "breakpoints not greater than or equal to input list length = ~a" (length xs)) bps))
(unless (increasing-nonnegative-list? bps)
(raise-argument-error 'break-at "increasing-nonnegative-list" bps))
;; easier to do back to front, because then the list index for each item won't change during the recursion
cons a zero onto bps ( which may already start with zero ) and then use that as the terminating condition
because breaking at zero means we 've reached the start of the list
(let loop ([xs xs][bps (reverse (cons 0 bps))][acc empty])
(match bps
[(cons (? zero?) _) (cons xs acc)] ; return whatever's left, because no more splits are possible
[_ (match/values (split-at xs (car bps))
[(head tail) (loop head (cdr bps) (cons tail acc))])])))
(define (shift-base xs how-far fill-item cycle caller)
(unless (list? xs)
(raise-argument-error caller "list?" xs))
(define abs-how-far (if cycle
(modulo (abs how-far) (length xs))
(abs how-far)))
(define (make-fill thing) (if cycle thing (make-list abs-how-far fill-item)))
(cond
[(> abs-how-far (length xs))
(raise-argument-error caller
(format "index not larger than list length ~a" (length xs))
(* (if (eq? caller 'shift-left) -1 1) how-far))]
[(zero? how-far) xs]
[(positive? how-far)
(match/values (split-at-right xs abs-how-far)
[(head tail) (append (make-fill tail) head)])]
[else ; how-far is negative
(match/values (split-at xs abs-how-far)
[(head tail) (append tail (make-fill head))])]))
(define+provide+safe (shift xs how-far [fill-item #f] [cycle #f])
((list? integer?) (any/c boolean?) . ->* . list?)
(shift-base xs how-far fill-item cycle 'shift))
(define+provide+safe (shift-left xs how-far [fill-item #f] [cycle #f])
((list? integer?) (any/c boolean?) . ->* . list?)
(shift-base xs (- how-far) fill-item cycle 'shift-left))
(define+provide+safe (shift-cycle xs how-far)
(list? integer? . -> . list?)
(shift-base xs how-far #false #true 'shift-cycle))
(define+provide+safe (shift-left-cycle xs how-far)
(list? integer? . -> . list?)
(shift-base xs (- how-far) #false #true 'shift-left-cycle))
(define+provide+safe (shifts xs how-fars [fill-item #f] [cycle #f])
((list? (listof integer?)) (any/c boolean?) . ->* . (listof list?))
(unless (list? xs)
(raise-argument-error 'shifts "list?" xs))
(map (λ (how-far) (shift xs how-far fill-item cycle)) how-fars))
(define+provide+safe (shift/values xs shift-amount-or-amounts [fill-item #f] [cycle #f])
((list? (or/c (listof integer?) integer?)) (any/c boolean?) . ->* . any)
(apply values ((if (list? shift-amount-or-amounts)
shifts
shift) xs shift-amount-or-amounts fill-item cycle))) | null | https://raw.githubusercontent.com/mbutterick/sugar/990b0b589274a36a58e27197e771500c5898b5a2/sugar/list.rkt | racket | same idea as slicef, but the negated items are dropped
easier to do back to front, because then the list index for each item won't change during the recursion
return whatever's left, because no more splits are possible
how-far is negative | #lang racket/base
(require (for-syntax
racket/base)
racket/list
racket/match
racket/function
"define.rkt")
(define (increasing-nonnegative-list? x)
(and (list? x) (or (empty? x) (apply < -1 x))))
(define+provide+safe (trimf xs test-proc)
(list? procedure? . -> . list?)
(unless (list? xs)
(raise-argument-error 'trimf "list?" xs))
(dropf-right (dropf xs test-proc) test-proc))
(define (slicef-and-filter-split-helper xs pred [separate-negated? #f])
(let loop ([xs xs][negating? #f][acc empty][negated-acc empty])
(match xs
[(? empty?) (if separate-negated?
(values (reverse acc) (reverse negated-acc))
(reverse acc))]
[(list* (? (if negating? (negate pred) pred) pred-xs) ... other-xs)
(cond
[(and negating? separate-negated?)
(loop other-xs
(not negating?)
acc
(match pred-xs
[(? empty?) negated-acc]
[_ (cons pred-xs negated-acc)]))]
[else
(loop other-xs
(not negating?)
(match pred-xs
[(? empty?) acc]
[_ (cons pred-xs acc)])
negated-acc)])])))
(define+provide+safe (slicef xs pred)
(list? procedure? . -> . (listof list?))
(unless (list? xs)
(raise-argument-error 'slicef "list?" xs))
(slicef-and-filter-split-helper xs pred))
(define+provide+safe (slicef-at xs pred [force? #f])
((list? procedure?) (boolean?) . ->* . (listof list?))
(unless (list? xs)
(raise-argument-error 'slicef-at "list?" xs))
(unless (procedure? pred)
(raise-argument-error 'slicef-at "procedure?" pred))
(let loop ([xs xs][acc empty])
(match xs
[(== empty) (reverse acc)]
[(list* (? pred pred-x) (? (negate pred) not-pred-xs) ... tail)
(loop tail (cons (cons pred-x not-pred-xs) acc))]
[(list* (? (negate pred) not-pred-xs) ... tail)
(loop tail (if force? acc (cons not-pred-xs acc)))])))
(define+provide+safe (slicef-after xs pred [force? #f])
((list? procedure?) (boolean?) . ->* . (listof list?))
(unless (list? xs)
(raise-argument-error 'slicef-after "list?" xs))
(unless (procedure? pred)
(raise-argument-error 'slicef-after "procedure?" pred))
(let loop ([xs xs][acc empty])
(match xs
[(== empty) (reverse acc)]
[(list* (? (negate pred) not-pred-xs) ... (? pred pred-x) tail)
(loop tail (cons (append not-pred-xs (list pred-x)) acc))]
[tail (loop empty (if force? acc (cons tail acc)))])))
(define+provide+safe (slice-at xs len [force? #f])
((list? exact-nonnegative-integer?) (boolean?) . ->* . (listof list?))
(unless (list? xs)
(raise-argument-error 'slice-at "list?" xs))
(unless (and (integer? len) (positive? len))
(raise-argument-error 'slice-at "positive integer for sublist length" len))
(let loop ([xs xs][slices empty])
(if (< (length xs) len)
(reverse (if (or force? (empty? xs))
slices
(cons xs slices)))
(match/values (split-at xs len)
[(subxs rest) (loop rest (cons subxs slices))]))))
(define+provide+safe (partition* pred xs)
(predicate/c list? . -> . (values list? list?))
(unless (list? xs)
(raise-argument-error 'partition* "list?" xs))
(slicef-and-filter-split-helper xs pred 'drop-negated))
(define+provide+safe (filter-split xs pred)
(list? predicate/c . -> . (listof list?))
(unless (list? xs)
(raise-argument-error 'filter-split "list?" xs))
(define-values (negated-pred-xs _) (partition* (negate pred) xs))
negated-pred-xs)
(define+provide+safe (frequency-hash xs)
(list? . -> . hash?)
(unless (list? xs)
(raise-argument-error 'frequency-hash "list?" xs))
(define counter (make-hash))
(for ([item (in-list xs)])
(hash-update! counter item add1 0))
counter)
(define (->list x)
(match x
[(? list? x) x]
[(? vector?) (vector->list x)]
[(? string?) (string->list x)]
[else (raise-argument-error '->list "item that can be converted to list" x)]))
(define+provide+safe (members-unique? x)
((or/c list? vector? string?) . -> . boolean?)
(match (->list x)
[(? list? x) (= (length (remove-duplicates x)) (length x))]
[_ (raise-argument-error 'members-unique? "list, vector, or string" x)]))
(define+provide+safe (members-unique?/error x)
((or/c list? vector? string?) . -> . boolean?)
(match (members-unique? x)
[(== #false)
(define duplicate-keys (filter values (hash-map (frequency-hash (->list x))
(λ (element freq) (and (> freq 1) element)))))
(error (string-append "members-unique? failed because " (if (= (length duplicate-keys) 1)
"item isn't"
"items aren't") " unique:") duplicate-keys)]
[result result]))
(provide+safe values->list)
(define-syntax (values->list stx)
(syntax-case stx ()
[(_ VALUES-EXPR) #'(call-with-values (λ () VALUES-EXPR) list)]))
(define+provide+safe (sublist xs i j)
(list? exact-nonnegative-integer? exact-nonnegative-integer? . -> . list?)
(unless (list? xs)
(raise-argument-error 'sublist "list?" xs))
(cond
[(> j (length xs)) (error 'sublist (format "ending index ~a exceeds length of list" j))]
[(>= j i) (for/list ([(x idx) (in-indexed xs)]
#:when (<= i idx (sub1 j)))
x)]
[else (raise-argument-error 'sublist (format "starting index larger than ending index" (list i j)))]))
(define+provide+safe (break-at xs bps-in)
(list? any/c . -> . (listof list?))
(unless (list? xs)
(raise-argument-error 'break-at "list" xs))
(define bps ((if (list? bps-in) values list) bps-in))
(when (ormap (λ (bp) (<= (length xs) bp)) bps)
(raise-argument-error 'break-at
(format "breakpoints not greater than or equal to input list length = ~a" (length xs)) bps))
(unless (increasing-nonnegative-list? bps)
(raise-argument-error 'break-at "increasing-nonnegative-list" bps))
cons a zero onto bps ( which may already start with zero ) and then use that as the terminating condition
because breaking at zero means we 've reached the start of the list
(let loop ([xs xs][bps (reverse (cons 0 bps))][acc empty])
(match bps
[_ (match/values (split-at xs (car bps))
[(head tail) (loop head (cdr bps) (cons tail acc))])])))
(define (shift-base xs how-far fill-item cycle caller)
(unless (list? xs)
(raise-argument-error caller "list?" xs))
(define abs-how-far (if cycle
(modulo (abs how-far) (length xs))
(abs how-far)))
(define (make-fill thing) (if cycle thing (make-list abs-how-far fill-item)))
(cond
[(> abs-how-far (length xs))
(raise-argument-error caller
(format "index not larger than list length ~a" (length xs))
(* (if (eq? caller 'shift-left) -1 1) how-far))]
[(zero? how-far) xs]
[(positive? how-far)
(match/values (split-at-right xs abs-how-far)
[(head tail) (append (make-fill tail) head)])]
(match/values (split-at xs abs-how-far)
[(head tail) (append tail (make-fill head))])]))
(define+provide+safe (shift xs how-far [fill-item #f] [cycle #f])
((list? integer?) (any/c boolean?) . ->* . list?)
(shift-base xs how-far fill-item cycle 'shift))
(define+provide+safe (shift-left xs how-far [fill-item #f] [cycle #f])
((list? integer?) (any/c boolean?) . ->* . list?)
(shift-base xs (- how-far) fill-item cycle 'shift-left))
(define+provide+safe (shift-cycle xs how-far)
(list? integer? . -> . list?)
(shift-base xs how-far #false #true 'shift-cycle))
(define+provide+safe (shift-left-cycle xs how-far)
(list? integer? . -> . list?)
(shift-base xs (- how-far) #false #true 'shift-left-cycle))
(define+provide+safe (shifts xs how-fars [fill-item #f] [cycle #f])
((list? (listof integer?)) (any/c boolean?) . ->* . (listof list?))
(unless (list? xs)
(raise-argument-error 'shifts "list?" xs))
(map (λ (how-far) (shift xs how-far fill-item cycle)) how-fars))
(define+provide+safe (shift/values xs shift-amount-or-amounts [fill-item #f] [cycle #f])
((list? (or/c (listof integer?) integer?)) (any/c boolean?) . ->* . any)
(apply values ((if (list? shift-amount-or-amounts)
shifts
shift) xs shift-amount-or-amounts fill-item cycle))) |
ef0817a3910fd3ad1d4631da4aede96617a0758751c7109e515014bd83c8b044 | 2600hz/kazoo | knm_errors.erl | %%%-----------------------------------------------------------------------------
( C ) 2010 - 2020 , 2600Hz
%%% @doc
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(knm_errors).
-export([unspecified/2
,unauthorized/0
,number_exists/1
,invalid_state_transition/3
,no_change_required/1
,service_restriction/2
,carrier_not_specified/1
,billing_issue/2
,invalid/2
,multiple_choice/2
,assign_failure/2
,database_error/2
,number_is_porting/1
,by_carrier/3
]).
-export([to_json/1, to_json/2, to_json/3
,code/1
,error/1
,cause/1
,message/1
]).
-export([failed_to_proplist/1
,failed_to_json/1
]).
-include("knm.hrl").
-define(CODE, <<"code">>).
-define(ERROR, <<"error">>).
-define(CAUSE, <<"cause">>).
-define(MESSAGE, <<"message">>).
-type binatom_reason() :: atom() | kz_term:ne_binary().
-type error() :: kz_json:object().
%% used by knm_pipe
-type reason() :: error() | atom().
-type reasons() :: [reason()].
-type failed() :: #{kz_term:ne_binary() => reason()}.
-type proplist() :: [{kz_term:ne_binary(), reason()}].
-type thrown_error() :: {'error', atom()} |
{'error', atom(), any()} |
{'error', atom(), any(), any()}.
-export_type([error/0
,failed/0
,proplist/0
,reason/0, reasons/0
,thrown_error/0
]).
%%------------------------------------------------------------------------------
%% @doc Convert knm_pipe failed map to proplist.
%% @end
%%------------------------------------------------------------------------------
-spec failed_to_proplist(failed()) -> proplist().
failed_to_proplist(Failed) ->
maps:to_list(Failed).
%%------------------------------------------------------------------------------
%% @doc Convert knm_pipe failed map to proplist.
%% @end
%%------------------------------------------------------------------------------
-spec failed_to_json(failed()) -> kz_json:object().
failed_to_json(Failed) ->
kz_json:from_map(Failed).
-spec unspecified(any(), knm_phone_number:record() | kz_term:ne_binary()) -> no_return().
unspecified(Error, PN) ->
throw({'error', Error, PN}).
-spec unauthorized() -> no_return().
unauthorized() ->
throw({'error', 'unauthorized'}).
-spec number_exists(kz_term:ne_binary()) -> no_return().
number_exists(DID) ->
throw({'error', 'number_exists', DID}).
-spec invalid_state_transition(knm_phone_number:record() | knm_phone_number:record(), kz_term:api_ne_binary(), kz_term:ne_binary()) -> no_return().
invalid_state_transition(PN, undefined, ToState) ->
invalid_state_transition(PN, <<"(nothing)">>, ToState);
invalid_state_transition(PN, FromState, ToState) ->
Reason = <<"from ", FromState/binary, " to ", ToState/binary>>,
throw({'error', 'invalid_state_transition', PN, Reason}).
-spec no_change_required(knm_phone_number:record()) -> no_return().
no_change_required(PN) ->
throw({'error', 'no_change_required', PN}).
-spec service_restriction(knm_phone_number:record(), kz_term:ne_binary()) -> no_return().
service_restriction(PN, Message) ->
throw({'error', 'service_restriction', PN, Message}).
-spec carrier_not_specified(knm_phone_number:record()) -> no_return().
carrier_not_specified(PN) ->
throw({'error', 'carrier_not_specified', PN}).
-spec billing_issue(kz_term:ne_binary(), kz_json:object()) -> no_return().
billing_issue(AccountId, Reason) ->
throw({'error', 'billing_issue', AccountId, Reason}).
-spec invalid(knm_phone_number:record(), kz_term:ne_binary()) -> no_return().
invalid(PN, Reason) ->
throw({'error', 'invalid', PN, Reason}).
-spec multiple_choice(knm_phone_number:record(), kz_json:object()) -> no_return().
multiple_choice(PN, Update) ->
throw({'error', 'multiple_choice', PN, Update}).
-spec assign_failure(knm_phone_number:record(), any()) -> no_return().
assign_failure(PN, E) ->
throw({'error', 'assign_failure', PN, E}).
-spec database_error(kz_data:data_errors(), knm_phone_number:record()) -> no_return().
database_error(E, PN) ->
throw({'error', 'database_error', PN, E}).
-spec number_is_porting(kz_term:ne_binary()) -> no_return().
number_is_porting(Num) ->
throw({'error', 'number_is_porting', Num}).
-spec by_carrier(module(), binatom_reason(), kz_term:api_ne_binary() | knm_phone_number:record()) -> no_return().
by_carrier(Carrier, E, 'undefined') ->
throw_by_carrier(Carrier, E, <<"unknown">>);
by_carrier(Carrier, E, <<Num/binary>>) ->
throw_by_carrier(Carrier, E, Num);
by_carrier(Carrier, E, PN) ->
throw_by_carrier(Carrier, E, knm_phone_number:number(PN)).
-spec throw_by_carrier(module(), binatom_reason(), kz_term:ne_binary()) -> no_return().
throw_by_carrier(Carrier, E, Num) ->
throw({'error', 'by_carrier', Num, {Carrier, E}}).
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec to_json(binatom_reason()) -> error().
to_json(Reason)->
to_json(Reason, 'undefined').
-spec to_json(binatom_reason(), kz_term:api_ne_binary()) -> error().
to_json(Reason, Num)->
to_json(Reason, Num, 'undefined').
-spec to_json(binatom_reason(), kz_term:api_ne_binary() | kz_term:ne_binaries(), atom() | kz_term:ne_binary() | any()) -> error().
to_json('number_is_porting', Num=?NE_BINARY, _) ->
Message = <<"number ", Num/binary, " is porting">>,
build_error(400, 'number_is_porting', Message, Num);
to_json('number_exists', Num=?NE_BINARY, _) ->
Message = <<"number ", Num/binary, " already exists">>,
build_error(409, 'number_exists', Message, Num);
to_json('not_found', Num=?NE_BINARY, _) ->
Message = <<"number ", Num/binary, " not found">>,
build_error(404, 'not_found', Message, Num);
to_json('not_reconcilable', Num=?NE_BINARY, _) ->
Message = <<"number ", Num/binary, " is not reconcilable">>,
build_error(404, 'not_reconcilable', Message, Num);
to_json('unauthorized', _, Cause) ->
Message = <<"requestor is unauthorized to perform operation">>,
build_error(403, 'forbidden', Message, Cause);
to_json('service_restriction', Num=?NE_BINARY, Cause) ->
build_error(402, 'service_restriction', Cause, Num);
to_json('no_change_required', _, Cause) ->
Message = <<"no change required">>,
build_error(400, 'no_change_required', Message, Cause);
to_json('invalid_state_transition', _, Cause) ->
Message = <<"invalid state transition">>,
build_error(400, 'invalid_state_transition', Message, Cause);
to_json('assign_failure', _, Cause) ->
Message = <<"invalid account to assign to">>,
build_error(400, 'assign_failure', Message, Cause);
to_json(Reason='invalid', _, Cause) ->
Message = <<"invalid">>,
build_error(400, Reason, Message, Cause);
to_json('by_carrier', Num, {_Carrier,_Cause}) ->
lager:error("carrier ~s fault: ~p", [_Carrier, _Cause]),
build_error(500, 'unspecified_fault', <<"fault by carrier">>, Num);
to_json('not_enough_credit', AccountId, Reason) ->
Message = io_lib:format("account ~s does not have enough credit to perform the operation"
,[AccountId]
),
build_error(402, 'not_enough_credit', kz_term:to_binary(Message), Reason);
to_json(Reason='internal_error', _, _Cause) ->
lager:error("internal error: ~p", [_Cause]),
build_error(500, Reason, 'internal_error', 'undefined');
to_json(Reason, _, Cause) ->
?LOG_ERROR("funky 500 error: ~p/~p", [Reason, Cause]),
build_error(500, 'unspecified_fault', Reason, Cause).
%%%=============================================================================
Internal functions
%%%=============================================================================
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec build_error(integer(), atom(), binatom_reason(), binatom_reason()) ->
error().
build_error(Code, Error, Message, Cause) ->
kz_json:from_list(
[{?CODE, Code}]
++ [{K, kz_term:to_binary(V)}
|| {K, V} <- [{?ERROR, Error}
,{?CAUSE, Cause}
,{?MESSAGE, Message}
],
V =/= 'undefined'
]
).
-spec code(error()) -> kz_term:api_integer().
code(JObj) ->
kz_json:get_value(?CODE, JObj).
-spec error(error()) -> kz_term:api_binary().
error(JObj) ->
kz_json:get_value(?ERROR, JObj).
-spec cause(error()) -> kz_term:api_binary().
cause(JObj) ->
kz_json:get_value(?CAUSE, JObj).
-spec message(error()) -> kz_term:api_binary().
message(JObj) ->
kz_json:get_value(?MESSAGE, JObj).
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/core/kazoo_numbers/src/knm_errors.erl | erlang | -----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
used by knm_pipe
------------------------------------------------------------------------------
@doc Convert knm_pipe failed map to proplist.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Convert knm_pipe failed map to proplist.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
=============================================================================
=============================================================================
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------ | ( C ) 2010 - 2020 , 2600Hz
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(knm_errors).
-export([unspecified/2
,unauthorized/0
,number_exists/1
,invalid_state_transition/3
,no_change_required/1
,service_restriction/2
,carrier_not_specified/1
,billing_issue/2
,invalid/2
,multiple_choice/2
,assign_failure/2
,database_error/2
,number_is_porting/1
,by_carrier/3
]).
-export([to_json/1, to_json/2, to_json/3
,code/1
,error/1
,cause/1
,message/1
]).
-export([failed_to_proplist/1
,failed_to_json/1
]).
-include("knm.hrl").
-define(CODE, <<"code">>).
-define(ERROR, <<"error">>).
-define(CAUSE, <<"cause">>).
-define(MESSAGE, <<"message">>).
-type binatom_reason() :: atom() | kz_term:ne_binary().
-type error() :: kz_json:object().
-type reason() :: error() | atom().
-type reasons() :: [reason()].
-type failed() :: #{kz_term:ne_binary() => reason()}.
-type proplist() :: [{kz_term:ne_binary(), reason()}].
-type thrown_error() :: {'error', atom()} |
{'error', atom(), any()} |
{'error', atom(), any(), any()}.
-export_type([error/0
,failed/0
,proplist/0
,reason/0, reasons/0
,thrown_error/0
]).
-spec failed_to_proplist(failed()) -> proplist().
failed_to_proplist(Failed) ->
maps:to_list(Failed).
-spec failed_to_json(failed()) -> kz_json:object().
failed_to_json(Failed) ->
kz_json:from_map(Failed).
-spec unspecified(any(), knm_phone_number:record() | kz_term:ne_binary()) -> no_return().
unspecified(Error, PN) ->
throw({'error', Error, PN}).
-spec unauthorized() -> no_return().
unauthorized() ->
throw({'error', 'unauthorized'}).
-spec number_exists(kz_term:ne_binary()) -> no_return().
number_exists(DID) ->
throw({'error', 'number_exists', DID}).
-spec invalid_state_transition(knm_phone_number:record() | knm_phone_number:record(), kz_term:api_ne_binary(), kz_term:ne_binary()) -> no_return().
invalid_state_transition(PN, undefined, ToState) ->
invalid_state_transition(PN, <<"(nothing)">>, ToState);
invalid_state_transition(PN, FromState, ToState) ->
Reason = <<"from ", FromState/binary, " to ", ToState/binary>>,
throw({'error', 'invalid_state_transition', PN, Reason}).
-spec no_change_required(knm_phone_number:record()) -> no_return().
no_change_required(PN) ->
throw({'error', 'no_change_required', PN}).
-spec service_restriction(knm_phone_number:record(), kz_term:ne_binary()) -> no_return().
service_restriction(PN, Message) ->
throw({'error', 'service_restriction', PN, Message}).
-spec carrier_not_specified(knm_phone_number:record()) -> no_return().
carrier_not_specified(PN) ->
throw({'error', 'carrier_not_specified', PN}).
-spec billing_issue(kz_term:ne_binary(), kz_json:object()) -> no_return().
billing_issue(AccountId, Reason) ->
throw({'error', 'billing_issue', AccountId, Reason}).
-spec invalid(knm_phone_number:record(), kz_term:ne_binary()) -> no_return().
invalid(PN, Reason) ->
throw({'error', 'invalid', PN, Reason}).
-spec multiple_choice(knm_phone_number:record(), kz_json:object()) -> no_return().
multiple_choice(PN, Update) ->
throw({'error', 'multiple_choice', PN, Update}).
-spec assign_failure(knm_phone_number:record(), any()) -> no_return().
assign_failure(PN, E) ->
throw({'error', 'assign_failure', PN, E}).
-spec database_error(kz_data:data_errors(), knm_phone_number:record()) -> no_return().
database_error(E, PN) ->
throw({'error', 'database_error', PN, E}).
-spec number_is_porting(kz_term:ne_binary()) -> no_return().
number_is_porting(Num) ->
throw({'error', 'number_is_porting', Num}).
-spec by_carrier(module(), binatom_reason(), kz_term:api_ne_binary() | knm_phone_number:record()) -> no_return().
by_carrier(Carrier, E, 'undefined') ->
throw_by_carrier(Carrier, E, <<"unknown">>);
by_carrier(Carrier, E, <<Num/binary>>) ->
throw_by_carrier(Carrier, E, Num);
by_carrier(Carrier, E, PN) ->
throw_by_carrier(Carrier, E, knm_phone_number:number(PN)).
-spec throw_by_carrier(module(), binatom_reason(), kz_term:ne_binary()) -> no_return().
throw_by_carrier(Carrier, E, Num) ->
throw({'error', 'by_carrier', Num, {Carrier, E}}).
-spec to_json(binatom_reason()) -> error().
to_json(Reason)->
to_json(Reason, 'undefined').
-spec to_json(binatom_reason(), kz_term:api_ne_binary()) -> error().
to_json(Reason, Num)->
to_json(Reason, Num, 'undefined').
-spec to_json(binatom_reason(), kz_term:api_ne_binary() | kz_term:ne_binaries(), atom() | kz_term:ne_binary() | any()) -> error().
to_json('number_is_porting', Num=?NE_BINARY, _) ->
Message = <<"number ", Num/binary, " is porting">>,
build_error(400, 'number_is_porting', Message, Num);
to_json('number_exists', Num=?NE_BINARY, _) ->
Message = <<"number ", Num/binary, " already exists">>,
build_error(409, 'number_exists', Message, Num);
to_json('not_found', Num=?NE_BINARY, _) ->
Message = <<"number ", Num/binary, " not found">>,
build_error(404, 'not_found', Message, Num);
to_json('not_reconcilable', Num=?NE_BINARY, _) ->
Message = <<"number ", Num/binary, " is not reconcilable">>,
build_error(404, 'not_reconcilable', Message, Num);
to_json('unauthorized', _, Cause) ->
Message = <<"requestor is unauthorized to perform operation">>,
build_error(403, 'forbidden', Message, Cause);
to_json('service_restriction', Num=?NE_BINARY, Cause) ->
build_error(402, 'service_restriction', Cause, Num);
to_json('no_change_required', _, Cause) ->
Message = <<"no change required">>,
build_error(400, 'no_change_required', Message, Cause);
to_json('invalid_state_transition', _, Cause) ->
Message = <<"invalid state transition">>,
build_error(400, 'invalid_state_transition', Message, Cause);
to_json('assign_failure', _, Cause) ->
Message = <<"invalid account to assign to">>,
build_error(400, 'assign_failure', Message, Cause);
to_json(Reason='invalid', _, Cause) ->
Message = <<"invalid">>,
build_error(400, Reason, Message, Cause);
to_json('by_carrier', Num, {_Carrier,_Cause}) ->
lager:error("carrier ~s fault: ~p", [_Carrier, _Cause]),
build_error(500, 'unspecified_fault', <<"fault by carrier">>, Num);
to_json('not_enough_credit', AccountId, Reason) ->
Message = io_lib:format("account ~s does not have enough credit to perform the operation"
,[AccountId]
),
build_error(402, 'not_enough_credit', kz_term:to_binary(Message), Reason);
to_json(Reason='internal_error', _, _Cause) ->
lager:error("internal error: ~p", [_Cause]),
build_error(500, Reason, 'internal_error', 'undefined');
to_json(Reason, _, Cause) ->
?LOG_ERROR("funky 500 error: ~p/~p", [Reason, Cause]),
build_error(500, 'unspecified_fault', Reason, Cause).
Internal functions
-spec build_error(integer(), atom(), binatom_reason(), binatom_reason()) ->
error().
build_error(Code, Error, Message, Cause) ->
kz_json:from_list(
[{?CODE, Code}]
++ [{K, kz_term:to_binary(V)}
|| {K, V} <- [{?ERROR, Error}
,{?CAUSE, Cause}
,{?MESSAGE, Message}
],
V =/= 'undefined'
]
).
-spec code(error()) -> kz_term:api_integer().
code(JObj) ->
kz_json:get_value(?CODE, JObj).
-spec error(error()) -> kz_term:api_binary().
error(JObj) ->
kz_json:get_value(?ERROR, JObj).
-spec cause(error()) -> kz_term:api_binary().
cause(JObj) ->
kz_json:get_value(?CAUSE, JObj).
-spec message(error()) -> kz_term:api_binary().
message(JObj) ->
kz_json:get_value(?MESSAGE, JObj).
|
f0eb50ea25ef4ae406fc3d635164a4111eec23bf1e85c5af744154cf2e504fd7 | gergoerdi/tandoori | wildcard.hs | foo _ = []
| null | https://raw.githubusercontent.com/gergoerdi/tandoori/515142ce76b96efa75d7044c9077d85394585556/input/wildcard.hs | haskell | foo _ = []
|
|
20e884b8bf80d7118a37baaedbd39c34633d586319647823a09615119d2edcff | stevenvar/OMicroB | device_config.ml | (******************************************************************************)
let default_arm_cxx_options = [ "-mthumb";
"-g"; "-fno-exceptions"; "-fno-unwind-tables";
"-O2"; "-g"; "-Wall"; "-Wl,-Os";
"-fdata-sections"; "-ffunction-sections";
"-O"; "-g"; "-Wall"; "-Wl,-Os";
"-Wl,--gc-sections" ]
let compile_ml_to_byte ~ppx_options ~mlopts ~cxxopts ~local ~trace ~verbose
inputs output =
let libdir = libdir local in
let vars = [ ("CAMLLIB", libdir) ] in
let cmd = [ Config.ocamlc ] @ default_ocamlc_options @ ppx_options @ [ "-custom" ] @ mlopts in
let cmd = if trace > 0 then cmd @ [ "-ccopt"; "-DDEBUG=" ^ string_of_int trace ] else cmd in
let cmd = cmd @ List.flatten (List.map (fun cxxopt -> [ "-ccopt"; cxxopt ]) cxxopts) in
let cmd = cmd @ [ "-I"; Filename.concat libdir "targets/microbit";
Filename.concat libdir "targets/microbit/microbit.cma";
"-open"; "Microbit" ] in
let cmd = cmd @ inputs @ [ "-o"; output ] in
run ~vars ~verbose cmd
let compile_c_to_hex ~cpu ~linkscript ~startup ~microbian ~local ~trace:_ ~verbose input output =
let includedir = includedir local in
let microbitdir =
if local then Filename.concat Config.builddir "src/byterun/microbit"
else Filename.concat Config.includedir "microbit" in
let arm_o_file = (Filename.remove_extension input)^".arm_o" in
let arm_elf_file = (Filename.remove_extension input)^".arm_elf" in
(* let arm_map_file = (Filename.remove_extension input)^".map" in *)
(* Compile a .c into a .arm_o *)
let conc_microbit s = Filename.concat microbitdir s in
let cmd = [ Config.arm_cxx ] @ [ "-mcpu="^cpu ] @ default_arm_cxx_options in
let cmd = cmd @ [ "-D__MICROBIT__" ] in
let cmd = cmd @ [ "-I"; Filename.concat includedir "microbit" ] in
let cmd = cmd @ [ "-o"; arm_o_file ] @ [ "-c"; input ] in
Printf.printf "################## Compile a .c into a .arm_o\n";
run ~verbose cmd;
Printf.printf "################## Compiled a .c into a .arm_o\n";
(* Compile a .arm_o into a .arm_elf *)
let cmd = [ Config.arm_cxx ] @ [ "-mcpu="^cpu ] @ default_arm_cxx_options in
let cmd = cmd @ [ "-specs=nosys.specs" ] in
let cmd = cmd @ [ "-D__MICROBIT__" ] in
let cmd = cmd @ [ "-T"; conc_microbit linkscript; "-nostdlib" ] in
let cmd = cmd @ [ arm_o_file;
conc_microbit startup;
conc_microbit microbian] in
let cmd = cmd @ [ "-lm"; "-lc"; "-lgcc"; "-lnosys" ] in
let cmd = cmd @ [ "-o" ; arm_elf_file ] in
List.iter (Printf.printf "%s ") cmd;
run ~verbose cmd;
Printf.printf "################## Compiled a .arm_o into a .arm_elf\n";
Compile a .arm_elf into a .hex
let cmd = [ Config.arm_objcopy; "-O"; "ihex"; arm_elf_file; output ] in
run ~verbose cmd
module MicroBitConfig : DEVICECONFIG = struct
let compile_ml_to_byte = compile_ml_to_byte
let compile_c_to_hex = compile_c_to_hex
~cpu:"cortex-m0" ~linkscript:"nRF51822.ld" ~startup:"startup1.o" ~microbian:"microbian1.a"
let simul_flag = "__SIMUL_MICROBIT_1__"
let flash ~sudo:_ ~verbose:_ hexfile =
failwith
(Printf.sprintf
"To flash a microbit, simply copy %s to your microbit device"
hexfile)
end
module MicroBit2Config : DEVICECONFIG = struct
let compile_ml_to_byte = compile_ml_to_byte
let simul_flag = "__SIMUL_MICROBIT_2__"
let compile_c_to_hex = compile_c_to_hex
~cpu:"cortex-m4" ~linkscript:"nRF52833.ld" ~startup:"startup2.o" ~microbian:"microbian2.a"
let flash ~sudo:_ ~verbose:_ hexfile =
failwith
(Printf.sprintf
"To flash a microbit, simply copy %s to your microbit device"
hexfile)
end
(******************************************************************************)
(** Choose correct config according to name *)
let get_config name = match name with
| "microbit" | "microbit1" -> (module MicroBitConfig : DEVICECONFIG)
| "microbit2" -> (module MicroBit2Config : DEVICECONFIG)
| _ -> get_config name
(** Get the names of all configs *)
let all_config_names () = [
"microbit1"; "microbit2"
]@(all_config_names ())
(******************************************************************************)
(******************************************************************************)
(******************************************************************************)
| null | https://raw.githubusercontent.com/stevenvar/OMicroB/99a2e781f9511137090aaba3c09e2e920c0dbc77/targets/microbit/device_config.ml | ocaml | ****************************************************************************
let arm_map_file = (Filename.remove_extension input)^".map" in
Compile a .c into a .arm_o
Compile a .arm_o into a .arm_elf
****************************************************************************
* Choose correct config according to name
* Get the names of all configs
****************************************************************************
****************************************************************************
**************************************************************************** |
let default_arm_cxx_options = [ "-mthumb";
"-g"; "-fno-exceptions"; "-fno-unwind-tables";
"-O2"; "-g"; "-Wall"; "-Wl,-Os";
"-fdata-sections"; "-ffunction-sections";
"-O"; "-g"; "-Wall"; "-Wl,-Os";
"-Wl,--gc-sections" ]
let compile_ml_to_byte ~ppx_options ~mlopts ~cxxopts ~local ~trace ~verbose
inputs output =
let libdir = libdir local in
let vars = [ ("CAMLLIB", libdir) ] in
let cmd = [ Config.ocamlc ] @ default_ocamlc_options @ ppx_options @ [ "-custom" ] @ mlopts in
let cmd = if trace > 0 then cmd @ [ "-ccopt"; "-DDEBUG=" ^ string_of_int trace ] else cmd in
let cmd = cmd @ List.flatten (List.map (fun cxxopt -> [ "-ccopt"; cxxopt ]) cxxopts) in
let cmd = cmd @ [ "-I"; Filename.concat libdir "targets/microbit";
Filename.concat libdir "targets/microbit/microbit.cma";
"-open"; "Microbit" ] in
let cmd = cmd @ inputs @ [ "-o"; output ] in
run ~vars ~verbose cmd
let compile_c_to_hex ~cpu ~linkscript ~startup ~microbian ~local ~trace:_ ~verbose input output =
let includedir = includedir local in
let microbitdir =
if local then Filename.concat Config.builddir "src/byterun/microbit"
else Filename.concat Config.includedir "microbit" in
let arm_o_file = (Filename.remove_extension input)^".arm_o" in
let arm_elf_file = (Filename.remove_extension input)^".arm_elf" in
let conc_microbit s = Filename.concat microbitdir s in
let cmd = [ Config.arm_cxx ] @ [ "-mcpu="^cpu ] @ default_arm_cxx_options in
let cmd = cmd @ [ "-D__MICROBIT__" ] in
let cmd = cmd @ [ "-I"; Filename.concat includedir "microbit" ] in
let cmd = cmd @ [ "-o"; arm_o_file ] @ [ "-c"; input ] in
Printf.printf "################## Compile a .c into a .arm_o\n";
run ~verbose cmd;
Printf.printf "################## Compiled a .c into a .arm_o\n";
let cmd = [ Config.arm_cxx ] @ [ "-mcpu="^cpu ] @ default_arm_cxx_options in
let cmd = cmd @ [ "-specs=nosys.specs" ] in
let cmd = cmd @ [ "-D__MICROBIT__" ] in
let cmd = cmd @ [ "-T"; conc_microbit linkscript; "-nostdlib" ] in
let cmd = cmd @ [ arm_o_file;
conc_microbit startup;
conc_microbit microbian] in
let cmd = cmd @ [ "-lm"; "-lc"; "-lgcc"; "-lnosys" ] in
let cmd = cmd @ [ "-o" ; arm_elf_file ] in
List.iter (Printf.printf "%s ") cmd;
run ~verbose cmd;
Printf.printf "################## Compiled a .arm_o into a .arm_elf\n";
Compile a .arm_elf into a .hex
let cmd = [ Config.arm_objcopy; "-O"; "ihex"; arm_elf_file; output ] in
run ~verbose cmd
module MicroBitConfig : DEVICECONFIG = struct
let compile_ml_to_byte = compile_ml_to_byte
let compile_c_to_hex = compile_c_to_hex
~cpu:"cortex-m0" ~linkscript:"nRF51822.ld" ~startup:"startup1.o" ~microbian:"microbian1.a"
let simul_flag = "__SIMUL_MICROBIT_1__"
let flash ~sudo:_ ~verbose:_ hexfile =
failwith
(Printf.sprintf
"To flash a microbit, simply copy %s to your microbit device"
hexfile)
end
module MicroBit2Config : DEVICECONFIG = struct
let compile_ml_to_byte = compile_ml_to_byte
let simul_flag = "__SIMUL_MICROBIT_2__"
let compile_c_to_hex = compile_c_to_hex
~cpu:"cortex-m4" ~linkscript:"nRF52833.ld" ~startup:"startup2.o" ~microbian:"microbian2.a"
let flash ~sudo:_ ~verbose:_ hexfile =
failwith
(Printf.sprintf
"To flash a microbit, simply copy %s to your microbit device"
hexfile)
end
let get_config name = match name with
| "microbit" | "microbit1" -> (module MicroBitConfig : DEVICECONFIG)
| "microbit2" -> (module MicroBit2Config : DEVICECONFIG)
| _ -> get_config name
let all_config_names () = [
"microbit1"; "microbit2"
]@(all_config_names ())
|
4fe94b4797fc737f046a38d0f3110de580423387b68ce9004d9b7ae16c47a52b | ermine/xmpp | punycode_test.ml | open Printf
let _ =
let str = Sys.argv.(1) in
printf "original: [%s]\n" str;
let decoded = Punycode.decode str in
printf "decoded: [%s]\n" (UTF8.encode decoded);
let encoded = Punycode.encode (Array.of_list decoded) in
printf "encoded: [%s]\n" encoded;
if str <> encoded then
failwith("FAIL")
| null | https://raw.githubusercontent.com/ermine/xmpp/85f31c5aaac6e3b5042694ff33ac8afdfb845c06/tests/punycode_test.ml | ocaml | open Printf
let _ =
let str = Sys.argv.(1) in
printf "original: [%s]\n" str;
let decoded = Punycode.decode str in
printf "decoded: [%s]\n" (UTF8.encode decoded);
let encoded = Punycode.encode (Array.of_list decoded) in
printf "encoded: [%s]\n" encoded;
if str <> encoded then
failwith("FAIL")
|
|
5a1ed4e81fa41a15c4604c0895b125554bd7145d216d1f1a0a878872f77b5fc8 | chrisdone/hulk | Server.hs | {-# OPTIONS -Wall -fno-warn-missing-signatures #-}
module Main where
import Network
import System.Console.CmdArgs
import System.Posix
import Hulk.Config (getConfig)
import Hulk.Options (options,optionsConf)
import Hulk.Server (start)
import Hulk.Types ()
main :: IO ()
main = withSocketsDo $ do
_ <- installHandler sigPIPE Ignore Nothing
cmdArgs options >>= getConfig . optionsConf >>= start
| null | https://raw.githubusercontent.com/chrisdone/hulk/8a7c2cf975a2d40778a26750591c8c69d3ea4ae7/src/main/Server.hs | haskell | # OPTIONS -Wall -fno-warn-missing-signatures # | module Main where
import Network
import System.Console.CmdArgs
import System.Posix
import Hulk.Config (getConfig)
import Hulk.Options (options,optionsConf)
import Hulk.Server (start)
import Hulk.Types ()
main :: IO ()
main = withSocketsDo $ do
_ <- installHandler sigPIPE Ignore Nothing
cmdArgs options >>= getConfig . optionsConf >>= start
|
ebaa6615c38df7c3607c4127a0095bca552cbc97e642b09b3e553cfb36736aeb | MaskRay/OJHaskell | 104.hs | import Data.Int
import Data.List
import Data.Numbers.Fibonacci
modulus :: Integral a => a
modulus = 10^9
newtype ModP = ModP Int64 deriving (Show, Eq)
instance Num ModP where
ModP a + ModP b = ModP $ (a + b) `rem` modulus
ModP a - ModP b = ModP $ (a - b) `rem` modulus
ModP a * ModP b = ModP $ (a * b) `rem` modulus
fromInteger = ModP . fromIntegral . (`rem` modulus)
main = print . head $ dropWhile (\x -> not (checkLast (fib x) && checkFirst (fib x))) [0..]
where
checkLast (ModP x) = (==['1'..'9']) . sort . show $ x
checkFirst = (==['1'..'9']) . sort . take 9 . show
| null | https://raw.githubusercontent.com/MaskRay/OJHaskell/ba24050b2480619f10daa7d37fca558182ba006c/Project%20Euler/104.hs | haskell | import Data.Int
import Data.List
import Data.Numbers.Fibonacci
modulus :: Integral a => a
modulus = 10^9
newtype ModP = ModP Int64 deriving (Show, Eq)
instance Num ModP where
ModP a + ModP b = ModP $ (a + b) `rem` modulus
ModP a - ModP b = ModP $ (a - b) `rem` modulus
ModP a * ModP b = ModP $ (a * b) `rem` modulus
fromInteger = ModP . fromIntegral . (`rem` modulus)
main = print . head $ dropWhile (\x -> not (checkLast (fib x) && checkFirst (fib x))) [0..]
where
checkLast (ModP x) = (==['1'..'9']) . sort . show $ x
checkFirst = (==['1'..'9']) . sort . take 9 . show
|
|
abf1e20f5f195a7282fdd15dfb87e8ac5fd80eb4b4e6a868bc9217fe7feb96c9 | nyu-acsys/drift | Ackermann01.ml | let rec bot _ = bot ()
let fail _ = assert false
let rec ack_without_checking_1087 x_DO_NOT_CARE_1093 x_DO_NOT_CARE_1094 x_DO_NOT_CARE_1095 m_1031 set_flag_ack_1075 s_ack_m_1070 s_ack_n_1071 n_1032 =
let set_flag_ack_1075 = true
in
let s_ack_n_1071 = n_1032
in
let s_ack_m_1070 = m_1031
in
if m_1031 = 0 then
n_1032 + 1
else
if n_1032 = 0 then
ack_without_checking_1087 set_flag_ack_1075 s_ack_m_1070
s_ack_n_1071 (m_1031 - 1) set_flag_ack_1075 s_ack_m_1070
s_ack_n_1071 1
else
ack_without_checking_1087 set_flag_ack_1075 s_ack_m_1070
s_ack_n_1071 (m_1031 - 1) set_flag_ack_1075 s_ack_m_1070
s_ack_n_1071
(ack_without_checking_1087 set_flag_ack_1075 s_ack_m_1070
s_ack_n_1071 m_1031 set_flag_ack_1075 s_ack_m_1070 s_ack_n_1071
(n_1032 - 1))
let rec ack_1030 x_DO_NOT_CARE_1089 x_DO_NOT_CARE_1090 x_DO_NOT_CARE_1091 m_1031 prev_set_flag_ack_1074 s_prev_ack_m_1072 s_prev_ack_n_1073 n_1032 =
let u =
if prev_set_flag_ack_1074 then
if ((0 * 1) + (0 * s_prev_ack_m_1072)) + (1 * s_prev_ack_n_1073)
> ((0 * 1) + (0 * m_1031)) + (1 * n_1032) &&
((0 * 1) + (0 * m_1031)) + (1 * n_1032) >= 0 then
()
else
let u_2812 = fail ()
in
bot()
else
()
in
ack_without_checking_1087 x_DO_NOT_CARE_1089 x_DO_NOT_CARE_1090
x_DO_NOT_CARE_1091 m_1031 prev_set_flag_ack_1074
s_prev_ack_m_1072 s_prev_ack_n_1073 n_1032
let main (m_1035:int(*-:{v:Int | true}*)) (n_1036:int(*-:{v:Int | true}*)) =
let set_flag_ack_1075 = false in
let s_ack_m_1070 = 0 in
let s_ack_n_1071 = 0 in
if n_1036 > 0 && m_1035 > 0 then
ack_1030 set_flag_ack_1075 s_ack_m_1070 s_ack_n_1071 m_1035
set_flag_ack_1075 s_ack_m_1070 s_ack_n_1071 n_1036
else
0
| null | https://raw.githubusercontent.com/nyu-acsys/drift/51a3160d74b761626180da4f7dd0bb950cfe40c0/tests/benchmarks/r_type/termination/Ackermann01.ml | ocaml | -:{v:Int | true}
-:{v:Int | true} | let rec bot _ = bot ()
let fail _ = assert false
let rec ack_without_checking_1087 x_DO_NOT_CARE_1093 x_DO_NOT_CARE_1094 x_DO_NOT_CARE_1095 m_1031 set_flag_ack_1075 s_ack_m_1070 s_ack_n_1071 n_1032 =
let set_flag_ack_1075 = true
in
let s_ack_n_1071 = n_1032
in
let s_ack_m_1070 = m_1031
in
if m_1031 = 0 then
n_1032 + 1
else
if n_1032 = 0 then
ack_without_checking_1087 set_flag_ack_1075 s_ack_m_1070
s_ack_n_1071 (m_1031 - 1) set_flag_ack_1075 s_ack_m_1070
s_ack_n_1071 1
else
ack_without_checking_1087 set_flag_ack_1075 s_ack_m_1070
s_ack_n_1071 (m_1031 - 1) set_flag_ack_1075 s_ack_m_1070
s_ack_n_1071
(ack_without_checking_1087 set_flag_ack_1075 s_ack_m_1070
s_ack_n_1071 m_1031 set_flag_ack_1075 s_ack_m_1070 s_ack_n_1071
(n_1032 - 1))
let rec ack_1030 x_DO_NOT_CARE_1089 x_DO_NOT_CARE_1090 x_DO_NOT_CARE_1091 m_1031 prev_set_flag_ack_1074 s_prev_ack_m_1072 s_prev_ack_n_1073 n_1032 =
let u =
if prev_set_flag_ack_1074 then
if ((0 * 1) + (0 * s_prev_ack_m_1072)) + (1 * s_prev_ack_n_1073)
> ((0 * 1) + (0 * m_1031)) + (1 * n_1032) &&
((0 * 1) + (0 * m_1031)) + (1 * n_1032) >= 0 then
()
else
let u_2812 = fail ()
in
bot()
else
()
in
ack_without_checking_1087 x_DO_NOT_CARE_1089 x_DO_NOT_CARE_1090
x_DO_NOT_CARE_1091 m_1031 prev_set_flag_ack_1074
s_prev_ack_m_1072 s_prev_ack_n_1073 n_1032
let set_flag_ack_1075 = false in
let s_ack_m_1070 = 0 in
let s_ack_n_1071 = 0 in
if n_1036 > 0 && m_1035 > 0 then
ack_1030 set_flag_ack_1075 s_ack_m_1070 s_ack_n_1071 m_1035
set_flag_ack_1075 s_ack_m_1070 s_ack_n_1071 n_1036
else
0
|
2b105b95fad1b084eac7d185289d74ee4f17fed369faa4d357a9644b4f2e00aa | haskell/haskell-language-server | Fourmolu.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE DisambiguateRecordFields #
# LANGUAGE LambdaCase #
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedLabels #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TypeApplications #
module Ide.Plugin.Fourmolu (
descriptor,
provider,
LogEvent,
) where
import Control.Exception (IOException, try)
import Control.Lens ((^.))
import Control.Monad
import Control.Monad.IO.Class
import Data.Bifunctor (bimap, first)
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import Development.IDE hiding (pluginHandlers)
import Development.IDE.GHC.Compat as Compat hiding (Cpp, Warning,
hang, vcat)
import qualified Development.IDE.GHC.Compat.Util as S
import GHC.LanguageExtensions.Type (Extension (Cpp))
import Ide.Plugin.Fourmolu.Shim
import Ide.Plugin.Properties
import Ide.PluginUtils (makeDiffTextEdit)
import Ide.Types
import Language.LSP.Server hiding (defaultConfig)
import Language.LSP.Types hiding (line)
import Language.LSP.Types.Lens (HasTabSize (tabSize))
import Ormolu
import System.Exit
import System.FilePath
import System.Process.Run (cwd, proc)
import System.Process.Text (readCreateProcessWithExitCode)
import Text.Read (readMaybe)
descriptor :: Recorder (WithPriority LogEvent) -> PluginId -> PluginDescriptor IdeState
descriptor recorder plId =
(defaultPluginDescriptor plId)
{ pluginHandlers = mkFormattingHandlers $ provider recorder plId
, pluginConfigDescriptor = defaultConfigDescriptor{configCustomConfig = mkCustomConfig properties}
}
properties :: Properties '[ 'PropertyKey "external" 'TBoolean]
properties =
emptyProperties
& defineBooleanProperty
#external
"Call out to an external \"fourmolu\" executable, rather than using the bundled library"
False
provider :: Recorder (WithPriority LogEvent) -> PluginId -> FormattingHandler IdeState
provider recorder plId ideState typ contents fp fo = withIndefiniteProgress title Cancellable $ do
fileOpts <-
maybe [] (convertDynFlags . hsc_dflags . hscEnv)
<$> liftIO (runAction "Fourmolu" ideState $ use GhcSession fp)
useCLI <- liftIO $ runAction "Fourmolu" ideState $ usePropertyAction #external plId properties
if useCLI
then liftIO
. fmap (join . first (mkError . show))
. try @IOException
$ do
check Fourmolu version so that we know which flags to use
(exitCode, out, _err) <- readCreateProcessWithExitCode ( proc "fourmolu" ["-v"] ) ""
let version = do
guard $ exitCode == ExitSuccess
"fourmolu" : v : _ <- pure $ T.words out
traverse (readMaybe @Int . T.unpack) $ T.splitOn "." v
case version of
Just v -> pure CLIVersionInfo
{ noCabal = v >= [0, 7]
}
Nothing -> do
logWith recorder Warning $ NoVersion out
pure CLIVersionInfo
{ noCabal = True
}
run Fourmolu
readCreateProcessWithExitCode
( proc "fourmolu" $
map ("-o" <>) fileOpts
<> mwhen noCabal ["--no-cabal"]
<> catMaybes
[ ("--start-line=" <>) . show <$> regionStartLine region
, ("--end-line=" <>) . show <$> regionEndLine region
]
){cwd = Just $ takeDirectory fp'}
contents
case exitCode of
ExitSuccess -> do
logWith recorder Debug $ StdErr err
pure . Right $ makeDiffTextEdit contents out
ExitFailure n -> do
logWith recorder Info $ StdErr err
pure . Left . responseError $ "Fourmolu failed with exit code " <> T.pack (show n)
else do
let format fourmoluConfig =
bimap (mkError . show) (makeDiffTextEdit contents)
<$> try @OrmoluException (ormolu config fp' (T.unpack contents))
where
printerOpts = cfgFilePrinterOpts fourmoluConfig
config =
addFixityOverrides (cfgFileFixities fourmoluConfig) $
defaultConfig
{ cfgDynOptions = map DynOption fileOpts
, cfgRegion = region
, cfgDebug = False
, cfgPrinterOpts =
fillMissingPrinterOpts
(printerOpts <> lspPrinterOpts)
defaultPrinterOpts
}
in liftIO (loadConfigFile fp') >>= \case
ConfigLoaded file opts -> liftIO $ do
logWith recorder Info $ ConfigPath file
format opts
ConfigNotFound searchDirs -> liftIO $ do
logWith recorder Info $ NoConfigPath searchDirs
format emptyConfig
ConfigParseError f err -> do
sendNotification SWindowShowMessage $
ShowMessageParams
{ _xtype = MtError
, _message = errorMessage
}
return . Left $ responseError errorMessage
where
errorMessage = "Failed to load " <> T.pack f <> ": " <> T.pack (showParseError err)
where
fp' = fromNormalizedFilePath fp
title = "Formatting " <> T.pack (takeFileName fp')
mkError = responseError . ("Fourmolu: " <>) . T.pack
lspPrinterOpts = mempty{poIndentation = Just $ fromIntegral $ fo ^. tabSize}
region = case typ of
FormatText ->
RegionIndices Nothing Nothing
FormatRange (Range (Position sl _) (Position el _)) ->
RegionIndices (Just $ fromIntegral $ sl + 1) (Just $ fromIntegral $ el + 1)
data LogEvent
= NoVersion Text
| ConfigPath FilePath
| NoConfigPath [FilePath]
| StdErr Text
deriving (Show)
instance Pretty LogEvent where
pretty = \case
NoVersion t -> "Couldn't get Fourmolu version:" <> line <> indent 2 (pretty t)
ConfigPath p -> "Loaded Fourmolu config from: " <> pretty (show p)
NoConfigPath ps -> "No " <> pretty configFileName <> " found in any of:"
<> line <> indent 2 (vsep (map (pretty . show) ps))
StdErr t -> "Fourmolu stderr:" <> line <> indent 2 (pretty t)
convertDynFlags :: DynFlags -> [String]
convertDynFlags df =
let pp = ["-pgmF=" <> p | not (null p)]
p = sPgm_F $ Compat.settings df
pm = map (("-fplugin=" <>) . moduleNameString) $ pluginModNames df
ex = map showExtension $ S.toList $ extensionFlags df
showExtension = \case
Cpp -> "-XCPP"
x -> "-X" ++ show x
in pp <> pm <> ex
newtype CLIVersionInfo = CLIVersionInfo
{ noCabal :: Bool
}
mwhen :: Monoid a => Bool -> a -> a
mwhen b x = if b then x else mempty
| null | https://raw.githubusercontent.com/haskell/haskell-language-server/6f5a73507f8d9266a486feaf8695c052362b9b95/plugins/hls-fourmolu-plugin/src/Ide/Plugin/Fourmolu.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE NamedFieldPuns #
# LANGUAGE OverloadedLabels #
# LANGUAGE OverloadedStrings # | # LANGUAGE DisambiguateRecordFields #
# LANGUAGE LambdaCase #
# LANGUAGE TypeApplications #
module Ide.Plugin.Fourmolu (
descriptor,
provider,
LogEvent,
) where
import Control.Exception (IOException, try)
import Control.Lens ((^.))
import Control.Monad
import Control.Monad.IO.Class
import Data.Bifunctor (bimap, first)
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import Development.IDE hiding (pluginHandlers)
import Development.IDE.GHC.Compat as Compat hiding (Cpp, Warning,
hang, vcat)
import qualified Development.IDE.GHC.Compat.Util as S
import GHC.LanguageExtensions.Type (Extension (Cpp))
import Ide.Plugin.Fourmolu.Shim
import Ide.Plugin.Properties
import Ide.PluginUtils (makeDiffTextEdit)
import Ide.Types
import Language.LSP.Server hiding (defaultConfig)
import Language.LSP.Types hiding (line)
import Language.LSP.Types.Lens (HasTabSize (tabSize))
import Ormolu
import System.Exit
import System.FilePath
import System.Process.Run (cwd, proc)
import System.Process.Text (readCreateProcessWithExitCode)
import Text.Read (readMaybe)
descriptor :: Recorder (WithPriority LogEvent) -> PluginId -> PluginDescriptor IdeState
descriptor recorder plId =
(defaultPluginDescriptor plId)
{ pluginHandlers = mkFormattingHandlers $ provider recorder plId
, pluginConfigDescriptor = defaultConfigDescriptor{configCustomConfig = mkCustomConfig properties}
}
properties :: Properties '[ 'PropertyKey "external" 'TBoolean]
properties =
emptyProperties
& defineBooleanProperty
#external
"Call out to an external \"fourmolu\" executable, rather than using the bundled library"
False
provider :: Recorder (WithPriority LogEvent) -> PluginId -> FormattingHandler IdeState
provider recorder plId ideState typ contents fp fo = withIndefiniteProgress title Cancellable $ do
fileOpts <-
maybe [] (convertDynFlags . hsc_dflags . hscEnv)
<$> liftIO (runAction "Fourmolu" ideState $ use GhcSession fp)
useCLI <- liftIO $ runAction "Fourmolu" ideState $ usePropertyAction #external plId properties
if useCLI
then liftIO
. fmap (join . first (mkError . show))
. try @IOException
$ do
check Fourmolu version so that we know which flags to use
(exitCode, out, _err) <- readCreateProcessWithExitCode ( proc "fourmolu" ["-v"] ) ""
let version = do
guard $ exitCode == ExitSuccess
"fourmolu" : v : _ <- pure $ T.words out
traverse (readMaybe @Int . T.unpack) $ T.splitOn "." v
case version of
Just v -> pure CLIVersionInfo
{ noCabal = v >= [0, 7]
}
Nothing -> do
logWith recorder Warning $ NoVersion out
pure CLIVersionInfo
{ noCabal = True
}
run Fourmolu
readCreateProcessWithExitCode
( proc "fourmolu" $
map ("-o" <>) fileOpts
<> mwhen noCabal ["--no-cabal"]
<> catMaybes
[ ("--start-line=" <>) . show <$> regionStartLine region
, ("--end-line=" <>) . show <$> regionEndLine region
]
){cwd = Just $ takeDirectory fp'}
contents
case exitCode of
ExitSuccess -> do
logWith recorder Debug $ StdErr err
pure . Right $ makeDiffTextEdit contents out
ExitFailure n -> do
logWith recorder Info $ StdErr err
pure . Left . responseError $ "Fourmolu failed with exit code " <> T.pack (show n)
else do
let format fourmoluConfig =
bimap (mkError . show) (makeDiffTextEdit contents)
<$> try @OrmoluException (ormolu config fp' (T.unpack contents))
where
printerOpts = cfgFilePrinterOpts fourmoluConfig
config =
addFixityOverrides (cfgFileFixities fourmoluConfig) $
defaultConfig
{ cfgDynOptions = map DynOption fileOpts
, cfgRegion = region
, cfgDebug = False
, cfgPrinterOpts =
fillMissingPrinterOpts
(printerOpts <> lspPrinterOpts)
defaultPrinterOpts
}
in liftIO (loadConfigFile fp') >>= \case
ConfigLoaded file opts -> liftIO $ do
logWith recorder Info $ ConfigPath file
format opts
ConfigNotFound searchDirs -> liftIO $ do
logWith recorder Info $ NoConfigPath searchDirs
format emptyConfig
ConfigParseError f err -> do
sendNotification SWindowShowMessage $
ShowMessageParams
{ _xtype = MtError
, _message = errorMessage
}
return . Left $ responseError errorMessage
where
errorMessage = "Failed to load " <> T.pack f <> ": " <> T.pack (showParseError err)
where
fp' = fromNormalizedFilePath fp
title = "Formatting " <> T.pack (takeFileName fp')
mkError = responseError . ("Fourmolu: " <>) . T.pack
lspPrinterOpts = mempty{poIndentation = Just $ fromIntegral $ fo ^. tabSize}
region = case typ of
FormatText ->
RegionIndices Nothing Nothing
FormatRange (Range (Position sl _) (Position el _)) ->
RegionIndices (Just $ fromIntegral $ sl + 1) (Just $ fromIntegral $ el + 1)
data LogEvent
= NoVersion Text
| ConfigPath FilePath
| NoConfigPath [FilePath]
| StdErr Text
deriving (Show)
instance Pretty LogEvent where
pretty = \case
NoVersion t -> "Couldn't get Fourmolu version:" <> line <> indent 2 (pretty t)
ConfigPath p -> "Loaded Fourmolu config from: " <> pretty (show p)
NoConfigPath ps -> "No " <> pretty configFileName <> " found in any of:"
<> line <> indent 2 (vsep (map (pretty . show) ps))
StdErr t -> "Fourmolu stderr:" <> line <> indent 2 (pretty t)
convertDynFlags :: DynFlags -> [String]
convertDynFlags df =
let pp = ["-pgmF=" <> p | not (null p)]
p = sPgm_F $ Compat.settings df
pm = map (("-fplugin=" <>) . moduleNameString) $ pluginModNames df
ex = map showExtension $ S.toList $ extensionFlags df
showExtension = \case
Cpp -> "-XCPP"
x -> "-X" ++ show x
in pp <> pm <> ex
newtype CLIVersionInfo = CLIVersionInfo
{ noCabal :: Bool
}
mwhen :: Monoid a => Bool -> a -> a
mwhen b x = if b then x else mempty
|
414bc283fc1d90e33c79b68769adbb091d3534b7c01da9c338f43c3385e3b756 | marcelosousa/llvmvf | Util.hs | -------------------------------------------------------------------------------
Module : Language . . Util
Copyright : ( c ) 2012
-------------------------------------------------------------------------------
module Language.LLVMIR.Extractor.Util where
import qualified LLVM.FFI.Core as FFI
import LLVM.Core hiding (Value)
import Foreign.C.Types
type Value = FFI.ValueRef
cUInt2Bool :: CUInt -> Bool
cUInt2Bool 0 = False
cUInt2Bool _ = True
cInt2Bool :: CInt -> Bool
cInt2Bool 0 = False
cInt2Bool _ = True
| null | https://raw.githubusercontent.com/marcelosousa/llvmvf/c314e43aa8bc8bb7fd9c83cebfbdcabee4ecfe1b/src/Language/LLVMIR/Extractor/Util.hs | haskell | -----------------------------------------------------------------------------
----------------------------------------------------------------------------- | Module : Language . . Util
Copyright : ( c ) 2012
module Language.LLVMIR.Extractor.Util where
import qualified LLVM.FFI.Core as FFI
import LLVM.Core hiding (Value)
import Foreign.C.Types
type Value = FFI.ValueRef
cUInt2Bool :: CUInt -> Bool
cUInt2Bool 0 = False
cUInt2Bool _ = True
cInt2Bool :: CInt -> Bool
cInt2Bool 0 = False
cInt2Bool _ = True
|
3cc136078aae5f6add8cf166a2d5304974777fd3b61f134c056501a3bf6e02d4 | ragkousism/Guix-on-Hurd | gperf.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2012 , 2013 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages gperf)
#:use-module (guix licenses)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu))
(define-public gperf
(package
(name "gperf")
(version "3.1")
(source
(origin
(method url-fetch)
(uri (string-append "mirror-"
version ".tar.gz"))
(sha256
(base32
"1qispg6i508rq8pkajh26cznwimbnj06wq9sd85vg95v8nwld1aq"))))
(build-system gnu-build-system)
(arguments '(#:parallel-tests? #f))
(home-page "/")
(synopsis "Perfect hash function generator")
(description
"gperf is a perfect hash function generator. For a given list of
strings, it produces a hash function and hash table in C or C++ code. That
the hash function is perfect means that no collisions can exist and that
look-ups can be made by single string comparisons.")
(license gpl3+)))
| null | https://raw.githubusercontent.com/ragkousism/Guix-on-Hurd/e951bb2c0c4961dc6ac2bda8f331b9c4cee0da95/gnu/packages/gperf.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
| Copyright © 2012 , 2013 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages gperf)
#:use-module (guix licenses)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu))
(define-public gperf
(package
(name "gperf")
(version "3.1")
(source
(origin
(method url-fetch)
(uri (string-append "mirror-"
version ".tar.gz"))
(sha256
(base32
"1qispg6i508rq8pkajh26cznwimbnj06wq9sd85vg95v8nwld1aq"))))
(build-system gnu-build-system)
(arguments '(#:parallel-tests? #f))
(home-page "/")
(synopsis "Perfect hash function generator")
(description
"gperf is a perfect hash function generator. For a given list of
strings, it produces a hash function and hash table in C or C++ code. That
the hash function is perfect means that no collisions can exist and that
look-ups can be made by single string comparisons.")
(license gpl3+)))
|
a1f4e2ccc0758cbd8cd5d910b3caaa4affbf5fedd84947bd445fd601986abb63 | processone/ejabberd | mod_pubsub_opt.erl | %% Generated automatically
%% DO NOT EDIT: run `make options` instead
-module(mod_pubsub_opt).
-export([access_createnode/1]).
-export([db_type/1]).
-export([default_node_config/1]).
-export([force_node_config/1]).
-export([host/1]).
-export([hosts/1]).
-export([ignore_pep_from_offline/1]).
-export([last_item_cache/1]).
-export([max_item_expire_node/1]).
-export([max_items_node/1]).
-export([max_nodes_discoitems/1]).
-export([max_subscriptions_node/1]).
-export([name/1]).
-export([nodetree/1]).
-export([pep_mapping/1]).
-export([plugins/1]).
-export([vcard/1]).
-spec access_createnode(gen_mod:opts() | global | binary()) -> 'all' | acl:acl().
access_createnode(Opts) when is_map(Opts) ->
gen_mod:get_opt(access_createnode, Opts);
access_createnode(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, access_createnode).
-spec db_type(gen_mod:opts() | global | binary()) -> atom().
db_type(Opts) when is_map(Opts) ->
gen_mod:get_opt(db_type, Opts);
db_type(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, db_type).
-spec default_node_config(gen_mod:opts() | global | binary()) -> [{atom(),atom() | integer()}].
default_node_config(Opts) when is_map(Opts) ->
gen_mod:get_opt(default_node_config, Opts);
default_node_config(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, default_node_config).
-spec force_node_config(gen_mod:opts() | global | binary()) -> [{re:mp(),[{atom(),atom() | integer()}]}].
force_node_config(Opts) when is_map(Opts) ->
gen_mod:get_opt(force_node_config, Opts);
force_node_config(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, force_node_config).
-spec host(gen_mod:opts() | global | binary()) -> binary().
host(Opts) when is_map(Opts) ->
gen_mod:get_opt(host, Opts);
host(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, host).
-spec hosts(gen_mod:opts() | global | binary()) -> [binary()].
hosts(Opts) when is_map(Opts) ->
gen_mod:get_opt(hosts, Opts);
hosts(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, hosts).
-spec ignore_pep_from_offline(gen_mod:opts() | global | binary()) -> boolean().
ignore_pep_from_offline(Opts) when is_map(Opts) ->
gen_mod:get_opt(ignore_pep_from_offline, Opts);
ignore_pep_from_offline(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, ignore_pep_from_offline).
-spec last_item_cache(gen_mod:opts() | global | binary()) -> boolean().
last_item_cache(Opts) when is_map(Opts) ->
gen_mod:get_opt(last_item_cache, Opts);
last_item_cache(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, last_item_cache).
-spec max_item_expire_node(gen_mod:opts() | global | binary()) -> 'infinity' | pos_integer().
max_item_expire_node(Opts) when is_map(Opts) ->
gen_mod:get_opt(max_item_expire_node, Opts);
max_item_expire_node(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, max_item_expire_node).
-spec max_items_node(gen_mod:opts() | global | binary()) -> 'unlimited' | non_neg_integer().
max_items_node(Opts) when is_map(Opts) ->
gen_mod:get_opt(max_items_node, Opts);
max_items_node(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, max_items_node).
-spec max_nodes_discoitems(gen_mod:opts() | global | binary()) -> 'infinity' | non_neg_integer().
max_nodes_discoitems(Opts) when is_map(Opts) ->
gen_mod:get_opt(max_nodes_discoitems, Opts);
max_nodes_discoitems(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, max_nodes_discoitems).
-spec max_subscriptions_node(gen_mod:opts() | global | binary()) -> 'undefined' | non_neg_integer().
max_subscriptions_node(Opts) when is_map(Opts) ->
gen_mod:get_opt(max_subscriptions_node, Opts);
max_subscriptions_node(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, max_subscriptions_node).
-spec name(gen_mod:opts() | global | binary()) -> binary().
name(Opts) when is_map(Opts) ->
gen_mod:get_opt(name, Opts);
name(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, name).
-spec nodetree(gen_mod:opts() | global | binary()) -> binary().
nodetree(Opts) when is_map(Opts) ->
gen_mod:get_opt(nodetree, Opts);
nodetree(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, nodetree).
-spec pep_mapping(gen_mod:opts() | global | binary()) -> [{binary(),binary()}].
pep_mapping(Opts) when is_map(Opts) ->
gen_mod:get_opt(pep_mapping, Opts);
pep_mapping(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, pep_mapping).
-spec plugins(gen_mod:opts() | global | binary()) -> [binary()].
plugins(Opts) when is_map(Opts) ->
gen_mod:get_opt(plugins, Opts);
plugins(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, plugins).
-spec vcard(gen_mod:opts() | global | binary()) -> 'undefined' | tuple().
vcard(Opts) when is_map(Opts) ->
gen_mod:get_opt(vcard, Opts);
vcard(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, vcard).
| null | https://raw.githubusercontent.com/processone/ejabberd/b173ec0a780b1cd6d8da1f25f58c1880fbae293a/src/mod_pubsub_opt.erl | erlang | Generated automatically
DO NOT EDIT: run `make options` instead |
-module(mod_pubsub_opt).
-export([access_createnode/1]).
-export([db_type/1]).
-export([default_node_config/1]).
-export([force_node_config/1]).
-export([host/1]).
-export([hosts/1]).
-export([ignore_pep_from_offline/1]).
-export([last_item_cache/1]).
-export([max_item_expire_node/1]).
-export([max_items_node/1]).
-export([max_nodes_discoitems/1]).
-export([max_subscriptions_node/1]).
-export([name/1]).
-export([nodetree/1]).
-export([pep_mapping/1]).
-export([plugins/1]).
-export([vcard/1]).
-spec access_createnode(gen_mod:opts() | global | binary()) -> 'all' | acl:acl().
access_createnode(Opts) when is_map(Opts) ->
gen_mod:get_opt(access_createnode, Opts);
access_createnode(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, access_createnode).
-spec db_type(gen_mod:opts() | global | binary()) -> atom().
db_type(Opts) when is_map(Opts) ->
gen_mod:get_opt(db_type, Opts);
db_type(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, db_type).
-spec default_node_config(gen_mod:opts() | global | binary()) -> [{atom(),atom() | integer()}].
default_node_config(Opts) when is_map(Opts) ->
gen_mod:get_opt(default_node_config, Opts);
default_node_config(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, default_node_config).
-spec force_node_config(gen_mod:opts() | global | binary()) -> [{re:mp(),[{atom(),atom() | integer()}]}].
force_node_config(Opts) when is_map(Opts) ->
gen_mod:get_opt(force_node_config, Opts);
force_node_config(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, force_node_config).
-spec host(gen_mod:opts() | global | binary()) -> binary().
host(Opts) when is_map(Opts) ->
gen_mod:get_opt(host, Opts);
host(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, host).
-spec hosts(gen_mod:opts() | global | binary()) -> [binary()].
hosts(Opts) when is_map(Opts) ->
gen_mod:get_opt(hosts, Opts);
hosts(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, hosts).
-spec ignore_pep_from_offline(gen_mod:opts() | global | binary()) -> boolean().
ignore_pep_from_offline(Opts) when is_map(Opts) ->
gen_mod:get_opt(ignore_pep_from_offline, Opts);
ignore_pep_from_offline(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, ignore_pep_from_offline).
-spec last_item_cache(gen_mod:opts() | global | binary()) -> boolean().
last_item_cache(Opts) when is_map(Opts) ->
gen_mod:get_opt(last_item_cache, Opts);
last_item_cache(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, last_item_cache).
-spec max_item_expire_node(gen_mod:opts() | global | binary()) -> 'infinity' | pos_integer().
max_item_expire_node(Opts) when is_map(Opts) ->
gen_mod:get_opt(max_item_expire_node, Opts);
max_item_expire_node(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, max_item_expire_node).
-spec max_items_node(gen_mod:opts() | global | binary()) -> 'unlimited' | non_neg_integer().
max_items_node(Opts) when is_map(Opts) ->
gen_mod:get_opt(max_items_node, Opts);
max_items_node(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, max_items_node).
-spec max_nodes_discoitems(gen_mod:opts() | global | binary()) -> 'infinity' | non_neg_integer().
max_nodes_discoitems(Opts) when is_map(Opts) ->
gen_mod:get_opt(max_nodes_discoitems, Opts);
max_nodes_discoitems(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, max_nodes_discoitems).
-spec max_subscriptions_node(gen_mod:opts() | global | binary()) -> 'undefined' | non_neg_integer().
max_subscriptions_node(Opts) when is_map(Opts) ->
gen_mod:get_opt(max_subscriptions_node, Opts);
max_subscriptions_node(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, max_subscriptions_node).
-spec name(gen_mod:opts() | global | binary()) -> binary().
name(Opts) when is_map(Opts) ->
gen_mod:get_opt(name, Opts);
name(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, name).
-spec nodetree(gen_mod:opts() | global | binary()) -> binary().
nodetree(Opts) when is_map(Opts) ->
gen_mod:get_opt(nodetree, Opts);
nodetree(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, nodetree).
-spec pep_mapping(gen_mod:opts() | global | binary()) -> [{binary(),binary()}].
pep_mapping(Opts) when is_map(Opts) ->
gen_mod:get_opt(pep_mapping, Opts);
pep_mapping(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, pep_mapping).
-spec plugins(gen_mod:opts() | global | binary()) -> [binary()].
plugins(Opts) when is_map(Opts) ->
gen_mod:get_opt(plugins, Opts);
plugins(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, plugins).
-spec vcard(gen_mod:opts() | global | binary()) -> 'undefined' | tuple().
vcard(Opts) when is_map(Opts) ->
gen_mod:get_opt(vcard, Opts);
vcard(Host) ->
gen_mod:get_module_opt(Host, mod_pubsub, vcard).
|
d831e293227393042f1505580af02c5f992f0c408f4209ae3e1e6af7ef256093 | wavewave/hoodle | Link.hs | {-# LANGUAGE GADTs #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE TupleSections #
module Hoodle.Coroutine.Link where
import Control.Applicative
import Control.Lens (at, set, view, (^.), _2)
import Control.Monad hiding (forM_)
import Control.Monad.State (get, gets, liftIO)
import Control.Monad.Trans.Maybe
import qualified Data.ByteString.Char8 as B
import Data.Foldable (forM_)
import Data.Hoodle.BBox
import Data.Hoodle.Generic
import Data.Hoodle.Simple
import Data.Hoodle.Zipper
import qualified Data.IntMap as IM
import qualified Data.Map as M
import Data.Maybe (mapMaybe)
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.UUID.V4 (nextRandom)
import Graphics.Hoodle.Render.Item
import Graphics.Hoodle.Render.Type
import Graphics.Hoodle.Render.Type.HitTest
import Graphics.Hoodle.Render.Util.HitTest
import qualified Graphics.UI.Gtk as Gtk
import Hoodle.Accessor
import Hoodle.Coroutine.Dialog
import Hoodle.Coroutine.Draw
import Hoodle.Coroutine.Page (changePage)
import Hoodle.Coroutine.Select.Clipboard
import Hoodle.Coroutine.TextInput
import Hoodle.Coroutine.Window
import Hoodle.Device
import Hoodle.ModelAction.ContextMenu
import Hoodle.ModelAction.File (makeNewItemImage)
import Hoodle.ModelAction.Select
import Hoodle.Type.Canvas
import Hoodle.Type.Coroutine
import Hoodle.Type.Enum
import Hoodle.Type.Event
import Hoodle.Type.HoodleState
import Hoodle.Type.PageArrangement
import Hoodle.Util
import Hoodle.View.Coordinate
import System.Directory
import System.FilePath
import System.Process (createProcess, proc)
--
import Prelude hiding (mapM_)
-- |
openLinkAction ::
UrlPath ->
-- | (docid,anchorid)
Maybe (T.Text, T.Text) ->
MainCoroutine ()
openLinkAction urlpath mid = do
case urlpath of
FileUrl fp -> do
mk <- liftIO . checkPreviouslyOpenedFile fp =<< get
case mk of
Just k -> switchTab k >> forM_ mid (uncurry goToAnchorPos)
Nothing ->
addTab (LocalDir (Just fp))
>> forM_ mid (uncurry goToAnchorPos)
HttpUrl url -> liftIO $ void $ createProcess (proc "xdg-open" [url])
-- |
checkPreviouslyOpenedFile :: FilePath -> HoodleState -> IO (Maybe Int)
checkPreviouslyOpenedFile fp xst = do
cfp <- canonicalizePath fp
lst <- filterM (checker cfp . snd) (IM.assocs (xst ^. unitHoodles . _2))
case lst of
x : _ -> return (Just (fst x))
_ -> return Nothing
where
checker cfp uhdl = getHoodleFilePath uhdl
# maybe (return False)
$ \fp' -> do
cfp' <- canonicalizePath fp'
return (cfp == cfp')
makeTextSVGFromStringAt ::
String ->
CanvasId ->
UnitHoodle ->
CanvasCoordinate ->
IO (B.ByteString, BBox)
makeTextSVGFromStringAt str cid uhdl ccoord = do
rdr <- makePangoTextSVG (0, 0) (T.pack str) -- for the time being, I use string
geometry <- getCanvasGeometryCvsId cid uhdl
let mpgcoord = (desktop2Page geometry . canvas2Desktop geometry) ccoord
return $ case mpgcoord of
Nothing -> rdr
Just (_, PageCoord (x', y')) ->
let bbox' = moveBBoxULCornerTo (x', y') (snd rdr)
in (fst rdr, bbox')
-- |
notifyLink :: CanvasId -> PointerCoord -> MainCoroutine ()
notifyLink cid pcoord = do
uhdl <- gets (view (unitHoodles . currentUnit))
forBoth' unboxBiAct (f uhdl) (getCanvasInfo cid uhdl)
where
f :: forall b. UnitHoodle -> CanvasInfo b -> MainCoroutine ()
f uhdl cvsInfo = do
let cpn = PageNum . view currentPageNum $ cvsInfo
arr = view (viewInfo . pageArrangement) cvsInfo
mnotifyitem = view notifiedItem cvsInfo
canvas = view drawArea cvsInfo
geometry <- liftIO $ makeCanvasGeometry cpn arr canvas
mresult <-
case (desktop2Page geometry . device2Desktop geometry) pcoord of
Nothing -> return Nothing
Just (pnum, PageCoord (x, y)) -> do
let hdl = getHoodle uhdl
mpage = view (gpages . at (unPageNum pnum)) hdl
case mpage of
Nothing -> return Nothing
Just page -> do
let itms = (view gitems . current . view glayers) page
lnks = filter isLinkInRItem itms
hlnks = hltFilteredBy (\itm -> isPointInBBox (getBBox itm) (x, y)) lnks
hitted = takeHitted hlnks
case mnotifyitem of
Nothing ->
if (not . null) hitted
then Just <$> newNotify geometry pnum (head hitted) Nothing
else return Nothing
Just (opnum, obbx, _) -> do
let obbx_desk = xformBBox (unDeskCoord . page2Desktop geometry . (opnum,) . PageCoord) obbx
if pnum == opnum && isPointInBBox obbx (x, y)
then return Nothing
else
if (not . null) hitted
then Just <$> newNotify geometry pnum (head hitted) (Just obbx_desk)
else return (Just (Nothing, obbx_desk))
forM_
mresult
( \(mnewnotified, bbx_desk) -> do
let ncinfobox = (set (unboxLens notifiedItem) mnewnotified . getCanvasInfo cid) uhdl
pureUpdateUhdl $ setCanvasInfo (cid, ncinfobox)
invalidateInBBox (Just bbx_desk) Efficient cid
)
----
newNotify ::
CanvasGeometry ->
PageNum ->
RItem ->
Maybe BBox ->
MainCoroutine (Maybe (PageNum, BBox, RItem), BBox)
newNotify geometry pnum lnk mobbx_desk = do
let bbx = getBBox lnk
bbx_desk = xformBBox (unDeskCoord . page2Desktop geometry . (pnum,) . PageCoord) bbx
nbbx_desk = maybe bbx_desk (unionBBox bbx_desk) mobbx_desk
return (Just (pnum, bbx, lnk), nbbx_desk)
-- | got a link address (or embedded image) from drag and drop
gotLink :: Maybe String -> (Int, Int) -> MainCoroutine ()
gotLink mstr (x, y) = do
xst <- get
cache <- renderCache
let uhdl = view (unitHoodles . currentUnit) xst
cid = getCurrentCanvasId uhdl
mr <- runMaybeT $ do
str <- (MaybeT . return) mstr
let (str1, rem1) = break (== ',') str
guard ((not . null) rem1)
return (B.pack str1, tail rem1)
case mr of
Nothing -> do
mr2 <- runMaybeT $ do
str <- (MaybeT . return) mstr
(MaybeT . return) (urlParse str)
case mr2 of
Nothing -> return ()
Just (FileUrl file) -> do
let ext = takeExtension file
when (ext == ".png" || ext == ".PNG" || ext == ".jpg" || ext == ".JPG") $ do
let isembedded = view (settings . doesEmbedImage) xst
callRenderer $
GotRItem
<$> (cnstrctRItem =<< liftIO (makeNewItemImage isembedded file))
RenderEv (GotRItem nitm) <-
waitSomeEvent (\case RenderEv (GotRItem _) -> True; _ -> False)
geometry <- liftIO $ getCanvasGeometryCvsId cid uhdl
let ccoord = CvsCoord (fromIntegral x, fromIntegral y)
mpgcoord = (desktop2Page geometry . canvas2Desktop geometry) ccoord
insertItemAt mpgcoord nitm
Just (HttpUrl url) -> do
case getSelectedItmsFromUnitHoodle uhdl of
Nothing -> do
uuidbstr <- liftIO $ B.pack . show <$> nextRandom
rdrbbx <- liftIO $ makeTextSVGFromStringAt url cid uhdl (CvsCoord (fromIntegral x, fromIntegral y))
linkInsert "simple" (uuidbstr, url) url rdrbbx
Just hititms -> do
b <- okCancelMessageBox ("replace selected item with link to " ++ url ++ "?")
when b $ do
let ulbbox = (unUnion . mconcat . fmap (Union . Middle . getBBox)) hititms
case ulbbox of
Middle bbox -> do
svg <- liftIO $ makeSVGFromSelection cache cid hititms bbox
uuidbstr <- liftIO $ B.pack . show <$> nextRandom
deleteSelection
linkInsert "simple" (uuidbstr, url) url (svg_render svg, bbox)
_ -> return ()
Just (uuidbstr, fp) -> do
let fn = takeFileName fp
case getSelectedItmsFromUnitHoodle uhdl of
Nothing -> do
rdr <- liftIO (makePangoTextSVG (0, 0) (T.pack fn))
geometry <- liftIO $ getCanvasGeometryCvsId cid uhdl
let ccoord = CvsCoord (fromIntegral x, fromIntegral y)
mpgcoord = (desktop2Page geometry . canvas2Desktop geometry) ccoord
rdr' = case mpgcoord of
Nothing -> rdr
Just (_, PageCoord (x', y')) ->
let bbox' = moveBBoxULCornerTo (x', y') (snd rdr) in (fst rdr, bbox')
linkInsert "simple" (uuidbstr, fp) fn rdr'
Just hititms -> do
b <- okCancelMessageBox ("replace selected item with link to " ++ fn ++ "?")
when b $ do
let ulbbox = (unUnion . mconcat . fmap (Union . Middle . getBBox)) hititms
case ulbbox of
Middle bbox -> do
svg <- liftIO $ makeSVGFromSelection cache cid hititms bbox
uuid <- liftIO nextRandom
let uuidbstr' = B.pack (show uuid)
deleteSelection
linkInsert "simple" (uuidbstr', fp) fn (svg_render svg, bbox)
_ -> return ()
-- |
addLink :: MainCoroutine ()
addLink = do
mfilename <- fileChooser Gtk.FileChooserActionOpen Nothing
doIOaction $ const (action mfilename)
AddLink minput <- waitSomeEvent (\case AddLink _ -> True; _ -> False)
case minput of
Nothing -> return ()
Just (str, fname) -> do
uuid <- liftIO nextRandom
let uuidbstr = B.pack (show uuid)
rdr <- liftIO (makePangoTextSVG (0, 0) (T.pack str))
linkInsert "simple" (uuidbstr, fname) str rdr
where
action mfn = do
dialog <-
Gtk.messageDialogNew
Nothing
[Gtk.DialogModal]
Gtk.MessageQuestion
Gtk.ButtonsOkCancel
("add link" :: String)
upper <- fmap Gtk.castToContainer (Gtk.dialogGetContentArea dialog)
vbox <- Gtk.vBoxNew False 0
Gtk.containerAdd upper vbox
txtvw <- Gtk.textViewNew
Gtk.boxPackStart vbox txtvw Gtk.PackGrow 0
Gtk.widgetShowAll dialog
res <- Gtk.dialogRun dialog
case res of
Gtk.ResponseOk -> do
buf <- Gtk.textViewGetBuffer txtvw
(istart, iend) <-
(,) <$> Gtk.textBufferGetStartIter buf
<*> Gtk.textBufferGetEndIter buf
l <- Gtk.textBufferGetText buf istart iend True
Gtk.widgetDestroy dialog
return (UsrEv (AddLink ((l,) <$> mfn)))
_ -> Gtk.widgetDestroy dialog >> return (UsrEv (AddLink Nothing))
-- |
listAnchors :: MainCoroutine ()
listAnchors = msgShout . show . getAnchorMap . rHoodle2Hoodle . getHoodle . view (unitHoodles . currentUnit) =<< get
getAnchorMap :: Hoodle -> M.Map T.Text (Int, (Double, Double))
getAnchorMap hdl =
let pgs = view pages hdl
itemsInPage pg = [i | l <- view layers pg, i <- view items l]
anchorsWithPageNum :: [(Int, [Anchor])]
anchorsWithPageNum = zip [0 ..] (map (mapMaybe lookupAnchor . itemsInPage) pgs)
in foldr (\(p, ys) m -> foldr (insertAnchor p) m ys) M.empty anchorsWithPageNum
where
lookupAnchor (ItemAnchor a) = Just a
lookupAnchor _ = Nothing
insertAnchor pgnum Anchor {..} = M.insert (TE.decodeUtf8 anchor_id) (pgnum, anchor_pos)
-- |
goToAnchorPos :: T.Text -> T.Text -> MainCoroutine ()
goToAnchorPos docid anchorid = do
rhdl <- gets (getHoodle . view (unitHoodles . currentUnit))
let hdl = rHoodle2Hoodle rhdl
when (docid == (TE.decodeUtf8 . view ghoodleID) rhdl) $ do
let anchormap = getAnchorMap hdl
forM_ (M.lookup anchorid anchormap) $ \(pgnum, _) -> changePage (const pgnum)
| null | https://raw.githubusercontent.com/wavewave/hoodle/b026641ef9375818c996c1fa4968e192a0d48e7c/core/src/Hoodle/Coroutine/Link.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
|
| (docid,anchorid)
|
for the time being, I use string
|
--
| got a link address (or embedded image) from drag and drop
|
|
| | # LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
# LANGUAGE TupleSections #
module Hoodle.Coroutine.Link where
import Control.Applicative
import Control.Lens (at, set, view, (^.), _2)
import Control.Monad hiding (forM_)
import Control.Monad.State (get, gets, liftIO)
import Control.Monad.Trans.Maybe
import qualified Data.ByteString.Char8 as B
import Data.Foldable (forM_)
import Data.Hoodle.BBox
import Data.Hoodle.Generic
import Data.Hoodle.Simple
import Data.Hoodle.Zipper
import qualified Data.IntMap as IM
import qualified Data.Map as M
import Data.Maybe (mapMaybe)
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.UUID.V4 (nextRandom)
import Graphics.Hoodle.Render.Item
import Graphics.Hoodle.Render.Type
import Graphics.Hoodle.Render.Type.HitTest
import Graphics.Hoodle.Render.Util.HitTest
import qualified Graphics.UI.Gtk as Gtk
import Hoodle.Accessor
import Hoodle.Coroutine.Dialog
import Hoodle.Coroutine.Draw
import Hoodle.Coroutine.Page (changePage)
import Hoodle.Coroutine.Select.Clipboard
import Hoodle.Coroutine.TextInput
import Hoodle.Coroutine.Window
import Hoodle.Device
import Hoodle.ModelAction.ContextMenu
import Hoodle.ModelAction.File (makeNewItemImage)
import Hoodle.ModelAction.Select
import Hoodle.Type.Canvas
import Hoodle.Type.Coroutine
import Hoodle.Type.Enum
import Hoodle.Type.Event
import Hoodle.Type.HoodleState
import Hoodle.Type.PageArrangement
import Hoodle.Util
import Hoodle.View.Coordinate
import System.Directory
import System.FilePath
import System.Process (createProcess, proc)
import Prelude hiding (mapM_)
openLinkAction ::
UrlPath ->
Maybe (T.Text, T.Text) ->
MainCoroutine ()
openLinkAction urlpath mid = do
case urlpath of
FileUrl fp -> do
mk <- liftIO . checkPreviouslyOpenedFile fp =<< get
case mk of
Just k -> switchTab k >> forM_ mid (uncurry goToAnchorPos)
Nothing ->
addTab (LocalDir (Just fp))
>> forM_ mid (uncurry goToAnchorPos)
HttpUrl url -> liftIO $ void $ createProcess (proc "xdg-open" [url])
checkPreviouslyOpenedFile :: FilePath -> HoodleState -> IO (Maybe Int)
checkPreviouslyOpenedFile fp xst = do
cfp <- canonicalizePath fp
lst <- filterM (checker cfp . snd) (IM.assocs (xst ^. unitHoodles . _2))
case lst of
x : _ -> return (Just (fst x))
_ -> return Nothing
where
checker cfp uhdl = getHoodleFilePath uhdl
# maybe (return False)
$ \fp' -> do
cfp' <- canonicalizePath fp'
return (cfp == cfp')
makeTextSVGFromStringAt ::
String ->
CanvasId ->
UnitHoodle ->
CanvasCoordinate ->
IO (B.ByteString, BBox)
makeTextSVGFromStringAt str cid uhdl ccoord = do
geometry <- getCanvasGeometryCvsId cid uhdl
let mpgcoord = (desktop2Page geometry . canvas2Desktop geometry) ccoord
return $ case mpgcoord of
Nothing -> rdr
Just (_, PageCoord (x', y')) ->
let bbox' = moveBBoxULCornerTo (x', y') (snd rdr)
in (fst rdr, bbox')
notifyLink :: CanvasId -> PointerCoord -> MainCoroutine ()
notifyLink cid pcoord = do
uhdl <- gets (view (unitHoodles . currentUnit))
forBoth' unboxBiAct (f uhdl) (getCanvasInfo cid uhdl)
where
f :: forall b. UnitHoodle -> CanvasInfo b -> MainCoroutine ()
f uhdl cvsInfo = do
let cpn = PageNum . view currentPageNum $ cvsInfo
arr = view (viewInfo . pageArrangement) cvsInfo
mnotifyitem = view notifiedItem cvsInfo
canvas = view drawArea cvsInfo
geometry <- liftIO $ makeCanvasGeometry cpn arr canvas
mresult <-
case (desktop2Page geometry . device2Desktop geometry) pcoord of
Nothing -> return Nothing
Just (pnum, PageCoord (x, y)) -> do
let hdl = getHoodle uhdl
mpage = view (gpages . at (unPageNum pnum)) hdl
case mpage of
Nothing -> return Nothing
Just page -> do
let itms = (view gitems . current . view glayers) page
lnks = filter isLinkInRItem itms
hlnks = hltFilteredBy (\itm -> isPointInBBox (getBBox itm) (x, y)) lnks
hitted = takeHitted hlnks
case mnotifyitem of
Nothing ->
if (not . null) hitted
then Just <$> newNotify geometry pnum (head hitted) Nothing
else return Nothing
Just (opnum, obbx, _) -> do
let obbx_desk = xformBBox (unDeskCoord . page2Desktop geometry . (opnum,) . PageCoord) obbx
if pnum == opnum && isPointInBBox obbx (x, y)
then return Nothing
else
if (not . null) hitted
then Just <$> newNotify geometry pnum (head hitted) (Just obbx_desk)
else return (Just (Nothing, obbx_desk))
forM_
mresult
( \(mnewnotified, bbx_desk) -> do
let ncinfobox = (set (unboxLens notifiedItem) mnewnotified . getCanvasInfo cid) uhdl
pureUpdateUhdl $ setCanvasInfo (cid, ncinfobox)
invalidateInBBox (Just bbx_desk) Efficient cid
)
newNotify ::
CanvasGeometry ->
PageNum ->
RItem ->
Maybe BBox ->
MainCoroutine (Maybe (PageNum, BBox, RItem), BBox)
newNotify geometry pnum lnk mobbx_desk = do
let bbx = getBBox lnk
bbx_desk = xformBBox (unDeskCoord . page2Desktop geometry . (pnum,) . PageCoord) bbx
nbbx_desk = maybe bbx_desk (unionBBox bbx_desk) mobbx_desk
return (Just (pnum, bbx, lnk), nbbx_desk)
gotLink :: Maybe String -> (Int, Int) -> MainCoroutine ()
gotLink mstr (x, y) = do
xst <- get
cache <- renderCache
let uhdl = view (unitHoodles . currentUnit) xst
cid = getCurrentCanvasId uhdl
mr <- runMaybeT $ do
str <- (MaybeT . return) mstr
let (str1, rem1) = break (== ',') str
guard ((not . null) rem1)
return (B.pack str1, tail rem1)
case mr of
Nothing -> do
mr2 <- runMaybeT $ do
str <- (MaybeT . return) mstr
(MaybeT . return) (urlParse str)
case mr2 of
Nothing -> return ()
Just (FileUrl file) -> do
let ext = takeExtension file
when (ext == ".png" || ext == ".PNG" || ext == ".jpg" || ext == ".JPG") $ do
let isembedded = view (settings . doesEmbedImage) xst
callRenderer $
GotRItem
<$> (cnstrctRItem =<< liftIO (makeNewItemImage isembedded file))
RenderEv (GotRItem nitm) <-
waitSomeEvent (\case RenderEv (GotRItem _) -> True; _ -> False)
geometry <- liftIO $ getCanvasGeometryCvsId cid uhdl
let ccoord = CvsCoord (fromIntegral x, fromIntegral y)
mpgcoord = (desktop2Page geometry . canvas2Desktop geometry) ccoord
insertItemAt mpgcoord nitm
Just (HttpUrl url) -> do
case getSelectedItmsFromUnitHoodle uhdl of
Nothing -> do
uuidbstr <- liftIO $ B.pack . show <$> nextRandom
rdrbbx <- liftIO $ makeTextSVGFromStringAt url cid uhdl (CvsCoord (fromIntegral x, fromIntegral y))
linkInsert "simple" (uuidbstr, url) url rdrbbx
Just hititms -> do
b <- okCancelMessageBox ("replace selected item with link to " ++ url ++ "?")
when b $ do
let ulbbox = (unUnion . mconcat . fmap (Union . Middle . getBBox)) hititms
case ulbbox of
Middle bbox -> do
svg <- liftIO $ makeSVGFromSelection cache cid hititms bbox
uuidbstr <- liftIO $ B.pack . show <$> nextRandom
deleteSelection
linkInsert "simple" (uuidbstr, url) url (svg_render svg, bbox)
_ -> return ()
Just (uuidbstr, fp) -> do
let fn = takeFileName fp
case getSelectedItmsFromUnitHoodle uhdl of
Nothing -> do
rdr <- liftIO (makePangoTextSVG (0, 0) (T.pack fn))
geometry <- liftIO $ getCanvasGeometryCvsId cid uhdl
let ccoord = CvsCoord (fromIntegral x, fromIntegral y)
mpgcoord = (desktop2Page geometry . canvas2Desktop geometry) ccoord
rdr' = case mpgcoord of
Nothing -> rdr
Just (_, PageCoord (x', y')) ->
let bbox' = moveBBoxULCornerTo (x', y') (snd rdr) in (fst rdr, bbox')
linkInsert "simple" (uuidbstr, fp) fn rdr'
Just hititms -> do
b <- okCancelMessageBox ("replace selected item with link to " ++ fn ++ "?")
when b $ do
let ulbbox = (unUnion . mconcat . fmap (Union . Middle . getBBox)) hititms
case ulbbox of
Middle bbox -> do
svg <- liftIO $ makeSVGFromSelection cache cid hititms bbox
uuid <- liftIO nextRandom
let uuidbstr' = B.pack (show uuid)
deleteSelection
linkInsert "simple" (uuidbstr', fp) fn (svg_render svg, bbox)
_ -> return ()
addLink :: MainCoroutine ()
addLink = do
mfilename <- fileChooser Gtk.FileChooserActionOpen Nothing
doIOaction $ const (action mfilename)
AddLink minput <- waitSomeEvent (\case AddLink _ -> True; _ -> False)
case minput of
Nothing -> return ()
Just (str, fname) -> do
uuid <- liftIO nextRandom
let uuidbstr = B.pack (show uuid)
rdr <- liftIO (makePangoTextSVG (0, 0) (T.pack str))
linkInsert "simple" (uuidbstr, fname) str rdr
where
action mfn = do
dialog <-
Gtk.messageDialogNew
Nothing
[Gtk.DialogModal]
Gtk.MessageQuestion
Gtk.ButtonsOkCancel
("add link" :: String)
upper <- fmap Gtk.castToContainer (Gtk.dialogGetContentArea dialog)
vbox <- Gtk.vBoxNew False 0
Gtk.containerAdd upper vbox
txtvw <- Gtk.textViewNew
Gtk.boxPackStart vbox txtvw Gtk.PackGrow 0
Gtk.widgetShowAll dialog
res <- Gtk.dialogRun dialog
case res of
Gtk.ResponseOk -> do
buf <- Gtk.textViewGetBuffer txtvw
(istart, iend) <-
(,) <$> Gtk.textBufferGetStartIter buf
<*> Gtk.textBufferGetEndIter buf
l <- Gtk.textBufferGetText buf istart iend True
Gtk.widgetDestroy dialog
return (UsrEv (AddLink ((l,) <$> mfn)))
_ -> Gtk.widgetDestroy dialog >> return (UsrEv (AddLink Nothing))
listAnchors :: MainCoroutine ()
listAnchors = msgShout . show . getAnchorMap . rHoodle2Hoodle . getHoodle . view (unitHoodles . currentUnit) =<< get
getAnchorMap :: Hoodle -> M.Map T.Text (Int, (Double, Double))
getAnchorMap hdl =
let pgs = view pages hdl
itemsInPage pg = [i | l <- view layers pg, i <- view items l]
anchorsWithPageNum :: [(Int, [Anchor])]
anchorsWithPageNum = zip [0 ..] (map (mapMaybe lookupAnchor . itemsInPage) pgs)
in foldr (\(p, ys) m -> foldr (insertAnchor p) m ys) M.empty anchorsWithPageNum
where
lookupAnchor (ItemAnchor a) = Just a
lookupAnchor _ = Nothing
insertAnchor pgnum Anchor {..} = M.insert (TE.decodeUtf8 anchor_id) (pgnum, anchor_pos)
goToAnchorPos :: T.Text -> T.Text -> MainCoroutine ()
goToAnchorPos docid anchorid = do
rhdl <- gets (getHoodle . view (unitHoodles . currentUnit))
let hdl = rHoodle2Hoodle rhdl
when (docid == (TE.decodeUtf8 . view ghoodleID) rhdl) $ do
let anchormap = getAnchorMap hdl
forM_ (M.lookup anchorid anchormap) $ \(pgnum, _) -> changePage (const pgnum)
|
1c9357ac157d4b717a3a8dcd6af70435a02a3f0a59231922686c5e2161744424 | aldosolorzano/structurizr-clj | view.clj | (ns structurizr-clj.view)
(defn configuration
"Gets configuration for given views"
[views]
(.getConfiguration views))
(defn get-key
[view]
(.getKey view))
(defn styles
"Get styles HashSet for given views"
[views]
(.getStyles (configuration views)))
(defn system-landscapes
[views]
(.getSystemLandscapeViews views))
(defn system-contexts
[views]
(.getSystemContextViews views))
(defn containers
[views]
(.getContainerViews views))
(defn components
[views]
(.getComponentViews views))
(defn create-system-landscape
"Creates SystemLandscape view "
[views key description]
(.createSystemLandscapeView views key description))
(defn create-system-context
"Creates SystemContextView for given software-system"
[views software-system key description]
(.createSystemContextView views software-system key description))
(defn create-container
"Creates ContainerView for given software-system"
[views software-system key description]
(.createContainerView views software-system key description))
(defn create-component
"Creates ComponentView for given container"
[views container key description]
(.createComponentView views container key description))
(defn remove-element
"Removes an individual element from this view"
[view element]
(.remove view element))
(defn add-element
"Adds te given element to this view, including relationships to/from that element"
[view element]
(.add view element))
(defn add-elements
"Adds all elements to the view"
[view]
(.addAllElements view))
(defn add-people
"Adds all person elements to the view"
[view]
(.addAllPeople view))
(defn add-software-systems
"Adds all software-systems to the view"
[view]
(.addAllSoftwareSystems view))
(defn add-containers
"Adds all containers to the view"
[view]
(.addAllContainers view))
(defn add-components
"Adds all components to the view"
[view]
(.addAllComponents view))
| null | https://raw.githubusercontent.com/aldosolorzano/structurizr-clj/20354fc97c660dd5ce44972f4a1d3935a4f32e4c/src/structurizr_clj/view.clj | clojure | (ns structurizr-clj.view)
(defn configuration
"Gets configuration for given views"
[views]
(.getConfiguration views))
(defn get-key
[view]
(.getKey view))
(defn styles
"Get styles HashSet for given views"
[views]
(.getStyles (configuration views)))
(defn system-landscapes
[views]
(.getSystemLandscapeViews views))
(defn system-contexts
[views]
(.getSystemContextViews views))
(defn containers
[views]
(.getContainerViews views))
(defn components
[views]
(.getComponentViews views))
(defn create-system-landscape
"Creates SystemLandscape view "
[views key description]
(.createSystemLandscapeView views key description))
(defn create-system-context
"Creates SystemContextView for given software-system"
[views software-system key description]
(.createSystemContextView views software-system key description))
(defn create-container
"Creates ContainerView for given software-system"
[views software-system key description]
(.createContainerView views software-system key description))
(defn create-component
"Creates ComponentView for given container"
[views container key description]
(.createComponentView views container key description))
(defn remove-element
"Removes an individual element from this view"
[view element]
(.remove view element))
(defn add-element
"Adds te given element to this view, including relationships to/from that element"
[view element]
(.add view element))
(defn add-elements
"Adds all elements to the view"
[view]
(.addAllElements view))
(defn add-people
"Adds all person elements to the view"
[view]
(.addAllPeople view))
(defn add-software-systems
"Adds all software-systems to the view"
[view]
(.addAllSoftwareSystems view))
(defn add-containers
"Adds all containers to the view"
[view]
(.addAllContainers view))
(defn add-components
"Adds all components to the view"
[view]
(.addAllComponents view))
|
|
1966360594297880b9b557a1e157455f7ca66faaf39e39151f6c47377a2d077b | huangz1990/SICP-answers | test-p33-prime.scm | (load "test-manager/load.scm")
(load "p33-prime.scm")
(define-each-check
(prime? 7)
(false? (prime? 10))
)
(run-registered-tests)
| null | https://raw.githubusercontent.com/huangz1990/SICP-answers/15e3475003ef10eb738cf93c1932277bc56bacbe/chp2/code/test-p33-prime.scm | scheme | (load "test-manager/load.scm")
(load "p33-prime.scm")
(define-each-check
(prime? 7)
(false? (prime? 10))
)
(run-registered-tests)
|
|
4bd5afe0dcfcc1e490800dae9cec09e8cc431d403fe910003c6662ec8a0bf906 | renanroberto/whatsyourquestion-bot | Flow.hs | module Flow (flow, (?>)) where
type FlowT a = Either a ()
class Flow a where
flowFail :: a
flow :: FlowT a -> a
flow (Left x) = x
flow (Right _) = flowFail
(?>) :: Bool -> a -> FlowT a
bool ?> value = if bool then Left value else Right ()
infix 3 ?>
instance Flow Bool where
flowFail = False
instance Flow (Maybe a) where
flowFail = Nothing
instance Flow e => Flow (Either e a) where
flowFail = Left flowFail
| null | https://raw.githubusercontent.com/renanroberto/whatsyourquestion-bot/3207eefbb66004b2cc258a46d9cc9c9ed31fcc38/src/Flow.hs | haskell | module Flow (flow, (?>)) where
type FlowT a = Either a ()
class Flow a where
flowFail :: a
flow :: FlowT a -> a
flow (Left x) = x
flow (Right _) = flowFail
(?>) :: Bool -> a -> FlowT a
bool ?> value = if bool then Left value else Right ()
infix 3 ?>
instance Flow Bool where
flowFail = False
instance Flow (Maybe a) where
flowFail = Nothing
instance Flow e => Flow (Either e a) where
flowFail = Left flowFail
|
|
aacb24acbcf5ed60c103a0e3c9962eeca0933f8749f68573d3812a74cf31a809 | TrustInSoft/tis-kernel | aorai_utils.mli | (**************************************************************************)
(* *)
This file is part of .
(* *)
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
(* *)
is released under GPLv2
(* *)
(**************************************************************************)
(**************************************************************************)
(* *)
This file is part of Aorai plug - in of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
( Institut National de Recherche en Informatique et en
(* Automatique) *)
INSA ( Institut National des Sciences Appliquees )
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
open Cil_types
open Promelaast
(** Given a transition a function and a function status (call or return)
it returns if the cross condition can be statisfied
with only function status.
*)
val isCrossable:
(typed_condition * action) trans -> kernel_function -> funcStatus -> bool
(** Given a transition and the main entry point it returns if
the cross condition can be statisfied at the beginning of the program. *)
val isCrossableAtInit:
(typed_condition * action) trans -> kernel_function -> bool
* This function rewrites a cross condition into an ACSL expression .
Moreover , by giving current operation name and its status ( call or
return ) the generation simplifies the generated expression .
Moreover, by giving current operation name and its status (call or
return) the generation simplifies the generated expression.
*)
val crosscond_to_pred:
typed_condition -> kernel_function -> funcStatus ->
Cil_types.predicate Cil_types.named
(** {b Globals management} *)
(** Copy the file pointer locally in the class in order to easiest globals management and initializes some tables. *)
val initFile : Cil_types.file -> unit
(** Given the name of the main function, this function computes all newly introduced globals (variables, enumeration structure, invariants, etc.) *)
val initGlobals : Cil_types.kernel_function -> bool -> unit
(* ************************************************************************* *)
(** {b Pre/post management} *)
(**{b Pre and post condition of C functions} In our point of view, the pre or
the post condition of a C function are defined by the set of states
authorized just before/after the call, as such as the set of crossable
transitions. The following functions generates abstract pre and post-conditions
by using only informations deduced from the buchi automata.
*)
(** base lhost corresponding to curState. *)
val host_state_term: unit -> Cil_types.term_lval
(** Returns the predicate saying that automaton is in
corresponding state. *)
val is_state_pred: state -> Cil_types.predicate Cil_types.named
(** Returns the statement saying the state is affected *)
val is_state_stmt: state * Cil_types.varinfo -> location -> Cil_types.stmt
(** Returns the boolean expression saying the state is affected *)
val is_state_exp: state -> location -> Cil_types.exp
(** Returns the predicate saying that automaton is NOT
in corresponding state. *)
val is_out_of_state_pred: state -> Cil_types.predicate Cil_types.named
(** Returns the statement saying the automaton is not in the corresponding
state.
@raise AbortFatal in the deterministic case, as such an assignment is
meaningless in this context: we only assign the state variable to be
in the (unique by definition) state currently active
*)
val is_out_of_state_stmt:
state * Cil_types.varinfo -> location -> Cil_types.stmt
(** Returns the expression testing that automaton is NOT
in the corresponding state.*)
val is_out_of_state_exp: state -> location -> Cil_types.exp
* returns assigns clause corresponding to updating automaton 's state , and
assigning auxiliary variable depending on the possible transitions made
in the function .
@since Nitrogen-20111001
@since Neon-20140301 adds kf argument
assigning auxiliary variable depending on the possible transitions made
in the function.
@since Nitrogen-20111001
@since Neon-20140301 adds kf argument
*)
val aorai_assigns:
Data_for_aorai.state ->
Cil_types.location -> Cil_types.identified_term Cil_types.assigns
* returns the list of predicates expressing that for each current state
the automaton currently is in , there is at least one transition that is
crossed .
the automaton currently is in, there is at least one transition that is
crossed.
*)
val force_transition:
Cil_types.location -> kernel_function -> Promelaast.funcStatus ->
Data_for_aorai.state -> Cil_types.identified_predicate list
(** return list of preconditions for the given auxiliary function
(f_pre_func or f_post_func). *)
val auto_func_preconditions:
Cil_types.location -> kernel_function -> Promelaast.funcStatus ->
Data_for_aorai.state -> Cil_types.identified_predicate list
* auto_func_behaviors f st ( st_status , tr_status )
generates behaviors corresponding to the transitions authorized by
tr_status for function f in status st
@since
generates behaviors corresponding to the transitions authorized by
tr_status for function f in status st
@since Nitrogen-20111001
*)
val auto_func_behaviors:
Cil_types.location -> kernel_function -> Promelaast.funcStatus ->
Data_for_aorai.state -> Cil_types.funbehavior list
* [ f status ]
generates the body of pre & post functions .
res must be [ None ] for a pre - function and [ Some v ] for a post - func where
[ v ] is the formal corresponding to the value returned by the original
function . If the original function returns [ Void ] , [ res ] must be [ None ] .
It also returns the local variables list declared in the body .
generates the body of pre & post functions.
res must be [None] for a pre-function and [Some v] for a post-func where
[v] is the formal corresponding to the value returned by the original
function. If the original function returns [Void], [res] must be [None].
It also returns the local variables list declared in the body. *)
val auto_func_block:
Cil_types.location -> kernel_function -> Promelaast.funcStatus ->
Data_for_aorai.state -> Cil_types.varinfo option ->
Cil_types.block * Cil_types.varinfo list
val get_preds_pre_wrt_params :
kernel_function -> Cil_types.predicate Cil_types.named
val get_preds_post_bc_wrt_params :
kernel_function -> Cil_types.predicate Cil_types.named
(** Returns a list of predicate giving for each possible start state the
disjunction of possible current states
*)
val possible_states_preds:
Data_for_aorai.state -> Cil_types.predicate Cil_types.named list
(** Possible values of the given auxiliary variable under the current path,
[start]ing from the given point
@since Neon-20140301 add logic_label argument
*)
val update_to_pred:
start: Cil_types.logic_label ->
pre_state:Promelaast.state -> post_state:Promelaast.state ->
Cil_types.term -> Data_for_aorai.Intervals.t -> predicate named
(** for a given starting and ending state, returns the post-conditions
related to the possible values of the auxiliary variables at current point
the function, guarded by the fact that we have followed this path, from
the given program point
@modify Neon-20130301 add logic_label argument
*)
val action_to_pred:
start:Cil_types.logic_label ->
pre_state:Promelaast.state -> post_state:Promelaast.state ->
Data_for_aorai.Vals.t -> predicate named list
(** All actions that might have been performed on aux variables from the
given program point, guarded by the path followed.
@modify Neon-20140301 add logic_label argument
*)
val all_actions_preds:
Cil_types.logic_label ->
Data_for_aorai.state -> predicate named list
(** Return an integer constant term with the 0 value. *)
val zero_term : unit -> Cil_types.term
(** Given an lval term 'host' and an integer value 'off', it returns a lval term host[off]. *)
val mk_offseted_array : Cil_types.term_lval -> int -> Cil_types.term
val mk_offseted_array_states_as_enum :
Cil_types.term_lval -> int -> Cil_types.term
(** Returns a term representing the given logic variable
(usually a fresh quantified variable). *)
val mk_term_from_vi : Cil_types.varinfo -> Cil_types.term
val make_enum_states: unit -> unit
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/TrustInSoft/tis-kernel/748d28baba90c03c0f5f4654d2e7bb47dfbe4e7d/src/plugins/aorai/aorai_utils.mli | ocaml | ************************************************************************
************************************************************************
************************************************************************
alternatives)
Automatique)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* Given a transition a function and a function status (call or return)
it returns if the cross condition can be statisfied
with only function status.
* Given a transition and the main entry point it returns if
the cross condition can be statisfied at the beginning of the program.
* {b Globals management}
* Copy the file pointer locally in the class in order to easiest globals management and initializes some tables.
* Given the name of the main function, this function computes all newly introduced globals (variables, enumeration structure, invariants, etc.)
*************************************************************************
* {b Pre/post management}
*{b Pre and post condition of C functions} In our point of view, the pre or
the post condition of a C function are defined by the set of states
authorized just before/after the call, as such as the set of crossable
transitions. The following functions generates abstract pre and post-conditions
by using only informations deduced from the buchi automata.
* base lhost corresponding to curState.
* Returns the predicate saying that automaton is in
corresponding state.
* Returns the statement saying the state is affected
* Returns the boolean expression saying the state is affected
* Returns the predicate saying that automaton is NOT
in corresponding state.
* Returns the statement saying the automaton is not in the corresponding
state.
@raise AbortFatal in the deterministic case, as such an assignment is
meaningless in this context: we only assign the state variable to be
in the (unique by definition) state currently active
* Returns the expression testing that automaton is NOT
in the corresponding state.
* return list of preconditions for the given auxiliary function
(f_pre_func or f_post_func).
* Returns a list of predicate giving for each possible start state the
disjunction of possible current states
* Possible values of the given auxiliary variable under the current path,
[start]ing from the given point
@since Neon-20140301 add logic_label argument
* for a given starting and ending state, returns the post-conditions
related to the possible values of the auxiliary variables at current point
the function, guarded by the fact that we have followed this path, from
the given program point
@modify Neon-20130301 add logic_label argument
* All actions that might have been performed on aux variables from the
given program point, guarded by the path followed.
@modify Neon-20140301 add logic_label argument
* Return an integer constant term with the 0 value.
* Given an lval term 'host' and an integer value 'off', it returns a lval term host[off].
* Returns a term representing the given logic variable
(usually a fresh quantified variable).
Local Variables:
compile-command: "make -C ../../.."
End:
| This file is part of .
is a fork of Frama - C. All the differences are :
Copyright ( C ) 2016 - 2017
is released under GPLv2
This file is part of Aorai plug - in of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
( Institut National de Recherche en Informatique et en
INSA ( Institut National des Sciences Appliquees )
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
open Cil_types
open Promelaast
val isCrossable:
(typed_condition * action) trans -> kernel_function -> funcStatus -> bool
val isCrossableAtInit:
(typed_condition * action) trans -> kernel_function -> bool
* This function rewrites a cross condition into an ACSL expression .
Moreover , by giving current operation name and its status ( call or
return ) the generation simplifies the generated expression .
Moreover, by giving current operation name and its status (call or
return) the generation simplifies the generated expression.
*)
val crosscond_to_pred:
typed_condition -> kernel_function -> funcStatus ->
Cil_types.predicate Cil_types.named
val initFile : Cil_types.file -> unit
val initGlobals : Cil_types.kernel_function -> bool -> unit
val host_state_term: unit -> Cil_types.term_lval
val is_state_pred: state -> Cil_types.predicate Cil_types.named
val is_state_stmt: state * Cil_types.varinfo -> location -> Cil_types.stmt
val is_state_exp: state -> location -> Cil_types.exp
val is_out_of_state_pred: state -> Cil_types.predicate Cil_types.named
val is_out_of_state_stmt:
state * Cil_types.varinfo -> location -> Cil_types.stmt
val is_out_of_state_exp: state -> location -> Cil_types.exp
* returns assigns clause corresponding to updating automaton 's state , and
assigning auxiliary variable depending on the possible transitions made
in the function .
@since Nitrogen-20111001
@since Neon-20140301 adds kf argument
assigning auxiliary variable depending on the possible transitions made
in the function.
@since Nitrogen-20111001
@since Neon-20140301 adds kf argument
*)
val aorai_assigns:
Data_for_aorai.state ->
Cil_types.location -> Cil_types.identified_term Cil_types.assigns
* returns the list of predicates expressing that for each current state
the automaton currently is in , there is at least one transition that is
crossed .
the automaton currently is in, there is at least one transition that is
crossed.
*)
val force_transition:
Cil_types.location -> kernel_function -> Promelaast.funcStatus ->
Data_for_aorai.state -> Cil_types.identified_predicate list
val auto_func_preconditions:
Cil_types.location -> kernel_function -> Promelaast.funcStatus ->
Data_for_aorai.state -> Cil_types.identified_predicate list
* auto_func_behaviors f st ( st_status , tr_status )
generates behaviors corresponding to the transitions authorized by
tr_status for function f in status st
@since
generates behaviors corresponding to the transitions authorized by
tr_status for function f in status st
@since Nitrogen-20111001
*)
val auto_func_behaviors:
Cil_types.location -> kernel_function -> Promelaast.funcStatus ->
Data_for_aorai.state -> Cil_types.funbehavior list
* [ f status ]
generates the body of pre & post functions .
res must be [ None ] for a pre - function and [ Some v ] for a post - func where
[ v ] is the formal corresponding to the value returned by the original
function . If the original function returns [ Void ] , [ res ] must be [ None ] .
It also returns the local variables list declared in the body .
generates the body of pre & post functions.
res must be [None] for a pre-function and [Some v] for a post-func where
[v] is the formal corresponding to the value returned by the original
function. If the original function returns [Void], [res] must be [None].
It also returns the local variables list declared in the body. *)
val auto_func_block:
Cil_types.location -> kernel_function -> Promelaast.funcStatus ->
Data_for_aorai.state -> Cil_types.varinfo option ->
Cil_types.block * Cil_types.varinfo list
val get_preds_pre_wrt_params :
kernel_function -> Cil_types.predicate Cil_types.named
val get_preds_post_bc_wrt_params :
kernel_function -> Cil_types.predicate Cil_types.named
val possible_states_preds:
Data_for_aorai.state -> Cil_types.predicate Cil_types.named list
val update_to_pred:
start: Cil_types.logic_label ->
pre_state:Promelaast.state -> post_state:Promelaast.state ->
Cil_types.term -> Data_for_aorai.Intervals.t -> predicate named
val action_to_pred:
start:Cil_types.logic_label ->
pre_state:Promelaast.state -> post_state:Promelaast.state ->
Data_for_aorai.Vals.t -> predicate named list
val all_actions_preds:
Cil_types.logic_label ->
Data_for_aorai.state -> predicate named list
val zero_term : unit -> Cil_types.term
val mk_offseted_array : Cil_types.term_lval -> int -> Cil_types.term
val mk_offseted_array_states_as_enum :
Cil_types.term_lval -> int -> Cil_types.term
val mk_term_from_vi : Cil_types.varinfo -> Cil_types.term
val make_enum_states: unit -> unit
|
82fc3525beb0bc9099cfeec876d40cfcde40572034b48b35f2dceee125e8b62d | gator1/jepsen | timeline.clj | (ns jepsen.checker.timeline
"Renders an HTML timeline of a history."
(:require [clojure.core.reducers :as r]
[clojure.string :as str]
[hiccup.core :as h]
[knossos.history :as history]
[jepsen.util :as util :refer [name+]]
[jepsen.store :as store]
[jepsen.checker :as checker]))
(defn style
"Generate a CSS style fragment from a map."
[m]
(->> m
(map (fn [kv] (str (name (key kv)) ":" (val kv))))
(str/join ";")))
(def timescale "Nanoseconds per pixel" 1e6)
(def col-width "pixels" 100)
(def gutter-width "pixels" 106)
(def height "pixels" 16)
(def stylesheet
(str ".ops { position: absolute; }\n"
".op { position: absolute;
padding: 2px; }\n"
".op.invoke { background: #C1DEFF; }\n"
".op.ok { background: #B7FFB7; }\n"
".op.fail { background: #FFD4D5; }\n"
".op.info { background: #FEFFC1; }\n"))
(defn pairs
"Pairs up ops from each process in a history. Yields a lazy sequence of [info]
or [invoke, ok|fail|info] pairs."
([history]
(pairs {} history))
([invocations [op & ops]]
(lazy-seq
(when op
(case (:type op)
:info (if (contains? invocations (:process op))
; Info following invoke
(cons [(get invocations (:process op)) op]
(pairs (dissoc invocations (:process op)) ops))
; Unmatched info
(cons [op] (pairs invocations ops)))
:invoke (do (assert (not (contains? invocations (:process op))))
(pairs (assoc invocations (:process op) op) ops))
(:ok :fail) (do (assert (contains? invocations (:process op)))
(cons [(get invocations (:process op)) op]
(pairs (dissoc invocations (:process op))
ops))))))))
(defn pair->div
"Turns a pair of start/stop operations into a div."
[history process-index [start stop]]
(let [p (:process start)
op (or stop start)
s {:width col-width
:left (* gutter-width (get process-index p))
:top (* height (:index start))}]
[:div {:class (str "op " (name (:type op)))
:style (style (cond (= :info (:type stop))
(assoc s :height (* height
(- (inc (count history))
(:index start))))
stop
(assoc s :height (* height
(- (:index stop)
(:index start))))
true
(assoc s :height height)))
:title (str (when stop (str (long (util/nanos->ms
(- (:time stop) (:time start))))
" ms\n"))
(pr-str (:error op)))}
(str (:process op) " " (name+ (:f op)) " " (:value start)
(when (not= (:value start) (:value stop))
(str "<br />" (:value stop))))]))
(defn process-index
"Maps processes to columns"
[history]
(->> history
history/processes
history/sort-processes
(reduce (fn [m p] (assoc m p (count m)))
{})))
(defn html
[]
(reify checker/Checker
(check [this test model history opts]
(->> (h/html [:html
[:head
[:style stylesheet]]
[:body
[:h1 (:name test)]
[:p (str (:start-time test))]
[:div {:class "ops"}
(->> history
history/complete
history/index
pairs
(map (partial pair->div
history
(process-index history))))]]])
(spit (store/path! test (:subdirectory opts) "timeline.html")))
{:valid? true})))
| null | https://raw.githubusercontent.com/gator1/jepsen/1932cbd72cbc1f6c2a27abe0fe347ea989f0cfbb/jepsen/src/jepsen/checker/timeline.clj | clojure |
}\n"
Info following invoke
Unmatched info | (ns jepsen.checker.timeline
"Renders an HTML timeline of a history."
(:require [clojure.core.reducers :as r]
[clojure.string :as str]
[hiccup.core :as h]
[knossos.history :as history]
[jepsen.util :as util :refer [name+]]
[jepsen.store :as store]
[jepsen.checker :as checker]))
(defn style
"Generate a CSS style fragment from a map."
[m]
(->> m
(map (fn [kv] (str (name (key kv)) ":" (val kv))))
(str/join ";")))
(def timescale "Nanoseconds per pixel" 1e6)
(def col-width "pixels" 100)
(def gutter-width "pixels" 106)
(def height "pixels" 16)
(def stylesheet
(str ".ops { position: absolute; }\n"
".op.invoke { background: #C1DEFF; }\n"
".op.ok { background: #B7FFB7; }\n"
".op.fail { background: #FFD4D5; }\n"
".op.info { background: #FEFFC1; }\n"))
(defn pairs
"Pairs up ops from each process in a history. Yields a lazy sequence of [info]
or [invoke, ok|fail|info] pairs."
([history]
(pairs {} history))
([invocations [op & ops]]
(lazy-seq
(when op
(case (:type op)
:info (if (contains? invocations (:process op))
(cons [(get invocations (:process op)) op]
(pairs (dissoc invocations (:process op)) ops))
(cons [op] (pairs invocations ops)))
:invoke (do (assert (not (contains? invocations (:process op))))
(pairs (assoc invocations (:process op) op) ops))
(:ok :fail) (do (assert (contains? invocations (:process op)))
(cons [(get invocations (:process op)) op]
(pairs (dissoc invocations (:process op))
ops))))))))
(defn pair->div
"Turns a pair of start/stop operations into a div."
[history process-index [start stop]]
(let [p (:process start)
op (or stop start)
s {:width col-width
:left (* gutter-width (get process-index p))
:top (* height (:index start))}]
[:div {:class (str "op " (name (:type op)))
:style (style (cond (= :info (:type stop))
(assoc s :height (* height
(- (inc (count history))
(:index start))))
stop
(assoc s :height (* height
(- (:index stop)
(:index start))))
true
(assoc s :height height)))
:title (str (when stop (str (long (util/nanos->ms
(- (:time stop) (:time start))))
" ms\n"))
(pr-str (:error op)))}
(str (:process op) " " (name+ (:f op)) " " (:value start)
(when (not= (:value start) (:value stop))
(str "<br />" (:value stop))))]))
(defn process-index
"Maps processes to columns"
[history]
(->> history
history/processes
history/sort-processes
(reduce (fn [m p] (assoc m p (count m)))
{})))
(defn html
[]
(reify checker/Checker
(check [this test model history opts]
(->> (h/html [:html
[:head
[:style stylesheet]]
[:body
[:h1 (:name test)]
[:p (str (:start-time test))]
[:div {:class "ops"}
(->> history
history/complete
history/index
pairs
(map (partial pair->div
history
(process-index history))))]]])
(spit (store/path! test (:subdirectory opts) "timeline.html")))
{:valid? true})))
|
40cfced2a66f8396b20aa7386a33aa6a1133fcdb13afdb2552681448e9e7cb10 | qfpl/reflex-workshop | Template.hs | |
Copyright : ( c ) 2018 , Commonwealth Scientific and Industrial Research Organisation
License : :
Stability : experimental
Portability : non - portable
Copyright : (c) 2018, Commonwealth Scientific and Industrial Research Organisation
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
module Util.Template (
mkTemplate
) where
import Data.Semigroup ((<>))
import qualified Data.Map as Map
import Data.Text (Text)
import Reflex.Dom.Core
import Reflex.Dom.Template
import Util.Bootstrap
mkTemplate :: MonadWidget t m => Rule m -> Text -> m (Event t ())
mkTemplate rule t = do
ePostBuild <- getPostBuild
eTemplate <- loadTemplate (rule <> sourceCodeRule) (t <$ ePostBuild)
let
(eError, eSuccess) = fanEither eTemplate
loadingDiv = divClass "alert alert-secondary" $ text "Loading..." >> getPostBuild
errorDiv = divClass "alert alert-error" $ text "Error loading page" >> getPostBuild
wrapPostBuild w = w >> getPostBuild
fmap switchDyn . widgetHold loadingDiv . leftmost $ [wrapPostBuild <$> eSuccess, errorDiv <$ eError]
sourceCodeRule :: MonadWidget t m => Rule m
sourceCodeRule =
Rule $ \l childFn ->
case l of
RTElement t a es ->
case t of
"div" -> do
cls <- Map.lookup "class" a
case cls of
"sourceCode" -> Just . card . divClass "sourceCode" $ childFn es
_ -> Nothing
_ -> Nothing
_ -> Nothing
| null | https://raw.githubusercontent.com/qfpl/reflex-workshop/244ef13fb4b2e884f455eccc50072e98d1668c9e/src/Util/Template.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE GADTs # | |
Copyright : ( c ) 2018 , Commonwealth Scientific and Industrial Research Organisation
License : :
Stability : experimental
Portability : non - portable
Copyright : (c) 2018, Commonwealth Scientific and Industrial Research Organisation
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Util.Template (
mkTemplate
) where
import Data.Semigroup ((<>))
import qualified Data.Map as Map
import Data.Text (Text)
import Reflex.Dom.Core
import Reflex.Dom.Template
import Util.Bootstrap
mkTemplate :: MonadWidget t m => Rule m -> Text -> m (Event t ())
mkTemplate rule t = do
ePostBuild <- getPostBuild
eTemplate <- loadTemplate (rule <> sourceCodeRule) (t <$ ePostBuild)
let
(eError, eSuccess) = fanEither eTemplate
loadingDiv = divClass "alert alert-secondary" $ text "Loading..." >> getPostBuild
errorDiv = divClass "alert alert-error" $ text "Error loading page" >> getPostBuild
wrapPostBuild w = w >> getPostBuild
fmap switchDyn . widgetHold loadingDiv . leftmost $ [wrapPostBuild <$> eSuccess, errorDiv <$ eError]
sourceCodeRule :: MonadWidget t m => Rule m
sourceCodeRule =
Rule $ \l childFn ->
case l of
RTElement t a es ->
case t of
"div" -> do
cls <- Map.lookup "class" a
case cls of
"sourceCode" -> Just . card . divClass "sourceCode" $ childFn es
_ -> Nothing
_ -> Nothing
_ -> Nothing
|
18027c2ba2773247f9702346b4b4459d5c758315df76eca0f21694004216f503 | parapluu/Concuerror | depend_6_3.erl | -module(depend_6_3).
-export([result/0, procs/0, run/1]).
result() -> io:format("36").
procs() -> io:format("4").
run(Procs) ->
[S] = io_lib:format("~p",[Procs]),
initial(),
run_aux(S),
block().
run_aux([]) -> ok;
run_aux([P|R]) ->
spawn(fun() -> proc(P) end),
run_aux(R).
block() ->
receive
after infinity -> never
end.
initial() ->
ets:new(table, [public, named_table]),
ets:insert(table, {y, 0}),
ets:insert(table, {z, 0}).
proc($1) ->
ets:lookup(table, y),
ets:lookup(table, z);
proc($2) ->
ets:lookup(table, y);
proc($3) ->
ets:insert(table, {y, 1});
proc($4) ->
ets:insert(table, {y, 2}),
ets:insert(table, {z, 1}).
| null | https://raw.githubusercontent.com/parapluu/Concuerror/152a5ccee0b6e97d8c3329c2167166435329d261/resources/perm_tests/src/depend_6_3.erl | erlang | -module(depend_6_3).
-export([result/0, procs/0, run/1]).
result() -> io:format("36").
procs() -> io:format("4").
run(Procs) ->
[S] = io_lib:format("~p",[Procs]),
initial(),
run_aux(S),
block().
run_aux([]) -> ok;
run_aux([P|R]) ->
spawn(fun() -> proc(P) end),
run_aux(R).
block() ->
receive
after infinity -> never
end.
initial() ->
ets:new(table, [public, named_table]),
ets:insert(table, {y, 0}),
ets:insert(table, {z, 0}).
proc($1) ->
ets:lookup(table, y),
ets:lookup(table, z);
proc($2) ->
ets:lookup(table, y);
proc($3) ->
ets:insert(table, {y, 1});
proc($4) ->
ets:insert(table, {y, 2}),
ets:insert(table, {z, 1}).
|
|
030c9257afd18a10b8796bd0d6ee99941087a8c9932a65173f11d139bfda4bb5 | 0install/0install | impl.ml | Copyright ( C ) 2014 , the README file for details , or visit .
* See the README file for details, or visit .
*)
open Support
open Support.Common
module U = Support.Utils
module FeedAttr = Constants.FeedAttr
module AttrMap = Support.Qdom.AttrMap
type importance =
[ `Essential (* Must select a version of the dependency *)
| `Recommended (* Prefer to select a version, if possible *)
| `Restricts ] (* Just adds restrictions without expressing any opinion *)
type distro_retrieval_method = {
distro_size : Int64.t option;
distro_install_info : (string * string); (* In some format meaningful to the distribution *)
}
type package_state =
[ `Installed
| `Uninstalled of distro_retrieval_method ]
type package_impl = {
package_distro : string;
mutable package_state : package_state;
}
type cache_impl = {
digests : Manifest.digest list;
retrieval_methods : [`Archive | `File | `Recipe] Element.t list;
}
type existing =
[ `Cache_impl of cache_impl
| `Local_impl of filepath
| `Package_impl of package_impl ]
type impl_type =
[ existing
| `Binary_of of existing t ]
and restriction = < to_string : string; meets_restriction : impl_type t -> bool >
and binding = Element.binding_node Element.t
and dependency = {
dep_qdom : Element.dependency_node Element.t;
dep_importance : importance;
dep_iface: Sigs.iface_uri;
dep_src: bool;
dep_restrictions: restriction list;
dep_required_commands: string list;
dep_if_os : Arch.os option; (* The badly-named 'os' attribute *)
dep_use : string option; (* Deprecated 'use' attribute *)
}
and command = {
mutable command_qdom : [`Command] Element.t;
command_requires : dependency list;
command_bindings : binding list;
}
and properties = {
attrs : AttrMap.t;
requires : dependency list;
bindings : binding list;
commands : command XString.Map.t;
}
and +'a t = {
qdom : [ `Implementation | `Package_impl ] Element.t;
props : properties;
stability : Stability.t;
Required OS ; the first part of the ' arch ' attribute . None for ' * '
Required CPU ; the second part of the ' arch ' attribute . None for ' * '
parsed_version : Version.t;
impl_type : 'a;
}
type generic_implementation = impl_type t
type distro_implementation = [ `Package_impl of package_impl ] t
let make ~elem ~props ~stability ~os ~machine ~version impl_type =
{
qdom = (elem :> [`Implementation | `Package_impl] Element.t);
props;
stability;
os;
machine;
parsed_version = version;
impl_type;
}
let with_stability stability t = {t with stability}
let make_command ?source_hint name path : command =
let elem = Element.make_command ~path ~source_hint name in
{
command_qdom = elem;
command_requires = [];
command_bindings = [];
}
let make_distribtion_restriction distros =
object
method meets_restriction impl =
ListLabels.exists (Str.split XString.re_space distros) ~f:(fun distro ->
match distro, impl.impl_type with
| "0install", `Package_impl _ -> false
| "0install", `Cache_impl _ -> true
| "0install", `Local_impl _ -> true
| distro, `Package_impl {package_distro;_} -> package_distro = distro
| _ -> false
)
method to_string = "distribution:" ^ distros
end
let get_attr_ex name impl =
AttrMap.get_no_ns name impl.props.attrs |? lazy (Safe_exn.failf "Missing '%s' attribute for %a" name Element.pp impl.qdom)
let parse_version_element elem =
let before = Element.before elem in
let not_before = Element.not_before elem in
let test = Version.make_range_restriction not_before before in
object
method meets_restriction impl = test impl.parsed_version
method to_string =
match not_before, before with
| None, None -> "(no restriction)"
| Some low, None -> "version " ^ low ^ ".."
| None, Some high -> "version ..!" ^ high
| Some low, Some high -> "version " ^ low ^ "..!" ^ high
end
let make_impossible_restriction msg =
object
method meets_restriction _impl = false
method to_string = Printf.sprintf "<impossible: %s>" msg
end
let re_exact_id = Str.regexp "^=\\(.+\\)/\\([^/]*\\)$"
let make_version_restriction expr =
try
if Str.string_match re_exact_id expr 0 then (
(* =FEED/ID exact implementation spec *)
let req_feed = Str.matched_group 1 expr in
let req_id = Str.matched_group 2 expr |> Escape.ununderscore_escape in
object
method meets_restriction impl =
get_attr_ex FeedAttr.id impl = req_id &&
get_attr_ex FeedAttr.from_feed impl = req_feed
method to_string = Printf.sprintf "feed=%S, impl=%S" req_feed req_id
end
) else (
(* version-based test *)
let test = Version.parse_expr expr in
object
method meets_restriction impl = test impl.parsed_version
method to_string = "version " ^ expr
end
)
with Safe_exn.T e as ex ->
let msg = Format.asprintf "Can't parse version restriction '%s': %s" expr (Safe_exn.msg e) in
log_warning ~ex:ex "%s" msg;
make_impossible_restriction msg
let local_dir_of impl =
match impl.impl_type with
| `Local_impl path -> Some path
| _ -> None
let parse_dep local_dir node =
let dep = Element.classify_dep node in
let iface, node =
let raw_iface = Element.interface node in
if XString.starts_with raw_iface "." then (
match local_dir with
| Some dir ->
let iface = U.normpath @@ dir +/ raw_iface in
(iface, Element.with_interface iface node)
| None ->
Safe_exn.failf "Relative interface URI '%s' in non-local feed" raw_iface
) else (
(raw_iface, node)
) in
let commands = ref XString.Set.empty in
let restrictions = Element.restrictions node |> List.map (fun (`Version child) -> parse_version_element child) in
Element.bindings node |> List.iter (fun child ->
let binding = Binding.parse_binding child in
match Binding.get_command binding with
| None -> ()
| Some name -> commands := XString.Set.add name !commands
);
let needs_src = Element.source node |> default false in
let restrictions = match Element.version_opt node with
| None -> restrictions
| Some expr -> make_version_restriction expr :: restrictions
in
begin match dep with
| `Runner r -> commands := XString.Set.add (default "run" @@ Element.command r) !commands
| `Requires _ | `Restricts _ -> () end;
let importance =
match dep with
| `Restricts _ -> `Restricts
| `Requires r | `Runner r -> Element.importance r in
let restrictions =
match Element.distribution node with
| Some distros -> make_distribtion_restriction distros :: restrictions
| None -> restrictions in
{
dep_qdom = (node :> Element.dependency_node Element.t);
dep_iface = iface;
dep_src = needs_src;
dep_restrictions = restrictions;
dep_required_commands = XString.Set.elements !commands;
dep_importance = importance;
dep_use = Element.use node;
dep_if_os = Element.os node;
}
let parse_command local_dir elem : command =
let deps = ref [] in
let bindings = ref [] in
Element.command_children elem |> List.iter (function
| #Element.dependency as d ->
deps := parse_dep local_dir (Element.element_of_dependency d) :: !deps
| #Element.binding as b ->
bindings := Element.element_of_binding b :: !bindings
| _ -> ()
);
{
command_qdom = elem;
command_requires = !deps;
command_bindings = !bindings;
}
let is_source impl = Arch.is_src impl.machine
let needs_compilation = function
| {impl_type = `Binary_of _; _} -> true
| {impl_type = #existing; _} -> false
let existing_source = function
| {impl_type = `Binary_of source; _} -> source
| {impl_type = #existing; _} as existing -> existing
let get_command_opt command_name impl = XString.Map.find_opt command_name impl.props.commands
let get_command_ex command_name impl : command =
XString.Map.find_opt command_name impl.props.commands |? lazy (Safe_exn.failf "Command '%s' not found in %a" command_name Element.pp impl.qdom)
(** The list of languages provided by this implementation. *)
let get_langs impl =
let langs =
match AttrMap.get_no_ns "langs" impl.props.attrs with
| Some langs -> Str.split XString.re_space langs
| None -> ["en"] in
List.filter_map Support.Locale.parse_lang langs
let is_retrievable_without_network cache_impl =
let ok_without_network elem =
match Recipe.parse_retrieval_method elem with
| Some recipe -> not @@ Recipe.recipe_requires_network recipe
| None -> false in
List.exists ok_without_network cache_impl.retrieval_methods
let get_id impl =
let feed_url = get_attr_ex FeedAttr.from_feed impl in
Feed_url.({feed = Feed_url.parse feed_url; id = get_attr_ex FeedAttr.id impl})
let pp f impl = Element.pp f impl.qdom
| null | https://raw.githubusercontent.com/0install/0install/22eebdbe51a9f46cda29eed3e9e02e37e36b2d18/src/zeroinstall/impl.ml | ocaml | Must select a version of the dependency
Prefer to select a version, if possible
Just adds restrictions without expressing any opinion
In some format meaningful to the distribution
The badly-named 'os' attribute
Deprecated 'use' attribute
=FEED/ID exact implementation spec
version-based test
* The list of languages provided by this implementation. | Copyright ( C ) 2014 , the README file for details , or visit .
* See the README file for details, or visit .
*)
open Support
open Support.Common
module U = Support.Utils
module FeedAttr = Constants.FeedAttr
module AttrMap = Support.Qdom.AttrMap
type importance =
type distro_retrieval_method = {
distro_size : Int64.t option;
}
type package_state =
[ `Installed
| `Uninstalled of distro_retrieval_method ]
type package_impl = {
package_distro : string;
mutable package_state : package_state;
}
type cache_impl = {
digests : Manifest.digest list;
retrieval_methods : [`Archive | `File | `Recipe] Element.t list;
}
type existing =
[ `Cache_impl of cache_impl
| `Local_impl of filepath
| `Package_impl of package_impl ]
type impl_type =
[ existing
| `Binary_of of existing t ]
and restriction = < to_string : string; meets_restriction : impl_type t -> bool >
and binding = Element.binding_node Element.t
and dependency = {
dep_qdom : Element.dependency_node Element.t;
dep_importance : importance;
dep_iface: Sigs.iface_uri;
dep_src: bool;
dep_restrictions: restriction list;
dep_required_commands: string list;
}
and command = {
mutable command_qdom : [`Command] Element.t;
command_requires : dependency list;
command_bindings : binding list;
}
and properties = {
attrs : AttrMap.t;
requires : dependency list;
bindings : binding list;
commands : command XString.Map.t;
}
and +'a t = {
qdom : [ `Implementation | `Package_impl ] Element.t;
props : properties;
stability : Stability.t;
Required OS ; the first part of the ' arch ' attribute . None for ' * '
Required CPU ; the second part of the ' arch ' attribute . None for ' * '
parsed_version : Version.t;
impl_type : 'a;
}
type generic_implementation = impl_type t
type distro_implementation = [ `Package_impl of package_impl ] t
let make ~elem ~props ~stability ~os ~machine ~version impl_type =
{
qdom = (elem :> [`Implementation | `Package_impl] Element.t);
props;
stability;
os;
machine;
parsed_version = version;
impl_type;
}
let with_stability stability t = {t with stability}
let make_command ?source_hint name path : command =
let elem = Element.make_command ~path ~source_hint name in
{
command_qdom = elem;
command_requires = [];
command_bindings = [];
}
let make_distribtion_restriction distros =
object
method meets_restriction impl =
ListLabels.exists (Str.split XString.re_space distros) ~f:(fun distro ->
match distro, impl.impl_type with
| "0install", `Package_impl _ -> false
| "0install", `Cache_impl _ -> true
| "0install", `Local_impl _ -> true
| distro, `Package_impl {package_distro;_} -> package_distro = distro
| _ -> false
)
method to_string = "distribution:" ^ distros
end
let get_attr_ex name impl =
AttrMap.get_no_ns name impl.props.attrs |? lazy (Safe_exn.failf "Missing '%s' attribute for %a" name Element.pp impl.qdom)
let parse_version_element elem =
let before = Element.before elem in
let not_before = Element.not_before elem in
let test = Version.make_range_restriction not_before before in
object
method meets_restriction impl = test impl.parsed_version
method to_string =
match not_before, before with
| None, None -> "(no restriction)"
| Some low, None -> "version " ^ low ^ ".."
| None, Some high -> "version ..!" ^ high
| Some low, Some high -> "version " ^ low ^ "..!" ^ high
end
let make_impossible_restriction msg =
object
method meets_restriction _impl = false
method to_string = Printf.sprintf "<impossible: %s>" msg
end
let re_exact_id = Str.regexp "^=\\(.+\\)/\\([^/]*\\)$"
let make_version_restriction expr =
try
if Str.string_match re_exact_id expr 0 then (
let req_feed = Str.matched_group 1 expr in
let req_id = Str.matched_group 2 expr |> Escape.ununderscore_escape in
object
method meets_restriction impl =
get_attr_ex FeedAttr.id impl = req_id &&
get_attr_ex FeedAttr.from_feed impl = req_feed
method to_string = Printf.sprintf "feed=%S, impl=%S" req_feed req_id
end
) else (
let test = Version.parse_expr expr in
object
method meets_restriction impl = test impl.parsed_version
method to_string = "version " ^ expr
end
)
with Safe_exn.T e as ex ->
let msg = Format.asprintf "Can't parse version restriction '%s': %s" expr (Safe_exn.msg e) in
log_warning ~ex:ex "%s" msg;
make_impossible_restriction msg
let local_dir_of impl =
match impl.impl_type with
| `Local_impl path -> Some path
| _ -> None
let parse_dep local_dir node =
let dep = Element.classify_dep node in
let iface, node =
let raw_iface = Element.interface node in
if XString.starts_with raw_iface "." then (
match local_dir with
| Some dir ->
let iface = U.normpath @@ dir +/ raw_iface in
(iface, Element.with_interface iface node)
| None ->
Safe_exn.failf "Relative interface URI '%s' in non-local feed" raw_iface
) else (
(raw_iface, node)
) in
let commands = ref XString.Set.empty in
let restrictions = Element.restrictions node |> List.map (fun (`Version child) -> parse_version_element child) in
Element.bindings node |> List.iter (fun child ->
let binding = Binding.parse_binding child in
match Binding.get_command binding with
| None -> ()
| Some name -> commands := XString.Set.add name !commands
);
let needs_src = Element.source node |> default false in
let restrictions = match Element.version_opt node with
| None -> restrictions
| Some expr -> make_version_restriction expr :: restrictions
in
begin match dep with
| `Runner r -> commands := XString.Set.add (default "run" @@ Element.command r) !commands
| `Requires _ | `Restricts _ -> () end;
let importance =
match dep with
| `Restricts _ -> `Restricts
| `Requires r | `Runner r -> Element.importance r in
let restrictions =
match Element.distribution node with
| Some distros -> make_distribtion_restriction distros :: restrictions
| None -> restrictions in
{
dep_qdom = (node :> Element.dependency_node Element.t);
dep_iface = iface;
dep_src = needs_src;
dep_restrictions = restrictions;
dep_required_commands = XString.Set.elements !commands;
dep_importance = importance;
dep_use = Element.use node;
dep_if_os = Element.os node;
}
let parse_command local_dir elem : command =
let deps = ref [] in
let bindings = ref [] in
Element.command_children elem |> List.iter (function
| #Element.dependency as d ->
deps := parse_dep local_dir (Element.element_of_dependency d) :: !deps
| #Element.binding as b ->
bindings := Element.element_of_binding b :: !bindings
| _ -> ()
);
{
command_qdom = elem;
command_requires = !deps;
command_bindings = !bindings;
}
let is_source impl = Arch.is_src impl.machine
let needs_compilation = function
| {impl_type = `Binary_of _; _} -> true
| {impl_type = #existing; _} -> false
let existing_source = function
| {impl_type = `Binary_of source; _} -> source
| {impl_type = #existing; _} as existing -> existing
let get_command_opt command_name impl = XString.Map.find_opt command_name impl.props.commands
let get_command_ex command_name impl : command =
XString.Map.find_opt command_name impl.props.commands |? lazy (Safe_exn.failf "Command '%s' not found in %a" command_name Element.pp impl.qdom)
let get_langs impl =
let langs =
match AttrMap.get_no_ns "langs" impl.props.attrs with
| Some langs -> Str.split XString.re_space langs
| None -> ["en"] in
List.filter_map Support.Locale.parse_lang langs
let is_retrievable_without_network cache_impl =
let ok_without_network elem =
match Recipe.parse_retrieval_method elem with
| Some recipe -> not @@ Recipe.recipe_requires_network recipe
| None -> false in
List.exists ok_without_network cache_impl.retrieval_methods
let get_id impl =
let feed_url = get_attr_ex FeedAttr.from_feed impl in
Feed_url.({feed = Feed_url.parse feed_url; id = get_attr_ex FeedAttr.id impl})
let pp f impl = Element.pp f impl.qdom
|
794b6df6ec23f677f779ac2a615c2904be1bbd09d7812b6917ba1cac09263e90 | 7even/endless-ships | parser.clj | (ns endless-ships.parser
(:require [camel-snake-kebab.core :refer [->kebab-case-keyword]]
[clojure.java.io :refer [file resource]]
[clojure.string :as str]
[instaparse.core :as insta])
(:import [java.lang Float Integer]))
(def files
"All files containing game data."
(->> "game/data"
resource
file
file-seq
(filter #(.endsWith (.getName %) ".txt"))))
(defn- transform-block [[_ name & args] & child-blocks]
(let [processed-children (reduce (fn [children [child-name & child-contents]]
(update children
child-name
#(conj (or % [])
(vec child-contents))))
{}
child-blocks)]
[name (vec args) processed-children]))
(def transform-options
{:data vector
:0-indented-block transform-block
:1-indented-block transform-block
:2-indented-block transform-block
:3-indented-block transform-block
:4-indented-block transform-block
:5-indented-block transform-block
:6-indented-block transform-block
:string identity
:integer #(Long/parseLong %)
:float #(Float/parseFloat (str/replace % "," "."))})
(defn parse [data]
(let [parser (-> "parser.bnf"
resource
insta/parser)]
(->> (parser data)
(insta/transform transform-options))))
(def data
(time
(->> files
(mapcat (fn [file]
(let [filename (.getName file)
objects (-> file slurp (str "\n") parse)]
(map #(assoc-in % [2 "file"] filename) objects))))
doall)))
(defn ->map [m]
(reduce (fn [data [attr-name attr-value]]
(assoc data
(->kebab-case-keyword attr-name)
(get-in attr-value [0 0 0])))
{}
m))
(comment
;; object counts by type
(->> data
(map first)
(reduce (fn [counts object]
(update counts object #(inc (or % 0))))
{})
(sort-by last >))
;; ship counts by file
(->> data
(filter #(and (= (first %) "ship") (= (count (second %)) 1)))
(remove #(= (second %) ["Unknown Ship Type"]))
(map #(get-in % [2 "file"]))
(reduce (fn [counts object]
(update counts object #(inc (or % 0))))
{})
(sort-by last >))
;; outfit counts by file
(->> data
(filter #(= (first %) "outfit"))
(map #(get-in % [2 "file"]))
(reduce (fn [counts object]
(update counts object #(inc (or % 0))))
{})
(sort-by last >))
;; all systems with their positions
(->> data
(filter #(= (first %) "system"))
(map (juxt #(get-in % [1 0]) #(get-in % [2 "pos" 0 0]))))
;; parsing errors
(->> data
(filter #(keyword? (first %)))))
| null | https://raw.githubusercontent.com/7even/endless-ships/1cb0519b66e493f092adea9b22768ad0980dbbab/src/clj/endless_ships/parser.clj | clojure | object counts by type
ship counts by file
outfit counts by file
all systems with their positions
parsing errors | (ns endless-ships.parser
(:require [camel-snake-kebab.core :refer [->kebab-case-keyword]]
[clojure.java.io :refer [file resource]]
[clojure.string :as str]
[instaparse.core :as insta])
(:import [java.lang Float Integer]))
(def files
"All files containing game data."
(->> "game/data"
resource
file
file-seq
(filter #(.endsWith (.getName %) ".txt"))))
(defn- transform-block [[_ name & args] & child-blocks]
(let [processed-children (reduce (fn [children [child-name & child-contents]]
(update children
child-name
#(conj (or % [])
(vec child-contents))))
{}
child-blocks)]
[name (vec args) processed-children]))
(def transform-options
{:data vector
:0-indented-block transform-block
:1-indented-block transform-block
:2-indented-block transform-block
:3-indented-block transform-block
:4-indented-block transform-block
:5-indented-block transform-block
:6-indented-block transform-block
:string identity
:integer #(Long/parseLong %)
:float #(Float/parseFloat (str/replace % "," "."))})
(defn parse [data]
(let [parser (-> "parser.bnf"
resource
insta/parser)]
(->> (parser data)
(insta/transform transform-options))))
(def data
(time
(->> files
(mapcat (fn [file]
(let [filename (.getName file)
objects (-> file slurp (str "\n") parse)]
(map #(assoc-in % [2 "file"] filename) objects))))
doall)))
(defn ->map [m]
(reduce (fn [data [attr-name attr-value]]
(assoc data
(->kebab-case-keyword attr-name)
(get-in attr-value [0 0 0])))
{}
m))
(comment
(->> data
(map first)
(reduce (fn [counts object]
(update counts object #(inc (or % 0))))
{})
(sort-by last >))
(->> data
(filter #(and (= (first %) "ship") (= (count (second %)) 1)))
(remove #(= (second %) ["Unknown Ship Type"]))
(map #(get-in % [2 "file"]))
(reduce (fn [counts object]
(update counts object #(inc (or % 0))))
{})
(sort-by last >))
(->> data
(filter #(= (first %) "outfit"))
(map #(get-in % [2 "file"]))
(reduce (fn [counts object]
(update counts object #(inc (or % 0))))
{})
(sort-by last >))
(->> data
(filter #(= (first %) "system"))
(map (juxt #(get-in % [1 0]) #(get-in % [2 "pos" 0 0]))))
(->> data
(filter #(keyword? (first %)))))
|
e63ce70d762733e32ac34ec76dd4296521017cf6e25eaca8af653c7ca0ad9c48 | dongcarl/guix | pkg-config.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2012 , 2013 , 2014 , 2016 < >
Copyright © 2019 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages pkg-config)
#:use-module (guix licenses)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu)
#:use-module (guix build-system trivial)
#:use-module (guix memoization)
#:export (pkg-config))
;; This is the "primitive" pkg-config package. People should use `pkg-config'
;; (see below) rather than `%pkg-config', but we export `%pkg-config' so that
;; `fold-packages' finds it.
(define-public %pkg-config
(package
(name "pkg-config")
(version "0.29.2")
(source (origin
(method url-fetch)
(uri (list
(string-append
"-config-" version
".tar.gz")
;; FIXME: The following URL redirects to HTTPS, which
;; creates bootstrapping problems:
< > .
(string-append
"-config-"
version ".tar.gz")))
(sha256
(base32
"14fmwzki1rlz8bs2p810lk6jqdxsk966d8drgsjmi54cd00rrikg"))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags
'("--with-internal-glib"
;; Those variables are guessed incorrectly when cross-compiling.
;; See: -cross-compiling.html.
,@(if (%current-target-system)
'("glib_cv_stack_grows=no"
"glib_cv_uscore=no"
"ac_cv_func_posix_getpwuid_r=yes"
"ac_cv_func_posix_getgrgid_r=yes")
'()))))
(native-search-paths
(list (search-path-specification
(variable "PKG_CONFIG_PATH")
(files '("lib/pkgconfig" "lib64/pkgconfig" "share/pkgconfig")))))
(home-page "-config")
(license gpl2+)
(synopsis "Helper tool used when compiling applications and libraries")
(description
"pkg-config is a helper tool used when compiling applications and
libraries. It helps you insert the correct compiler options on the
command line so an application can use gcc -o test test.c `pkg-config
--libs --cflags glib-2.0` for instance, rather than hard-coding values
on where to find glib (or other libraries). It is language-agnostic, so
it can be used for defining the location of documentation tools, for
instance.")))
(define cross-pkg-config
(mlambda (target)
"Return a pkg-config for TARGET, essentially just a wrapper called
`TARGET-pkg-config', as `configure' scripts like it."
;; See <-mythbuster/pkgconfig/cross-compiling.html>
;; for details.
(package
(inherit %pkg-config)
(name (string-append (package-name %pkg-config) "-" target))
(build-system trivial-build-system)
(arguments
`(#:modules ((guix build utils))
#:builder (begin
(use-modules (guix build utils))
(let* ((in (assoc-ref %build-inputs "pkg-config"))
(out (assoc-ref %outputs "out"))
(bin (string-append out "/bin"))
(prog (string-append ,target "-pkg-config"))
(native (string-append in "/bin/pkg-config")))
(mkdir-p bin)
;; Create a `TARGET-pkg-config' -> `pkg-config' symlink.
;; This satisfies the pkg.m4 macros, which use
AC_PROG_TOOL to determine the ` pkg - config ' program
;; name.
(symlink native (string-append bin "/" prog))
;; Also make 'pkg.m4' available, some packages might
;; expect it.
(mkdir-p (string-append out "/share"))
(symlink (string-append in "/share/aclocal")
(string-append out "/share/aclocal"))
#t))))
(native-inputs `(("pkg-config" ,%pkg-config)))
;; Ignore native inputs, and set `PKG_CONFIG_PATH' for target inputs.
(native-search-paths '())
(search-paths (package-native-search-paths %pkg-config)))))
(define (pkg-config-for-target target)
"Return a pkg-config package for TARGET, which may be either #f for a native
build, or a GNU triplet."
(if target
(cross-pkg-config target)
%pkg-config))
;; This hack allows us to automatically choose the native or the cross
;; `pkg-config' depending on whether it's being used in a cross-build
;; environment or not.
(define-syntax pkg-config
(identifier-syntax (pkg-config-for-target (%current-target-system))))
| null | https://raw.githubusercontent.com/dongcarl/guix/82543e9649da2da9a5285ede4ec4f718fd740fcb/gnu/packages/pkg-config.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
This is the "primitive" pkg-config package. People should use `pkg-config'
(see below) rather than `%pkg-config', but we export `%pkg-config' so that
`fold-packages' finds it.
FIXME: The following URL redirects to HTTPS, which
creates bootstrapping problems:
Those variables are guessed incorrectly when cross-compiling.
See: -cross-compiling.html.
See <-mythbuster/pkgconfig/cross-compiling.html>
for details.
Create a `TARGET-pkg-config' -> `pkg-config' symlink.
This satisfies the pkg.m4 macros, which use
name.
Also make 'pkg.m4' available, some packages might
expect it.
Ignore native inputs, and set `PKG_CONFIG_PATH' for target inputs.
This hack allows us to automatically choose the native or the cross
`pkg-config' depending on whether it's being used in a cross-build
environment or not. | Copyright © 2012 , 2013 , 2014 , 2016 < >
Copyright © 2019 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages pkg-config)
#:use-module (guix licenses)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu)
#:use-module (guix build-system trivial)
#:use-module (guix memoization)
#:export (pkg-config))
(define-public %pkg-config
(package
(name "pkg-config")
(version "0.29.2")
(source (origin
(method url-fetch)
(uri (list
(string-append
"-config-" version
".tar.gz")
< > .
(string-append
"-config-"
version ".tar.gz")))
(sha256
(base32
"14fmwzki1rlz8bs2p810lk6jqdxsk966d8drgsjmi54cd00rrikg"))))
(build-system gnu-build-system)
(arguments
`(#:configure-flags
'("--with-internal-glib"
,@(if (%current-target-system)
'("glib_cv_stack_grows=no"
"glib_cv_uscore=no"
"ac_cv_func_posix_getpwuid_r=yes"
"ac_cv_func_posix_getgrgid_r=yes")
'()))))
(native-search-paths
(list (search-path-specification
(variable "PKG_CONFIG_PATH")
(files '("lib/pkgconfig" "lib64/pkgconfig" "share/pkgconfig")))))
(home-page "-config")
(license gpl2+)
(synopsis "Helper tool used when compiling applications and libraries")
(description
"pkg-config is a helper tool used when compiling applications and
libraries. It helps you insert the correct compiler options on the
command line so an application can use gcc -o test test.c `pkg-config
--libs --cflags glib-2.0` for instance, rather than hard-coding values
on where to find glib (or other libraries). It is language-agnostic, so
it can be used for defining the location of documentation tools, for
instance.")))
(define cross-pkg-config
(mlambda (target)
"Return a pkg-config for TARGET, essentially just a wrapper called
`TARGET-pkg-config', as `configure' scripts like it."
(package
(inherit %pkg-config)
(name (string-append (package-name %pkg-config) "-" target))
(build-system trivial-build-system)
(arguments
`(#:modules ((guix build utils))
#:builder (begin
(use-modules (guix build utils))
(let* ((in (assoc-ref %build-inputs "pkg-config"))
(out (assoc-ref %outputs "out"))
(bin (string-append out "/bin"))
(prog (string-append ,target "-pkg-config"))
(native (string-append in "/bin/pkg-config")))
(mkdir-p bin)
AC_PROG_TOOL to determine the ` pkg - config ' program
(symlink native (string-append bin "/" prog))
(mkdir-p (string-append out "/share"))
(symlink (string-append in "/share/aclocal")
(string-append out "/share/aclocal"))
#t))))
(native-inputs `(("pkg-config" ,%pkg-config)))
(native-search-paths '())
(search-paths (package-native-search-paths %pkg-config)))))
(define (pkg-config-for-target target)
"Return a pkg-config package for TARGET, which may be either #f for a native
build, or a GNU triplet."
(if target
(cross-pkg-config target)
%pkg-config))
(define-syntax pkg-config
(identifier-syntax (pkg-config-for-target (%current-target-system))))
|
9b5a60f314c837aafd49be90a3a7a15b0c271e4668e2047bbc0eff09186a3baa | onyx-platform/onyx | scheduler_test.clj | (ns onyx.scheduler.scheduler-test
(:require [clojure.test :refer :all]
[onyx.scheduling.common-job-scheduler :refer [reconfigure-cluster-workload]]
[onyx.log.generators :refer [one-group]]
[onyx.api]))
(deftest ^:broken jitter-on-no-change
(let [initial-allocations
{:j1 {:t1 [:p2 :p1]
:t2 [:p4 :p3]
:t3 [:p5]}
:j2 {:t4 [:p8 :p7 :p6]
:t5 [:p9 :p10]}}]
(is
(= initial-allocations
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2]
:allocations initial-allocations
:peers [:p1 :p2 :p3 :p4 :p5 :p6 :p7 :p8 :p9 :p10]
:tasks {:j1 [:t1 :t2 :t3]
:j2 [:t4 :t5]}
:saturation {:j1 5 :j2 5}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}})))))))
(deftest ^:broken jitter-on-add-peer
(is
(= {:j1 {:t1 [:p4 :p5] :t2 [:p2] :t3 [:p7]}
:j2 {:t4 [:p6] :t5 [:p3] :t6 [:p1]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2]
:allocations {:j1 {:t1 [:p4] :t2 [:p2] :t3 [:p7]}
:j2 {:t4 [:p6] :t5 [:p3] :t6 [:p1]}}
:peers [:p1 :p2 :p3 :p4 :p5 :p6 :p7]
:tasks {:j1 [:t1 :t2 :t3] :j2 [:t4 :t5 :t6]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken underwhelm-peers
(is
(= {:j1 {:t1 [:p1]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {:j1 {:t1 []}}
:peers [:p1 :p2 :p3]
:tasks {:j1 [:t1]}
:task-saturation {:j1 {:t1 1}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken not-enough-peers
(is
(= {}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {}
:peers [:p1 :p2 :p3]
:tasks {:j1 [:t1]}
:min-required-peers {:j1 {:t1 10}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken only-one-job-allocated
(is
(= {:j1 {:t1 [:p1 :p2 :p3]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2]
:allocations {:j1 {:t1 []} :j2 {:t2 []}}
:peers [:p1 :p2 :p3]
:tasks {:j1 [:t1] :j2 [:t2]}
:min-required-peers {:j1 {:t1 3} :j2 {:t2 3}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken even-distribution
(is
(= {:j1 {:t1 [:p5] :t2 [:p2] :t3 [:p1]}
:j2 {:t4 [:p6] :t5 [:p3] :t6 [:p4]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2]
:allocations {:j1 {:t1 []} :j2 {:t2 []}}
:peers [:p1 :p2 :p3 :p4 :p5 :p6]
:tasks {:j1 [:t1 :t2 :t3] :j2 [:t4 :t5 :t6]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken prefer-earlier-job
(is
(= {:j1 {:t1 [:p5 :p7] :t2 [:p2] :t3 [:p1]}
:j2 {:t4 [:p6] :t5 [:p3] :t6 [:p4]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2]
:allocations {:j1 {:t1 []} :j2 {:t2 []}}
:peers [:p1 :p2 :p3 :p4 :p5 :p6 :p7]
:tasks {:j1 [:t1 :t2 :t3] :j2 [:t4 :t5 :t6]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken skip-overloaded-jobs
(is
(= {:j1 {:t1 [:p4 :p5 :p6]}
:j3 {:t3 [:p1 :p2 :p3]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2 :j3]
:peers [:p1 :p2 :p3 :p4 :p5 :p6]
:tasks {:j1 [:t1] :j2 [:t2] :j3 [:t3]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced
:j3 :onyx.task-scheduler/balanced}
:min-required-peers {:j2 {:t2 100}}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken big-assignment
(let [peers (map #(keyword (str "p" %)) (range 100))
replica (reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {}
:peers peers
:tasks {:j1 [:t1]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))]
(is (= (into #{} peers)
(into #{} (get-in replica [:allocations :j1 :t1]))))))
(deftest ^:broken grouping-sticky-peers
(is
(= {:j1 {:t1 [:p1 :p6] :t2 [:p2 :p3 :p4] :t3 [:p5]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {:j1 {:t1 [:p1]
:t2 [:p2 :p3 :p4]
:t3 [:p5]}}
:peers [:p1 :p2 :p3 :p4 :p5 :p6]
:tasks {:j1 [:t1 :t2 :t3]}
:flux-policies {:j1 {:t2 :kill}}
:task-saturation {:j1 {:t1 100 :t2 100 :t3 100}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken grouping-recover-flux-policy
(is
(= {:j1 {:t1 [:p1] :t2 [:p2 :p3 :p5] :t3 [:p4]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {:j1 {:t1 [:p1]
:t2 [:p2 :p3]
:t3 [:p4]}}
:peers [:p1 :p2 :p3 :p4 :p5]
:tasks {:j1 [:t1 :t2 :t3]}
:flux-policies {:j1 {:t2 :recover}}
:task-saturation {:j1 {:t1 100 :t2 3 :t3 100}}
:min-required-peers {:j1 {:t2 3}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken promote-to-first-task
(is
(= {:j1 {:t1 [:p1 :p3] :t2 [:p2]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {:j1 {:t1 [:p1]
:t2 [:p2 :p3]}}
:peers [:p1 :p2 :p3]
:tasks {:j1 [:t1 :t2]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken percentage-grouping-task-tilt
(is
(= {:j1 {:t1 [:p5 :p8]
:t2 [:p1 :p7 :p10 :p6]
:t3 [:p2 :p4 :p3 :p9]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:job-scheduler :onyx.job-scheduler/greedy
:task-percentages {:j1 {:t1 20 :t2 30 :t3 50}}
:peers [:p7 :p10 :p9 :p1 :p2 :p6 :p4 :p3 :p5 :p8]
:min-required-peers {:j1 {:t1 1 :t2 4 :t3 1}}
:jobs [:j1]
:tasks {:j1 [:t1 :t2 :t3]}
:flux-policies {:j1 {:t2 :kill}}
:messaging {:onyx.messaging/impl :aeron}
:allocations {:j1 {}}
:task-schedulers {:j1 :onyx.task-scheduler/percentage}
:task-saturation {:j1 {:t1 1000 :t2 4 :t3 1000}}}))))))
(deftest ^:broken max-peers-jitter
(is
(= {:j1 {:t2 [:p1] :t3 [:p2] :t1 [:p5]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:job-scheduler :onyx.job-scheduler/greedy
:saturation {:j1 3}
:peers [:p1 :p2 :p3 :p4 :p5]
:min-required-peers {:j1 {:t1 1 :t2 1 :t3 1}}
:task-slot-ids {:j1 {:t2 {:p1 0} :t3 {:p2 0} :t1 {:p5 0}}}
:jobs [:j1]
:tasks {:j1 [:t1 :t2 :t3]}
:messaging {:onyx.messaging/impl :aeron}
:allocations {:j1 {:t2 [:p1] :t3 [:p2] :t1 [:p5]}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:task-saturation {:j1 {:t1 1 :t2 1 :t3 1}}}))))))
| null | https://raw.githubusercontent.com/onyx-platform/onyx/74f9ae58cdbcfcb1163464595f1e6ae6444c9782/test/onyx/scheduler/scheduler_test.clj | clojure | (ns onyx.scheduler.scheduler-test
(:require [clojure.test :refer :all]
[onyx.scheduling.common-job-scheduler :refer [reconfigure-cluster-workload]]
[onyx.log.generators :refer [one-group]]
[onyx.api]))
(deftest ^:broken jitter-on-no-change
(let [initial-allocations
{:j1 {:t1 [:p2 :p1]
:t2 [:p4 :p3]
:t3 [:p5]}
:j2 {:t4 [:p8 :p7 :p6]
:t5 [:p9 :p10]}}]
(is
(= initial-allocations
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2]
:allocations initial-allocations
:peers [:p1 :p2 :p3 :p4 :p5 :p6 :p7 :p8 :p9 :p10]
:tasks {:j1 [:t1 :t2 :t3]
:j2 [:t4 :t5]}
:saturation {:j1 5 :j2 5}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}})))))))
(deftest ^:broken jitter-on-add-peer
(is
(= {:j1 {:t1 [:p4 :p5] :t2 [:p2] :t3 [:p7]}
:j2 {:t4 [:p6] :t5 [:p3] :t6 [:p1]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2]
:allocations {:j1 {:t1 [:p4] :t2 [:p2] :t3 [:p7]}
:j2 {:t4 [:p6] :t5 [:p3] :t6 [:p1]}}
:peers [:p1 :p2 :p3 :p4 :p5 :p6 :p7]
:tasks {:j1 [:t1 :t2 :t3] :j2 [:t4 :t5 :t6]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken underwhelm-peers
(is
(= {:j1 {:t1 [:p1]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {:j1 {:t1 []}}
:peers [:p1 :p2 :p3]
:tasks {:j1 [:t1]}
:task-saturation {:j1 {:t1 1}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken not-enough-peers
(is
(= {}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {}
:peers [:p1 :p2 :p3]
:tasks {:j1 [:t1]}
:min-required-peers {:j1 {:t1 10}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken only-one-job-allocated
(is
(= {:j1 {:t1 [:p1 :p2 :p3]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2]
:allocations {:j1 {:t1 []} :j2 {:t2 []}}
:peers [:p1 :p2 :p3]
:tasks {:j1 [:t1] :j2 [:t2]}
:min-required-peers {:j1 {:t1 3} :j2 {:t2 3}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken even-distribution
(is
(= {:j1 {:t1 [:p5] :t2 [:p2] :t3 [:p1]}
:j2 {:t4 [:p6] :t5 [:p3] :t6 [:p4]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2]
:allocations {:j1 {:t1 []} :j2 {:t2 []}}
:peers [:p1 :p2 :p3 :p4 :p5 :p6]
:tasks {:j1 [:t1 :t2 :t3] :j2 [:t4 :t5 :t6]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken prefer-earlier-job
(is
(= {:j1 {:t1 [:p5 :p7] :t2 [:p2] :t3 [:p1]}
:j2 {:t4 [:p6] :t5 [:p3] :t6 [:p4]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2]
:allocations {:j1 {:t1 []} :j2 {:t2 []}}
:peers [:p1 :p2 :p3 :p4 :p5 :p6 :p7]
:tasks {:j1 [:t1 :t2 :t3] :j2 [:t4 :t5 :t6]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken skip-overloaded-jobs
(is
(= {:j1 {:t1 [:p4 :p5 :p6]}
:j3 {:t3 [:p1 :p2 :p3]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1 :j2 :j3]
:peers [:p1 :p2 :p3 :p4 :p5 :p6]
:tasks {:j1 [:t1] :j2 [:t2] :j3 [:t3]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced
:j2 :onyx.task-scheduler/balanced
:j3 :onyx.task-scheduler/balanced}
:min-required-peers {:j2 {:t2 100}}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken big-assignment
(let [peers (map #(keyword (str "p" %)) (range 100))
replica (reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {}
:peers peers
:tasks {:j1 [:t1]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))]
(is (= (into #{} peers)
(into #{} (get-in replica [:allocations :j1 :t1]))))))
(deftest ^:broken grouping-sticky-peers
(is
(= {:j1 {:t1 [:p1 :p6] :t2 [:p2 :p3 :p4] :t3 [:p5]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {:j1 {:t1 [:p1]
:t2 [:p2 :p3 :p4]
:t3 [:p5]}}
:peers [:p1 :p2 :p3 :p4 :p5 :p6]
:tasks {:j1 [:t1 :t2 :t3]}
:flux-policies {:j1 {:t2 :kill}}
:task-saturation {:j1 {:t1 100 :t2 100 :t3 100}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken grouping-recover-flux-policy
(is
(= {:j1 {:t1 [:p1] :t2 [:p2 :p3 :p5] :t3 [:p4]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {:j1 {:t1 [:p1]
:t2 [:p2 :p3]
:t3 [:p4]}}
:peers [:p1 :p2 :p3 :p4 :p5]
:tasks {:j1 [:t1 :t2 :t3]}
:flux-policies {:j1 {:t2 :recover}}
:task-saturation {:j1 {:t1 100 :t2 3 :t3 100}}
:min-required-peers {:j1 {:t2 3}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken promote-to-first-task
(is
(= {:j1 {:t1 [:p1 :p3] :t2 [:p2]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:jobs [:j1]
:allocations {:j1 {:t1 [:p1]
:t2 [:p2 :p3]}}
:peers [:p1 :p2 :p3]
:tasks {:j1 [:t1 :t2]}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:job-scheduler :onyx.job-scheduler/balanced
:messaging {:onyx.messaging/impl :aeron}}))))))
(deftest ^:broken percentage-grouping-task-tilt
(is
(= {:j1 {:t1 [:p5 :p8]
:t2 [:p1 :p7 :p10 :p6]
:t3 [:p2 :p4 :p3 :p9]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:job-scheduler :onyx.job-scheduler/greedy
:task-percentages {:j1 {:t1 20 :t2 30 :t3 50}}
:peers [:p7 :p10 :p9 :p1 :p2 :p6 :p4 :p3 :p5 :p8]
:min-required-peers {:j1 {:t1 1 :t2 4 :t3 1}}
:jobs [:j1]
:tasks {:j1 [:t1 :t2 :t3]}
:flux-policies {:j1 {:t2 :kill}}
:messaging {:onyx.messaging/impl :aeron}
:allocations {:j1 {}}
:task-schedulers {:j1 :onyx.task-scheduler/percentage}
:task-saturation {:j1 {:t1 1000 :t2 4 :t3 1000}}}))))))
(deftest ^:broken max-peers-jitter
(is
(= {:j1 {:t2 [:p1] :t3 [:p2] :t1 [:p5]}}
(:allocations
(reconfigure-cluster-workload
(one-group
{:job-scheduler :onyx.job-scheduler/greedy
:saturation {:j1 3}
:peers [:p1 :p2 :p3 :p4 :p5]
:min-required-peers {:j1 {:t1 1 :t2 1 :t3 1}}
:task-slot-ids {:j1 {:t2 {:p1 0} :t3 {:p2 0} :t1 {:p5 0}}}
:jobs [:j1]
:tasks {:j1 [:t1 :t2 :t3]}
:messaging {:onyx.messaging/impl :aeron}
:allocations {:j1 {:t2 [:p1] :t3 [:p2] :t1 [:p5]}}
:task-schedulers {:j1 :onyx.task-scheduler/balanced}
:task-saturation {:j1 {:t1 1 :t2 1 :t3 1}}}))))))
|
|
b86fd46d4670d1df3efa2aad2452e301ec4094c1c49e34177a84e97780c68af8 | orbitz/kaiju | kaiju_kv_transport.ml | open Core.Std
open Async.Std
type t = unit
module Callbacks = struct
module Init_args = struct
type t = { log : Zolog_std_event.t Zolog.t
; config : Konfig.t
; base_key : string list
; backend : Kaiju_kv_backend.t
}
end
type start = Init_args.t -> (t, unit) Deferred.Result.t
end
module Init_args = struct
type t = { log : Zolog_std_event.t Zolog.t
; config : Konfig.t
; base_key : string list
; backend : Kaiju_kv_backend.t
; start : Callbacks.start
}
end
let start init_args =
let module Cia = Callbacks.Init_args in
init_args.Init_args.start
{ Cia.log = init_args.Init_args.log
; config = init_args.Init_args.config
; base_key = init_args.Init_args.base_key
; backend = init_args.Init_args.backend
}
>>=? fun () ->
Deferred.return (Ok ())
| null | https://raw.githubusercontent.com/orbitz/kaiju/9da6a07770cdfddde7d33b12f636df2615c36676/lib/kaiju_kv/kaiju_kv_transport.ml | ocaml | open Core.Std
open Async.Std
type t = unit
module Callbacks = struct
module Init_args = struct
type t = { log : Zolog_std_event.t Zolog.t
; config : Konfig.t
; base_key : string list
; backend : Kaiju_kv_backend.t
}
end
type start = Init_args.t -> (t, unit) Deferred.Result.t
end
module Init_args = struct
type t = { log : Zolog_std_event.t Zolog.t
; config : Konfig.t
; base_key : string list
; backend : Kaiju_kv_backend.t
; start : Callbacks.start
}
end
let start init_args =
let module Cia = Callbacks.Init_args in
init_args.Init_args.start
{ Cia.log = init_args.Init_args.log
; config = init_args.Init_args.config
; base_key = init_args.Init_args.base_key
; backend = init_args.Init_args.backend
}
>>=? fun () ->
Deferred.return (Ok ())
|
|
3794409f3e76212f5a89c07562c3781255e8c27e893d39ec1e643ed79d5c9432 | brendanhay/gogol | Get.hs | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
-- |
Module : . Slides . Presentations . Get
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
--
-- Gets the latest version of the specified presentation.
--
-- /See:/ </ Google Slides API Reference> for @slides.presentations.get@.
module Gogol.Slides.Presentations.Get
( -- * Resource
SlidesPresentationsGetResource,
-- ** Constructing a Request
SlidesPresentationsGet (..),
newSlidesPresentationsGet,
)
where
import qualified Gogol.Prelude as Core
import Gogol.Slides.Types
-- | A resource alias for @slides.presentations.get@ method which the
' SlidesPresentationsGet ' request conforms to .
type SlidesPresentationsGetResource =
"v1"
Core.:> "presentations"
Core.:> Core.Capture "presentationId" Core.Text
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.Get '[Core.JSON] Presentation
-- | Gets the latest version of the specified presentation.
--
-- /See:/ 'newSlidesPresentationsGet' smart constructor.
data SlidesPresentationsGet = SlidesPresentationsGet
{ -- | V1 error format.
xgafv :: (Core.Maybe Xgafv),
-- | OAuth access token.
accessToken :: (Core.Maybe Core.Text),
| JSONP
callback :: (Core.Maybe Core.Text),
-- | The ID of the presentation to retrieve.
presentationId :: Core.Text,
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
| Creates a value of ' SlidesPresentationsGet ' with the minimum fields required to make a request .
newSlidesPresentationsGet ::
| The ID of the presentation to retrieve . See ' presentationId ' .
Core.Text ->
SlidesPresentationsGet
newSlidesPresentationsGet presentationId =
SlidesPresentationsGet
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
callback = Core.Nothing,
presentationId = presentationId,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance Core.GoogleRequest SlidesPresentationsGet where
type Rs SlidesPresentationsGet = Presentation
type
Scopes SlidesPresentationsGet =
'[ Drive'FullControl,
Drive'File,
Drive'Readonly,
Presentations'FullControl,
Presentations'Readonly
]
requestClient SlidesPresentationsGet {..} =
go
presentationId
xgafv
accessToken
callback
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
slidesService
where
go =
Core.buildClient
( Core.Proxy ::
Core.Proxy SlidesPresentationsGetResource
)
Core.mempty
| null | https://raw.githubusercontent.com/brendanhay/gogol/fffd4d98a1996d0ffd4cf64545c5e8af9c976cda/lib/services/gogol-slides/gen/Gogol/Slides/Presentations/Get.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
Gets the latest version of the specified presentation.
/See:/ </ Google Slides API Reference> for @slides.presentations.get@.
* Resource
** Constructing a Request
| A resource alias for @slides.presentations.get@ method which the
| Gets the latest version of the specified presentation.
/See:/ 'newSlidesPresentationsGet' smart constructor.
| V1 error format.
| OAuth access token.
| The ID of the presentation to retrieve.
| Upload protocol for media (e.g. \"raw\", \"multipart\"). | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Module : . Slides . Presentations . Get
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Gogol.Slides.Presentations.Get
SlidesPresentationsGetResource,
SlidesPresentationsGet (..),
newSlidesPresentationsGet,
)
where
import qualified Gogol.Prelude as Core
import Gogol.Slides.Types
' SlidesPresentationsGet ' request conforms to .
type SlidesPresentationsGetResource =
"v1"
Core.:> "presentations"
Core.:> Core.Capture "presentationId" Core.Text
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.Get '[Core.JSON] Presentation
data SlidesPresentationsGet = SlidesPresentationsGet
xgafv :: (Core.Maybe Xgafv),
accessToken :: (Core.Maybe Core.Text),
| JSONP
callback :: (Core.Maybe Core.Text),
presentationId :: Core.Text,
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
| Creates a value of ' SlidesPresentationsGet ' with the minimum fields required to make a request .
newSlidesPresentationsGet ::
| The ID of the presentation to retrieve . See ' presentationId ' .
Core.Text ->
SlidesPresentationsGet
newSlidesPresentationsGet presentationId =
SlidesPresentationsGet
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
callback = Core.Nothing,
presentationId = presentationId,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance Core.GoogleRequest SlidesPresentationsGet where
type Rs SlidesPresentationsGet = Presentation
type
Scopes SlidesPresentationsGet =
'[ Drive'FullControl,
Drive'File,
Drive'Readonly,
Presentations'FullControl,
Presentations'Readonly
]
requestClient SlidesPresentationsGet {..} =
go
presentationId
xgafv
accessToken
callback
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
slidesService
where
go =
Core.buildClient
( Core.Proxy ::
Core.Proxy SlidesPresentationsGetResource
)
Core.mempty
|
cee7746b84cb329f6a158dd2115abc064217d6b4ceed7df910d96bf43531fce2 | VisionsGlobalEmpowerment/webchange | index.cljs | (ns webchange.ui.components.icon.system.index
(:require
[webchange.ui.components.icon.system.icon-account :as account]
[webchange.ui.components.icon.system.icon-account-add :as account-add]
[webchange.ui.components.icon.system.icon-account-remove :as account-remove]
[webchange.ui.components.icon.system.icon-align-left :as align-left]
[webchange.ui.components.icon.system.icon-align-right :as align-right]
[webchange.ui.components.icon.system.icon-align-center :as align-center]
[webchange.ui.components.icon.system.icon-archive :as archive]
[webchange.ui.components.icon.system.icon-arrow-left :as arrow-left]
[webchange.ui.components.icon.system.icon-arrow-right :as arrow-right]
[webchange.ui.components.icon.system.icon-arrow-up :as arrow-up]
[webchange.ui.components.icon.system.icon-book :as book]
[webchange.ui.components.icon.system.icon-build :as build]
[webchange.ui.components.icon.system.icon-calendar :as calendar]
[webchange.ui.components.icon.system.icon-caret-down :as caret-down]
[webchange.ui.components.icon.system.icon-caret-left :as caret-left]
[webchange.ui.components.icon.system.icon-caret-right :as caret-right]
[webchange.ui.components.icon.system.icon-caret-up :as caret-up]
[webchange.ui.components.icon.system.icon-change-position :as change-position]
[webchange.ui.components.icon.system.icon-character :as character]
[webchange.ui.components.icon.system.icon-check :as check]
[webchange.ui.components.icon.system.icon-close :as close]
[webchange.ui.components.icon.system.icon-contain :as contain]
[webchange.ui.components.icon.system.icon-copy :as copy]
[webchange.ui.components.icon.system.icon-cover :as cover]
[webchange.ui.components.icon.system.icon-cup :as cup]
[webchange.ui.components.icon.system.icon-dialogue :as dialogue]
[webchange.ui.components.icon.system.icon-dnd :as dnd]
[webchange.ui.components.icon.system.icon-download :as download]
[webchange.ui.components.icon.system.icon-drop :as drop]
[webchange.ui.components.icon.system.icon-duplicate :as duplicate]
[webchange.ui.components.icon.system.icon-edit :as edit]
[webchange.ui.components.icon.system.icon-edit-boxed :as edit-boxed]
[webchange.ui.components.icon.system.icon-effects :as effects]
[webchange.ui.components.icon.system.icon-emotion :as emotion]
[webchange.ui.components.icon.system.icon-flip :as flip]
[webchange.ui.components.icon.system.icon-global :as global]
[webchange.ui.components.icon.system.icon-image-broken :as image-broken]
[webchange.ui.components.icon.system.icon-images :as images]
[webchange.ui.components.icon.system.icon-info :as info]
[webchange.ui.components.icon.system.icon-lock :as lock]
[webchange.ui.components.icon.system.icon-move :as move]
[webchange.ui.components.icon.system.icon-movement :as movement]
[webchange.ui.components.icon.system.icon-play :as play]
[webchange.ui.components.icon.system.icon-plus :as plus]
[webchange.ui.components.icon.system.icon-question :as question]
[webchange.ui.components.icon.system.icon-record :as record]
[webchange.ui.components.icon.system.icon-restore :as restore]
[webchange.ui.components.icon.system.icon-rewind-backward :as rewind-backward]
[webchange.ui.components.icon.system.icon-rewind-forward :as rewind-forward]
[webchange.ui.components.icon.system.icon-search :as search]
[webchange.ui.components.icon.system.icon-select :as select]
[webchange.ui.components.icon.system.icon-statistics :as statistics]
[webchange.ui.components.icon.system.icon-stop-recording :as stop-recording]
[webchange.ui.components.icon.system.icon-template :as template]
[webchange.ui.components.icon.system.icon-translate :as translate]
[webchange.ui.components.icon.system.icon-trash :as trash]
[webchange.ui.components.icon.system.icon-upload :as upload]
[webchange.ui.components.icon.system.icon-visibility-off :as visibility-off]
[webchange.ui.components.icon.system.icon-visibility-on :as visibility-on]
[webchange.ui.components.icon.system.icon-zoom-in :as zoom-in]
[webchange.ui.components.icon.system.icon-zoom-out :as zoom-out]
[webchange.ui.components.icon.utils :refer [with-prefix]]))
(def data (with-prefix "system"
{"account" account/data
"account-add" account-add/data
"account-remove" account-remove/data
"align-left" align-left/data
"align-right" align-right/data
"align-center" align-center/data
"archive" archive/data
"arrow-left" arrow-left/data
"arrow-right" arrow-right/data
"arrow-up" arrow-up/data
"book" book/data
"build" build/data
"system-book" book/data
"calendar" calendar/data
"caret-down" caret-down/data
"caret-left" caret-left/data
"caret-right" caret-right/data
"caret-up" caret-up/data
"change-position" change-position/data
"character" character/data
"check" check/data
"close" close/data
"contain" contain/data
"copy" copy/data
"cover" cover/data
"cup" cup/data
"dialogue" dialogue/data
"dnd" dnd/data
"download" download/data
"drop" drop/data
"duplicate" duplicate/data
"edit" edit/data
"edit-boxed" edit-boxed/data
"effects" effects/data
"emotion" emotion/data
"flip" flip/data
"global" global/data
"image-broken" image-broken/data
"images" images/data
"info" info/data
"lock" lock/data
"move" move/data
"movement" movement/data
"play" play/data
"plus" plus/data
"question" question/data
"record" record/data
"restore" restore/data
"rewind-backward" rewind-backward/data
"rewind-forward" rewind-forward/data
"search" search/data
"select" select/data
"statistics" statistics/data
"stop" stop-recording/data
"template" template/data
"translate" translate/data
"trash" trash/data
"upload" upload/data
"visibility-off" visibility-off/data
"visibility-on" visibility-on/data
"zoom-in" zoom-in/data
"zoom-out" zoom-out/data}))
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/5d5b492712fe34db3ea3b23230c8ad87f94e5865/src/cljs/webchange/ui/components/icon/system/index.cljs | clojure | (ns webchange.ui.components.icon.system.index
(:require
[webchange.ui.components.icon.system.icon-account :as account]
[webchange.ui.components.icon.system.icon-account-add :as account-add]
[webchange.ui.components.icon.system.icon-account-remove :as account-remove]
[webchange.ui.components.icon.system.icon-align-left :as align-left]
[webchange.ui.components.icon.system.icon-align-right :as align-right]
[webchange.ui.components.icon.system.icon-align-center :as align-center]
[webchange.ui.components.icon.system.icon-archive :as archive]
[webchange.ui.components.icon.system.icon-arrow-left :as arrow-left]
[webchange.ui.components.icon.system.icon-arrow-right :as arrow-right]
[webchange.ui.components.icon.system.icon-arrow-up :as arrow-up]
[webchange.ui.components.icon.system.icon-book :as book]
[webchange.ui.components.icon.system.icon-build :as build]
[webchange.ui.components.icon.system.icon-calendar :as calendar]
[webchange.ui.components.icon.system.icon-caret-down :as caret-down]
[webchange.ui.components.icon.system.icon-caret-left :as caret-left]
[webchange.ui.components.icon.system.icon-caret-right :as caret-right]
[webchange.ui.components.icon.system.icon-caret-up :as caret-up]
[webchange.ui.components.icon.system.icon-change-position :as change-position]
[webchange.ui.components.icon.system.icon-character :as character]
[webchange.ui.components.icon.system.icon-check :as check]
[webchange.ui.components.icon.system.icon-close :as close]
[webchange.ui.components.icon.system.icon-contain :as contain]
[webchange.ui.components.icon.system.icon-copy :as copy]
[webchange.ui.components.icon.system.icon-cover :as cover]
[webchange.ui.components.icon.system.icon-cup :as cup]
[webchange.ui.components.icon.system.icon-dialogue :as dialogue]
[webchange.ui.components.icon.system.icon-dnd :as dnd]
[webchange.ui.components.icon.system.icon-download :as download]
[webchange.ui.components.icon.system.icon-drop :as drop]
[webchange.ui.components.icon.system.icon-duplicate :as duplicate]
[webchange.ui.components.icon.system.icon-edit :as edit]
[webchange.ui.components.icon.system.icon-edit-boxed :as edit-boxed]
[webchange.ui.components.icon.system.icon-effects :as effects]
[webchange.ui.components.icon.system.icon-emotion :as emotion]
[webchange.ui.components.icon.system.icon-flip :as flip]
[webchange.ui.components.icon.system.icon-global :as global]
[webchange.ui.components.icon.system.icon-image-broken :as image-broken]
[webchange.ui.components.icon.system.icon-images :as images]
[webchange.ui.components.icon.system.icon-info :as info]
[webchange.ui.components.icon.system.icon-lock :as lock]
[webchange.ui.components.icon.system.icon-move :as move]
[webchange.ui.components.icon.system.icon-movement :as movement]
[webchange.ui.components.icon.system.icon-play :as play]
[webchange.ui.components.icon.system.icon-plus :as plus]
[webchange.ui.components.icon.system.icon-question :as question]
[webchange.ui.components.icon.system.icon-record :as record]
[webchange.ui.components.icon.system.icon-restore :as restore]
[webchange.ui.components.icon.system.icon-rewind-backward :as rewind-backward]
[webchange.ui.components.icon.system.icon-rewind-forward :as rewind-forward]
[webchange.ui.components.icon.system.icon-search :as search]
[webchange.ui.components.icon.system.icon-select :as select]
[webchange.ui.components.icon.system.icon-statistics :as statistics]
[webchange.ui.components.icon.system.icon-stop-recording :as stop-recording]
[webchange.ui.components.icon.system.icon-template :as template]
[webchange.ui.components.icon.system.icon-translate :as translate]
[webchange.ui.components.icon.system.icon-trash :as trash]
[webchange.ui.components.icon.system.icon-upload :as upload]
[webchange.ui.components.icon.system.icon-visibility-off :as visibility-off]
[webchange.ui.components.icon.system.icon-visibility-on :as visibility-on]
[webchange.ui.components.icon.system.icon-zoom-in :as zoom-in]
[webchange.ui.components.icon.system.icon-zoom-out :as zoom-out]
[webchange.ui.components.icon.utils :refer [with-prefix]]))
(def data (with-prefix "system"
{"account" account/data
"account-add" account-add/data
"account-remove" account-remove/data
"align-left" align-left/data
"align-right" align-right/data
"align-center" align-center/data
"archive" archive/data
"arrow-left" arrow-left/data
"arrow-right" arrow-right/data
"arrow-up" arrow-up/data
"book" book/data
"build" build/data
"system-book" book/data
"calendar" calendar/data
"caret-down" caret-down/data
"caret-left" caret-left/data
"caret-right" caret-right/data
"caret-up" caret-up/data
"change-position" change-position/data
"character" character/data
"check" check/data
"close" close/data
"contain" contain/data
"copy" copy/data
"cover" cover/data
"cup" cup/data
"dialogue" dialogue/data
"dnd" dnd/data
"download" download/data
"drop" drop/data
"duplicate" duplicate/data
"edit" edit/data
"edit-boxed" edit-boxed/data
"effects" effects/data
"emotion" emotion/data
"flip" flip/data
"global" global/data
"image-broken" image-broken/data
"images" images/data
"info" info/data
"lock" lock/data
"move" move/data
"movement" movement/data
"play" play/data
"plus" plus/data
"question" question/data
"record" record/data
"restore" restore/data
"rewind-backward" rewind-backward/data
"rewind-forward" rewind-forward/data
"search" search/data
"select" select/data
"statistics" statistics/data
"stop" stop-recording/data
"template" template/data
"translate" translate/data
"trash" trash/data
"upload" upload/data
"visibility-off" visibility-off/data
"visibility-on" visibility-on/data
"zoom-in" zoom-in/data
"zoom-out" zoom-out/data}))
|
|
b190761d517bd0c06c544934f6806d765b435b02cb50d532b0365b2c0c69701c | bobbae/gosling-emacs | buff-menu.ml | ;
; Buffer menu main function and support functions. Autoloaded.
;
(declare-global &Buffer-menu-current-buffer& &Buffer-menu-argument&)
(defun
(buffer-menu
(setq &Buffer-menu-current-buffer& (current-buffer-name))
(setq &Buffer-menu-argument& prefix-argument-provided)
(&buffer-menu))
)
(defun
(&buffer-menu old-buffer buffer-column ; written by Swt
; Presents a menu of buffers in a buffer.
; The user may position the cursor on the
; line containing the desired buffer, then
; hit space to switch to that buffer.
(setq old-buffer (current-buffer-name))
(switch-to-buffer "Buffer list")
(list-buffers)
(search-forward "Buffer")
(backward-word)
(setq buffer-column (current-column))
(error-occured cont
(setq cont 1)
(while cont
(search-forward (concat " " &Buffer-menu-current-buffer& " "))
(if (= (&Buffer-menu-name) &Buffer-menu-current-buffer&)
(setq cont 0)
(beginning-of-next-line))))
(beginning-of-line)
(delete-next-character)
(insert-string ".")
(beginning-of-file)
(error-occured cont
(setq cont 1)
(while cont
(search-forward (concat " " old-buffer " "))
(if (= (&Buffer-menu-name) old-buffer)
(setq cont 0)
(beginning-of-next-line))))
(beginning-of-line)
(save-excursion
(beginning-of-file)
(if (= &Buffer-menu-argument& 0)
(while (! (error-occured
(search-forward " Scr ")))
(beginning-of-line)
(set-mark)
(next-line)
(delete-region-to-buffer "SCRATCH")
(if (= (following-char) '^J')
(delete-next-character))))
)
(local-bind-to-key "Select-buffer-menu" '\040')
(local-bind-to-key "Select-buffer-menu" '^m')
(local-bind-to-key "Select-buffer-menu" '^j')
(local-bind-to-key "Buffer-menu-2-window" '2')
(local-bind-to-key "Buffer-menu-1-window" '1')
(local-bind-to-key "Buffer-menu-." '.')
(local-bind-to-key "Buffer-menu-save-file" 's')
(local-bind-to-key "Buffer-menu-save-file" 'S')
(local-bind-to-key "Buffer-menu-delete-buffer" 'd')
(local-bind-to-key "Buffer-menu-delete-buffer" 'D')
(local-bind-to-key "Buffer-menu-not-modified" "~")
(local-bind-to-key "Buffer-menu-help" "?")
(local-bind-to-key "Buffer-menu-all" "\030\002"); ^X^B
(setq wrap-long-lines 0)
)
)
(defun
(Buffer-menu-help
(if (= prefix-argument-provided 0)
(message
"Commands are space, newline, 1, 2, s, ~, d, ., ^X^B. ^U? gets more help")
(save-excursion
(pop-to-buffer "Help")
(erase-buffer)
(insert-string
(concat
"space, newline Select buffer on line.\n"
"1\t\tSelect buffer and switch to 1 window mode.\n"
"2\t\tSelect buffer in the other window.\n"
".\t\tSelect the buffer you were in before.\n"
"s,S\t\tSave the file in this buffer.\n"
"~\t\tUnmodify this buffer.\n"
"d,D\t\tKill this buffer.\n"
"^X^B\t\tRedisplay with Scratch buffers too.")))
)
)
)
(defun
(Select-buffer-menu
(setq wrap-long-lines 1)
(use-old-buffer (&Buffer-menu-name)))
)
(defun
(Buffer-menu-1-window ; by SWT
; select a buffer and delete all other
; windows at the same time.
(setq wrap-long-lines 1)
(use-old-buffer (&Buffer-menu-name))
(delete-other-windows))
)
(defun
(Buffer-menu-2-window new-buffer
(setq new-buffer (&Buffer-menu-name))
(setq wrap-long-lines 1)
(use-old-buffer &Buffer-menu-current-buffer&)
(pop-to-buffer new-buffer))
)
(defun
(Buffer-menu-.
(setq wrap-long-lines 1)
(use-old-buffer &Buffer-menu-current-buffer&)
)
)
(defun
(Buffer-menu-save-file buffer-name
(progn
(use-old-buffer (&Buffer-menu-name))
(Write-current-file)
(&buffer-menu)
)
)
)
(defun
(Buffer-menu-delete-buffer
(delete-buffer (&Buffer-menu-name))
(beginning-of-line)
(save-excursion
(set-mark)
(next-line)
(delete-region-to-buffer "SCRATCH"))
(if (eobp) (previous-line)))
)
(defun
(Buffer-menu-not-modified
(use-old-buffer (&Buffer-menu-name))
(Not-modified)
(&buffer-menu)
)
)
(defun
(&Buffer-menu-name buffer-column mode-column
(if (!= (current-buffer-name) "Buffer list")
(error-message "Not in Buffer menu")
)
(save-excursion
(beginning-of-file)
(search-forward "Buffer")
(backward-word)
(setq buffer-column (- (current-column) 1))
(search-forward "Mode")
(backward-word)
(setq mode-column (- (current-column) 1))
)
(beginning-of-line)
(provide-prefix-argument buffer-column (forward-character))
(set-mark)
(search-forward " ")
(while (& (!= (following-char) '\040')
(< (current-column) mode-column))
(search-forward " "))
(backward-character)
(region-to-string)
)
)
(defun
(Buffer-menu-all
(use-old-buffer (&Buffer-menu-name))
(setq &Buffer-menu-argument& 1)
(&buffer-menu))
)
| null | https://raw.githubusercontent.com/bobbae/gosling-emacs/8fdda532abbffb0c952251a0b5a4857e0f27495a/lib/maclib/utah/buff-menu.ml | ocaml | ;
; Buffer menu main function and support functions. Autoloaded.
;
(declare-global &Buffer-menu-current-buffer& &Buffer-menu-argument&)
(defun
(buffer-menu
(setq &Buffer-menu-current-buffer& (current-buffer-name))
(setq &Buffer-menu-argument& prefix-argument-provided)
(&buffer-menu))
)
(defun
(&buffer-menu old-buffer buffer-column ; written by Swt
; Presents a menu of buffers in a buffer.
; The user may position the cursor on the
; line containing the desired buffer, then
; hit space to switch to that buffer.
(setq old-buffer (current-buffer-name))
(switch-to-buffer "Buffer list")
(list-buffers)
(search-forward "Buffer")
(backward-word)
(setq buffer-column (current-column))
(error-occured cont
(setq cont 1)
(while cont
(search-forward (concat " " &Buffer-menu-current-buffer& " "))
(if (= (&Buffer-menu-name) &Buffer-menu-current-buffer&)
(setq cont 0)
(beginning-of-next-line))))
(beginning-of-line)
(delete-next-character)
(insert-string ".")
(beginning-of-file)
(error-occured cont
(setq cont 1)
(while cont
(search-forward (concat " " old-buffer " "))
(if (= (&Buffer-menu-name) old-buffer)
(setq cont 0)
(beginning-of-next-line))))
(beginning-of-line)
(save-excursion
(beginning-of-file)
(if (= &Buffer-menu-argument& 0)
(while (! (error-occured
(search-forward " Scr ")))
(beginning-of-line)
(set-mark)
(next-line)
(delete-region-to-buffer "SCRATCH")
(if (= (following-char) '^J')
(delete-next-character))))
)
(local-bind-to-key "Select-buffer-menu" '\040')
(local-bind-to-key "Select-buffer-menu" '^m')
(local-bind-to-key "Select-buffer-menu" '^j')
(local-bind-to-key "Buffer-menu-2-window" '2')
(local-bind-to-key "Buffer-menu-1-window" '1')
(local-bind-to-key "Buffer-menu-." '.')
(local-bind-to-key "Buffer-menu-save-file" 's')
(local-bind-to-key "Buffer-menu-save-file" 'S')
(local-bind-to-key "Buffer-menu-delete-buffer" 'd')
(local-bind-to-key "Buffer-menu-delete-buffer" 'D')
(local-bind-to-key "Buffer-menu-not-modified" "~")
(local-bind-to-key "Buffer-menu-help" "?")
(local-bind-to-key "Buffer-menu-all" "\030\002"); ^X^B
(setq wrap-long-lines 0)
)
)
(defun
(Buffer-menu-help
(if (= prefix-argument-provided 0)
(message
"Commands are space, newline, 1, 2, s, ~, d, ., ^X^B. ^U? gets more help")
(save-excursion
(pop-to-buffer "Help")
(erase-buffer)
(insert-string
(concat
"space, newline Select buffer on line.\n"
"1\t\tSelect buffer and switch to 1 window mode.\n"
"2\t\tSelect buffer in the other window.\n"
".\t\tSelect the buffer you were in before.\n"
"s,S\t\tSave the file in this buffer.\n"
"~\t\tUnmodify this buffer.\n"
"d,D\t\tKill this buffer.\n"
"^X^B\t\tRedisplay with Scratch buffers too.")))
)
)
)
(defun
(Select-buffer-menu
(setq wrap-long-lines 1)
(use-old-buffer (&Buffer-menu-name)))
)
(defun
(Buffer-menu-1-window ; by SWT
; select a buffer and delete all other
; windows at the same time.
(setq wrap-long-lines 1)
(use-old-buffer (&Buffer-menu-name))
(delete-other-windows))
)
(defun
(Buffer-menu-2-window new-buffer
(setq new-buffer (&Buffer-menu-name))
(setq wrap-long-lines 1)
(use-old-buffer &Buffer-menu-current-buffer&)
(pop-to-buffer new-buffer))
)
(defun
(Buffer-menu-.
(setq wrap-long-lines 1)
(use-old-buffer &Buffer-menu-current-buffer&)
)
)
(defun
(Buffer-menu-save-file buffer-name
(progn
(use-old-buffer (&Buffer-menu-name))
(Write-current-file)
(&buffer-menu)
)
)
)
(defun
(Buffer-menu-delete-buffer
(delete-buffer (&Buffer-menu-name))
(beginning-of-line)
(save-excursion
(set-mark)
(next-line)
(delete-region-to-buffer "SCRATCH"))
(if (eobp) (previous-line)))
)
(defun
(Buffer-menu-not-modified
(use-old-buffer (&Buffer-menu-name))
(Not-modified)
(&buffer-menu)
)
)
(defun
(&Buffer-menu-name buffer-column mode-column
(if (!= (current-buffer-name) "Buffer list")
(error-message "Not in Buffer menu")
)
(save-excursion
(beginning-of-file)
(search-forward "Buffer")
(backward-word)
(setq buffer-column (- (current-column) 1))
(search-forward "Mode")
(backward-word)
(setq mode-column (- (current-column) 1))
)
(beginning-of-line)
(provide-prefix-argument buffer-column (forward-character))
(set-mark)
(search-forward " ")
(while (& (!= (following-char) '\040')
(< (current-column) mode-column))
(search-forward " "))
(backward-character)
(region-to-string)
)
)
(defun
(Buffer-menu-all
(use-old-buffer (&Buffer-menu-name))
(setq &Buffer-menu-argument& 1)
(&buffer-menu))
)
|
|
fb5688893043124625083bc7bdba899d0519ad7290439d8658cc98ab024bb73c | bmeurer/ocamljit2 | odoc_test.ml | (***********************************************************************)
(* OCamldoc *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2004 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
(** Custom generator to perform test on ocamldoc. *)
open Odoc_info
open Odoc_info.Module
open Odoc_info.Type
type test_kind =
Types_display
let p = Format.fprintf
class string_gen =
object(self)
inherit Odoc_info.Scan.scanner
val mutable test_kinds = []
val mutable fmt = Format.str_formatter
method must_display_types = List.mem Types_display test_kinds
method set_test_kinds_from_module m =
test_kinds <- List.fold_left
(fun acc (s, _) ->
match s with
"test_types_display" -> Types_display :: acc
| _ -> acc
)
[]
(
match m.m_info with
None -> []
| Some i -> i.i_custom
)
method! scan_type t =
match test_kinds with
[] -> ()
| _ ->
p fmt "# type %s:\n" t.ty_name;
if self#must_display_types then
(
p fmt "# manifest (Odoc_info.string_of_type_expr):\n<[%s]>\n"
(match t.ty_manifest with
None -> "None"
| Some e -> Odoc_info.string_of_type_expr e
);
);
method! scan_module_pre m =
p fmt "#\n# module %s:\n" m.m_name ;
if self#must_display_types then
(
p fmt "# Odoc_info.string_of_module_type:\n<[%s]>\n"
(Odoc_info.string_of_module_type m.m_type);
p fmt "# Odoc_info.string_of_module_type ~complete: true :\n<[%s]>\n"
(Odoc_info.string_of_module_type ~complete: true m.m_type);
);
true
method! scan_module_type_pre m =
p fmt "#\n# module type %s:\n" m.mt_name ;
if self#must_display_types then
(
p fmt "# Odoc_info.string_of_module_type:\n<[%s]>\n"
(match m.mt_type with
None -> "None"
| Some t -> Odoc_info.string_of_module_type t
);
p fmt "# Odoc_info.string_of_module_type ~complete: true :\n<[%s]>\n"
(match m.mt_type with
None -> "None"
| Some t -> Odoc_info.string_of_module_type ~complete: true t
);
);
true
method generate (module_list: Odoc_info.Module.t_module list) =
let oc = open_out !Odoc_info.Args.out_file in
fmt <- Format.formatter_of_out_channel oc;
(
try
List.iter
(fun m ->
self#set_test_kinds_from_module m;
self#scan_module_list [m];
)
module_list
with
e ->
prerr_endline (Printexc.to_string e)
);
Format.pp_print_flush fmt ();
close_out oc
end
let my_generator = new string_gen
let _ = Odoc_info.Args.set_doc_generator
(Some (my_generator :> Odoc_info.Args.doc_generator))
| null | https://raw.githubusercontent.com/bmeurer/ocamljit2/ef06db5c688c1160acc1de1f63c29473bcd0055c/testsuite/tests/tool-ocamldoc/odoc_test.ml | ocaml | *********************************************************************
OCamldoc
*********************************************************************
* Custom generator to perform test on ocamldoc. | , projet Cristal , INRIA Rocquencourt
Copyright 2004 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
open Odoc_info
open Odoc_info.Module
open Odoc_info.Type
type test_kind =
Types_display
let p = Format.fprintf
class string_gen =
object(self)
inherit Odoc_info.Scan.scanner
val mutable test_kinds = []
val mutable fmt = Format.str_formatter
method must_display_types = List.mem Types_display test_kinds
method set_test_kinds_from_module m =
test_kinds <- List.fold_left
(fun acc (s, _) ->
match s with
"test_types_display" -> Types_display :: acc
| _ -> acc
)
[]
(
match m.m_info with
None -> []
| Some i -> i.i_custom
)
method! scan_type t =
match test_kinds with
[] -> ()
| _ ->
p fmt "# type %s:\n" t.ty_name;
if self#must_display_types then
(
p fmt "# manifest (Odoc_info.string_of_type_expr):\n<[%s]>\n"
(match t.ty_manifest with
None -> "None"
| Some e -> Odoc_info.string_of_type_expr e
);
);
method! scan_module_pre m =
p fmt "#\n# module %s:\n" m.m_name ;
if self#must_display_types then
(
p fmt "# Odoc_info.string_of_module_type:\n<[%s]>\n"
(Odoc_info.string_of_module_type m.m_type);
p fmt "# Odoc_info.string_of_module_type ~complete: true :\n<[%s]>\n"
(Odoc_info.string_of_module_type ~complete: true m.m_type);
);
true
method! scan_module_type_pre m =
p fmt "#\n# module type %s:\n" m.mt_name ;
if self#must_display_types then
(
p fmt "# Odoc_info.string_of_module_type:\n<[%s]>\n"
(match m.mt_type with
None -> "None"
| Some t -> Odoc_info.string_of_module_type t
);
p fmt "# Odoc_info.string_of_module_type ~complete: true :\n<[%s]>\n"
(match m.mt_type with
None -> "None"
| Some t -> Odoc_info.string_of_module_type ~complete: true t
);
);
true
method generate (module_list: Odoc_info.Module.t_module list) =
let oc = open_out !Odoc_info.Args.out_file in
fmt <- Format.formatter_of_out_channel oc;
(
try
List.iter
(fun m ->
self#set_test_kinds_from_module m;
self#scan_module_list [m];
)
module_list
with
e ->
prerr_endline (Printexc.to_string e)
);
Format.pp_print_flush fmt ();
close_out oc
end
let my_generator = new string_gen
let _ = Odoc_info.Args.set_doc_generator
(Some (my_generator :> Odoc_info.Args.doc_generator))
|
9d5ec881506ffa950a7640af4ae16c6fcc7fdbe19b1b66b3b81a2e1a30b76cf0 | testdouble/baizen | account_identifier_test.clj | (ns baizen.formats.account-identifier-test
(:require [clojure.test :refer :all]
[baizen.formats :refer :all]
[baizen.formats.account-identifier :refer :all])
(:import [baizen.formats.account_identifier AccountIdentifier]))
(def simple-account-identifier-line
["03" "0975312468" "GBP"
"010" "500000" "1" "0/"])
(def account-identifier-line-with-multiple-summaries
["03" "0975312468" "GBP"
"010" "500000" "1" "0"
"056" "2000" "" "0"
"057" "4000" "" "2/"])
(deftest account-identifier-test
(testing "account identifier fields"
(testing "with only one summary"
(let [account-identifier (dissect (AccountIdentifier. simple-account-identifier-line))
summary (first (:summaries account-identifier))]
(is (= "03" (:record-code account-identifier)))
(is (= "0975312468" (:customer-account-number account-identifier)))
(is (= "GBP" (:currency-code account-identifier)))
(is (= {:code "010" :transaction "NA" :level "Status" :description "Opening Ledger"}
(:type-code summary)))
(is (= "500000" (:amount summary)))
(is (= "1" (:item-count summary)))
(is (= "0" (:funds-type summary)))))
(testing "with multiple summaries"
(let [account-identifier (dissect (AccountIdentifier. account-identifier-line-with-multiple-summaries))
first-summary (first (:summaries account-identifier))
second-summary (second (:summaries account-identifier))
third-summary (last (:summaries account-identifier))]
(is (= "03" (:record-code account-identifier)))
(is (= "0975312468" (:customer-account-number account-identifier)))
(is (= "GBP" (:currency-code account-identifier)))
(is (= {:code "010" :transaction "NA" :level "Status" :description "Opening Ledger"}
(:type-code first-summary)))
(is (= "500000" (:amount first-summary)))
(is (= "1" (:item-count first-summary)))
(is (= "0" (:funds-type first-summary)))
(is (= {:code "056" :transaction "NA" :level "Status" :description "Loan Balance"}
(:type-code second-summary)))
(is (= "2000" (:amount second-summary)))
(is (= "" (:item-count second-summary)))
(is (= "0" (:funds-type second-summary)))
(is (= {:code "057" :transaction "NA" :level "Status" :description "Total Investment Position"}
(:type-code third-summary)))
(is (= "4000" (:amount third-summary)))
(is (= "" (:item-count third-summary)))
(is (= "2" (:funds-type third-summary))))))
(testing "default :current-code should be the same as the group currency code")
(testing "default :item-count is 'unknown'")
(testing "default :funds-type is 'Z'")
(testing "handle :funds-type is 'D' -- there will be extra records"))
| null | https://raw.githubusercontent.com/testdouble/baizen/66bc3da3b660bc88317e1be6fce266a95db883fb/test/baizen/formats/account_identifier_test.clj | clojure | (ns baizen.formats.account-identifier-test
(:require [clojure.test :refer :all]
[baizen.formats :refer :all]
[baizen.formats.account-identifier :refer :all])
(:import [baizen.formats.account_identifier AccountIdentifier]))
(def simple-account-identifier-line
["03" "0975312468" "GBP"
"010" "500000" "1" "0/"])
(def account-identifier-line-with-multiple-summaries
["03" "0975312468" "GBP"
"010" "500000" "1" "0"
"056" "2000" "" "0"
"057" "4000" "" "2/"])
(deftest account-identifier-test
(testing "account identifier fields"
(testing "with only one summary"
(let [account-identifier (dissect (AccountIdentifier. simple-account-identifier-line))
summary (first (:summaries account-identifier))]
(is (= "03" (:record-code account-identifier)))
(is (= "0975312468" (:customer-account-number account-identifier)))
(is (= "GBP" (:currency-code account-identifier)))
(is (= {:code "010" :transaction "NA" :level "Status" :description "Opening Ledger"}
(:type-code summary)))
(is (= "500000" (:amount summary)))
(is (= "1" (:item-count summary)))
(is (= "0" (:funds-type summary)))))
(testing "with multiple summaries"
(let [account-identifier (dissect (AccountIdentifier. account-identifier-line-with-multiple-summaries))
first-summary (first (:summaries account-identifier))
second-summary (second (:summaries account-identifier))
third-summary (last (:summaries account-identifier))]
(is (= "03" (:record-code account-identifier)))
(is (= "0975312468" (:customer-account-number account-identifier)))
(is (= "GBP" (:currency-code account-identifier)))
(is (= {:code "010" :transaction "NA" :level "Status" :description "Opening Ledger"}
(:type-code first-summary)))
(is (= "500000" (:amount first-summary)))
(is (= "1" (:item-count first-summary)))
(is (= "0" (:funds-type first-summary)))
(is (= {:code "056" :transaction "NA" :level "Status" :description "Loan Balance"}
(:type-code second-summary)))
(is (= "2000" (:amount second-summary)))
(is (= "" (:item-count second-summary)))
(is (= "0" (:funds-type second-summary)))
(is (= {:code "057" :transaction "NA" :level "Status" :description "Total Investment Position"}
(:type-code third-summary)))
(is (= "4000" (:amount third-summary)))
(is (= "" (:item-count third-summary)))
(is (= "2" (:funds-type third-summary))))))
(testing "default :current-code should be the same as the group currency code")
(testing "default :item-count is 'unknown'")
(testing "default :funds-type is 'Z'")
(testing "handle :funds-type is 'D' -- there will be extra records"))
|
|
281bf35a539ec31675908c950bcef7e16fdefbf4b5b10c3e83fce40021e1054d | janestreet/lwt-async | lwt_ssl.mli | Lightweight thread library for
* Interface Lwt_ssl
* Copyright ( C ) 2005 - 2008
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation , with linking exceptions ;
* either version 2.1 of the License , or ( at your option ) any later
* version . See COPYING file for details .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA
* 02111 - 1307 , USA .
*
* Interface Lwt_ssl
* Copyright (C) 2005-2008 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, with linking exceptions;
* either version 2.1 of the License, or (at your option) any later
* version. See COPYING file for details.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
* 02111-1307, USA.
*)
(** OCaml-SSL integration *)
type socket
(** Wrapper for SSL sockets.
It is either a plain socket, either a real SSL socket. *)
val ssl_socket : socket -> Ssl.socket option
(** Returns the underlying SSL socket used for this wrapper. If it is
a plain socket it returns [None]. *)
val is_ssl : socket -> bool
(** Are we using an SSL socket? *)
val ssl_accept : Lwt_unix.file_descr -> Ssl.context -> socket Lwt.t
val ssl_connect : Lwt_unix.file_descr -> Ssl.context -> socket Lwt.t
val plain : Lwt_unix.file_descr -> socket
val embed_socket : Lwt_unix.file_descr -> Ssl.context -> socket
val read : socket -> string -> int -> int -> int Lwt.t
val write : socket -> string -> int -> int -> int Lwt.t
val read_bytes : socket -> Lwt_bytes.t -> int -> int -> int Lwt.t
val write_bytes : socket -> Lwt_bytes.t -> int -> int -> int Lwt.t
(* Really wait on a plain socket, just yield over SSL *)
val wait_read : socket -> unit Lwt.t
val wait_write : socket -> unit Lwt.t
val shutdown : socket -> Unix.shutdown_command -> unit
val close : socket -> unit Lwt.t
val in_channel_of_descr : socket -> Lwt_io.input_channel
val out_channel_of_descr : socket -> Lwt_io.output_channel
val ssl_shutdown : socket -> unit Lwt.t
val abort : socket -> exn -> unit
val get_fd : socket -> Lwt_unix.file_descr
val get_unix_fd : socket -> Unix.file_descr
val getsockname : socket -> Unix.sockaddr
val getpeername : socket -> Unix.sockaddr
| null | https://raw.githubusercontent.com/janestreet/lwt-async/c738e6202c1c7409e079e513c7bdf469f7f9984c/src/ssl/lwt_ssl.mli | ocaml | * OCaml-SSL integration
* Wrapper for SSL sockets.
It is either a plain socket, either a real SSL socket.
* Returns the underlying SSL socket used for this wrapper. If it is
a plain socket it returns [None].
* Are we using an SSL socket?
Really wait on a plain socket, just yield over SSL | Lightweight thread library for
* Interface Lwt_ssl
* Copyright ( C ) 2005 - 2008
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation , with linking exceptions ;
* either version 2.1 of the License , or ( at your option ) any later
* version . See COPYING file for details .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA
* 02111 - 1307 , USA .
*
* Interface Lwt_ssl
* Copyright (C) 2005-2008 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, with linking exceptions;
* either version 2.1 of the License, or (at your option) any later
* version. See COPYING file for details.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
* 02111-1307, USA.
*)
type socket
val ssl_socket : socket -> Ssl.socket option
val is_ssl : socket -> bool
val ssl_accept : Lwt_unix.file_descr -> Ssl.context -> socket Lwt.t
val ssl_connect : Lwt_unix.file_descr -> Ssl.context -> socket Lwt.t
val plain : Lwt_unix.file_descr -> socket
val embed_socket : Lwt_unix.file_descr -> Ssl.context -> socket
val read : socket -> string -> int -> int -> int Lwt.t
val write : socket -> string -> int -> int -> int Lwt.t
val read_bytes : socket -> Lwt_bytes.t -> int -> int -> int Lwt.t
val write_bytes : socket -> Lwt_bytes.t -> int -> int -> int Lwt.t
val wait_read : socket -> unit Lwt.t
val wait_write : socket -> unit Lwt.t
val shutdown : socket -> Unix.shutdown_command -> unit
val close : socket -> unit Lwt.t
val in_channel_of_descr : socket -> Lwt_io.input_channel
val out_channel_of_descr : socket -> Lwt_io.output_channel
val ssl_shutdown : socket -> unit Lwt.t
val abort : socket -> exn -> unit
val get_fd : socket -> Lwt_unix.file_descr
val get_unix_fd : socket -> Unix.file_descr
val getsockname : socket -> Unix.sockaddr
val getpeername : socket -> Unix.sockaddr
|
0dc2b4a62dbae957e831bcd11bd407ab3e417814bae0e10b3bcd62e65cf9b24d | silkapp/girella | Run.hs | -- | Import this module if you only want to run queries
module Girella.Run (module X) where
import Girella.Config as X
import Girella.Range as X
import Girella.Transaction as X
| null | https://raw.githubusercontent.com/silkapp/girella/f2fbbaaa2536f1afc5bc1a493d02f92cec81da18/src/Girella/Run.hs | haskell | | Import this module if you only want to run queries | module Girella.Run (module X) where
import Girella.Config as X
import Girella.Range as X
import Girella.Transaction as X
|
8b6868885b0b66a08bde3bf2114c268d5531e901d284e327f1b145042d20537c | rampion/tree-traversals | doctests.hs | module Main where
import Test.DocTest
main :: IO ()
main = doctest $ words "--preserve-it src/"
| null | https://raw.githubusercontent.com/rampion/tree-traversals/c2fa94eebe40eb1fcd7903d7758698aee57efdc8/doctests.hs | haskell | module Main where
import Test.DocTest
main :: IO ()
main = doctest $ words "--preserve-it src/"
|
|
78a042d6d2c48d37aed5b807b1c06af4890fc5d5aaaf563f92e0367c76330ee6 | BinaryAnalysisPlatform/bap | bil_piqi.ml | open Core_kernel[@@warning "-D"]
open Bap.Std
open Bil.Types
module P = Stmt_piqi
open Type
let casttype_to_piqi = function
| UNSIGNED -> `cast_unsigned
| SIGNED -> `cast_signed
| HIGH -> `cast_high
| LOW -> `cast_low
let casttype_of_piqi = function
| `cast_unsigned -> UNSIGNED
| `cast_signed -> SIGNED
| `cast_high -> HIGH
| `cast_low -> LOW
let unop_to_piqi : 'a -> Stmt_piqi.unop_type = function
| NEG -> `uneg
| NOT -> `unot
let unop_of_piqi = function
| `uneg -> NEG
| `unot -> NOT
let binop_to_piqi : 'a -> Stmt_piqi.binop_type = function
| PLUS -> `plus
| MINUS -> `minus
| TIMES -> `times
| DIVIDE -> `divide
| SDIVIDE -> `sdivide
| MOD -> `modbop
| SMOD -> `smod
| LSHIFT -> `lshift
| RSHIFT -> `rshift
| ARSHIFT -> `arshift
| AND -> `andbop
| OR -> `orbop
| XOR -> `x_or
| EQ -> `eq
| NEQ -> `neq
| LT -> `lt
| LE -> `le
| SLT -> `slt
| SLE -> `sle
let binop_of_piqi = function
| `plus -> PLUS
| `minus -> MINUS
| `times -> TIMES
| `divide -> DIVIDE
| `sdivide -> SDIVIDE
| `modbop -> MOD
| `smod -> SMOD
| `lshift -> LSHIFT
| `rshift -> RSHIFT
| `arshift -> ARSHIFT
| `andbop -> AND
| `orbop -> OR
| `x_or -> XOR
| `eq -> EQ
| `neq -> NEQ
| `lt -> LT
| `le -> LE
| `slt -> SLT
| `sle -> SLE
let type_to_piqi : typ -> Stmt_piqi.typ = function
| Imm s -> `imm s
| Mem (t, t') -> `mem {Stmt_piqi.Mem.index_type = t; element_type = t';}
| Unk -> `unk
let type_of_piqi = function
| `imm n -> Imm n
| `mem {P.Mem.index_type; element_type} -> Mem (index_type, element_type)
| `unk -> Type.Unk
let var_to_piqi v =
let module P = Stmt_piqi in {
P.Var.name = Var.name v;
P.Var.id = Var.index v;
P.Var.typ = type_to_piqi (Var.typ v);
P.Var.tmp = Var.is_virtual v;
}
let var_of_piqi { P.Var.name; id; typ; tmp} =
let module P = Stmt_piqi in
let v = Var.create ~is_virtual:tmp name (type_of_piqi typ) in
Var.with_index v id
let endianness_to_piqi : endian -> Stmt_piqi.endian = function
| LittleEndian -> `little_endian
| BigEndian -> `big_endian
let endianness_of_piqi = function
| `little_endian -> LittleEndian
| `big_endian -> BigEndian
let rec exp_to_piqi : exp -> Stmt_piqi.expr =
function
| Load (m, i, e, s) ->
let m = exp_to_piqi m in
let i = exp_to_piqi i in
let e = endianness_to_piqi e in
`load {P.Load.memory=m; address=i; endian=e; size=s;}
| Store (m, i, v, e, size) ->
let m = exp_to_piqi m in
let i = exp_to_piqi i in
let v = exp_to_piqi v in
let e = endianness_to_piqi e in
`store {P.Store.memory=m; address=i; value=v; endian=e; size;}
| BinOp (bop, e1, e2) ->
let bop = binop_to_piqi bop in
let e1 = exp_to_piqi e1 in
let e2 = exp_to_piqi e2 in
`binop {P.Binop.op=bop; lexp=e1; rexp=e2;}
| UnOp (uop, e) ->
let uop = unop_to_piqi uop in
let e = exp_to_piqi e in
`unop {P.Unop.op=uop; exp=e}
| Var v ->
`var (var_to_piqi v)
| Int v ->
`inte {P.Inte.int = Bitvector.to_string v;}
| Cast (ct, size, e) ->
let ct = casttype_to_piqi ct in
let e = exp_to_piqi e in
`cast {P.Cast.cast_type=ct; size; exp=e}
| Let (v, e, e') ->
let v = var_to_piqi v in
let e = exp_to_piqi e in
let e' = exp_to_piqi e' in
`let_exp {P.Let_exp.bound_var=v; definition=e; open_exp=e'}
| Unknown (s, t) ->
let t = type_to_piqi t in
`unknown {P.Unknown.descr=s; typ=t}
| Ite (e, te, fe) ->
let e = exp_to_piqi e in
let te = exp_to_piqi te in
let fe = exp_to_piqi fe in
`ite {P.Ite.condition=e; iftrue=te; iffalse=fe}
| Extract (h, l, e) ->
let e = exp_to_piqi e in
`extract {P.Extract.hbit=h; lbit=l; exp=e}
| Concat (e1, e2) ->
let e1 = exp_to_piqi e1 in
let e2 = exp_to_piqi e2 in
`concat {P.Concat.lexp=e1; rexp=e2}
let rec exp_of_piqi = function
| `load {P.Load.memory; address; endian; size} ->
let m = exp_of_piqi memory in
let i = exp_of_piqi address in
let e = endianness_of_piqi endian in
Load (m, i, e, size)
| `store {P.Store.memory; address; value; endian; size} ->
let m = exp_of_piqi memory in
let i = exp_of_piqi address in
let v = exp_of_piqi value in
let e = endianness_of_piqi endian in
Store (m, i, v, e, size)
| `binop {P.Binop.op; lexp; rexp} ->
let bop = binop_of_piqi op in
let e1 = exp_of_piqi lexp in
let e2 = exp_of_piqi rexp in
BinOp (bop, e1, e2)
| `unop {P.Unop.op; exp} ->
let uop = unop_of_piqi op in
let e = exp_of_piqi exp in
UnOp (uop, e)
| `var v ->
Var (var_of_piqi v)
| `inte {P.Inte.int=s} ->
Int (Bitvector.of_string s)
| `cast {P.Cast.cast_type; size; exp} ->
let ct = casttype_of_piqi cast_type in
let e = exp_of_piqi exp in
Cast (ct, size, e)
| `let_exp {P.Let_exp.bound_var; definition; open_exp} ->
let v = var_of_piqi bound_var in
let d = exp_of_piqi definition in
let e = exp_of_piqi open_exp in
Let (v, d, e)
| `unknown {P.Unknown.descr; typ} ->
let t = type_of_piqi typ in
Unknown (descr, t)
| `ite {P.Ite.condition; iftrue; iffalse} ->
let cond = exp_of_piqi condition in
let te = exp_of_piqi iftrue in
let fe = exp_of_piqi iffalse in
Ite (cond, te, fe)
| `extract {P.Extract.hbit; lbit; exp} ->
let e = exp_of_piqi exp in
Extract (hbit, lbit, e)
| `concat {P.Concat.lexp; rexp} ->
let e1 = exp_of_piqi lexp in
let e2 = exp_of_piqi rexp in
Concat (e1, e2)
let rec stmt_to_piqi : stmt -> Stmt_piqi.stmt = function
| Move (v, e) ->
let v = var_to_piqi v in
let e = exp_to_piqi e in
`move {P.Move.lvar=v; rexp=e}
| Jmp targ ->
let targ = exp_to_piqi targ in
`jmp {P.Jmp.target=targ}
| Special s -> `special s
| While (e, stmts) ->
let e = exp_to_piqi e in
let stmts = stmts_to_piqi stmts in
`while_stmt {P.While_stmt.cond=e; loop_body=stmts}
| If (e, then_branch, else_branch) ->
let e = exp_to_piqi e in
let then_branch = stmts_to_piqi then_branch in
let else_branch = stmts_to_piqi else_branch in
`if_stmt {P.If_stmt.cond=e; true_branch=then_branch; false_branch=else_branch}
| CpuExn n -> `cpuexn {P.Cpuexn.errnum=n}
and stmts_to_piqi l = List.map ~f:stmt_to_piqi l
let rec stmt_of_piqi = function
| `move {P.Move.lvar; rexp} ->
let v = var_of_piqi lvar in
let e = exp_of_piqi rexp in
Move (v, e)
| `jmp {P.Jmp.target} ->
let t = exp_of_piqi target in
Jmp t
| `special s -> Special s
| `while_stmt {P.While_stmt.cond; loop_body} ->
let e = exp_of_piqi cond in
let b = stmts_of_piqi loop_body in
While (e, b)
| `if_stmt {P.If_stmt.cond; true_branch; false_branch} ->
let e = exp_of_piqi cond in
let then_branch = stmts_of_piqi true_branch in
let else_branch = stmts_of_piqi false_branch in
If (e, then_branch, else_branch)
| `cpuexn {P.Cpuexn.errnum} -> CpuExn errnum
and stmts_of_piqi l = List.map ~f:stmt_of_piqi l
open Stmt_piqi_ext
type fmt = [ `json | `pb | `piq | `pib | `xml ] [@@deriving sexp, enumerate]
type out_fmt = [fmt | `json_pretty | `xml_pretty]
let loads f g fmt s = f (g s fmt)
let dumps g f fmt x = f (g x) (fmt : fmt :> out_fmt)
let bil_of_string = loads stmts_of_piqi parse_stmt_list
let stmt_of_string = loads stmt_of_piqi parse_stmt
let exp_of_string = loads exp_of_piqi parse_expr
let string_of_bil = dumps stmts_to_piqi gen_stmt_list
let string_of_stmt = dumps stmt_to_piqi gen_stmt
let string_of_exp = dumps exp_to_piqi gen_expr
let piqi_of_exp = exp_to_piqi
let piqi_of_var = var_to_piqi
| null | https://raw.githubusercontent.com/BinaryAnalysisPlatform/bap/cbdf732d46c8e38df79d9942fc49bcb97915c657/lib/bap_piqi/bil_piqi.ml | ocaml | open Core_kernel[@@warning "-D"]
open Bap.Std
open Bil.Types
module P = Stmt_piqi
open Type
let casttype_to_piqi = function
| UNSIGNED -> `cast_unsigned
| SIGNED -> `cast_signed
| HIGH -> `cast_high
| LOW -> `cast_low
let casttype_of_piqi = function
| `cast_unsigned -> UNSIGNED
| `cast_signed -> SIGNED
| `cast_high -> HIGH
| `cast_low -> LOW
let unop_to_piqi : 'a -> Stmt_piqi.unop_type = function
| NEG -> `uneg
| NOT -> `unot
let unop_of_piqi = function
| `uneg -> NEG
| `unot -> NOT
let binop_to_piqi : 'a -> Stmt_piqi.binop_type = function
| PLUS -> `plus
| MINUS -> `minus
| TIMES -> `times
| DIVIDE -> `divide
| SDIVIDE -> `sdivide
| MOD -> `modbop
| SMOD -> `smod
| LSHIFT -> `lshift
| RSHIFT -> `rshift
| ARSHIFT -> `arshift
| AND -> `andbop
| OR -> `orbop
| XOR -> `x_or
| EQ -> `eq
| NEQ -> `neq
| LT -> `lt
| LE -> `le
| SLT -> `slt
| SLE -> `sle
let binop_of_piqi = function
| `plus -> PLUS
| `minus -> MINUS
| `times -> TIMES
| `divide -> DIVIDE
| `sdivide -> SDIVIDE
| `modbop -> MOD
| `smod -> SMOD
| `lshift -> LSHIFT
| `rshift -> RSHIFT
| `arshift -> ARSHIFT
| `andbop -> AND
| `orbop -> OR
| `x_or -> XOR
| `eq -> EQ
| `neq -> NEQ
| `lt -> LT
| `le -> LE
| `slt -> SLT
| `sle -> SLE
let type_to_piqi : typ -> Stmt_piqi.typ = function
| Imm s -> `imm s
| Mem (t, t') -> `mem {Stmt_piqi.Mem.index_type = t; element_type = t';}
| Unk -> `unk
let type_of_piqi = function
| `imm n -> Imm n
| `mem {P.Mem.index_type; element_type} -> Mem (index_type, element_type)
| `unk -> Type.Unk
let var_to_piqi v =
let module P = Stmt_piqi in {
P.Var.name = Var.name v;
P.Var.id = Var.index v;
P.Var.typ = type_to_piqi (Var.typ v);
P.Var.tmp = Var.is_virtual v;
}
let var_of_piqi { P.Var.name; id; typ; tmp} =
let module P = Stmt_piqi in
let v = Var.create ~is_virtual:tmp name (type_of_piqi typ) in
Var.with_index v id
let endianness_to_piqi : endian -> Stmt_piqi.endian = function
| LittleEndian -> `little_endian
| BigEndian -> `big_endian
let endianness_of_piqi = function
| `little_endian -> LittleEndian
| `big_endian -> BigEndian
let rec exp_to_piqi : exp -> Stmt_piqi.expr =
function
| Load (m, i, e, s) ->
let m = exp_to_piqi m in
let i = exp_to_piqi i in
let e = endianness_to_piqi e in
`load {P.Load.memory=m; address=i; endian=e; size=s;}
| Store (m, i, v, e, size) ->
let m = exp_to_piqi m in
let i = exp_to_piqi i in
let v = exp_to_piqi v in
let e = endianness_to_piqi e in
`store {P.Store.memory=m; address=i; value=v; endian=e; size;}
| BinOp (bop, e1, e2) ->
let bop = binop_to_piqi bop in
let e1 = exp_to_piqi e1 in
let e2 = exp_to_piqi e2 in
`binop {P.Binop.op=bop; lexp=e1; rexp=e2;}
| UnOp (uop, e) ->
let uop = unop_to_piqi uop in
let e = exp_to_piqi e in
`unop {P.Unop.op=uop; exp=e}
| Var v ->
`var (var_to_piqi v)
| Int v ->
`inte {P.Inte.int = Bitvector.to_string v;}
| Cast (ct, size, e) ->
let ct = casttype_to_piqi ct in
let e = exp_to_piqi e in
`cast {P.Cast.cast_type=ct; size; exp=e}
| Let (v, e, e') ->
let v = var_to_piqi v in
let e = exp_to_piqi e in
let e' = exp_to_piqi e' in
`let_exp {P.Let_exp.bound_var=v; definition=e; open_exp=e'}
| Unknown (s, t) ->
let t = type_to_piqi t in
`unknown {P.Unknown.descr=s; typ=t}
| Ite (e, te, fe) ->
let e = exp_to_piqi e in
let te = exp_to_piqi te in
let fe = exp_to_piqi fe in
`ite {P.Ite.condition=e; iftrue=te; iffalse=fe}
| Extract (h, l, e) ->
let e = exp_to_piqi e in
`extract {P.Extract.hbit=h; lbit=l; exp=e}
| Concat (e1, e2) ->
let e1 = exp_to_piqi e1 in
let e2 = exp_to_piqi e2 in
`concat {P.Concat.lexp=e1; rexp=e2}
let rec exp_of_piqi = function
| `load {P.Load.memory; address; endian; size} ->
let m = exp_of_piqi memory in
let i = exp_of_piqi address in
let e = endianness_of_piqi endian in
Load (m, i, e, size)
| `store {P.Store.memory; address; value; endian; size} ->
let m = exp_of_piqi memory in
let i = exp_of_piqi address in
let v = exp_of_piqi value in
let e = endianness_of_piqi endian in
Store (m, i, v, e, size)
| `binop {P.Binop.op; lexp; rexp} ->
let bop = binop_of_piqi op in
let e1 = exp_of_piqi lexp in
let e2 = exp_of_piqi rexp in
BinOp (bop, e1, e2)
| `unop {P.Unop.op; exp} ->
let uop = unop_of_piqi op in
let e = exp_of_piqi exp in
UnOp (uop, e)
| `var v ->
Var (var_of_piqi v)
| `inte {P.Inte.int=s} ->
Int (Bitvector.of_string s)
| `cast {P.Cast.cast_type; size; exp} ->
let ct = casttype_of_piqi cast_type in
let e = exp_of_piqi exp in
Cast (ct, size, e)
| `let_exp {P.Let_exp.bound_var; definition; open_exp} ->
let v = var_of_piqi bound_var in
let d = exp_of_piqi definition in
let e = exp_of_piqi open_exp in
Let (v, d, e)
| `unknown {P.Unknown.descr; typ} ->
let t = type_of_piqi typ in
Unknown (descr, t)
| `ite {P.Ite.condition; iftrue; iffalse} ->
let cond = exp_of_piqi condition in
let te = exp_of_piqi iftrue in
let fe = exp_of_piqi iffalse in
Ite (cond, te, fe)
| `extract {P.Extract.hbit; lbit; exp} ->
let e = exp_of_piqi exp in
Extract (hbit, lbit, e)
| `concat {P.Concat.lexp; rexp} ->
let e1 = exp_of_piqi lexp in
let e2 = exp_of_piqi rexp in
Concat (e1, e2)
let rec stmt_to_piqi : stmt -> Stmt_piqi.stmt = function
| Move (v, e) ->
let v = var_to_piqi v in
let e = exp_to_piqi e in
`move {P.Move.lvar=v; rexp=e}
| Jmp targ ->
let targ = exp_to_piqi targ in
`jmp {P.Jmp.target=targ}
| Special s -> `special s
| While (e, stmts) ->
let e = exp_to_piqi e in
let stmts = stmts_to_piqi stmts in
`while_stmt {P.While_stmt.cond=e; loop_body=stmts}
| If (e, then_branch, else_branch) ->
let e = exp_to_piqi e in
let then_branch = stmts_to_piqi then_branch in
let else_branch = stmts_to_piqi else_branch in
`if_stmt {P.If_stmt.cond=e; true_branch=then_branch; false_branch=else_branch}
| CpuExn n -> `cpuexn {P.Cpuexn.errnum=n}
and stmts_to_piqi l = List.map ~f:stmt_to_piqi l
let rec stmt_of_piqi = function
| `move {P.Move.lvar; rexp} ->
let v = var_of_piqi lvar in
let e = exp_of_piqi rexp in
Move (v, e)
| `jmp {P.Jmp.target} ->
let t = exp_of_piqi target in
Jmp t
| `special s -> Special s
| `while_stmt {P.While_stmt.cond; loop_body} ->
let e = exp_of_piqi cond in
let b = stmts_of_piqi loop_body in
While (e, b)
| `if_stmt {P.If_stmt.cond; true_branch; false_branch} ->
let e = exp_of_piqi cond in
let then_branch = stmts_of_piqi true_branch in
let else_branch = stmts_of_piqi false_branch in
If (e, then_branch, else_branch)
| `cpuexn {P.Cpuexn.errnum} -> CpuExn errnum
and stmts_of_piqi l = List.map ~f:stmt_of_piqi l
open Stmt_piqi_ext
type fmt = [ `json | `pb | `piq | `pib | `xml ] [@@deriving sexp, enumerate]
type out_fmt = [fmt | `json_pretty | `xml_pretty]
let loads f g fmt s = f (g s fmt)
let dumps g f fmt x = f (g x) (fmt : fmt :> out_fmt)
let bil_of_string = loads stmts_of_piqi parse_stmt_list
let stmt_of_string = loads stmt_of_piqi parse_stmt
let exp_of_string = loads exp_of_piqi parse_expr
let string_of_bil = dumps stmts_to_piqi gen_stmt_list
let string_of_stmt = dumps stmt_to_piqi gen_stmt
let string_of_exp = dumps exp_to_piqi gen_expr
let piqi_of_exp = exp_to_piqi
let piqi_of_var = var_to_piqi
|
|
10d4ba0668be2025c88ad6cfce8c7c87c36bff467ce71f77e238543e0e5e3ef8 | ygrek/mldonkey | bTProtocol.ml | Copyright 2001 , 2002 b8_bavard , b8_fee_carabine ,
This file is part of mldonkey .
mldonkey is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
mldonkey is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with mldonkey ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This file is part of mldonkey.
mldonkey is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
mldonkey is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mldonkey; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
1 . Download the .torrent file
* * * * * * * * * * * * * * * * * * * * * * * * * * * * *
{
" announce " = " :6969/announce " ;
" info " = {
" files " = [
{
" length " = 682164224 ;
" path " = [ " Mandrake91-cd1-inst.i586.iso " ; ]
}
;
{
" length " = 681279488 ;
" path " = [
" Mandrake91-cd2-ext.i586.iso " ;
]
;
}
;
{
" length " = 681574400 ;
" path " = [
" Mandrake91-cd3-i18n.i586.iso " ;
]
;
}
;
]
;
" name " = " mandrake9.1 " ;
" piece length " = 262144 ;
" pieces " = " [ EAd\155ã´gÛ ÓþËf\134Ê«\025\016ô͵,1U\150À
\132\147îª\n%ù\\é,\012ÿC\008GÈÓd!æ¾öuL!\134Ô\016\152&\017¾\008³¢d\029Ë3\031Ï\134
# » ×\025\137¡=¢. ® \019§´\138î.ñ\151O\137Ùÿ,£ç&\019Àۢç\156.ù\150 < Eªª\153\018\145\
149d\147[+J=º\155l\139Î\028¡dVÉ\000-\017 ° Ť\013\154¼>A¹Ã5ïIt\007\020 © ãÚÀÈÈ\014O ®
ô1\152UÄ\026K\021^ãúì5Í¿ü \026\149\131q\024\015¸]Òþ£\027&\148\\ã- © \028WMÂ5 ... " ;
}
;
}
2 . Extract BitTorrent information needed :
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Metainfo files are bencoded dictionaries with the following keys -
' announce '
The url of the tracker .
' info '
This maps to a dictionary , with keys described below .
The ' name ' key maps to a string which is the suggested name to save
the file ( or directory ) as . It is purely advisory .
' piece length ' maps to the number of bytes in each piece the file is
split into . For the purposes of transfer , files are split into
fixed - size pieces which are all the same length except for possibly
the last one which may be truncated . Piece length is almost always a
power of two , most commonly 2 ^ 20 .
' pieces ' maps to a string whose length is a multiple of 20 . It is to
be subdivided into strings of length 20 , each of which is the sha1
hash of the piece at the corresponding index .
There is also a key ' length ' or a key ' files ' , but not both or
neither . If ' length ' is present then the download represents a
single file , otherwise it represents a set of files which go in a
directory structure .
In the single file case , ' length ' maps to the length of the file in
bytes .
For the purposes of the other keys , the multi - file case is treated
as only having a single file by concatenating the files in the order
they appear in the files list . The files list is the value ' files '
maps to , and is a list of dictionaries containing the following keys -
' length '
The length of the file , in bytes . ' path '
A list of strings corresponding to subdirectory names , the last of
which is the actual file name ( a zero length list is an error case ) .
In the single file case , the ' name ' key is the name of a file , in the
muliple file case , it 's the name of a directory .
3 . Contact the tracker regularly to update file information
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Tracker GET requests have the following keys by HTTP :
' info_hash '
The 20 byte sha1 hash of the bencoded form of the ' info ' value from
the metainfo file . Note that this is a substring of the metainfo
file . This value will almost certainly have to be escaped .
' peer_id '
A string of length 20 which this downloader uses as its i d. Each
downloader generates its own i d at random at the start of a new
download . This value will also almost certainly have to be escaped .
' ip '
An optional parameter giving the ip ( or dns name ) which this peer is
at . Generally used for the origin if it 's on the same machine as the
tracker .
' port '
The port number this peer is listening on . Common behavior is for a
downloader to try to listen on port 6881 and if that port is taken
try 6882 , then 6883 , etc . and give up after 6889 .
' uploaded '
The total amount uploaded so far , encoded in base ten ascii .
' downloaded '
The total amount downloaded so far , encoded in base ten ascii .
' left '
The number of bytes this peer still has to download , encoded in base
ten ascii . Note that this ca n't be computed from downloaded and the
file length since it might be a resume , and there 's a chance that
some of the downloaded data failed an integrity check and had to be
re - downloaded .
' event '
This is an optional key which maps to ' started ' , ' completed ' , or
' stopped ' ( or '' , which is the same as not being present ) .
--- > bencoded replu :
{ ' failure reason ' = ... }
or
{
' interval ' = .... ; ( * before next request to tracker
1. Download the .torrent file
*****************************
{
"announce" = ":6969/announce";
"info" = {
"files" = [
{
"length" = 682164224;
"path" = [ "Mandrake91-cd1-inst.i586.iso"; ]
}
;
{
"length" = 681279488;
"path" = [
"Mandrake91-cd2-ext.i586.iso";
]
;
}
;
{
"length" = 681574400;
"path" = [
"Mandrake91-cd3-i18n.i586.iso";
]
;
}
;
]
;
"name" = "mandrake9.1";
"piece length" = 262144;
"pieces" = "[EAd\155ã´gÛ ÓþËf\134Ê«\025\016ô͵,1U\150À
\132\147îª\n%ù\\é,\012ÿC\008GÈÓd!æ¾öuL!\134Ô\016\152&\017¾\008³¢d\029Ë3\031Ï\134
#»×\025\137¡=¢.®\019§´\138î.ñ\151O\137Ùÿ,£ç&\019Àۢç\156.ù\150<Eªª\153\018\145\
149d\147[+J=º\155l\139Î\028¡dVÉ\000-\017°Å¤\013\154¼>A¹Ã5ïIt\007\020©ãÚÀÈÈ\014O®
ô1\152UÄ\026K\021^ãúì5Í¿ü \026\149\131q\024\015¸]Òþ£\027&\148\\ã-©\028WMÂ5...";
}
;
}
2. Extract BitTorrent information needed:
*****************************************
Metainfo files are bencoded dictionaries with the following keys -
'announce'
The url of the tracker.
'info'
This maps to a dictionary, with keys described below.
The 'name' key maps to a string which is the suggested name to save
the file (or directory) as. It is purely advisory.
'piece length' maps to the number of bytes in each piece the file is
split into. For the purposes of transfer, files are split into
fixed-size pieces which are all the same length except for possibly
the last one which may be truncated. Piece length is almost always a
power of two, most commonly 2^20 .
'pieces' maps to a string whose length is a multiple of 20. It is to
be subdivided into strings of length 20, each of which is the sha1
hash of the piece at the corresponding index.
There is also a key 'length' or a key 'files', but not both or
neither. If 'length' is present then the download represents a
single file, otherwise it represents a set of files which go in a
directory structure.
In the single file case, 'length' maps to the length of the file in
bytes.
For the purposes of the other keys, the multi-file case is treated
as only having a single file by concatenating the files in the order
they appear in the files list. The files list is the value 'files'
maps to, and is a list of dictionaries containing the following keys -
'length'
The length of the file, in bytes. 'path'
A list of strings corresponding to subdirectory names, the last of
which is the actual file name (a zero length list is an error case).
In the single file case, the 'name' key is the name of a file, in the
muliple file case, it's the name of a directory.
3. Contact the tracker regularly to update file information
***********************************************************
Tracker GET requests have the following keys by HTTP:
'info_hash'
The 20 byte sha1 hash of the bencoded form of the 'info' value from
the metainfo file. Note that this is a substring of the metainfo
file. This value will almost certainly have to be escaped.
'peer_id'
A string of length 20 which this downloader uses as its id. Each
downloader generates its own id at random at the start of a new
download. This value will also almost certainly have to be escaped.
'ip'
An optional parameter giving the ip (or dns name) which this peer is
at. Generally used for the origin if it's on the same machine as the
tracker.
'port'
The port number this peer is listening on. Common behavior is for a
downloader to try to listen on port 6881 and if that port is taken
try 6882, then 6883, etc. and give up after 6889.
'uploaded'
The total amount uploaded so far, encoded in base ten ascii.
'downloaded'
The total amount downloaded so far, encoded in base ten ascii.
'left'
The number of bytes this peer still has to download, encoded in base
ten ascii. Note that this can't be computed from downloaded and the
file length since it might be a resume, and there's a chance that
some of the downloaded data failed an integrity check and had to be
re-downloaded.
'event'
This is an optional key which maps to 'started', 'completed', or
'stopped' (or '', which is the same as not being present).
---> bencoded replu:
{ 'failure reason' = ... }
or
{
'interval' = ....; (* before next request to tracker *)
'peers' = [
{
'peer id' = ....;
'ip' - ....;
'port' = ....;
};
....
]
}
4. Contact every peer regularly
*******************************
Handshake:
type int = BigEndian.int32
--->
string8 (prefixed by length): "BitTorrent protocol"
int8[8]: reserved(zeros)
int8[20 bytes]: Sha1.string (Bencode.encode file.file_info)
int8[20 bytes]: peer id
<---
string8 (prefixed by length): "BitTorrent protocol"
int8[8]: reserved(zeros)
int8[20 bytes]: Sha1.string (Bencode.encode file.file_info)
int8[20 bytes]: peer id
----> disconnect if sha1 don't match, or if peer id is unexpected
msg:
int: len of message (byte+payload) 0 -> keepalive sent every 2 minutes
byte8: opcode of message
int8[..]: payload
opcodes:
Connections start out choked and not interested.
No payload:
* 0 - choke: you have been blocked
* 1 - unchoke: you have been unblocked
* 2 - interested: I'm interested in downloading this file now
* 3 - not interested: I'm not interested in downloading this file now
With payload:
* 4 - have
int : index of new completed chunk
* 5 - bitfield:
string: a bitfield of bit 1 for downloaded chunks
byte: bits are inverted 0....7 ---> 7 .... 0
* 6 - request
int: index
int: begin
int: length (power of 2, 2 ^ 15)
* 7 - piece
int: index
int: begin
string: piece
* 8 - cancel: cancel a request
int: index
int: begin
int: length (power of 2, 2 ^ 15)
* 9 - DHT port announcement
int16: UDP port
* 20 - extended
byte: extended message ID (0 = handshake)
Choke/unchoke every 10 seconds
*)
open BasicSocket
open Printf2
open CommonOptions
open Md4
open CommonGlobals
open BigEndian
open TcpBufferedSocket
open AnyEndian
open BTTypes
let log_prefix = "[BT]"
let lprintf_nl fmt =
lprintf_nl2 log_prefix fmt
let azureus_porttest_random = ref 0
type ghandler =
BTHeader of (gconn -> TcpBufferedSocket.t ->
(string * string * Sha1.t) -> unit)
| Reader of (gconn -> TcpBufferedSocket.t -> unit)
and gconn = {
mutable gconn_handler : ghandler;
mutable gconn_refill : (TcpBufferedSocket.t -> unit) list;
mutable gconn_close_on_write : bool;
}
module TcpMessages = struct
type msg =
| Choke
| Unchoke
| Interested
| NotInterested
| Have of int64
| BitField of string
| Request of int * int64 * int64
| Piece of int * int64 * string * int * int
| Cancel of int * int64 * int64
| Ping
| PeerID of string
| DHT_Port of int
| Extended of int * string
let to_string msg =
match msg with
| Choke -> "Choke"
| Unchoke -> "Unchoke"
| Interested -> "Interested"
| NotInterested -> "NotInterested"
| Have n -> Printf.sprintf "Have %Ld" n
| BitField s -> Printf.sprintf "BitField %s" (String.escaped s)
| Request (index, offset, len) ->
Printf.sprintf "Request %d %Ld[%Ld]" index offset len
| Piece (index, offset, s, pos, len) ->
Printf.sprintf "Piece %d %Ld[%d]" index offset len
| Cancel (index, offset, len) ->
Printf.sprintf "Cancel %d %Ld[%Ld]" index offset len
| Ping -> "Ping"
| PeerID s -> Printf.sprintf "PeerID [%s]" (String.escaped s)
| DHT_Port n -> Printf.sprintf "DHT_Port %d" n
| Extended (n, s) -> Printf.sprintf "Extended [%d %s]" n (String.escaped s)
let parsing opcode m =
match opcode with
| 0 -> Choke
| 1 -> Unchoke
| 2 -> Interested
| 3 -> NotInterested
| 4 -> Have (get_uint64_32 m 0)
| 5 -> BitField m
| 6 -> Request (get_int m 0, get_uint64_32 m 4, get_uint64_32 m 8)
| 7 -> Piece (get_int m 0, get_uint64_32 m 4, m, 8, String.length m - 8)
| 8 -> Cancel (get_int m 0, get_uint64_32 m 4, get_uint64_32 m 8)
| 9 -> DHT_Port (get_int16 m 0)
| 20 -> Extended (get_int8 m 0, String.sub m 1 (String.length m - 1))
| -1 -> PeerID m
| _ -> raise Not_found
let buf = Buffer.create 100
let write msg =
Buffer.reset buf;
begin
buf_int buf 0;
match msg with
| Choke -> buf_int8 buf 0
| Unchoke -> buf_int8 buf 1
| Interested -> buf_int8 buf 2
| NotInterested -> buf_int8 buf 3
| Have i -> buf_int8 buf 4; buf_int64_32 buf i
| BitField string -> buf_int8 buf 5; Buffer.add_string buf string
| Request (index, pos, len) ->
buf_int8 buf 6;
buf_int buf index; buf_int64_32 buf pos; buf_int64_32 buf len
| Piece (num, index, s, pos, len) ->
buf_int8 buf 7;
buf_int buf num;
buf_int64_32 buf index;
Buffer.add_substring buf s pos len
| Cancel _ -> ()
| PeerID _ -> ()
| Ping -> ()
| DHT_Port n -> buf_int8 buf 9; buf_int16 buf n
| Extended (n,msg) -> buf_int8 buf 20; buf_int8 buf n; Buffer.add_string buf msg
end;
let s = Buffer.contents buf in
str_int s 0 (String.length s - 4);
s
end
(*************************************************************************)
(* *)
(* UdpMessages *)
(* *)
(*************************************************************************)
module UdpMessages = struct
type t =
PingReq of int * string * string
| of int * string * string
| NodePongReq of int * string
| UnknownReq of int * string
let extract_string s pos =
let end_pos = String.index_from \000 ' in
String.sub s pos ( end_pos - pos ) , pos + 1
let parse p =
match int_of_char p.[0 ] with
| 0x27 - >
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 1 in
let netname , pos = extract_string p 6 in
PingReq ( min_enc_type , unknown , netname )
| 0x28 - >
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 6 in
let netname , pos = extract_string p 11 in
( min_enc_type , unknown , netname )
| 0x29 - >
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 ( String.length p - 5 ) in
NodePongReq ( min_enc_type , unknown )
| n - > UnknownReq ( n , p )
let write p =
let b = Buffer.create 100 in
begin
match p with
| PingReq ( min_enc_type , unknown , netname ) - >
buf_int8 b 0x27 ;
buf_int b min_enc_type ;
Buffer.add_string b unknown ;
Buffer.add_string b netname ;
buf_int8 b 0x00
| ( min_enc_type , unknown , netname ) - >
buf_int8 b 0x28 ;
buf_int b min_enc_type ;
Buffer.add_string b unknown ;
Buffer.add_string b netname ;
buf_int8 b 0x00
| NodePongReq ( min_enc_type , unknown ) - >
buf_int8 b 0x29 ;
buf_int b min_enc_type ;
Buffer.add_string b unknown
( opcode , unknown ) - >
Buffer.add_string b unknown ;
end ;
Buffer.contents b
let to_string p =
let b = Buffer.create 100 in
begin
match p with
| PingReq ( min_enc_type , unknown , netname ) - >
Printf.bprintf b " ( % d , " min_enc_type ;
bprint_ints b unknown ;
Printf.bprintf b " , % s ) " netname
| ( min_enc_type , unknown , netname ) - >
Printf.bprintf b " ( % d , " min_enc_type ;
bprint_ints b unknown ;
Printf.bprintf b " , % s ) " netname
| NodePongReq ( min_enc_type , unknown ) - >
Printf.bprintf b " NodePong ( % d , " min_enc_type ;
bprint_ints b unknown ;
Printf.bprintf b " ) "
( opcode , unknown ) - >
Printf.bprintf b " Unknown \n " ;
bprint_ints b unknown ;
Printf.bprintf b " \n " ;
bprint_chars b unknown ;
Printf.bprintf b " \n "
end ;
Buffer.contents b
let udp_send t ip port ping msg =
if ! verbose_udp then begin
lprintf " Message UDP to % s:%d\n%s\n " ( Ip.to_string ip ) port
( to_string msg ) ;
end ;
try
let s = write msg in
UdpSocket.write t ping s ip port
with e - >
lprintf " FT : Exception % s in udp_send\n " ( Printexc2.to_string e )
end
module UdpMessages = struct
type t =
PingReq of int * string * string
| SupernodePongReq of int * string * string
| NodePongReq of int * string
| UnknownReq of int * string
let extract_string s pos =
let end_pos = String.index_from s pos '\000' in
String.sub s pos (end_pos - pos), pos + 1
let parse p =
match int_of_char p.[0] with
| 0x27 ->
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 1 in
let netname, pos = extract_string p 6 in
PingReq (min_enc_type, unknown, netname)
| 0x28 ->
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 6 in
let netname, pos = extract_string p 11 in
SupernodePongReq (min_enc_type, unknown, netname)
| 0x29 ->
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 (String.length p - 5) in
NodePongReq (min_enc_type, unknown)
| n -> UnknownReq (n, p)
let write p =
let b = Buffer.create 100 in
begin
match p with
| PingReq (min_enc_type, unknown, netname) ->
buf_int8 b 0x27;
buf_int b min_enc_type;
Buffer.add_string b unknown;
Buffer.add_string b netname;
buf_int8 b 0x00
| SupernodePongReq (min_enc_type, unknown, netname) ->
buf_int8 b 0x28;
buf_int b min_enc_type;
Buffer.add_string b unknown;
Buffer.add_string b netname;
buf_int8 b 0x00
| NodePongReq (min_enc_type, unknown) ->
buf_int8 b 0x29;
buf_int b min_enc_type;
Buffer.add_string b unknown
| UnknownReq (opcode, unknown) ->
Buffer.add_string b unknown;
end;
Buffer.contents b
let to_string p =
let b = Buffer.create 100 in
begin
match p with
| PingReq (min_enc_type, unknown, netname) ->
Printf.bprintf b "Ping (%d, " min_enc_type;
bprint_ints b unknown;
Printf.bprintf b ", %s)" netname
| SupernodePongReq (min_enc_type, unknown, netname) ->
Printf.bprintf b "SupernodePong (%d, " min_enc_type;
bprint_ints b unknown;
Printf.bprintf b ", %s)" netname
| NodePongReq (min_enc_type, unknown) ->
Printf.bprintf b "NodePong (%d, " min_enc_type;
bprint_ints b unknown;
Printf.bprintf b ")"
| UnknownReq (opcode, unknown) ->
Printf.bprintf b "Unknown \n ";
bprint_ints b unknown;
Printf.bprintf b "\n ";
bprint_chars b unknown;
Printf.bprintf b "\n"
end;
Buffer.contents b
let udp_send t ip port ping msg =
if !verbose_udp then begin
lprintf "Message UDP to %s:%d\n%s\n" (Ip.to_string ip) port
(to_string msg);
end;
try
let s = write msg in
UdpSocket.write t ping s ip port
with e ->
lprintf "FT: Exception %s in udp_send\n" (Printexc2.to_string e)
end
*)
exception Wait_for_more of string
let bt_handler parse_fun handler c sock =
try
let b = TcpBufferedSocket.buf sock in
if not c.client_received_peer_id then
begin
we get and parse the peer_id here because it may
not be sent from trackers that test us for NAT
( they just wait for our handshake response and
then drop the connection )
not be sent from trackers that test us for NAT
(they just wait for our handshake response and
then drop the connection) *)
if b.len >= 20 then
begin
let payload = String.sub b.buf b.pos 20 in
let p = parse_fun (-1) payload in
buf_used b 20;
c.client_received_peer_id <- true;
try
handler sock p;
with e ->
lprintf_nl "Exception %s in BTProtocol.parse_fun while handling peer_id"
(Printexc2.to_string e);
dump payload;
buf_used b b.len;
close sock Closed_by_user
end
else raise (Wait_for_more "peer_id");
must break the loop even if there is data , because the socket
could be closed beneath our feet and then b.buf seems to be zero length
regardless of what b.len tells ( this is a bug somewhere in
tcpBufferedSocket i think )
could be closed beneath our feet and then b.buf seems to be zero length
regardless of what b.len tells (this is a bug somewhere in
tcpBufferedSocket i think) *)
raise (Wait_for_more "after_peer_id");
end;
while b.len >= 4 do
let msg_len = get_int b.buf b.pos in
if msg_len < 0 then
begin
let (ip,port) = (TcpBufferedSocket.peer_addr sock) in
lprintf_nl "BT: Unknown message from %s:%d dropped!! peerid:%b data_len:%i msg_len:%i software: %s"
(Ip.to_string ip) port c.client_received_peer_id b.len msg_len (brand_to_string c.client_brand);
dump (String.sub b.buf b.pos (min b.len 30));
buf_used b b.len;
close sock Closed_by_user;
end
else if msg_len > 20000 then
We NEVER request pieces greater than size 20000 , this client is
trying to waste our bandwidth ?
trying to waste our bandwidth ? *)
begin
let (ip,port) = (TcpBufferedSocket.peer_addr sock) in
lprintf_nl "btprotocol.bt_handler: closed connection from %s:%d because of too much data!! data_len:%i msg_len:%i software: %s"
(Ip.to_string ip) port b.len msg_len (brand_to_string c.client_brand);
dump (String.sub b.buf b.pos (min b.len 30));
buf_used b b.len;
close sock Closed_by_user
end
else if b.len >= 4 + msg_len then
begin
buf_used b 4;
(* lprintf "Message complete: %d\n" msg_len; *)
if msg_len > 0 then
let opcode = get_int8 b.buf b.pos in
(* FIXME sub *)
let payload = String.sub b.buf (b.pos+1) (msg_len-1) in
buf_used b msg_len;
lprintf " Opcode % d\n " opcode ;
try
We use opcodes < 0 and
they do n't occur in the spec
they don't occur in the spec
*)
if opcode < 0 then raise Not_found;
let p = parse_fun opcode payload in
lprintf " , calling handler\n " ;
handler sock p
with e ->
lprintf_nl "Exception %s in BTProtocol.parse_fun while handling message with opcode: %d"
(Printexc2.to_string e) opcode;
dump payload;
else
(*received a ping*)
set_lifetime sock 130.
end
else raise (Wait_for_more "message")
done;
if b.len != 0 then raise (Wait_for_more "loop")
with
| Wait_for_more s ->
if closed sock && s <> "after_peer_id" then
lprintf_nl "bt_handler: Socket was closed while waiting for more data in %s" s
| e ->
lprintf_nl "Exception %s in bt_handler"
(Printexc2.to_string e)
let handlers info gconn =
let iter_read sock nread =
lprintf " iter_read % d\n " nread ;
let b = TcpBufferedSocket.buf sock in
if b.len > 0 then
match gconn.gconn_handler with
| BTHeader h ->
dump ( String.sub b.buf b.pos ( min b.len 100 ) ) ;
let slen = get_int8 b.buf b.pos in
if slen + 29 <= b.len then
begin
get proto and file_id from handshake ,
peer_id is not fetched here because
it might be late or not present
peer_id is not fetched here because
it might be late or not present
*)
let proto = String.sub b.buf ( b.pos+1 ) slen in
let file_id = Sha1.direct_of_string
(String.sub b.buf (b.pos+9+slen) 20) in
let proto,pos = get_string8 b.buf b.pos in
let rbits = (String.sub b.buf (b.pos+pos) 8) in
buf_used b (slen+29);
h gconn sock (proto, rbits, file_id);
end
else
if (String.sub b.buf b.pos (min b.len 100)) = "NATCHECK_HANDSHAKE" then
write_string sock (Printf.sprintf "azureus_rand_%d" !azureus_porttest_random)
else if (TcpBufferedSocket.closed sock) then
let (ip,port) = (TcpBufferedSocket.peer_addr sock) in
lprintf_nl "bt-handshake: closed sock from %s:%d b.len:%i slen:%i"
(Ip.to_string ip) port b.len slen;
| Reader h ->
h gconn sock
in
iter_read
let set_bt_sock sock info ghandler =
let gconn = {
gconn_handler = ghandler;
gconn_refill = [];
gconn_close_on_write = false;
} in
TcpBufferedSocket.set_reader sock (handlers info gconn);
TcpBufferedSocket.set_refill sock (fun sock ->
match gconn.gconn_refill with
[] -> ()
| refill :: _ -> refill sock
);
TcpBufferedSocket.set_handler sock TcpBufferedSocket.WRITE_DONE (
fun sock ->
match gconn.gconn_refill with
[] -> ()
| _ :: tail ->
gconn.gconn_refill <- tail;
match tail with
[] ->
if gconn.gconn_close_on_write then
set_lifetime sock 30.
TcpBufferedSocket.close sock " write done "
| refill :: _ -> refill sock)
let send_client client_sock msg =
do_if_connected client_sock (fun sock ->
try
let s = TcpMessages.write msg in
if !verbose_msg_clients then begin
lprintf_nl "send message: %s" (TcpMessages.to_string msg);
end;
(* dump s; *)
write_string sock s
with e ->
lprintf_nl "CLIENT : Error %s in send_client"
(Printexc2.to_string e)
)
| null | https://raw.githubusercontent.com/ygrek/mldonkey/333868a12bb6cd25fed49391dd2c3a767741cb51/src/networks/bittorrent/bTProtocol.ml | ocaml | before next request to tracker
***********************************************************************
UdpMessages
***********************************************************************
lprintf "Message complete: %d\n" msg_len;
FIXME sub
received a ping
dump s; | Copyright 2001 , 2002 b8_bavard , b8_fee_carabine ,
This file is part of mldonkey .
mldonkey is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
mldonkey is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with mldonkey ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This file is part of mldonkey.
mldonkey is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
mldonkey is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mldonkey; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
1 . Download the .torrent file
* * * * * * * * * * * * * * * * * * * * * * * * * * * * *
{
" announce " = " :6969/announce " ;
" info " = {
" files " = [
{
" length " = 682164224 ;
" path " = [ " Mandrake91-cd1-inst.i586.iso " ; ]
}
;
{
" length " = 681279488 ;
" path " = [
" Mandrake91-cd2-ext.i586.iso " ;
]
;
}
;
{
" length " = 681574400 ;
" path " = [
" Mandrake91-cd3-i18n.i586.iso " ;
]
;
}
;
]
;
" name " = " mandrake9.1 " ;
" piece length " = 262144 ;
" pieces " = " [ EAd\155ã´gÛ ÓþËf\134Ê«\025\016ô͵,1U\150À
\132\147îª\n%ù\\é,\012ÿC\008GÈÓd!æ¾öuL!\134Ô\016\152&\017¾\008³¢d\029Ë3\031Ï\134
# » ×\025\137¡=¢. ® \019§´\138î.ñ\151O\137Ùÿ,£ç&\019Àۢç\156.ù\150 < Eªª\153\018\145\
149d\147[+J=º\155l\139Î\028¡dVÉ\000-\017 ° Ť\013\154¼>A¹Ã5ïIt\007\020 © ãÚÀÈÈ\014O ®
ô1\152UÄ\026K\021^ãúì5Í¿ü \026\149\131q\024\015¸]Òþ£\027&\148\\ã- © \028WMÂ5 ... " ;
}
;
}
2 . Extract BitTorrent information needed :
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Metainfo files are bencoded dictionaries with the following keys -
' announce '
The url of the tracker .
' info '
This maps to a dictionary , with keys described below .
The ' name ' key maps to a string which is the suggested name to save
the file ( or directory ) as . It is purely advisory .
' piece length ' maps to the number of bytes in each piece the file is
split into . For the purposes of transfer , files are split into
fixed - size pieces which are all the same length except for possibly
the last one which may be truncated . Piece length is almost always a
power of two , most commonly 2 ^ 20 .
' pieces ' maps to a string whose length is a multiple of 20 . It is to
be subdivided into strings of length 20 , each of which is the sha1
hash of the piece at the corresponding index .
There is also a key ' length ' or a key ' files ' , but not both or
neither . If ' length ' is present then the download represents a
single file , otherwise it represents a set of files which go in a
directory structure .
In the single file case , ' length ' maps to the length of the file in
bytes .
For the purposes of the other keys , the multi - file case is treated
as only having a single file by concatenating the files in the order
they appear in the files list . The files list is the value ' files '
maps to , and is a list of dictionaries containing the following keys -
' length '
The length of the file , in bytes . ' path '
A list of strings corresponding to subdirectory names , the last of
which is the actual file name ( a zero length list is an error case ) .
In the single file case , the ' name ' key is the name of a file , in the
muliple file case , it 's the name of a directory .
3 . Contact the tracker regularly to update file information
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Tracker GET requests have the following keys by HTTP :
' info_hash '
The 20 byte sha1 hash of the bencoded form of the ' info ' value from
the metainfo file . Note that this is a substring of the metainfo
file . This value will almost certainly have to be escaped .
' peer_id '
A string of length 20 which this downloader uses as its i d. Each
downloader generates its own i d at random at the start of a new
download . This value will also almost certainly have to be escaped .
' ip '
An optional parameter giving the ip ( or dns name ) which this peer is
at . Generally used for the origin if it 's on the same machine as the
tracker .
' port '
The port number this peer is listening on . Common behavior is for a
downloader to try to listen on port 6881 and if that port is taken
try 6882 , then 6883 , etc . and give up after 6889 .
' uploaded '
The total amount uploaded so far , encoded in base ten ascii .
' downloaded '
The total amount downloaded so far , encoded in base ten ascii .
' left '
The number of bytes this peer still has to download , encoded in base
ten ascii . Note that this ca n't be computed from downloaded and the
file length since it might be a resume , and there 's a chance that
some of the downloaded data failed an integrity check and had to be
re - downloaded .
' event '
This is an optional key which maps to ' started ' , ' completed ' , or
' stopped ' ( or '' , which is the same as not being present ) .
--- > bencoded replu :
{ ' failure reason ' = ... }
or
{
' interval ' = .... ; ( * before next request to tracker
1. Download the .torrent file
*****************************
{
"announce" = ":6969/announce";
"info" = {
"files" = [
{
"length" = 682164224;
"path" = [ "Mandrake91-cd1-inst.i586.iso"; ]
}
;
{
"length" = 681279488;
"path" = [
"Mandrake91-cd2-ext.i586.iso";
]
;
}
;
{
"length" = 681574400;
"path" = [
"Mandrake91-cd3-i18n.i586.iso";
]
;
}
;
]
;
"name" = "mandrake9.1";
"piece length" = 262144;
"pieces" = "[EAd\155ã´gÛ ÓþËf\134Ê«\025\016ô͵,1U\150À
\132\147îª\n%ù\\é,\012ÿC\008GÈÓd!æ¾öuL!\134Ô\016\152&\017¾\008³¢d\029Ë3\031Ï\134
#»×\025\137¡=¢.®\019§´\138î.ñ\151O\137Ùÿ,£ç&\019Àۢç\156.ù\150<Eªª\153\018\145\
149d\147[+J=º\155l\139Î\028¡dVÉ\000-\017°Å¤\013\154¼>A¹Ã5ïIt\007\020©ãÚÀÈÈ\014O®
ô1\152UÄ\026K\021^ãúì5Í¿ü \026\149\131q\024\015¸]Òþ£\027&\148\\ã-©\028WMÂ5...";
}
;
}
2. Extract BitTorrent information needed:
*****************************************
Metainfo files are bencoded dictionaries with the following keys -
'announce'
The url of the tracker.
'info'
This maps to a dictionary, with keys described below.
The 'name' key maps to a string which is the suggested name to save
the file (or directory) as. It is purely advisory.
'piece length' maps to the number of bytes in each piece the file is
split into. For the purposes of transfer, files are split into
fixed-size pieces which are all the same length except for possibly
the last one which may be truncated. Piece length is almost always a
power of two, most commonly 2^20 .
'pieces' maps to a string whose length is a multiple of 20. It is to
be subdivided into strings of length 20, each of which is the sha1
hash of the piece at the corresponding index.
There is also a key 'length' or a key 'files', but not both or
neither. If 'length' is present then the download represents a
single file, otherwise it represents a set of files which go in a
directory structure.
In the single file case, 'length' maps to the length of the file in
bytes.
For the purposes of the other keys, the multi-file case is treated
as only having a single file by concatenating the files in the order
they appear in the files list. The files list is the value 'files'
maps to, and is a list of dictionaries containing the following keys -
'length'
The length of the file, in bytes. 'path'
A list of strings corresponding to subdirectory names, the last of
which is the actual file name (a zero length list is an error case).
In the single file case, the 'name' key is the name of a file, in the
muliple file case, it's the name of a directory.
3. Contact the tracker regularly to update file information
***********************************************************
Tracker GET requests have the following keys by HTTP:
'info_hash'
The 20 byte sha1 hash of the bencoded form of the 'info' value from
the metainfo file. Note that this is a substring of the metainfo
file. This value will almost certainly have to be escaped.
'peer_id'
A string of length 20 which this downloader uses as its id. Each
downloader generates its own id at random at the start of a new
download. This value will also almost certainly have to be escaped.
'ip'
An optional parameter giving the ip (or dns name) which this peer is
at. Generally used for the origin if it's on the same machine as the
tracker.
'port'
The port number this peer is listening on. Common behavior is for a
downloader to try to listen on port 6881 and if that port is taken
try 6882, then 6883, etc. and give up after 6889.
'uploaded'
The total amount uploaded so far, encoded in base ten ascii.
'downloaded'
The total amount downloaded so far, encoded in base ten ascii.
'left'
The number of bytes this peer still has to download, encoded in base
ten ascii. Note that this can't be computed from downloaded and the
file length since it might be a resume, and there's a chance that
some of the downloaded data failed an integrity check and had to be
re-downloaded.
'event'
This is an optional key which maps to 'started', 'completed', or
'stopped' (or '', which is the same as not being present).
---> bencoded replu:
{ 'failure reason' = ... }
or
{
'peers' = [
{
'peer id' = ....;
'ip' - ....;
'port' = ....;
};
....
]
}
4. Contact every peer regularly
*******************************
Handshake:
type int = BigEndian.int32
--->
string8 (prefixed by length): "BitTorrent protocol"
int8[8]: reserved(zeros)
int8[20 bytes]: Sha1.string (Bencode.encode file.file_info)
int8[20 bytes]: peer id
<---
string8 (prefixed by length): "BitTorrent protocol"
int8[8]: reserved(zeros)
int8[20 bytes]: Sha1.string (Bencode.encode file.file_info)
int8[20 bytes]: peer id
----> disconnect if sha1 don't match, or if peer id is unexpected
msg:
int: len of message (byte+payload) 0 -> keepalive sent every 2 minutes
byte8: opcode of message
int8[..]: payload
opcodes:
Connections start out choked and not interested.
No payload:
* 0 - choke: you have been blocked
* 1 - unchoke: you have been unblocked
* 2 - interested: I'm interested in downloading this file now
* 3 - not interested: I'm not interested in downloading this file now
With payload:
* 4 - have
int : index of new completed chunk
* 5 - bitfield:
string: a bitfield of bit 1 for downloaded chunks
byte: bits are inverted 0....7 ---> 7 .... 0
* 6 - request
int: index
int: begin
int: length (power of 2, 2 ^ 15)
* 7 - piece
int: index
int: begin
string: piece
* 8 - cancel: cancel a request
int: index
int: begin
int: length (power of 2, 2 ^ 15)
* 9 - DHT port announcement
int16: UDP port
* 20 - extended
byte: extended message ID (0 = handshake)
Choke/unchoke every 10 seconds
*)
open BasicSocket
open Printf2
open CommonOptions
open Md4
open CommonGlobals
open BigEndian
open TcpBufferedSocket
open AnyEndian
open BTTypes
let log_prefix = "[BT]"
let lprintf_nl fmt =
lprintf_nl2 log_prefix fmt
let azureus_porttest_random = ref 0
type ghandler =
BTHeader of (gconn -> TcpBufferedSocket.t ->
(string * string * Sha1.t) -> unit)
| Reader of (gconn -> TcpBufferedSocket.t -> unit)
and gconn = {
mutable gconn_handler : ghandler;
mutable gconn_refill : (TcpBufferedSocket.t -> unit) list;
mutable gconn_close_on_write : bool;
}
module TcpMessages = struct
type msg =
| Choke
| Unchoke
| Interested
| NotInterested
| Have of int64
| BitField of string
| Request of int * int64 * int64
| Piece of int * int64 * string * int * int
| Cancel of int * int64 * int64
| Ping
| PeerID of string
| DHT_Port of int
| Extended of int * string
let to_string msg =
match msg with
| Choke -> "Choke"
| Unchoke -> "Unchoke"
| Interested -> "Interested"
| NotInterested -> "NotInterested"
| Have n -> Printf.sprintf "Have %Ld" n
| BitField s -> Printf.sprintf "BitField %s" (String.escaped s)
| Request (index, offset, len) ->
Printf.sprintf "Request %d %Ld[%Ld]" index offset len
| Piece (index, offset, s, pos, len) ->
Printf.sprintf "Piece %d %Ld[%d]" index offset len
| Cancel (index, offset, len) ->
Printf.sprintf "Cancel %d %Ld[%Ld]" index offset len
| Ping -> "Ping"
| PeerID s -> Printf.sprintf "PeerID [%s]" (String.escaped s)
| DHT_Port n -> Printf.sprintf "DHT_Port %d" n
| Extended (n, s) -> Printf.sprintf "Extended [%d %s]" n (String.escaped s)
let parsing opcode m =
match opcode with
| 0 -> Choke
| 1 -> Unchoke
| 2 -> Interested
| 3 -> NotInterested
| 4 -> Have (get_uint64_32 m 0)
| 5 -> BitField m
| 6 -> Request (get_int m 0, get_uint64_32 m 4, get_uint64_32 m 8)
| 7 -> Piece (get_int m 0, get_uint64_32 m 4, m, 8, String.length m - 8)
| 8 -> Cancel (get_int m 0, get_uint64_32 m 4, get_uint64_32 m 8)
| 9 -> DHT_Port (get_int16 m 0)
| 20 -> Extended (get_int8 m 0, String.sub m 1 (String.length m - 1))
| -1 -> PeerID m
| _ -> raise Not_found
let buf = Buffer.create 100
let write msg =
Buffer.reset buf;
begin
buf_int buf 0;
match msg with
| Choke -> buf_int8 buf 0
| Unchoke -> buf_int8 buf 1
| Interested -> buf_int8 buf 2
| NotInterested -> buf_int8 buf 3
| Have i -> buf_int8 buf 4; buf_int64_32 buf i
| BitField string -> buf_int8 buf 5; Buffer.add_string buf string
| Request (index, pos, len) ->
buf_int8 buf 6;
buf_int buf index; buf_int64_32 buf pos; buf_int64_32 buf len
| Piece (num, index, s, pos, len) ->
buf_int8 buf 7;
buf_int buf num;
buf_int64_32 buf index;
Buffer.add_substring buf s pos len
| Cancel _ -> ()
| PeerID _ -> ()
| Ping -> ()
| DHT_Port n -> buf_int8 buf 9; buf_int16 buf n
| Extended (n,msg) -> buf_int8 buf 20; buf_int8 buf n; Buffer.add_string buf msg
end;
let s = Buffer.contents buf in
str_int s 0 (String.length s - 4);
s
end
module UdpMessages = struct
type t =
PingReq of int * string * string
| of int * string * string
| NodePongReq of int * string
| UnknownReq of int * string
let extract_string s pos =
let end_pos = String.index_from \000 ' in
String.sub s pos ( end_pos - pos ) , pos + 1
let parse p =
match int_of_char p.[0 ] with
| 0x27 - >
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 1 in
let netname , pos = extract_string p 6 in
PingReq ( min_enc_type , unknown , netname )
| 0x28 - >
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 6 in
let netname , pos = extract_string p 11 in
( min_enc_type , unknown , netname )
| 0x29 - >
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 ( String.length p - 5 ) in
NodePongReq ( min_enc_type , unknown )
| n - > UnknownReq ( n , p )
let write p =
let b = Buffer.create 100 in
begin
match p with
| PingReq ( min_enc_type , unknown , netname ) - >
buf_int8 b 0x27 ;
buf_int b min_enc_type ;
Buffer.add_string b unknown ;
Buffer.add_string b netname ;
buf_int8 b 0x00
| ( min_enc_type , unknown , netname ) - >
buf_int8 b 0x28 ;
buf_int b min_enc_type ;
Buffer.add_string b unknown ;
Buffer.add_string b netname ;
buf_int8 b 0x00
| NodePongReq ( min_enc_type , unknown ) - >
buf_int8 b 0x29 ;
buf_int b min_enc_type ;
Buffer.add_string b unknown
( opcode , unknown ) - >
Buffer.add_string b unknown ;
end ;
Buffer.contents b
let to_string p =
let b = Buffer.create 100 in
begin
match p with
| PingReq ( min_enc_type , unknown , netname ) - >
Printf.bprintf b " ( % d , " min_enc_type ;
bprint_ints b unknown ;
Printf.bprintf b " , % s ) " netname
| ( min_enc_type , unknown , netname ) - >
Printf.bprintf b " ( % d , " min_enc_type ;
bprint_ints b unknown ;
Printf.bprintf b " , % s ) " netname
| NodePongReq ( min_enc_type , unknown ) - >
Printf.bprintf b " NodePong ( % d , " min_enc_type ;
bprint_ints b unknown ;
Printf.bprintf b " ) "
( opcode , unknown ) - >
Printf.bprintf b " Unknown \n " ;
bprint_ints b unknown ;
Printf.bprintf b " \n " ;
bprint_chars b unknown ;
Printf.bprintf b " \n "
end ;
Buffer.contents b
let udp_send t ip port ping msg =
if ! verbose_udp then begin
lprintf " Message UDP to % s:%d\n%s\n " ( Ip.to_string ip ) port
( to_string msg ) ;
end ;
try
let s = write msg in
UdpSocket.write t ping s ip port
with e - >
lprintf " FT : Exception % s in udp_send\n " ( Printexc2.to_string e )
end
module UdpMessages = struct
type t =
PingReq of int * string * string
| SupernodePongReq of int * string * string
| NodePongReq of int * string
| UnknownReq of int * string
let extract_string s pos =
let end_pos = String.index_from s pos '\000' in
String.sub s pos (end_pos - pos), pos + 1
let parse p =
match int_of_char p.[0] with
| 0x27 ->
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 1 in
let netname, pos = extract_string p 6 in
PingReq (min_enc_type, unknown, netname)
| 0x28 ->
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 6 in
let netname, pos = extract_string p 11 in
SupernodePongReq (min_enc_type, unknown, netname)
| 0x29 ->
let min_enc_type = get_int p 1 in
let unknown = String.sub p 5 (String.length p - 5) in
NodePongReq (min_enc_type, unknown)
| n -> UnknownReq (n, p)
let write p =
let b = Buffer.create 100 in
begin
match p with
| PingReq (min_enc_type, unknown, netname) ->
buf_int8 b 0x27;
buf_int b min_enc_type;
Buffer.add_string b unknown;
Buffer.add_string b netname;
buf_int8 b 0x00
| SupernodePongReq (min_enc_type, unknown, netname) ->
buf_int8 b 0x28;
buf_int b min_enc_type;
Buffer.add_string b unknown;
Buffer.add_string b netname;
buf_int8 b 0x00
| NodePongReq (min_enc_type, unknown) ->
buf_int8 b 0x29;
buf_int b min_enc_type;
Buffer.add_string b unknown
| UnknownReq (opcode, unknown) ->
Buffer.add_string b unknown;
end;
Buffer.contents b
let to_string p =
let b = Buffer.create 100 in
begin
match p with
| PingReq (min_enc_type, unknown, netname) ->
Printf.bprintf b "Ping (%d, " min_enc_type;
bprint_ints b unknown;
Printf.bprintf b ", %s)" netname
| SupernodePongReq (min_enc_type, unknown, netname) ->
Printf.bprintf b "SupernodePong (%d, " min_enc_type;
bprint_ints b unknown;
Printf.bprintf b ", %s)" netname
| NodePongReq (min_enc_type, unknown) ->
Printf.bprintf b "NodePong (%d, " min_enc_type;
bprint_ints b unknown;
Printf.bprintf b ")"
| UnknownReq (opcode, unknown) ->
Printf.bprintf b "Unknown \n ";
bprint_ints b unknown;
Printf.bprintf b "\n ";
bprint_chars b unknown;
Printf.bprintf b "\n"
end;
Buffer.contents b
let udp_send t ip port ping msg =
if !verbose_udp then begin
lprintf "Message UDP to %s:%d\n%s\n" (Ip.to_string ip) port
(to_string msg);
end;
try
let s = write msg in
UdpSocket.write t ping s ip port
with e ->
lprintf "FT: Exception %s in udp_send\n" (Printexc2.to_string e)
end
*)
exception Wait_for_more of string
let bt_handler parse_fun handler c sock =
try
let b = TcpBufferedSocket.buf sock in
if not c.client_received_peer_id then
begin
we get and parse the peer_id here because it may
not be sent from trackers that test us for NAT
( they just wait for our handshake response and
then drop the connection )
not be sent from trackers that test us for NAT
(they just wait for our handshake response and
then drop the connection) *)
if b.len >= 20 then
begin
let payload = String.sub b.buf b.pos 20 in
let p = parse_fun (-1) payload in
buf_used b 20;
c.client_received_peer_id <- true;
try
handler sock p;
with e ->
lprintf_nl "Exception %s in BTProtocol.parse_fun while handling peer_id"
(Printexc2.to_string e);
dump payload;
buf_used b b.len;
close sock Closed_by_user
end
else raise (Wait_for_more "peer_id");
must break the loop even if there is data , because the socket
could be closed beneath our feet and then b.buf seems to be zero length
regardless of what b.len tells ( this is a bug somewhere in
tcpBufferedSocket i think )
could be closed beneath our feet and then b.buf seems to be zero length
regardless of what b.len tells (this is a bug somewhere in
tcpBufferedSocket i think) *)
raise (Wait_for_more "after_peer_id");
end;
while b.len >= 4 do
let msg_len = get_int b.buf b.pos in
if msg_len < 0 then
begin
let (ip,port) = (TcpBufferedSocket.peer_addr sock) in
lprintf_nl "BT: Unknown message from %s:%d dropped!! peerid:%b data_len:%i msg_len:%i software: %s"
(Ip.to_string ip) port c.client_received_peer_id b.len msg_len (brand_to_string c.client_brand);
dump (String.sub b.buf b.pos (min b.len 30));
buf_used b b.len;
close sock Closed_by_user;
end
else if msg_len > 20000 then
We NEVER request pieces greater than size 20000 , this client is
trying to waste our bandwidth ?
trying to waste our bandwidth ? *)
begin
let (ip,port) = (TcpBufferedSocket.peer_addr sock) in
lprintf_nl "btprotocol.bt_handler: closed connection from %s:%d because of too much data!! data_len:%i msg_len:%i software: %s"
(Ip.to_string ip) port b.len msg_len (brand_to_string c.client_brand);
dump (String.sub b.buf b.pos (min b.len 30));
buf_used b b.len;
close sock Closed_by_user
end
else if b.len >= 4 + msg_len then
begin
buf_used b 4;
if msg_len > 0 then
let opcode = get_int8 b.buf b.pos in
let payload = String.sub b.buf (b.pos+1) (msg_len-1) in
buf_used b msg_len;
lprintf " Opcode % d\n " opcode ;
try
We use opcodes < 0 and
they do n't occur in the spec
they don't occur in the spec
*)
if opcode < 0 then raise Not_found;
let p = parse_fun opcode payload in
lprintf " , calling handler\n " ;
handler sock p
with e ->
lprintf_nl "Exception %s in BTProtocol.parse_fun while handling message with opcode: %d"
(Printexc2.to_string e) opcode;
dump payload;
else
set_lifetime sock 130.
end
else raise (Wait_for_more "message")
done;
if b.len != 0 then raise (Wait_for_more "loop")
with
| Wait_for_more s ->
if closed sock && s <> "after_peer_id" then
lprintf_nl "bt_handler: Socket was closed while waiting for more data in %s" s
| e ->
lprintf_nl "Exception %s in bt_handler"
(Printexc2.to_string e)
let handlers info gconn =
let iter_read sock nread =
lprintf " iter_read % d\n " nread ;
let b = TcpBufferedSocket.buf sock in
if b.len > 0 then
match gconn.gconn_handler with
| BTHeader h ->
dump ( String.sub b.buf b.pos ( min b.len 100 ) ) ;
let slen = get_int8 b.buf b.pos in
if slen + 29 <= b.len then
begin
get proto and file_id from handshake ,
peer_id is not fetched here because
it might be late or not present
peer_id is not fetched here because
it might be late or not present
*)
let proto = String.sub b.buf ( b.pos+1 ) slen in
let file_id = Sha1.direct_of_string
(String.sub b.buf (b.pos+9+slen) 20) in
let proto,pos = get_string8 b.buf b.pos in
let rbits = (String.sub b.buf (b.pos+pos) 8) in
buf_used b (slen+29);
h gconn sock (proto, rbits, file_id);
end
else
if (String.sub b.buf b.pos (min b.len 100)) = "NATCHECK_HANDSHAKE" then
write_string sock (Printf.sprintf "azureus_rand_%d" !azureus_porttest_random)
else if (TcpBufferedSocket.closed sock) then
let (ip,port) = (TcpBufferedSocket.peer_addr sock) in
lprintf_nl "bt-handshake: closed sock from %s:%d b.len:%i slen:%i"
(Ip.to_string ip) port b.len slen;
| Reader h ->
h gconn sock
in
iter_read
let set_bt_sock sock info ghandler =
let gconn = {
gconn_handler = ghandler;
gconn_refill = [];
gconn_close_on_write = false;
} in
TcpBufferedSocket.set_reader sock (handlers info gconn);
TcpBufferedSocket.set_refill sock (fun sock ->
match gconn.gconn_refill with
[] -> ()
| refill :: _ -> refill sock
);
TcpBufferedSocket.set_handler sock TcpBufferedSocket.WRITE_DONE (
fun sock ->
match gconn.gconn_refill with
[] -> ()
| _ :: tail ->
gconn.gconn_refill <- tail;
match tail with
[] ->
if gconn.gconn_close_on_write then
set_lifetime sock 30.
TcpBufferedSocket.close sock " write done "
| refill :: _ -> refill sock)
let send_client client_sock msg =
do_if_connected client_sock (fun sock ->
try
let s = TcpMessages.write msg in
if !verbose_msg_clients then begin
lprintf_nl "send message: %s" (TcpMessages.to_string msg);
end;
write_string sock s
with e ->
lprintf_nl "CLIENT : Error %s in send_client"
(Printexc2.to_string e)
)
|
cab962623b6837cb5e04ffd275cf728006511879ea1578af964d2a83f7be69b8 | CryptoKami/cryptokami-core | Command.hs | # LANGUAGE ExistentialQuantification #
module Lang.Command
( CommandProc(..)
, UnavailableCommand(..)
) where
import Universum
import Lang.Argument (ArgumentConsumer)
import Lang.Name (Name)
import Lang.Value (Value)
import Lang.Syntax (Arg)
data CommandProc m = forall e. CommandProc
{ cpName :: !Name
, cpArgumentPrepare :: !([Arg Value] -> [Arg Value])
, cpArgumentConsumer :: !(ArgumentConsumer e)
, cpExec :: !(e -> m Value)
, cpHelp :: !Text
} deriving ()
data UnavailableCommand = UnavailableCommand
{ ucName :: !Name
, ucReason :: !Text
}
| null | https://raw.githubusercontent.com/CryptoKami/cryptokami-core/12ca60a9ad167b6327397b3b2f928c19436ae114/auxx/src/Lang/Command.hs | haskell | # LANGUAGE ExistentialQuantification #
module Lang.Command
( CommandProc(..)
, UnavailableCommand(..)
) where
import Universum
import Lang.Argument (ArgumentConsumer)
import Lang.Name (Name)
import Lang.Value (Value)
import Lang.Syntax (Arg)
data CommandProc m = forall e. CommandProc
{ cpName :: !Name
, cpArgumentPrepare :: !([Arg Value] -> [Arg Value])
, cpArgumentConsumer :: !(ArgumentConsumer e)
, cpExec :: !(e -> m Value)
, cpHelp :: !Text
} deriving ()
data UnavailableCommand = UnavailableCommand
{ ucName :: !Name
, ucReason :: !Text
}
|
|
846dfcbe3fb0fb911a59eda4346553caeb21fd9d86f93f3799176de0bb22f9d8 | graninas/Functional-Design-and-Architecture | HardwareSpec.hs | module Andromeda.HardwareSpec where
import Test.Hspec
import Andromeda
import Andromeda.Assets (boostersDef, aaaController86Name)
import Andromeda.Assets.Vendors.AAA.HardwareService (aaaHardwareService)
import . Test . HardwareService ( mockedHardwareService )
import Andromeda.TestData.Components (thermometer1Passp, pressure1Passp)
import qualified Andromeda.Hardware.Impl.Device.Types as TImpl
import qualified Andromeda.Hardware.Impl.Service as SImpl
import qualified Andromeda.Hardware.Impl.Runtime as RImpl
import qualified Andromeda.Hardware.Impl.HdlInterpreter as HdlImpl
import qualified Andromeda.Hardware.Language.Hdl as L
import qualified Andromeda.Hardware.Domain as D
import qualified Data.Map as Map
verifyTemperature :: Float -> SensorAPI -> IO ()
verifyTemperature temp handler = do
measurement <- readMeasurement handler
measurement `shouldBe` (Measurement Temperature temp)
getDevice :: RImpl.Devices -> ControllerName -> IO TImpl.Device
getDevice devices ctrlName = case Map.lookup (D.Controller ctrlName) devices of
Nothing -> fail "Controller not found"
Just (_, device) -> pure device
getDevicePart'
:: RImpl.Devices
-> SImpl.HardwareService
-> ComponentIndex
-> ControllerName
-> IO (Maybe TImpl.DevicePart)
getDevicePart' devices service idx ctrlName = do
device <- getDevice devices ctrlName
SImpl.getDevicePart service idx device
testBoostersDef :: Hdl
testBoostersDef =
[ SetupController "left booster" "left b ctrl" aaaController86Passport
( \lCtrl ->
[ RegisterComponent lCtrl "nozzle1-t" aaaTemperature25Passport
, RegisterComponent lCtrl "nozzle1-p" aaaPressure02Passport
, SetupController "right booster" "right b ctrl" aaaController86Passport
( \rCtrl ->
[ RegisterComponent rCtrl "nozzle2-t" aaaTemperature25Passport
, RegisterComponent rCtrl "nozzle2-p" aaaPressure02Passport
]
)
]
)
]
spec :: Spec
spec =
describe "Hardware tests" $ do
it "Hardware device components check" $ do
let devices = Map.empty
devices' <- HdlImpl.runHdl devices aaaHardwareService testBoostersDef
mbThermometer1 <- getDevicePart' devices' aaaHardwareService "nozzle1-t" "left b ctrl"
mbThermometer2 <- getDevicePart' devices' aaaHardwareService "nozzle2-t" "right b ctrl"
mbNonExistentTherm1 <- getDevicePart' devices' aaaHardwareService "xxx-t" "left b ctrl"
mbNonExistentTherm2 <- getDevicePart' devices' aaaHardwareService "xxx-t" "right b ctrl"
case (mbNonExistentTherm1, mbNonExistentTherm2) of
(Nothing, Nothing) -> pure ()
_ -> fail "Found an unexpected thermometer"
case (mbThermometer1, mbThermometer2) of
(Just therm1, Just therm2) -> putStrLn "Component found."
_ -> fail "There is no such component"
it "Hardware device component method run" $ do
let devices = Map.empty
devices' <- HdlImpl.runHdl devices aaaHardwareService testBoostersDef
mbThermometer <- getDevicePart' devices' aaaHardwareService "nozzle1-t" "left b ctrl"
case mbThermometer of
Nothing -> fail "There is no such component"
Just thermometer -> TImpl.withHandler thermometer (verifyTemperature 100.0)
| null | https://raw.githubusercontent.com/graninas/Functional-Design-and-Architecture/faee58404e7d766c6c21f1ffdf9a2e792aebb4cb/Second-Edition-Manning-Publications/BookSamples/CH05/Section5p2p2/test/Andromeda/HardwareSpec.hs | haskell | module Andromeda.HardwareSpec where
import Test.Hspec
import Andromeda
import Andromeda.Assets (boostersDef, aaaController86Name)
import Andromeda.Assets.Vendors.AAA.HardwareService (aaaHardwareService)
import . Test . HardwareService ( mockedHardwareService )
import Andromeda.TestData.Components (thermometer1Passp, pressure1Passp)
import qualified Andromeda.Hardware.Impl.Device.Types as TImpl
import qualified Andromeda.Hardware.Impl.Service as SImpl
import qualified Andromeda.Hardware.Impl.Runtime as RImpl
import qualified Andromeda.Hardware.Impl.HdlInterpreter as HdlImpl
import qualified Andromeda.Hardware.Language.Hdl as L
import qualified Andromeda.Hardware.Domain as D
import qualified Data.Map as Map
verifyTemperature :: Float -> SensorAPI -> IO ()
verifyTemperature temp handler = do
measurement <- readMeasurement handler
measurement `shouldBe` (Measurement Temperature temp)
getDevice :: RImpl.Devices -> ControllerName -> IO TImpl.Device
getDevice devices ctrlName = case Map.lookup (D.Controller ctrlName) devices of
Nothing -> fail "Controller not found"
Just (_, device) -> pure device
getDevicePart'
:: RImpl.Devices
-> SImpl.HardwareService
-> ComponentIndex
-> ControllerName
-> IO (Maybe TImpl.DevicePart)
getDevicePart' devices service idx ctrlName = do
device <- getDevice devices ctrlName
SImpl.getDevicePart service idx device
testBoostersDef :: Hdl
testBoostersDef =
[ SetupController "left booster" "left b ctrl" aaaController86Passport
( \lCtrl ->
[ RegisterComponent lCtrl "nozzle1-t" aaaTemperature25Passport
, RegisterComponent lCtrl "nozzle1-p" aaaPressure02Passport
, SetupController "right booster" "right b ctrl" aaaController86Passport
( \rCtrl ->
[ RegisterComponent rCtrl "nozzle2-t" aaaTemperature25Passport
, RegisterComponent rCtrl "nozzle2-p" aaaPressure02Passport
]
)
]
)
]
spec :: Spec
spec =
describe "Hardware tests" $ do
it "Hardware device components check" $ do
let devices = Map.empty
devices' <- HdlImpl.runHdl devices aaaHardwareService testBoostersDef
mbThermometer1 <- getDevicePart' devices' aaaHardwareService "nozzle1-t" "left b ctrl"
mbThermometer2 <- getDevicePart' devices' aaaHardwareService "nozzle2-t" "right b ctrl"
mbNonExistentTherm1 <- getDevicePart' devices' aaaHardwareService "xxx-t" "left b ctrl"
mbNonExistentTherm2 <- getDevicePart' devices' aaaHardwareService "xxx-t" "right b ctrl"
case (mbNonExistentTherm1, mbNonExistentTherm2) of
(Nothing, Nothing) -> pure ()
_ -> fail "Found an unexpected thermometer"
case (mbThermometer1, mbThermometer2) of
(Just therm1, Just therm2) -> putStrLn "Component found."
_ -> fail "There is no such component"
it "Hardware device component method run" $ do
let devices = Map.empty
devices' <- HdlImpl.runHdl devices aaaHardwareService testBoostersDef
mbThermometer <- getDevicePart' devices' aaaHardwareService "nozzle1-t" "left b ctrl"
case mbThermometer of
Nothing -> fail "There is no such component"
Just thermometer -> TImpl.withHandler thermometer (verifyTemperature 100.0)
|
|
8f10edd7a2fe9d249585b4b5fe9829c111d7cb2601bab6042e4bcd971d4a643d | gildor478/ounit | oUnitTestData.ml | (**************************************************************************)
The OUnit library
(* *)
Copyright ( C ) 2002 - 2008 Maas - Maarten Zeeman .
Copyright ( C ) 2010 OCamlCore SARL
Copyright ( C ) 2013
(* *)
The package OUnit is copyright by Maas - Maarten Zeeman , OCamlCore SARL
and .
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining *)
a copy of this document and the OUnit software ( " the Software " ) , to
deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, *)
sublicense , and/or sell copies of the Software , and to permit persons
to whom the Software is furnished to do so , subject to the following
(* conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be *)
included in all copies or substantial portions of the Software .
(* *)
(* The Software is provided ``as is'', without warranty of any kind, *)
(* express or implied, including but not limited to the warranties of *)
(* merchantability, fitness for a particular purpose and noninfringement. *)
In no event shall be liable for any claim , damages
(* or other liability, whether in an action of contract, tort or *)
otherwise , arising from , out of or in connection with the Software or
(* the use or other dealings in the software. *)
(* *)
See LICENSE.txt for details .
(**************************************************************************)
let make_filename = List.fold_left Filename.concat
let testdata_default =
let pwd = Sys.getcwd () in
let is_dir lst =
let dn = make_filename pwd lst in
Sys.file_exists dn && Sys.is_directory dn
in
try
let path =
List.find is_dir
[
["test"; "data"];
["tests"; "data"];
["data"]
]
in
Some (make_filename pwd path)
with Not_found ->
None
let testdata_dir =
OUnitConf.make_string_opt
"testdata_dir"
testdata_default
"Location of the test data directory (absolute path)."
let in_testdata_dir conf path =
match testdata_dir conf with
| Some fn -> make_filename fn path
| None ->
failwith "Test data dir not defined."
| null | https://raw.githubusercontent.com/gildor478/ounit/faf4936b17507406c7592186dcaa3f25c6fc138a/src/lib/ounit2/advanced/oUnitTestData.ml | ocaml | ************************************************************************
Permission is hereby granted, free of charge, to any person obtaining
the rights to use, copy, modify, merge, publish, distribute,
conditions:
The above copyright notice and this permission notice shall be
The Software is provided ``as is'', without warranty of any kind,
express or implied, including but not limited to the warranties of
merchantability, fitness for a particular purpose and noninfringement.
or other liability, whether in an action of contract, tort or
the use or other dealings in the software.
************************************************************************ | The OUnit library
Copyright ( C ) 2002 - 2008 Maas - Maarten Zeeman .
Copyright ( C ) 2010 OCamlCore SARL
Copyright ( C ) 2013
The package OUnit is copyright by Maas - Maarten Zeeman , OCamlCore SARL
and .
a copy of this document and the OUnit software ( " the Software " ) , to
deal in the Software without restriction , including without limitation
sublicense , and/or sell copies of the Software , and to permit persons
to whom the Software is furnished to do so , subject to the following
included in all copies or substantial portions of the Software .
In no event shall be liable for any claim , damages
otherwise , arising from , out of or in connection with the Software or
See LICENSE.txt for details .
let make_filename = List.fold_left Filename.concat
let testdata_default =
let pwd = Sys.getcwd () in
let is_dir lst =
let dn = make_filename pwd lst in
Sys.file_exists dn && Sys.is_directory dn
in
try
let path =
List.find is_dir
[
["test"; "data"];
["tests"; "data"];
["data"]
]
in
Some (make_filename pwd path)
with Not_found ->
None
let testdata_dir =
OUnitConf.make_string_opt
"testdata_dir"
testdata_default
"Location of the test data directory (absolute path)."
let in_testdata_dir conf path =
match testdata_dir conf with
| Some fn -> make_filename fn path
| None ->
failwith "Test data dir not defined."
|
15c0c1c16adf2297139b352ca17149dfefe2b3ab8dba1b6ccab7d200f61baae8 | tweag/haskell-training | Questionnaire.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
module Domain.Questionnaire where
-- aeson
import Data.Aeson.Types
-- base
import GHC.Generics
-- openapi3
import Data.OpenApi
-- text
import Data.Text
data Questionnaire = Questionnaire
{ title :: Text
}
deriving (Generic, FromJSON, ToJSON, ToSchema)
| null | https://raw.githubusercontent.com/tweag/haskell-training/89d930f7854075d692dfb127d2c54e953dbf1519/src/Domain/Questionnaire.hs | haskell | # LANGUAGE DeriveAnyClass #
aeson
base
openapi3
text | # LANGUAGE DeriveGeneric #
module Domain.Questionnaire where
import Data.Aeson.Types
import GHC.Generics
import Data.OpenApi
import Data.Text
data Questionnaire = Questionnaire
{ title :: Text
}
deriving (Generic, FromJSON, ToJSON, ToSchema)
|
416716664db4ff8939aee93a87d7ed1e3f45043fc073d5c71ab1170d2b4a2941 | pbv/codex | Tester.hs | {-# LANGUAGE OverloadedStrings #-}
module Codex.Tester (
oneOf,
tester,
nullTester,
-- * module re-exports
Meta, Code(..),
lookupFromMeta,
module Codex.Tester.Monad,
module Codex.Tester.Result,
module Codex.Tester.Utils,
module Codex.Tester.Limits,
-- * generic stuff
module Control.Monad,
module Control.Monad.Trans,
module System.FilePath,
module System.Exit,
module Data.Monoid,
) where
import Codex.Types
import Codex.Page (lookupFromMeta)
import Text.Pandoc (Meta)
import Codex.Tester.Monad
import Codex.Tester.Limits
import Codex.Tester.Result
import Codex.Tester.Utils
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import System.FilePath
import System.Exit
import Data.Monoid
import Data.Text (Text)
| Try testers in order , return the first one that suceedds .
This is just ` asum ` from Control . Applicative . Alternative
-- renamed for readability
oneOf :: [Tester a] -> Tester a
oneOf = foldr (<|>) empty
-- | label a tester and ignore submissions that don't match
tester :: Text -> Tester a -> Tester a
tester name cont = do
meta <- testMetadata
guard (lookupFromMeta "tester" meta == Just name)
cont
-- | trivial tester (accepts all submissions)
nullTester :: Tester Result
nullTester = tester "accept" $ return $ accepted "Submission recorded"
| null | https://raw.githubusercontent.com/pbv/codex/1a5a81965b12f834b436e2165c07120360aded99/src/Codex/Tester.hs | haskell | # LANGUAGE OverloadedStrings #
* module re-exports
* generic stuff
renamed for readability
| label a tester and ignore submissions that don't match
| trivial tester (accepts all submissions) | module Codex.Tester (
oneOf,
tester,
nullTester,
Meta, Code(..),
lookupFromMeta,
module Codex.Tester.Monad,
module Codex.Tester.Result,
module Codex.Tester.Utils,
module Codex.Tester.Limits,
module Control.Monad,
module Control.Monad.Trans,
module System.FilePath,
module System.Exit,
module Data.Monoid,
) where
import Codex.Types
import Codex.Page (lookupFromMeta)
import Text.Pandoc (Meta)
import Codex.Tester.Monad
import Codex.Tester.Limits
import Codex.Tester.Result
import Codex.Tester.Utils
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import System.FilePath
import System.Exit
import Data.Monoid
import Data.Text (Text)
| Try testers in order , return the first one that suceedds .
This is just ` asum ` from Control . Applicative . Alternative
oneOf :: [Tester a] -> Tester a
oneOf = foldr (<|>) empty
tester :: Text -> Tester a -> Tester a
tester name cont = do
meta <- testMetadata
guard (lookupFromMeta "tester" meta == Just name)
cont
nullTester :: Tester Result
nullTester = tester "accept" $ return $ accepted "Submission recorded"
|
9ea5bef6250f04236038352f3a46d5ee402ac818b8b701745bdd6fa41722ad6a | YoshikuniJujo/test_haskell | VulkanBufferEnum.hs | # LANGUAGE QuasiQuotes #
# OPTIONS_GHC -Wall -fno - warn - tabs #
module VulkanBufferEnum where
import Text.Nowdoc
import MakeEnum
make :: IO ()
make = createFileWithDefault vulkanCore "Buffer.Enum"
["Data.Default", "Data.Bits", "Data.Word"] [
( Just "CreateFlagsZero", [("CreateFlagsZero", Int 0)],
( "CreateFlagBits", "VkBufferCreateFlagBits",
["Show", "Eq", "Storable", "Bits"] ) ),
( Just "UsageFlagsZero", [("UsageFlagsZero", Int 0)],
( "UsageFlagBits", "VkBufferUsageFlagBits",
["Show", "Eq", "Storable", "Bits"] ) ) ]
[nowdoc|
type CreateFlags = CreateFlagBits
type UsageFlags = UsageFlagBits|]
| null | https://raw.githubusercontent.com/YoshikuniJujo/test_haskell/6ea44c1048805a62979669c185ab32ba9f4d2e02/themes/gui/vulkan/try-vulkan-middle/tools/VulkanBufferEnum.hs | haskell | # LANGUAGE QuasiQuotes #
# OPTIONS_GHC -Wall -fno - warn - tabs #
module VulkanBufferEnum where
import Text.Nowdoc
import MakeEnum
make :: IO ()
make = createFileWithDefault vulkanCore "Buffer.Enum"
["Data.Default", "Data.Bits", "Data.Word"] [
( Just "CreateFlagsZero", [("CreateFlagsZero", Int 0)],
( "CreateFlagBits", "VkBufferCreateFlagBits",
["Show", "Eq", "Storable", "Bits"] ) ),
( Just "UsageFlagsZero", [("UsageFlagsZero", Int 0)],
( "UsageFlagBits", "VkBufferUsageFlagBits",
["Show", "Eq", "Storable", "Bits"] ) ) ]
[nowdoc|
type CreateFlags = CreateFlagBits
type UsageFlags = UsageFlagBits|]
|
|
ca68bfc18697cec3f002eec563e1f2036c9d91ea32249fa6a59393dfd0b8709b | juspay/atlas | TestSilentIOLogger.hs | # OPTIONS_GHC -Wno - orphans #
|
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : TestSilentIOLogger
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : TestSilentIOLogger
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module TestSilentIOLogger where
import Beckn.Types.Common
import EulerHS.Prelude
instance Log IO where
logOutput _logLevel _msg = pure ()
withLogTag _ a = a
| null | https://raw.githubusercontent.com/juspay/atlas/e64b227dc17887fb01c2554db21c08284d18a806/test/src/TestSilentIOLogger.hs | haskell | # OPTIONS_GHC -Wno - orphans #
|
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : TestSilentIOLogger
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : TestSilentIOLogger
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module TestSilentIOLogger where
import Beckn.Types.Common
import EulerHS.Prelude
instance Log IO where
logOutput _logLevel _msg = pure ()
withLogTag _ a = a
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.