code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
defmodule ExPropriate.MarkedFunctions do
@moduledoc """
This module handles expropriation of function-level granularity.
It can be set up like this:
```elixir
defmodule MyModule do
use ExPropriate
# Function becomes public
@expropriate true
defp expropriated_function,
do: :am_expropriated
# Functions with multiple bodies only need to be tagged on the first body
@expropriate true
defp divide_by(number) when is_integer(number) and number > 0,
do: {:ok, div(100, number)}
defp divide_by(_other),
do: :error
# Untagged functions remain private
defp remains_private,
do: :am_private
end
MyModule.expropriated_function
# :am_expropriated
MyModule.divide_by(2)
# { :ok, 50 }
MyModule.divide_by(0)
# :error
MyModule.remains_private
# (UndefinedFunctionError) function MyModule.remains_private/0 is undefined or private.
```
The objective of this module was to be able to explicitly state which functions need to be
expropriated. The tradeoff is that this module is more "_intrusive_" than the module-level
granularity, since it overrides both `Kernel.def/2` and `Kernel.defp/2`.
Also a friendly reminder that the functions and macros contained in this module are for internal
use of the library and it's advised _against_ using them directly.
"""
@typedoc """
AST containing the definition of a function's body.
Typically it's a keyword list containing at least a `[do: expr]`. It also may be `nil` (if only
the function head is being declared), and it may also contain other keys like `:rescue`, `:catch`,
`:after`, etc.
```elixir
[do: {:+, [], [1, 2]}]
|> Macro.to_string
# "[do: 1 + 2]"
[
do: {
:/,
[],
[{:a, [], nil}, {:b, [], Elixir}]
},
rescue: [{
:->,
[],
[
[{:error, [], nil}],
{{:., [], [{:__aliases__, [], [:IO]}, :puts]}, [],
[{:error, [], nil}]}
]
}]
]
|> Macro.to_string
# "[do: a / b, rescue: (error -> IO.puts(error))]"
```
"""
@type fn_body :: Macro.t | nil
@typedoc """
AST containing the definition of a function's head.
Contains at least the function name, arguments and clauses.
Does not contain `def` or `defp`.
**Examples**:
```elixir
{:with_no_args, [], []}
|> Macro.to_string
# "with_no_args()"
{:with_two_args, [], [{:arg1, [], nil}, {:arg2, [], nil}}
|> Macro.to_string
# "with_two_args(arg1, arg2)"
{
:when,
[],
[
{:with_when, [], [{:arg, [], nil}]},
{:>, [], [{:arg, [], nil}, 0]}
]
}
|> Macro.to_string
# "with_when(arg) when arg > 1"
```
"""
@type fn_head :: Macro.t
@typedoc """
Tuple containing a function's name and arity.
```elixir
{:my_function, 2}
```
"""
@type fn_name :: {name :: atom, arity :: non_neg_integer}
@doc """
Generates the AST necessary to expropriate only tagged functions on compile time.
- Injects this module's `def/2` and `defp/2` macro in favor of `Kernel.def/2` and `Kernel.defp/2`.
- Setups attributes that are later used by the `def/2` and `defp/2` macros.
"""
@spec generate_use_ast() :: Macro.t
def generate_use_ast do
quote do
import Kernel, except: [def: 1, def: 2, defp: 1, defp: 2]
import unquote(__MODULE__), only: [def: 1, def: 2, defp: 1, defp: 2]
@expropriate false
Module.register_attribute(__MODULE__, :expropriated_names, accumulate: true)
end
end
@doc """
Generates AST to prevent warnings for unused `@expropriate` attributes.
These warnings happened when the module has `@expropriate` attributes, but ExPropriate is disabled
at a config level. Eg: in `prod` environment.
"""
@spec generate_unused_ast() :: Macro.t
def generate_unused_ast do
quote do
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(_env) do
quote do
Module.delete_attribute(__MODULE__, :expropriate)
end
end
@doc """
An override of `Kernel.def/2`.
This macro checks if the `@expropriate` attribute was set to `true` before defining a public
function, and outputs a warning if that's the case.
Regardless of the warning, it _always_ defines the fuction using `Kernel.def/2`
"""
defmacro def(fn_head, fn_body \\ nil) do
quote do
if @expropriate do
@expropriate false
IO.warn("You set @expropriate before a public function.", Macro.Env.stacktrace(__ENV__))
end
unquote(__MODULE__.define_public(fn_head, fn_body))
end
end
@doc """
An override of `Kernel.defp/2`.
This macro decides whether or not the expropriate the function body based on the following
criteria:
* The `@expropriate` attribute is set to `true`
* The function's name (`t:fn_name/0`) was already expropriated. (For functions with multiple
bodies)
"""
defmacro defp(fn_head, fn_body \\ nil) do
case fn_head_to_name(fn_head) do
{:ok, fn_name} ->
quote do
cond do
@expropriate ->
unquote(define_first_public_body(fn_name, fn_head, fn_body))
unquote(fn_name) in @expropriated_names ->
unquote(define_public(fn_head, fn_body))
true ->
unquote(define_private(fn_head, fn_body))
end
end
_ ->
message = "ExPropriate: There was an error expropriating the function: #{Macro.to_string(fn_head)}"
quote do
IO.warn(unquote(message), Macro.Env.stacktrace(__ENV__))
unquote(define_private(fn_head, fn_body))
end
end
end
@doc """
Transforms a function's head AST into a tuple containing the name and arity of the function.
"""
@spec fn_head_to_name(fn_head) :: {:ok, fn_name} | :error
def fn_head_to_name({:when, _, [fn_head|_checks]}) do
fn_head_to_name(fn_head)
end
def fn_head_to_name({name, _, nil}) when is_atom(name) do
{:ok, {name, 0}}
end
def fn_head_to_name({name, _, args}) when is_atom(name) and is_list(args) do
{:ok, {name, Enum.count(args)}}
end
def fn_head_to_name(_other) do
:error
end
@doc """
Defines a public function via `Kernel.def/2`, and sets the necessary attributes for the following
bodies.
Called by expropriated functions when they define their first body. In addition to defining the
function as public, this function also sets the `@expropriate` attribute back to `false` and
registers the function's name to the `@expropriated_names` attribute.
If the function has multiple bodies, it will directly call `define_public/2` instead.
"""
@spec define_first_public_body(fn_name, fn_head, fn_body) :: Macro.t
def define_first_public_body(fn_name, fn_head, fn_body) do
quote do
@expropriate false
@expropriated_names unquote(fn_name)
unquote(define_public(fn_head, fn_body))
end
end
@doc """
Defines a public function via `Kernel.def/2`
Called by functions that are being expropriated.
"""
@spec define_public(fn_head, fn_body) :: Macro.t
def define_public(fn_head, fn_body) do
quote do
Kernel.def unquote(fn_head), unquote(fn_body)
end
end
@doc """
Defines a private function via `Kernel.defp/2`
Called by functions that are **not** being expropriated.
"""
@spec define_private(fn_head, fn_body) :: Macro.t
def define_private(fn_head, fn_body) do
quote do
Kernel.defp unquote(fn_head), unquote(fn_body)
end
end
end
|
lib/ex_propriate/marked_functions.ex
| 0.813498 | 0.802285 |
marked_functions.ex
|
starcoder
|
defmodule GGity.Scale.Y.Continuous do
@moduledoc false
alias GGity.Scale.Y
@base_axis_intervals [0.1, 0.2, 0.25, 0.4, 0.5, 0.75, 1.0, 2.0, 2.5, 4.0, 5.0, 7.5, 10]
@type t() :: %__MODULE__{}
@type mapping() :: map()
defstruct width: 200,
breaks: 5,
labels: :waivers,
tick_values: nil,
inverse: nil,
transform: nil
@spec new(keyword()) :: Y.Continuous.t()
def new(options \\ []), do: struct(Y.Continuous, options)
@spec train(Y.Continuous.t(), {number(), number()}) :: Y.Continuous.t()
def train(scale, {min, max}) do
range = max - min
struct(scale, transformations(range, min, max, scale))
end
defp transformations(0, min, _max, %Y.Continuous{} = scale) do
[
tick_values: min,
inverse: fn _value -> min end,
transform: fn _value -> scale.width / 2 end
]
end
defp transformations(range, min, max, %Y.Continuous{} = scale) do
raw_interval_size = range / (scale.breaks - 1)
order_of_magnitude = :math.ceil(:math.log10(raw_interval_size) - 1)
power_of_ten = :math.pow(10, order_of_magnitude)
adjusted_interval_size = axis_interval_lookup(raw_interval_size / power_of_ten) * power_of_ten
adjusted_min = adjusted_interval_size * Float.floor(min / adjusted_interval_size)
adjusted_max = adjusted_interval_size * Float.ceil(max / adjusted_interval_size)
adjusted_interval_count =
round(1.0001 * (adjusted_max - adjusted_min) / adjusted_interval_size)
tick_values =
Enum.map(
1..(adjusted_interval_count + 1),
&(adjusted_min + (&1 - 1) * adjusted_interval_size)
)
[
tick_values: tick_values,
inverse: fn value ->
floor((value - adjusted_min) / (adjusted_max - adjusted_min) * scale.width)
end,
transform: fn value ->
floor((value - adjusted_min) / (adjusted_max - adjusted_min) * scale.width)
end
]
end
defp axis_interval_lookup(value) do
Enum.find(@base_axis_intervals, &(&1 >= value))
end
end
|
lib/ggity/scale/y_continuous.ex
| 0.825836 | 0.540985 |
y_continuous.ex
|
starcoder
|
defmodule Ameritrade.OAuth do
use OAuth2.Strategy
@defaults [
strategy: __MODULE__,
site: "https://auth.tdameritrade.com",
authorize_url: "https://auth.tdameritrade.com/auth",
token_url: "https://api.tdameritrade.com/v1/oauth2/token"
]
def client(opts \\ []) do
config = Application.fetch_env!(:ameritrade, :client_id)
config = to_string(config)
client_id = config <> "@AMER.OAUTHAP"
client_id = [client_id: client_id]
opts = @defaults |> Keyword.merge(opts) |> Keyword.merge(client_id) |> resolve_values()
OAuth2.Client.new(opts)
|> OAuth2.Client.put_serializer("application/json", Jason)
end
@doc """
Provides the authorize url.
"""
def authorize_url!(params \\ [], opts \\ []) do
opts
|> client
|> OAuth2.Client.authorize_url!(params)
end
def get(token, url, headers \\ [], opts \\ []) do
[token: token]
|> client
|> OAuth2.Client.get(url, headers, opts)
end
def post(token, url, body, headers \\ [], opts \\ []) do
[token: token]
|> client
|> OAuth2.Client.post(url, body, headers, opts)
end
def put(token, url, body, headers \\ [], opts \\ []) do
[token: token]
|> client
|> OAuth2.Client.put(url, body, headers, opts)
end
def delete(token, url, headers \\ [], opts \\ []) do
[token: token]
|> client
|> OAuth2.Client.delete(url, headers, opts)
end
def patch(token, url, headers \\ [], opts \\ []) do
[token: token]
|> client
|> OAuth2.Client.patch(url, headers, opts)
end
def get_token!(params \\ [], opts \\ []) do
client =
client(opts)
|> OAuth2.Client.get_token(params)
{_, token} =
case client do
{:error, %{body: %{"error" => description}, status_code: error}} ->
{:error,
%{
access_token: nil,
other_params: [
error: error,
error_description: description
]
}}
{:ok, %{token: token}} ->
{:ok, token}
{:ok, %{body: %{token: token}}} ->
{:ok, token}
end
token
end
# Strategy Callbacks
def authorize_url(client, params) do
OAuth2.Strategy.AuthCode.authorize_url(client, params)
end
def get_token(client, params, headers) do
{code, params} = Keyword.pop(params, :code, client.params["code"])
unless code do
raise OAuth2.Error, reason: "Missing required key `code` for `#{inspect(__MODULE__)}`"
end
client
|> put_header("Accept", "application/json")
|> put_header("Content-Type", "application/x-www-form-urlencoded")
|> put_param(:code, code)
|> put_param(:grant_type, "authorization_code")
|> put_param(:access_type, "offline")
|> put_param(:client_id, client.client_id)
|> put_param(:redirect_uri, client.redirect_uri)
|> merge_params(params)
|> put_headers(headers)
end
defp resolve_values(list) do
for {key, value} <- list do
{key, resolve_value(value)}
end
end
defp resolve_value({m, f, a}) when is_atom(m) and is_atom(f), do: apply(m, f, a)
defp resolve_value(v), do: v
@doc """
Adds `authorization` header for ameritrade auth.
"""
def client_header(%OAuth2.Client{client_id: id, client_secret: secret} = client) do
put_header(client, "authorization", "Basic " <> Base.encode64(id))
end
end
|
lib/oauth.ex
| 0.63443 | 0.426441 |
oauth.ex
|
starcoder
|
defmodule ExUnit.ClusteredCase.Node.Manager do
@moduledoc false
require Logger
alias ExUnit.ClusteredCaseError
alias ExUnit.ClusteredCase.Utils
alias ExUnit.ClusteredCase.Node.Agent, as: NodeAgent
alias ExUnit.ClusteredCase.Node.Ports
defstruct [
:name,
:cookie,
:manager_name,
:agent_name,
:heart,
:boot_timeout,
:init_timeout,
:post_start_functions,
:erl_flags,
:env,
:config,
:port,
:capture_log,
:stdout,
:alive?
]
@doc """
Converts a given node name into the name of the associated manager process
"""
def name_of(name), do: Utils.nodename(name)
@doc """
Starts a new node and it's corresponding management process
"""
@spec start_link(ExUnit.ClusteredCase.Node.node_opts()) :: {:ok, pid} | {:error, term}
def start_link(opts) do
:proc_lib.start_link(__MODULE__, :init, [self(), opts])
end
@doc """
Same as `start_link/1`, but does not link the process
"""
@spec start_nolink(ExUnit.ClusteredCase.Node.node_opts()) :: {:ok, pid} | {:error, term}
def start_nolink(opts) do
:proc_lib.start(__MODULE__, :init, [self(), opts])
end
@doc """
Instructs the manager to terminate the given node
"""
def stop(name), do: server_call(name, :stop)
@doc """
Instructs the manager to terminate the given node brutally.
"""
def kill(name), do: server_call(name, :kill)
@doc """
Returns the name of the node managed by the given process
"""
def name(name) when is_pid(name), do: server_call(name, :get_name)
@doc """
Returns the captured log output of the given node.
If the node was not configured to capture logs, this will be an empty string.
"""
def log(name) do
port = server_call(name, :get_port_pid)
Ports.get_captured_log(port)
end
@doc """
Determines if the given node is alive or dead
"""
def alive?(name) do
server_call(name, :is_alive)
rescue
ArgumentError ->
false
end
@doc """
Runs the given function on a node by spawning a process remotely with it
"""
def call(name, fun, opts \\ [])
def call(name, fun, opts) when is_function(fun) do
server_call(name, {:spawn_fun, fun, opts})
end
def call(name, {m, f, a}, opts), do: call(name, m, f, a, opts)
@doc """
Applies the given module/function/args on the given node and returns the result
"""
def call(name, m, f, a, opts \\ []) when is_atom(m) and is_atom(f) and is_list(a) do
server_call(name, {:apply, m, f, a, opts})
end
@doc """
Connects a node to other nodes in the given list
"""
def connect(name, nodes) when is_list(nodes) do
nodes =
for n <- nodes do
if is_pid(n) do
name(n)
else
Utils.nodename(n)
end
end
server_call(name, {:connect, nodes})
end
@doc """
Disconnects a node to other nodes in the given list
"""
def disconnect(name, nodes) when is_list(nodes) do
nodes =
for n <- nodes do
if is_pid(n) do
name(n)
else
Utils.nodename(n)
end
end
server_call(name, {:disconnect, nodes})
end
@doc false
def init(parent, opts) do
Process.flag(:trap_exit, true)
opts = to_node_opts(opts)
case register_name(opts.manager_name) do
{:error, reason} = err ->
:proc_lib.init_ack(parent, reason)
err
:ok ->
# Spawn node
with {:ok, port} <- Ports.open_link(opts),
opts = %{opts | port: port, alive?: true},
debug = :sys.debug_options([]),
{:ok, agent_pid} <- init_node(parent, opts),
:ok <- configure_node(parent, agent_pid, opts) do
:proc_lib.init_ack(parent, {:ok, self()})
handle_node_initialized(parent, debug, agent_pid, opts)
else
{:error, reason} ->
:proc_lib.init_ack(parent, reason)
end
end
end
defp register_name(name) do
try do
Process.register(self(), name)
:ok
rescue
_ ->
{:error, {:already_registered, Process.whereis(name)}}
end
end
defp init_node(parent, %{port: port, name: agent_node} = opts) do
boot_timeout = opts.boot_timeout
receive do
# If our parent dies, just exit
{:EXIT, ^parent, reason} ->
exit(reason)
# If the port dies, just exit
{:EXIT, ^port, reason} ->
exit(reason)
{^agent_node, _agent_pid, {:init_failed, err}} ->
msg =
if is_binary(err) do
err
else
"#{inspect(err)}"
end
Logger.error("""
Failed to boot node #{inspect(agent_node)}! More detail below:
#{msg}
""")
{:error, {:init_failed, err}}
{^agent_node, agent_pid, :node_booted} ->
# If we booted, send config overrides
send(agent_pid, {self(), :configure, opts.config})
{:ok, agent_pid}
after
boot_timeout ->
{:error, :boot_timeout}
end
end
defp configure_node(parent, agent_pid, %{port: port, name: agent_node} = opts) do
init_timeout = opts.init_timeout
receive do
{:EXIT, ^parent, reason} ->
exit(reason)
{:EXIT, ^port, reason} ->
exit(reason)
{^agent_node, ^agent_pid, :node_configured} ->
# At this point the node is ready for use
:ok
after
init_timeout ->
{:error, :init_timeout}
end
end
defp handle_node_initialized(parent, debug, agent_pid, opts) do
# If we are running coverage, make sure the managed node is included
if Process.whereis(:cover_server) do
cover_main_node = :cover.get_main_node()
:rpc.call(cover_main_node, :cover, :start, [opts.name])
end
# Start monitoring the node
:net_kernel.monitor_nodes(true, node_type: :all)
# Invoke all of the post-start functions
for fun <- opts.post_start_functions do
case fun do
{m, f, a} ->
:rpc.call(opts.name, m, f, a)
fun when is_function(fun, 0) ->
Node.spawn(opts.name, fun)
end
end
# Enter main loop
loop(parent, debug, agent_pid, opts)
end
defp loop(parent, debug, agent_pid, %{port: port, name: agent_node} = opts) do
heart? = opts.heart
alive? = opts.alive?
receive do
{:system, from, req} ->
:sys.handle_system_msg(req, from, parent, __MODULE__, debug, {agent_pid, opts})
{:EXIT, ^parent, reason} ->
exit(reason)
{:EXIT, ^port, reason} ->
exit(reason)
{:EXIT, _, _} ->
# Some child process terminated
loop(parent, debug, agent_pid, opts)
{:nodedown, ^agent_node, _} ->
loop(parent, debug, agent_pid, %{opts | alive?: false})
{:nodedown, _, _} ->
# ignore..
loop(parent, debug, agent_pid, opts)
{:nodeup, ^agent_node, _} ->
# Node was restarted by us or by init
with {:ok, agent_pid} <- init_node(parent, opts),
:ok <- configure_node(parent, agent_pid, opts) do
handle_node_initialized(parent, debug, agent_pid, %{opts | alive?: true})
else
{:error, reason} ->
exit(reason)
end
{:nodeup, _, _} ->
# ignore..
loop(parent, debug, agent_pid, opts)
{from, :is_alive} ->
send(from, {self(), alive?})
loop(parent, debug, agent_pid, opts)
{from, :get_name} ->
send(from, {self(), opts.name})
loop(parent, debug, agent_pid, opts)
{from, :get_port_pid} ->
send(from, {self(), port})
loop(parent, debug, agent_pid, opts)
{from, {:spawn_fun, fun, fun_opts}} when alive? ->
send(agent_pid, {self(), :spawn_fun, fun, fun_opts})
wait_for_fun(parent, debug, agent_pid, opts, from)
{from, {:apply, m, f, a, mfa_opts}} when alive? ->
send(agent_pid, {self(), :apply_fun, {m,f,a}, mfa_opts})
wait_for_fun(parent, debug, agent_pid, opts, from)
{from, {:connect, nodes}} when alive? ->
send(agent_pid, {self(), :connect, nodes})
wait_for_connected(parent, debug, agent_pid, opts, from, nodes)
{from, {:disconnect, nodes}} when alive? ->
send(agent_pid, {self(), :disconnect, nodes})
wait_for_disconnected(parent, debug, agent_pid, opts, from, nodes)
{from, stop} when stop in [:stop, :kill] ->
result = terminate_node(agent_pid, opts, brutal: stop == :kill)
send(from, {self(), result})
if heart? do
loop(parent, debug, agent_pid, %{opts | :alive? => false})
else
:ok
end
{from, _msg} when not alive? ->
send(from, {self(), {:error, :nodedown}})
loop(parent, debug, agent_pid, opts)
msg ->
Logger.warn("Unexpected message in #{__MODULE__}: #{inspect(msg)}")
loop(parent, debug, agent_pid, opts)
end
end
defp wait_for_fun(parent, debug, agent_pid, opts, from) do
receive do
{:EXIT, ^parent, reason} ->
exit(reason)
{_, ^agent_pid, reply} ->
send(from, {self(), reply})
loop(parent, debug, agent_pid, opts)
end
end
defp wait_for_connected(parent, debug, agent_pid, opts, from, []) do
send(from, {self(), :ok})
loop(parent, debug, agent_pid, opts)
end
defp wait_for_connected(parent, debug, agent_pid, opts, from, nodes) do
receive do
{:EXIT, ^parent, reason} ->
exit(reason)
{_, ^agent_pid, {:connected, n}} ->
wait_for_connected(parent, debug, agent_pid, opts, from, Enum.reject(nodes, &(&1 == n)))
{_, ^agent_pid, {:connect_failed, n, _reason}} ->
wait_for_connected(parent, debug, agent_pid, opts, from, Enum.reject(nodes, &(&1 == n)))
end
end
defp wait_for_disconnected(parent, debug, agent_pid, opts, from, []) do
send(from, {self(), :ok})
loop(parent, debug, agent_pid, opts)
end
defp wait_for_disconnected(parent, debug, agent_pid, opts, from, nodes) do
receive do
{:EXIT, ^parent, reason} ->
exit(reason)
{_, ^agent_pid, {:disconnected, n}} ->
wait_for_disconnected(
parent,
debug,
agent_pid,
opts,
from,
Enum.reject(nodes, &(&1 == n))
)
{_, ^agent_pid, {:disconnect_failed, n, _reason}} ->
wait_for_disconnected(
parent,
debug,
agent_pid,
opts,
from,
Enum.reject(nodes, &(&1 == n))
)
end
end
defp terminate_node(agent_pid, opts, terminate_opts) do
agent_node = opts.name
{cover?, main_cover_node} =
if Process.whereis(:cover_server) do
main_cover_node = :cover.get_main_node()
:rpc.call(main_cover_node, :cover, :flush, [agent_node])
{true, main_cover_node}
else
{false, nil}
end
send(agent_pid, {:terminate, terminate_opts})
receive do
{:nodedown, ^agent_node, _} ->
if cover? do
:rpc.call(main_cover_node, :cover, :stop, [agent_node])
end
:ok
after
30_000 ->
{:error, :shutdown_timeout}
end
end
# :sys callbacks
@doc false
def system_continue(parent, debug, {agent_pid, opts}) do
loop(parent, debug, agent_pid, opts)
end
@doc false
def system_get_state(state), do: {:ok, state}
@doc false
def system_replace_state(fun, state) do
new_state = fun.(state)
{:ok, new_state, new_state}
end
@doc false
def system_code_change(state, _mod, _old, _extra) do
{:ok, state}
end
@doc false
def system_terminate(reason, _parent, _debug, {_agent_pid, _opts}) do
reason
end
# Private
defp to_node_opts(opts) when is_list(opts) do
name =
case Keyword.get(opts, :name) do
nil ->
Utils.generate_name()
n ->
Utils.nodename(n)
end
cookie =
case Keyword.get(opts, :cookie, Node.get_cookie()) do
c when is_binary(c) ->
String.to_atom(c)
c when is_atom(c) ->
c
other ->
raise ClusteredCaseError,
"must provide valid cookie (atom or string) to #{__MODULE__}.start/1, got: #{
inspect(other)
}"
end
config = Keyword.get(opts, :config, [])
%__MODULE__{
name: name,
cookie: cookie,
manager_name: name,
agent_name: NodeAgent.name_of(),
heart: Keyword.get(opts, :heart, false),
boot_timeout: Keyword.get(opts, :boot_timeout, 2_000),
init_timeout: Keyword.get(opts, :init_timeout, 10_000),
post_start_functions: Keyword.get(opts, :post_start_functions, []),
erl_flags: to_port_args(name, cookie, Keyword.get(opts, :erl_flags, [])),
env: to_port_env(Keyword.get(opts, :env, [])),
config: config,
alive?: false,
capture_log: Keyword.get(opts, :capture_log, false),
stdout: Keyword.get(opts, :stdout, false)
}
end
defp to_port_env(env) when is_list(env),
do: to_port_env(env, [])
defp to_port_env([], acc), do: acc
defp to_port_env([{name, val} | rest], acc) when is_binary(name) and is_binary(val) do
to_port_env(rest, [{String.to_charlist(name), String.to_charlist(val)} | acc])
end
defp to_port_env([invalid | _], _) do
raise ClusteredCaseError,
"invalid env configuration, expected tuple of strings (name, value), got: #{
inspect(invalid)
}"
end
defp to_port_args(name, cookie, args) when is_list(args) do
# ["-detached",
base_args = [
"-noinput",
"-#{Utils.name_type()}",
"#{name}",
"-setcookie",
"#{cookie}",
"-id",
"#{name}",
"-loader",
"inet",
"-hosts",
"127.0.0.1",
"-s",
"#{NodeAgent}",
"start",
"#{Node.self()}"
]
code_paths =
:code.get_path()
|> Enum.flat_map(fn path -> ["-pa", path] end)
final_args =
code_paths
|> Enum.concat(base_args)
|> Enum.concat(args)
for a <- final_args do
if is_binary(a) do
String.to_charlist(a)
else
a
end
end
end
# Standardizes calls to the manager
defp server_call(pid, msg) when is_pid(pid) do
ref = Process.monitor(pid)
send(pid, {self(), msg})
receive do
{:DOWN, ^ref, _type, _pid, _info} ->
exit({:noproc, {__MODULE__, :server_call, [pid, msg]}})
{^pid, result} ->
Process.demonitor(ref, [:flush])
result
end
end
defp server_call(name, msg) do
name = Utils.nodename(name)
case Process.whereis(name) do
nil ->
exit({:noproc, {__MODULE__, :server_call, [name, msg]}})
pid ->
server_call(pid, msg)
end
end
end
|
lib/node/manager.ex
| 0.719186 | 0.431405 |
manager.ex
|
starcoder
|
defmodule Coxir.API do
@moduledoc """
Entry-point to the Discord REST API.
"""
use Tesla, only: [], docs: false
alias Tesla.Env
alias Coxir.{Gateway, Token}
alias Coxir.API.Error
@typedoc """
The options that can be passed to `perform/4`.
If the `:as` option is present, the token of the given gateway will be used.
If no token is provided, one is expected to be configured as `:token` under the `:coxir` app.
"""
@type options :: [
as: Gateway.gateway() | none,
token: Token.t() | none
]
@typedoc """
The possible outcomes of `perform/4`.
"""
@type result :: :ok | {:ok, map} | {:ok, list(map)} | {:error, Error.t()}
adapter(Tesla.Adapter.Gun)
plug(Coxir.API.Headers)
plug(Tesla.Middleware.BaseUrl, "https://discord.com/api/v9")
plug(Tesla.Middleware.JSON)
plug(Tesla.Middleware.Retry)
plug(Coxir.API.RateLimiter)
@doc """
Performs a request to the API.
"""
@spec perform(Env.method(), Env.url(), Env.query(), Env.body(), options) :: result
def perform(method, path, query, body, options) do
case request!(method: method, url: path, query: query, body: body, opts: options) do
%{status: 204} ->
:ok
%{status: status, body: body} when status in [200, 201, 304] ->
{:ok, body}
%{status: status, body: body} ->
error = Error.cast(status, body)
{:error, error}
end
end
@doc """
Delegates to `perform/4` with `method` set to `:get`.
"""
@spec get(Env.url(), Env.query(), options) :: result
def get(path, query \\ [], options) do
perform(:get, path, query, nil, options)
end
@doc """
Delegates to `perform/4` with `method` set to `:post`.
"""
@spec post(Env.url(), Env.body(), options) :: result
def post(path, body \\ %{}, options) do
perform(:post, path, [], body, options)
end
@doc """
Delegates to `perform/4` with `method` set to `:put`.
"""
@spec put(Env.url(), Env.body(), options) :: result
def put(path, body \\ %{}, options) do
perform(:put, path, [], body, options)
end
@doc """
Delegates to `perform/4` with `method` set to `:patch`.
"""
@spec patch(Env.url(), Env.body(), options) :: result
def patch(path, body, options) do
perform(:patch, path, [], body, options)
end
@doc """
Delegates to `perform/4` with `method` set to `:delete`.
"""
@spec delete(Env.url(), options) :: result
def delete(path, options) do
perform(:delete, path, [], nil, options)
end
end
|
lib/coxir/api.ex
| 0.894876 | 0.439868 |
api.ex
|
starcoder
|
defmodule RobotSimulator do
defstruct dir: nil, pos: nil
@directions [:north, :east, :south, :west]
defguardp is_valid_direction(dir) when dir in @directions
defguardp is_valid_position(x, y) when is_number(x) and is_number(y)
@doc """
Create a Robot Simulator given an initial direction and position.
Valid directions are: `:north`, `:east`, `:south`, `:west`
"""
@spec create(direction :: atom, position :: {integer, integer}) :: any
def create(dir \\ :north, pos \\ {0, 0})
def create(dir, {x, y} = pos) when is_valid_direction(dir) and is_valid_position(x, y) do
%__MODULE__{dir: dir, pos: pos}
end
def create(dir, _) when not is_valid_direction(dir), do: {:error, "invalid direction"}
def create(_, _), do: {:error, "invalid position"}
@doc """
Simulate the robot's movement given a string of instructions.
Valid instructions are: "R" (turn right), "L", (turn left), and "A" (advance)
"""
@spec simulate(robot :: any, instructions :: String.t()) :: any
def simulate(robot, instructions) do
instructions
|> String.split("", trim: true)
|> Enum.map(fn
"R" -> :turn_right
"L" -> :turn_left
"A" -> :advance
_ -> throw({:error, "invalid instruction"})
end)
|> Enum.reduce(robot, &execute/2)
catch
error -> error
end
defp execute(:turn_right, %__MODULE__{dir: dir} = robot) do
index = @directions |> Enum.find_index(&(&1 == dir))
%__MODULE__{robot | dir: Enum.at(@directions, index + 1, :north)}
end
defp execute(:turn_left, %__MODULE__{dir: dir} = robot) do
index = @directions |> Enum.find_index(&(&1 == dir))
%__MODULE__{robot | dir: Enum.at(@directions, index - 1)}
end
defp execute(:advance, %__MODULE__{pos: {x, y}} = robot) do
case robot.dir do
:north -> %__MODULE__{robot | pos: {x, y + 1}}
:south -> %__MODULE__{robot | pos: {x, y - 1}}
:east -> %__MODULE__{robot | pos: {x + 1, y}}
:west -> %__MODULE__{robot | pos: {x - 1, y}}
end
end
@doc """
Return the robot's direction.
Valid directions are: `:north`, `:east`, `:south`, `:west`
"""
@spec direction(robot :: any) :: atom
def direction(%__MODULE__{dir: dir}), do: dir
@doc """
Return the robot's position.
"""
@spec position(robot :: any) :: {integer, integer}
def position(%__MODULE__{pos: pos}), do: pos
end
|
exercism/elixir/robot-simulator/lib/robot_simulator.ex
| 0.899481 | 0.750781 |
robot_simulator.ex
|
starcoder
|
defmodule Day3 do
def gen_grid(x, y) do
zeroed_row = 0..y |> Enum.reduce([], fn _, acc -> [0 | acc] end)
Enum.reduce(0..x, %{}, fn r, acc ->
Map.put_new(acc, r, List.to_tuple(zeroed_row))
end)
end
@doc """
Over claimed inches
## Examples
iex> claims = String.split("#1 @ 1,3: 4x4|#2 @ 3,1: 4x4|#3 @ 5,5: 2x2", "|")
iex> Day3.unique_id(Day3.gen_grid(8, 8), claims)
3
"""
def unique_id(grid \\ gen_grid(8, 8), claims) do
c = Enum.map(claims, &parse_claim/1)
g = Enum.reduce(c, grid, fn x, acc -> update_grid(x, acc) end)
t = Enum.find(c, fn x -> is_unique_id(x, g) end)
elem(t, 0)
end
defp is_unique_id({_, x, y, w, h}, g) do
r =
Enum.reduce(y..(y + (h - 1)), [], fn i, acc ->
tuple = Map.get(g, i)
list = Enum.reduce(x..(x + (w - 1)), [], fn t, a -> [elem(tuple, t) | a] end)
[list | acc]
end)
answer =
List.flatten(r)
|> Enum.any?(fn j -> j > 1 end)
!answer
end
@doc """
Over claimed inches
## Examples
iex> claims = String.split("#1 @ 1,3: 4x4|#2 @ 3,1: 4x4|#3 @ 5,5: 2x2", "|")
iex> Day3.over_claimed(Day3.gen_grid(8, 8), claims)
4
"""
def over_claimed(grid \\ gen_grid(8, 8), claims) do
claims
|> Enum.map(&parse_claim/1)
|> Enum.reduce(grid, fn x, acc -> update_grid(x, acc) end)
|> Enum.reduce(0, fn x, acc ->
acc + count_overclaimed(elem(x, 1))
end)
end
defp count_overclaimed(tuple) do
count_overclaimed(tuple, 0, tuple_size(tuple), 0)
end
defp count_overclaimed(_, _, 0, count) do
count
end
defp count_overclaimed(tuple, index, length, count) do
if elem(tuple, index) > 1 do
count_overclaimed(tuple, index + 1, length - 1, count + 1)
else
count_overclaimed(tuple, index + 1, length - 1, count)
end
end
defp update_grid({_, _, _, _, 0}, grid) do
grid
end
defp update_grid({id, x, y, w, h}, grid) do
update_grid(
{id, x, y + 1, w, h - 1},
Map.update!(grid, y, &update_tuple(&1, x, x + w))
)
end
defp update_tuple(tuple, start, stop) do
if start == stop do
tuple
else
p = Kernel.elem(tuple, start)
update_tuple(Kernel.put_elem(tuple, start, p + 1), start + 1, stop)
end
end
defp parse_claim(claim) do
[id, x, y, w, h] = :binary.split(claim, ["#", " @ ", ",", ": ", "x"], [:trim_all, :global])
{:erlang.binary_to_integer(id), :erlang.binary_to_integer(x), :erlang.binary_to_integer(y),
:erlang.binary_to_integer(w), :erlang.binary_to_integer(h)}
end
end
|
lib/day3.ex
| 0.653569 | 0.52074 |
day3.ex
|
starcoder
|
defmodule Infer.Image do
@moduledoc """
Image type matchers based on the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming))
"""
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a jpeg.
## Examples
iex> binary = File.read!("test/images/sample.jpg")
iex> Infer.Image.jpeg?(binary)
true
iex> binary = File.read!("test/images/sample.jp2")
iex> Infer.Image.jpeg?(binary)
false
"""
@spec jpeg?(binary()) :: boolean()
def jpeg?(<<0xFF, 0xD8, 0xFF>> <> _), do: true
def jpeg?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a jpeg2000.
## Examples
iex> binary = File.read!("test/images/sample.jp2")
iex> Infer.Image.jpeg2000?(binary)
true
iex> binary = File.read!("test/images/sample.jpg")
iex> Infer.Image.jpeg2000?(binary)
false
"""
@spec jpeg2000?(binary()) :: boolean()
def jpeg2000?(<<0x0, 0x0, 0x0, 0xC, 0x6A, 0x50, 0x20, 0x20, 0xD, 0xA, 0x87, 0xA, 0x0, _rest::binary>>), do: true
def jpeg2000?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a png.
## Examples
iex> binary = File.read!("test/images/sample.png")
iex> Infer.Image.png?(binary)
true
iex> binary = File.read!("test/images/sample.jpg")
iex> Infer.Image.png?(binary)
false
"""
@spec png?(binary()) :: boolean()
def png?(<<0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, _rest::binary>>), do: true
def png?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a gif.
## Examples
iex> binary = File.read!("test/images/sample.gif")
iex> Infer.Image.gif?(binary)
true
iex> binary = File.read!("test/images/sample.jpg")
iex> Infer.Image.gif?(binary)
false
"""
@spec gif?(binary()) :: boolean()
def gif?(<<0x47, 0x49, 0x46, _rest::binary>>), do: true
def gif?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a webp.
## Examples
iex> binary = File.read!("test/images/sample.webp")
iex> Infer.Image.webp?(binary)
true
iex> binary = File.read!("test/images/sample.gif")
iex> Infer.Image.webp?(binary)
false
"""
@spec webp?(binary()) :: boolean()
def webp?(<<_head::binary-size(8), 0x57, 0x45, 0x42, 0x50, _rest::binary>>), do: true
def webp?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a cr2.
## Examples
iex> binary = File.read!("test/images/sample.cr2")
iex> Infer.Image.cr2?(binary)
true
iex> binary = File.read!("test/images/sample.tiff")
iex> Infer.Image.cr2?(binary)
false
"""
@spec cr2?(binary()) :: boolean()
def cr2?(<<0x49, 0x49, 0x2A, 0x0, _data::binary-size(4), 0x43, 0x52, 0x02, _rest::binary>>), do: true
def cr2?(<<0x4D, 0x4D, 0x0, 0x2A, _data::binary-size(4), 0x43, 0x52, 0x02, _rest::binary>>), do: true
def cr2?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a tiff.
## Examples
iex> binary = File.read!("test/images/sample.tiff")
iex> Infer.Image.tiff?(binary)
true
iex> binary = File.read!("test/images/sample.cr2")
iex> Infer.Image.tiff?(binary)
false
"""
@spec tiff?(binary()) :: boolean()
def tiff?(<<0x49, 0x49, 0x2A, 0x0, _data::binary-size(4), i_8, i_9, _rest::binary>> = binary) when i_8 != <<0x43>> and i_9 != <<0x52>>,
do: !cr2?(binary)
def tiff?(<<0x4D, 0x4D, 0x0, 0x2A, _data::binary-size(4), i_8, i_9, _rest::binary>> = binary) when i_8 != <<0x43>> and i_9 != <<0x52>>,
do: !cr2?(binary)
def tiff?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a bmp.
## Examples
iex> binary = File.read!("test/images/sample.bmp")
iex> Infer.Image.bmp?(binary)
true
iex> binary = File.read!("test/images/sample.png")
iex> Infer.Image.bmp?(binary)
false
"""
@spec bmp?(binary()) :: boolean()
def bmp?(<<0x42, 0x4D, _rest::binary>>), do: true
def bmp?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a jxr.
## Examples
iex> binary = File.read!("test/images/sample.jxr")
iex> Infer.Image.jxr?(binary)
true
iex> binary = File.read!("test/images/sample.png")
iex> Infer.Image.jxr?(binary)
false
"""
@spec jxr?(binary()) :: boolean()
def jxr?(<<0x49, 0x49, 0xBC, _rest::binary>>), do: true
def jxr?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a psd.
## Examples
iex> binary = File.read!("test/images/sample.psd")
iex> Infer.Image.psd?(binary)
true
iex> binary = File.read!("test/images/sample.png")
iex> Infer.Image.psd?(binary)
false
"""
@spec psd?(binary()) :: boolean()
def psd?(<<0x38, 0x42, 0x50, 0x53, _rest::binary>>), do: true
def psd?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a ico.
## Examples
iex> binary = File.read!("test/images/sample.ico")
iex> Infer.Image.ico?(binary)
true
iex> binary = File.read!("test/images/sample.png")
iex> Infer.Image.ico?(binary)
false
"""
@spec ico?(binary()) :: boolean()
def ico?(<<0x00, 0x00, 0x01, 0x00, _rest::binary>>), do: true
def ico?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a heif.
## Examples
iex> binary = File.read!("test/images/sample.heif")
iex> Infer.Image.heif?(binary)
true
iex> binary = File.read!("test/images/sample.avif")
iex> Infer.Image.heif?(binary)
false
"""
@spec heif?(binary()) :: boolean()
def heif?(<<ftyp_length::binary-size(4), "ftyp", _::binary>> = binary) when bit_size(ftyp_length) >= 16 do
case get_ftyp(binary) do
{"heic", _minor, _compatbile} -> true
{major, _minor, compatible} when major in ["mif1", "msf1"] -> 'heic' in compatible
_ -> false
end
end
def heif?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a avif.
## Examples
iex> binary = File.read!("test/images/sample.avif")
iex> Infer.Image.avif?(binary)
true
iex> binary = File.read!("test/images/sample.heif")
iex> Infer.Image.avif?(binary)
false
"""
@spec avif?(binary()) :: boolean()
def avif?(<<ftyp_length::binary-size(4), "ftyp", _::binary>> = binary) when bit_size(ftyp_length) >= 16 do
case get_ftyp(binary) do
{major, _minor, _compatbile} when major in ["avif", "avis"] -> true
{_major, _minor, compatible} -> 'avif' in compatible || 'avis' in compatible
_ -> false
end
end
def avif?(_binary), do: false
defp get_ftyp(<<ftyp_length::binary-size(4), "ftyp", major::binary-size(4), minor::binary-size(4), rest::binary>>) do
compatible =
rest
|> :binary.bin_to_list()
|> Stream.chunk_every(4)
|> Enum.take(Enum.max([Integer.floor_div(bit_size(ftyp_length), 4) - 4, 0]))
{major, minor, compatible}
end
end
|
lib/matchers/image.ex
| 0.871734 | 0.464598 |
image.ex
|
starcoder
|
defmodule PassiveSupport.Logging do
@moduledoc """
Helper functions for logging and inspecting.
These functions serve two primary purposes and one subtle but kinda nice purpose:
1. To keep outputs colorized even when they're sent to Logger,
2. To keep `IO.inspect` and `Kernel.inspect` from truncating away data
you might need if you intend, e.g., to save a function's return
as fixture data for debugging purposes
3. Not needing to temporarily `require Logger` when you're debugging a module
"""
import Kernel, except: [inspect: 1, inspect: 2]
@doc """
Sensible defaults for see a value in its entirety through `IO.inspect` and `Kernel.inspect`
"""
@spec inspect_opts :: [printable_limit: :infinity, limit: :infinity, width: 170, pretty: true]
def inspect_opts do
[
printable_limit: :infinity,
limit: :infinity,
width: 170,
pretty: true
]
end
@doc """
Pretty good, pretty pretty, color choices for inspected data
"""
@spec coloration_opts ::
{:syntax_colors, [number: :yellow, string: :green, list: :light_magenta,
map: :light_cyan, atom: :light_blue, tuple: :"[:black_background, :white]",
regex: :"[:cyan_background, :light_yellow]"
]}
def coloration_opts do
{:syntax_colors, [number: :yellow, string: :green, list: :light_magenta,
map: :light_cyan, atom: :light_blue, tuple: [:black_background, :white],
regex: [:cyan_background, :light_yellow]
]}
end
@doc """
Sensible option defaults for logging information to stdout
"""
@spec logger_opts :: [
syntax_colors: [number: :yellow, string: :green, list: :light_magenta,
map: :light_cyan, atom: :light_blue, tuple: :"[:black_background, :white]",
regex: :"[:cyan_background, :light_yellow]"
],
printable_limit: :infinity, limit: :infinity, width: 170, pretty: true
]
def logger_opts do
[coloration_opts() | inspect_opts()]
end
@doc """
calls `Kernel.inspect/2` on `item` with `logger_opts/0`.
If you wish to overwrite some portion of the `opts` send to `inspect`,
consider calling `Kernel.inspect` and passing `Utils.logger_opts() ++
overwrites` as your `opts` instead of calling this function.
"""
def inspect(item, to_log \\ false) do
Kernel.inspect(item, if(to_log, do: logger_opts(), else: inspect_opts()))
end
require Logger
def info(item \\ nil, label \\ nil) do
if label, do: Logger.info([label, ":"])
if item, do: Logger.info(inspect(item))
item
end
def debug(item \\ nil, label \\ nil) do
if label, do: Logger.debug([label, ":"])
if item, do: Logger.debug(inspect(item))
item
end
def warn(item \\ nil, label \\ nil) do
if label, do: Logger.warn([label, ":"])
if item, do: Logger.warn(inspect(item))
item
end
def alert(item \\ nil, label \\ nil) do
if label, do: Logger.alert([label, ":"])
if item, do: Logger.alert(inspect(item))
item
end
def critical(item \\ nil, label \\ nil) do
if label, do: Logger.critical([label, ":"])
if item, do: Logger.critical(inspect(item))
item
end
def emergency(item \\ nil, label \\ nil) do
if label, do: Logger.emergency([label, ":"])
if item, do: Logger.emergency(inspect(item))
item
end
def error(item \\ nil, label \\ nil) do
if label, do: Logger.error([label, ":"])
if item, do: Logger.error(inspect(item))
item
end
def notice(item \\ nil, label \\ nil) do
if label, do: Logger.notice([label, ":"])
if item, do: Logger.notice(inspect(item))
item
end
end
|
lib/passive_support/ext/logging.ex
| 0.73848 | 0.638765 |
logging.ex
|
starcoder
|
defmodule Membrane.AudioMixerBin do
@moduledoc """
Bin element distributing a mixing job between multiple `Membrane.AudioMixer` elements.
A tree of AudioMixers is created according to `max_inputs_per_node` parameter:
- if number of input tracks is smaller than `max_inputs_per_node`, only one AudioMixer element is created for the entire job
- if there are more input tracks than `max_inputs_per_node`, there are created enough mixers so that each mixer has at most
`max_inputs_per_node` inputs - outputs from those mixers are then mixed again following the same rules -
another level of mixers is created having enough mixers so that each mixer on this level has at most
`max_inputs_per_node` inputs (those are now the outputs of the previous level mixers).
Levels are created until only one mixer in the level is needed - output from this mixer is the final mixed track.
Bin allows for specyfiyng options for `Membrane.AudioMixer`, which are applied for all AudioMixers.
Recommended to use in case of mixing jobs with many inputs.
"""
use Membrane.Bin
use Bunch
require Membrane.Logger
alias Membrane.{Pad, ParentSpec, AudioMixer}
alias Membrane.Caps.Audio.Raw
alias Membrane.Caps.Matcher
@supported_caps {Raw,
format: Matcher.one_of([:s8, :s16le, :s16be, :s24le, :s24be, :s32le, :s32be])}
def_options max_inputs_per_node: [
type: :int,
description: """
The maximum number of inputs to a single mixer in the mixers tree. Must be at least 2.
""",
default: 10
],
mixer_options: [
spec: AudioMixer.t(),
description: """
The options that would be passed to each created AudioMixer.
""",
default: %AudioMixer{}
]
def_input_pad :input,
mode: :pull,
availability: :on_request,
demand_unit: :bytes,
caps: @supported_caps,
options: [
offset: [
spec: Time.t(),
default: 0,
description: "Offset of the input audio at the pad."
]
]
def_output_pad :output,
mode: :pull,
demand_unit: :bytes,
availability: :always,
caps: Raw
@impl true
def handle_init(options) do
state =
options
|> Map.from_struct()
|> Map.put(:inputs, 0)
{{:ok, spec: %ParentSpec{}}, state}
end
@impl true
def handle_pad_added(pad_ref, %{playback_state: :stopped} = ctx, %{inputs: inputs} = state) do
%Pad.Data{options: %{offset: offset}} = ctx.pads[pad_ref]
{children, links} = link_new_input(pad_ref, offset, state)
{{:ok, spec: %ParentSpec{children: children, links: links}}, %{state | inputs: inputs + 1}}
end
def handle_pad_added(_pad_ref, %{playback_state: playback_state}, _state)
when playback_state != :stopped do
raise("""
All pads should be added before starting the #{__MODULE__}.
Pad added event received in playback state #{playback_state}.
""")
end
@impl true
def handle_stopped_to_prepared(_context, state) do
{children, links} = create_mixers_tree(state)
{{:ok, spec: %ParentSpec{children: children, links: links}}, state}
end
# Link new input to the correct mixer. Creates the mixer if it doesn't exist.
defp link_new_input(pad_ref, offset, state) do
mixer_idx = div(state.inputs, state.max_inputs_per_node)
create_new_mixer = rem(state.inputs, state.max_inputs_per_node) == 0
children =
if create_new_mixer do
[{"mixer_0_#{mixer_idx}", state.mixer_options}]
else
[]
end
link =
link_bin_input(pad_ref)
|> via_in(:input, options: [offset: offset])
|> to("mixer_0_#{mixer_idx}")
{children, [link]}
end
# Create mixers and links between them. `levels` of the mixers' tree are labeled starting from 0
# and counted from the leaves to the root, where one final mixer (root) has the highest level.
# Level 0 mixers where created during adding input pads, so only mixers starting from level 1 are created now.
defp create_mixers_tree(state, level \\ 1, acc \\ {[], []}, current_level_inputs \\ nil)
defp create_mixers_tree(state, 1, {[], []}, nil) do
first_level_mixers = ceil(state.inputs / state.max_inputs_per_node)
create_mixers_tree(state, 1, {[], []}, first_level_mixers)
end
# end case - link one final mixer to bin output
defp create_mixers_tree(_state, level, {children, links}, 1) do
last_mixer_name = "mixer_#{level - 1}_#{0}"
links = [link(last_mixer_name) |> to_bin_output() | links]
{List.flatten(children), List.flatten(links)}
end
defp create_mixers_tree(state, level, {children, links}, current_level_inputs) do
n_mixers = ceil(current_level_inputs / state.max_inputs_per_node)
# create current level of mixers
new_children =
0..(n_mixers - 1)
|> Enum.map(fn i ->
{"mixer_#{level}_#{i}", state.mixer_options}
end)
# link current mixers with mixers from previous level
new_links =
0..(current_level_inputs - 1)
|> Enum.map(fn i ->
parent = div(i, state.max_inputs_per_node)
link("mixer_#{level - 1}_#{i}")
|> to("mixer_#{level}_#{parent}")
end)
create_mixers_tree(
state,
level + 1,
{[new_children | children], [new_links | links]},
n_mixers
)
end
end
|
lib/membrane_audio_mixer_bin.ex
| 0.865878 | 0.591458 |
membrane_audio_mixer_bin.ex
|
starcoder
|
defmodule Membrane.MP4.Container do
@moduledoc """
Module for parsing and serializing MP4 files.
Bases on MP4 structure specification from `#{inspect(__MODULE__)}.Schema`.
"""
use Bunch
alias __MODULE__.{ParseHelper, Schema, SerializeHelper}
@schema Schema.schema()
@type box_name_t :: atom
@type field_name_t :: atom
@type fields_t :: %{field_name_t => term | [term] | fields_t()}
@type t :: [{box_name_t, %{content: binary} | %{fields: fields_t, children: t}}]
@type parse_error_context_t :: [
{:box, box_name_t}
| {:field, field_name_t}
| {:data, bitstring}
| {:reason, :box_header | {:box_size, header: pos_integer, actual: pos_integer}}
]
@type serialize_error_context_t :: [{:box, box_name_t} | {:field, field_name_t}]
@doc """
Parses binary data to MP4 according to `#{inspect(Schema)}.schema/0`.
"""
@spec parse(binary) :: {:ok, t} | {:error, parse_error_context_t}
def parse(data) do
parse(data, @schema)
end
@doc """
Parses binary data to MP4 according to a custom schema.
"""
@spec parse(binary, Schema.t()) :: {:ok, t} | {:error, parse_error_context_t}
def parse(data, schema) do
ParseHelper.parse_boxes(data, schema, [])
end
@doc """
Same as `parse/1`, raises on error.
"""
@spec parse!(binary) :: t
def parse!(data) do
parse!(data, @schema)
end
@doc """
Same as `parse/2`, raises on error.
"""
@spec parse!(binary, Schema.t()) :: t
def parse!(data, schema) do
case ParseHelper.parse_boxes(data, schema, []) do
{:ok, mp4} ->
mp4
{:error, context} ->
raise """
Error parsing MP4
box: #{Keyword.get_values(context, :box) |> Enum.join(" / ")}
field: #{Keyword.get_values(context, :field) |> Enum.join(" / ")}
data: #{Keyword.get(context, :data) |> inspect()}
reason: #{Keyword.get(context, :reason) |> inspect(pretty: true)}
"""
end
end
@doc """
Serializes MP4 to a binary according to `#{inspect(Schema)}.schema/0`.
"""
@spec serialize(t) :: {:ok, binary} | {:error, serialize_error_context_t}
def serialize(mp4) do
serialize(mp4, @schema)
end
@doc """
Serializes MP4 to a binary according to a custom schema.
"""
@spec serialize(t, Schema.t()) :: {:ok, binary} | {:error, serialize_error_context_t}
def serialize(mp4, schema) do
SerializeHelper.serialize_boxes(mp4, schema)
end
@doc """
Same as `serialize/1`, raises on error
"""
@spec serialize!(t) :: binary
def serialize!(mp4) do
serialize!(mp4, @schema)
end
@doc """
Same as `serialize/2`, raises on error
"""
@spec serialize!(t, Schema.t()) :: binary
def serialize!(mp4, schema) do
case SerializeHelper.serialize_boxes(mp4, schema) do
{:ok, data} ->
data
{:error, context} ->
box = Keyword.get_values(context, :box)
raise """
Error serializing MP4
box: #{Enum.join(box, " / ")}
field: #{Keyword.get_values(context, :field) |> Enum.join(" / ")}
box contents:
#{get_box(mp4, box) |> inspect(pretty: true)}
"""
end
end
@doc """
Maps a path in the MP4 box tree into sequence of keys under which that
box resides in MP4.
"""
@spec box_path(box_name_t | [box_name_t]) :: [atom]
def box_path(path) do
path |> Bunch.listify() |> Enum.flat_map(&[:children, &1]) |> Enum.drop(1)
end
@doc """
Gets a box from a given path in a parsed MP4.
"""
@spec get_box(t, box_name_t | [box_name_t]) :: t
def get_box(mp4, path) do
Bunch.Access.get_in(mp4, box_path(path))
end
@doc """
Updates a box at a given path in a parsed MP4.
If `parameter_path` is set, a parameter within a box is updated.
"""
@spec update_box(t, box_name_t | [box_name_t], [atom], (term -> term)) :: t
def update_box(mp4, path, parameter_path \\ [], f) do
Bunch.Access.update_in(mp4, box_path(path) ++ Bunch.listify(parameter_path), f)
end
end
|
lib/membrane_mp4/container.ex
| 0.869922 | 0.540985 |
container.ex
|
starcoder
|
defmodule Saucexages.Util.Binary do
@moduledoc false
## General functions for working with Elixir/Erlang binaries.
@doc """
Pads a binary with the provided `padding` at the end, repeating the padding until `count` bytes is reached.
If `count` is larger than the existing binary, no padding is applied.
If the binary is already longer than the count, no padding will be applied.
Padding will be applied 1 byte at a time, meaning that any padding provided greater than 1 byte may only be partially applied if the total size is reached first.
## Examples
iex> pad_trailing_bytes(<<1, 2, 3>>, 6, <<6>>)
<<1, 2, 3, 6, 6, 6>>
iex> pad_trailing_bytes(<<1, 2, 3>>, 2, <<6>>)
<<1, 2, 3>>
"""
@spec pad_trailing_bytes(binary(), pos_integer(), binary()) :: binary()
def pad_trailing_bytes(bin, count, padding) when is_binary(bin) and is_integer(count) and count >= 0 do
pad_bytes(bin, :trailing, count, padding)
end
@doc """
Pads a binary with the provided `padding` at the beginning, repeating the padding until `count` bytes is reached.
If `count` is larger than the existing binary, no padding is applied.
If the binary is longer than the count, no padding will be applied.
Padding will be applied 1 byte at a time, meaning that any padding provided greater than 1 byte may only be partially applied if the total size is reached first.
## Examples
iex> pad_leading_bytes(<<1, 2, 3>>, 6, <<6>>)
<<6, 6, 6, 1, 2, 3>>
iex> pad_leading_bytes(<<1, 2, 3>>, 2, <<6>>)
<<1, 2, 3>>
"""
@spec pad_leading_bytes(binary(), pos_integer(), binary()) :: binary()
def pad_leading_bytes(bin, count, padding) when is_binary(bin) and is_integer(count) and count >= 0 do
pad_bytes(bin, :leading, count, padding)
end
@doc """
Creates a binary of a fixed size according to `count` padded with `padding`.
Pads a binary with the provided `padding` at the end, repeating the padding until `count` bytes is reached if padding is required.
If truncation is required, any bytes after `count` will be truncated.
## Examples
iex> pad_truncate(<<1, 2, 3>>, 6, <<6>>)
<<1, 2, 3, 6, 6, 6>>
iex> pad_truncate(<<1, 2, 3>>, 5, <<6, 6, 6>>)
<<1, 2, 3, 6, 6>>
iex> pad_truncate(<<1, 2, 3>>, 2, <<6, 6, 6>>)
<<1, 2>>
"""
@spec pad_truncate(binary(), pos_integer(), binary()) :: binary()
def pad_truncate(bin, count, padding) when is_binary(bin) and is_integer(count) and count >= 0 do
maybe_padded_bin = pad_trailing_bytes(bin, count, padding)
<<maybe_padded_bin :: binary - size(count)>>
end
#TODO: benchmark these alternates
#pad_trailing_bytes(bin, count, padding) |> :binary.part(0, count)
# bin_size = byte_size(string)
# cond do
# bin_size == count ->
# string
# bin_size < count ->
# pad_trailing_bytes(string, count, padding)
# bin_size > count ->
# <<string::binary-size(count)>>
# end
@doc """
Replaces a binary at a specific position within a binary with a new sub-binary.
The new sub-binary should not cause the binary to grow larger than the existing binary.
"""
@spec replace_binary_at(binary(), non_neg_integer(), binary()) :: binary()
def replace_binary_at(bin, position, value) when is_binary(bin) and is_integer(position) and position >= 0 and is_binary(value) and position < byte_size(bin) and (byte_size(value) + position) <= byte_size(bin) do
bin_size = byte_size(bin)
value_size = byte_size(value)
remaining_size = bin_size - position - value_size
<<start :: binary - size(position), _ :: binary - size(value_size), rest :: binary - size(remaining_size)>> = bin
<<start :: binary - size(position), value :: binary - size(value_size), rest :: binary - size(remaining_size)>>
end
def replace_binary_at(_bin, _position, _value) do
raise ArgumentError, "The replacement binary size and position should not cause the binary to grow larger than the existing binary."
end
defp build_filler(bin, 0, _remaining_padding, _padding) when is_binary(bin) do
bin
end
defp build_filler(bin, remaining_count, <<pad_byte :: binary - size(1), rest :: binary>>, padding) when is_binary(bin) do
build_filler(<<bin :: binary, pad_byte :: binary - size(1)>>, remaining_count - 1, rest, padding)
end
defp build_filler(bin, remaining_count, <<>>, padding) do
build_filler(bin, remaining_count, padding, padding)
end
defp pad_bytes(bin, kind, count, padding) when is_binary(bin) and is_integer(count) and count >= 0 do
bin_size = byte_size(bin)
if bin_size >= count do
bin
else
filler = build_filler(<<>>, count - bin_size, padding, padding)
case kind do
:leading -> <<filler :: binary, bin :: binary>>
:trailing -> <<bin :: binary, filler :: binary>>
end
end
end
end
|
lib/saucexages/util/binary.ex
| 0.872293 | 0.704084 |
binary.ex
|
starcoder
|
defmodule Crux.Structs.Channel do
@moduledoc """
Represents a Discord [Channel Object](https://discordapp.com/developers/docs/resources/channel#channel-object-channel-structure).
List of where every property can be present:
| Property | Text (0) | DM (1) | Voice (2) | Group (3) | Category (4) | News (5) |
| :-------------------: | :------: | :------------------: | :-------: | :-------: | :----------: | :------: |
| application_id | no | no | no | yes | no | no |
| bitrate | no | no | yes | no | no | no |
| guild_id | yes | no | yes | no | yes | yes |
| icon | no | no | no | yes | no | no |
| id | yes | yes | yes | yes | yes | yes |
| last_message_id | yes | yes | no | yes | no | yes |
| last_pin_timestamp | yes | yes | no | yes | no | yes |
| name | yes | no | yes | yes | yes | yes |
| nsfw | yes | no | no | no | no | yes |
| owner_id | no | no | no | yes | no | no |
| parent_id | yes | no | yes | no | no | yes |
| permission_overwrites | yes | no | yes | no | yes | yes |
| position | yes | no | yes | no | yes | yes |
| rate_limit_per_user | yes | no | no | no | no | no |
| recipients | no | yes<br>(One Element) | no | yes | no | no |
| topic | yes | no | yes | no | yes | yes |
| type | `0` | `1` | `2` | `3` | `4` | `5` |
| user_limit | no | no | yes | no | no | no |
Differences opposed to the Discord API Object:
- `:recipients` is a MapSet of user ids
"""
@behaviour Crux.Structs
alias Crux.Structs.{Overwrite, Util}
require Util
Util.modulesince("0.1.0")
defstruct(
application_id: nil,
bitrate: nil,
guild_id: nil,
icon: nil,
id: nil,
last_message_id: nil,
last_pin_timestamp: nil,
name: nil,
nsfw: nil,
owner_id: nil,
parent_id: nil,
permission_overwrites: %{},
position: nil,
rate_limit_per_user: nil,
recipients: %MapSet{},
topic: nil,
type: nil,
user_limit: nil
)
Util.typesince("0.1.0")
@type t :: %__MODULE__{
application_id: Crux.Rest.snowflake(),
bitrate: integer(),
guild_id: Crux.Rest.snowflake(),
icon: String.t(),
id: Crux.Rest.snowflake(),
last_message_id: Crux.Rest.snowflake(),
last_pin_timestamp: String.t(),
name: String.t(),
nsfw: boolean(),
owner_id: Crux.Rest.snowflake(),
parent_id: Crux.Rest.snowflake(),
permission_overwrites: %{optional(Crux.Rest.snowflake()) => Overwrite.t()},
position: integer(),
rate_limit_per_user: integer(),
recipients: MapSet.t(Crux.Rest.snowflake()),
topic: String.t(),
type: integer(),
user_limit: non_neg_integer()
}
@doc """
Creates a `Crux.Structs.Channel` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`
"""
@spec create(data :: map()) :: t()
Util.since("0.1.0")
def create(data) do
channel =
data
|> Util.atomify()
|> Map.update!(:id, &Util.id_to_int/1)
|> Map.update(:guild_id, nil, &Util.id_to_int/1)
|> Map.update(:owner_id, nil, &Util.id_to_int/1)
|> Map.update(:last_message_id, nil, &Util.id_to_int/1)
|> Map.update(:appliparent_idcation_id, nil, &Util.id_to_int/1)
|> Map.update(:parent_id, nil, &Util.id_to_int/1)
|> Map.update(:permission_overwrites, %{}, &Util.raw_data_to_map(&1, Overwrite))
|> Map.update(:recipients, %MapSet{}, &MapSet.new(&1, Util.map_to_id()))
struct(__MODULE__, channel)
end
@doc ~S"""
Converts a `Crux.Structs.Channel` into its discord mention format.
## Example
```elixir
iex> %Crux.Structs.Channel{id: 316880197314019329}
...> |> Crux.Structs.Channel.to_mention()
"<#316880197314019329>"
```
"""
@spec to_mention(user :: Crux.Structs.Channel.t()) :: String.t()
Util.since("0.1.1")
def to_mention(%__MODULE__{id: id}), do: "<##{id}>"
defimpl String.Chars, for: Crux.Structs.Channel do
alias Crux.Structs.Channel
@spec to_string(Channel.t()) :: String.t()
def to_string(%Channel{} = data), do: Channel.to_mention(data)
end
end
|
lib/structs/channel.ex
| 0.854915 | 0.732185 |
channel.ex
|
starcoder
|
defmodule Day7 do
@moduledoc """
Documentation for `Day7`.
"""
@doc """
Hello world.
"""
def run() do
get_input()
|> process(:first)
|> present()
get_input()
|> process(:second)
|> present()
end
def present({n, nx, x, xx} = _answer) do
IO.puts("minimum cost = #{n}, position #{nx}")
IO.puts("Incidently, maximum cost = #{x}, position #{xx}")
end
def get_input() do
File.read!("input.txt")
|> transform_input()
end
def transform_input(s) do
s
|> String.trim()
|> String.split(",")
|> Enum.map(fn e -> String.to_integer(e) end)
|> Enum.sort()
end
def get_input(s) do
s
|> transform_input()
end
def process(list_of_coordinates, variant) do
p_vector = make_position_vector(list_of_coordinates)
c_vector = make_cost_vector(length(p_vector), variant)
costs = calculate_costs(p_vector, c_vector)
{min, max} = Enum.min_max(costs)
min_position = Enum.find_index(costs, fn v -> v == min end)
max_position = Enum.find_index(costs, fn v -> v == max end)
{min, min_position, max, max_position}
end
def make_position_vector(l) do
{0, max} = Enum.min_max(l)
vector_length = 1 + max
proto_vector = Enum.map(1..vector_length, fn _v -> 0 end)
Enum.reduce(l, proto_vector, fn e, v -> List.replace_at(v, e, 1 + Enum.at(v, e, :i)) end)
end
def make_cost_vector(size, :first) do
Enum.map(1..(2 * size - 1), fn v -> abs(size - v) end)
end
def make_cost_vector(size, :second) do
ff = fn n -> div(n*(n+1),2) end
Enum.map(1..(2 * size - 1), fn v -> ff.(abs(size - v)) end)
end
def calculate_costs(pv, ccv) do
n = length(pv)
Enum.map(0..(n-1), fn dest -> dot_product(pv, sliced(dest, n, ccv))end)
end
def sliced(offset, len, source) do
Enum.slice(source, len - 1 - offset, len)
end
def dot_product(a, b) do
Enum.zip(a,b)
|> Enum.reduce(0, fn {a0, b0}, dp -> dp + a0 * b0 end)
end
def example() do
get_input("16,1,2,0,4,2,7,1,2,14")
|> process(:first)
|> present()
get_input("16,1,2,0,4,2,7,1,2,14")
|> process(:second)
|> present()
end
def info() do
info = get_input()
IO.puts("Length = #{length(info)}")
{min, max} = Enum.min_max(info)
IO.puts("Min and max: #{min} #{max}")
IO.puts("Length of uniqued info: #{length(Enum.uniq(info))}")
end
end
|
apps/day7/lib/day7.ex
| 0.749821 | 0.40342 |
day7.ex
|
starcoder
|
defmodule DeckhubWeb.PrimerHelpers do
@moduledoc """
View helper functions for generating elements that work with
[GitHub's Primer](https://primer.github.io/) CSS framework.
All functions can be used either within a template or composed together in code. Each function
should always emit `t:Phoenix.HTML.safe/0` objects or throw an exception.
"""
use Phoenix.HTML
require DeckhubWeb.Gettext
import DeckhubWeb.Gettext, only: [gettext: 1]
@typedoc """
The application name as an atom.
"""
@type app_name :: atom
defmodule MissingConfigurationError do
@moduledoc """
Exception raised when there is an element of required application configuration missing.
"""
defexception [:missing_keys]
def exception(key) when is_atom(key), do: exception([key])
def exception(keys) when is_list(keys) do
%__MODULE__{missing_keys: keys}
end
def message(%{missing_keys: missing_keys}) do
"application configuration missing: #{inspect(missing_keys)}"
end
end
@doc """
Renders the GitHub-style `<> with ♥ by [author link]` footer item.
Retrieves the author's name and URL from the application configuration for the default application
for the current module. See `code_with_heart/2` for more information.
"""
@spec code_with_heart() :: Phoenix.HTML.safe()
def code_with_heart do
code_with_heart(Application.get_application(__MODULE__))
end
@doc """
Renders the GitHub-style `<> with ♥ by [author link]` footer item.
Retrieves the author's name and URL from the application configuration before passing to
`code_with_heart/3`. This information can be added to the application configuration by adding the
following to your `config.exs`:
```
config :app_name,
code_with_heart: [
name: "Author's name",
url: "https://example.com"
]
```
Raises a `DeckhubWeb.PrimerHelpers.MissingConfigurationError` if any of the required
application configuration information is not specified and this function is called.
If passed two strings instead of an atom and a keyword list, this function will assume that you
mean to call `code_with_heart/3` with no options and do so for you.
"""
@spec code_with_heart(atom, Keyword.t()) :: Phoenix.HTML.safe()
def code_with_heart(app_name, options \\ [])
def code_with_heart(app_name, options)
def code_with_heart(app_name, options) when is_atom(app_name) and is_list(options) do
config = Application.get_env(app_name, :code_with_heart)
name = config[:name]
url = config[:url]
unless name && url, do: raise(MissingConfigurationError, :code_with_heart)
code_with_heart(name, url, options)
end
def code_with_heart(name, url) when is_binary(name) and is_binary(url),
do: code_with_heart(name, url, [])
@doc """
Renders the GitHub-style `<> with ♥ by [author link]` footer item.
The text in this element is intentionally left untranslated because the form of the element is
intended to be recognizable in its specific format.
## Options
All options are passed to the underlying HTML `a` element.
## Examples
```
Phoenix.HTML.safe_to_string(DeckhubWeb.PrimerHelpers.code_with_heart("Author's Name", "https://example.com"))
#=> "<svg .../> with <svg .../> by <a href=\"https://example.com\">Author's Name</a>"
```
"""
@spec code_with_heart(String.t(), String.t(), Keyword.t()) :: Phoenix.HTML.safe()
def code_with_heart(name, url, options) do
link_options = Keyword.merge([to: url, class: "link-gray-dark"], options)
html_escape([
PhoenixOcticons.octicon(:code),
" with ",
PhoenixOcticons.octicon(:heart),
" by ",
link(name, link_options)
])
end
@doc """
Renders a link to the project on GitHub.
Retrieves the project name or URL from the application configuration for the default application.
"""
@spec github_link(Keyword.t()) :: Phoenix.HTML.safe()
def github_link(options \\ [])
def github_link(options), do: github_link(options, [])
@doc """
Renders a link to the project on GitHub.
If the first parameter is an atom, it retrieves the project name or URL from the application
configuration. Otherwise, the project can be either the GitHub `owner/project` identifier or the
full GitHub URL.
This configuration information can be added to the application configuration by adding the
following to your `config.exs`:
```
config :app_name,
github_link: "owner/name"
```
If the configuration information is missing and the first parameter is an atom, a
`AtomTweaksWeb.PrimerHelpers.MissingConfigurationError` is raised.
## Options
All options are passed to the underlying HTML `a` element.
"""
@spec github_link(app_name | String.t(), Keyword.t()) :: Phoenix.HTML.safe()
def github_link(app_name_or_project, options)
def github_link(options, _no_options) when is_list(options) do
github_link(Application.get_application(__MODULE__), options)
end
def github_link(app_name, options) when is_atom(app_name) do
url = Application.get_env(app_name, :github_link)
unless url, do: raise(MissingConfigurationError, :github_link)
github_link(url, options)
end
def github_link(project, options) when is_binary(project) do
# Prepend the `https://github.com/` if only the name with owner is specified
url = if project =~ ~r{^[^/]+/[^/]+$}, do: "https://github.com/#{project}", else: project
link_options =
Keyword.merge(
[
to: url,
"aria-label": gettext("View this project on GitHub"),
class: "link-gray-dark tooltipped tooltipped-n"
],
options
)
link(link_options) do
PhoenixOcticons.octicon("mark-github")
end
end
end
|
lib/deckhub_web/helpers/primer_helpers.ex
| 0.88922 | 0.853425 |
primer_helpers.ex
|
starcoder
|
defmodule Morse do
def test() do
signal = '... --- ... '
decode(signal)
end
def decode(signal) do
table = decode_table()
decode(signal, table, table)
end
## Fill in the empty ... spaces
def decode([], _, _) do [] end
def decode([?- | seq], {:node, _, left, right}, tree) do
decode(seq, left, tree)
end
def decode([?. | seq], {:node, _, left, right}, tree) do
decode(seq, right, tree)
end
def decode([?\s |seq], {:node, char, _, _}, tree) do
[char |decode(seq, tree, tree)]
end
# Some test samples to decode, a signal must end with a space ' '
def base(), do: '.- .-.. .-.. ..-- -.-- --- ..- .-. ..-- -... .- ... . ..-- .- .-. . ..-- -... . .-.. --- -. --. ..-- - --- ..-- ..- ... '
def rolled(), do: '.... - - .--. ... ---... .----- .----- .-- .-- .-- .-.-.- -.-- --- ..- - ..- -... . .-.-.- -.-. --- -- .----- .-- .- - -.-. .... ..--.. ...- .----. -.. .--.-- ..... .---- .-- ....- .-- ----. .--.-- ..... --... --. .--.-- ..... ---.. -.-. .--.-- ..... .---- '
# Morse decoding tree as {:node, char, long, short}
defp decode_table do
{:node, :na,
{:node, 116,
{:node, 109,
{:node, 111,
{:node, :na, {:node, 48, nil, nil}, {:node, 57, nil, nil}},
{:node, :na, nil, {:node, 56, nil, {:node, 58, nil, nil}}}},
{:node, 103,
{:node, 113, nil, nil},
{:node, 122,
{:node, :na, {:node, 44, nil, nil}, nil},
{:node, 55, nil, nil}}}},
{:node, 110,
{:node, 107, {:node, 121, nil, nil}, {:node, 99, nil, nil}},
{:node, 100,
{:node, 120, nil, nil},
{:node, 98, nil, {:node, 54, {:node, 45, nil, nil}, nil}}}}},
{:node, 101,
{:node, 97,
{:node, 119,
{:node, 106,
{:node, 49, {:node, 47, nil, nil}, {:node, 61, nil, nil}},
nil},
{:node, 112,
{:node, :na, {:node, 37, nil, nil}, {:node, 64, nil, nil}},
nil}},
{:node, 114,
{:node, :na, nil, {:node, :na, {:node, 46, nil, nil}, nil}},
{:node, 108, nil, nil}}},
{:node, 105,
{:node, 117,
{:node, 32,
{:node, 50, nil, nil},
{:node, :na, nil, {:node, 63, nil, nil}}},
{:node, 102, nil, nil}},
{:node, 115,
{:node, 118, {:node, 51, nil, nil}, nil},
{:node, 104, {:node, 52, nil, nil}, {:node, 53, nil, nil}}}}}}
end
end
|
morse.ex
| 0.721056 | 0.469399 |
morse.ex
|
starcoder
|
defmodule Dayron do
@moduledoc ~S"""
Dayron is split into 2 main components:
* `Dayron.Repo` - repositories are wrappers around HTTP clients.
Via the repository, we can send requests to external REST APIs,
performing actions to get, create, update or destroy resources.
A repository needs an adapter and the api URL. HTTPoison is the default
built-in adapter.
* `Dayron.Model` - models allow developers to define structures
that map the external resources into local data. It also implements
callbacks to handle specific configuration, such as the resource name
used by the request and the data mapping rules.
In the following sections, we will provide an overview of those components
and how they interact with each other. Feel free to access their respective
module documentation for more specific examples, options and configuration.
If you want to quickly check a sample application using Dayron, please check
https://github.com/inaka/dayron/tree/master/examples/simple_blog.
## Repositories
`Dayron.Repo` is a wrapper around a rest client. We can define a
repository as follows:
defmodule RestRepo do
use Dayron.Repo, otp_app: :my_app
end
Where the configuration for the Repo must be in your application
environment, usually defined in your `config/config.exs`:
config :my_app, MyApp.RestRepo,
url: "https://api.example.com",
headers: [access_token: "token"]
## Model
A Model provides a set of functionalities around mapping the external data
into local structures.
Let's see an User model example:
defmodule User do
use Dayron.Model, resource: "users"
defstruct id: nil, name: "", age: 0
end
The model allows us to interact with the REST API using our repository:
# Inserting a new user
iex> user = %User{name: "User Name", age: 23}
iex> RestRepo.insert!(User, user)
{:ok, %User{...}}
# Get the resource data back
iex> user = RestRepo.get User, "user-id"
%User{id: "user-id", ...}
# Delete it
iex> RestRepo.delete!(User, "user-id")
{:ok, %User{...}}
As an example, let's see how we could use the User Model above in
a web application that needs to update users:
def update(id, %{"user" => user_params}) do
case RestRepo.update(User, id, user_params) do
{:ok, user} ->
send_resp conn, 200, "Ok"
{:error, error} ->
send_resp conn, 400, "Bad request"
end
end
The Repo methods also accepts extra options. If you want to send a list of
parameters to be sent in the query when retrieving a list of users, for
example:
iex> RestRepo.all(User, params: [name: "a user name"])
[%User{...}, %User{...}]
If you check the application logs, you'll see the complete request/response
information:
[debug] GET https://api.example.com/users
Options:
Params: name="a user name"
Body: -
Headers:
access_token: "token"
[debug] Response: 200 in 718ms
For a complete list of avaliable options, please check `Dayron.Adapter`
module.
"""
use Application
alias Mix.Project
@version Project.config[:version]
def version, do: @version
def start(_, _), do: {:ok, self}
end
|
lib/dayron.ex
| 0.860911 | 0.534491 |
dayron.ex
|
starcoder
|
defmodule AWS.KMS do
@moduledoc """
Key Management Service
Key Management Service (KMS) is an encryption and key management web service.
This guide describes the KMS operations that you can call programmatically. For
general information about KMS, see the [ *Key Management Service Developer Guide* ](https://docs.aws.amazon.com/kms/latest/developerguide/).
KMS is replacing the term *customer master key (CMK)* with *KMS key* and *KMS
key*. The concept has not changed. To prevent breaking changes, KMS is keeping
some variations of this term.
Amazon Web Services provides SDKs that consist of libraries and sample code for
various programming languages and platforms (Java, Ruby, .Net, macOS, Android,
etc.). The SDKs provide a convenient way to create programmatic access to KMS
and other Amazon Web Services services. For example, the SDKs take care of tasks
such as signing requests (see below), managing errors, and retrying requests
automatically. For more information about the Amazon Web Services SDKs,
including how to download and install them, see [Tools for Amazon Web Services](http://aws.amazon.com/tools/).
We recommend that you use the Amazon Web Services SDKs to make programmatic API
calls to KMS.
Clients must support TLS (Transport Layer Security) 1.0. We recommend TLS 1.2.
Clients must also support cipher suites with Perfect Forward Secrecy (PFS) such
as Ephemeral Diffie-Hellman (DHE) or Elliptic Curve Ephemeral Diffie-Hellman
(ECDHE). Most modern systems such as Java 7 and later support these modes.
## Signing Requests
Requests must be signed by using an access key ID and a secret access key. We
strongly recommend that you *do not* use your Amazon Web Services account (root)
access key ID and secret key for everyday work with KMS. Instead, use the access
key ID and secret access key for an IAM user. You can also use the Amazon Web
Services Security Token Service to generate temporary security credentials that
you can use to sign requests.
All KMS operations require [Signature Version 4](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
## Logging API Requests
KMS supports CloudTrail, a service that logs Amazon Web Services API calls and
related events for your Amazon Web Services account and delivers them to an
Amazon S3 bucket that you specify. By using the information collected by
CloudTrail, you can determine what requests were made to KMS, who made the
request, when it was made, and so on. To learn more about CloudTrail, including
how to turn it on and find your log files, see the [CloudTrail User Guide](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/).
## Additional Resources
For more information about credentials and request signing, see the following:
* [Amazon Web Services Security Credentials](https://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)
- This topic provides general information about the types of credentials used to
access Amazon Web Services.
* [Temporary Security Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html)
- This section of the *IAM User Guide* describes how to create and use temporary
security credentials.
* [Signature Version 4 Signing Process](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html)
- This set of topics walks you through the process of signing a request using an
access key ID and a secret access key.
## Commonly Used API Operations
Of the API operations discussed in this guide, the following will prove the most
useful for most applications. You will likely perform operations other than
these, such as creating keys and assigning policies, by using the console.
* `Encrypt`
* `Decrypt`
* `GenerateDataKey`
* `GenerateDataKeyWithoutPlaintext`
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "KMS",
api_version: "2014-11-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "kms",
global?: false,
protocol: "json",
service_id: "KMS",
signature_version: "v4",
signing_name: "kms",
target_prefix: "TrentService"
}
end
@doc """
Cancels the deletion of a KMS key.
When this operation succeeds, the key state of the KMS key is `Disabled`. To
enable the KMS key, use `EnableKey`.
For more information about scheduling and canceling deletion of a KMS key, see
[Deleting KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/deleting-keys.html)
in the *Key Management Service Developer Guide*.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:CancelKeyDeletion](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `ScheduleKeyDeletion`
"""
def cancel_key_deletion(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelKeyDeletion", input, options)
end
@doc """
Connects or reconnects a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
to its associated CloudHSM cluster.
The custom key store must be connected before you can create KMS keys in the key
store or use the KMS keys it contains. You can disconnect and reconnect a custom
key store at any time.
To connect a custom key store, its associated CloudHSM cluster must have at
least one active HSM. To get the number of active HSMs in a cluster, use the
[DescribeClusters](https://docs.aws.amazon.com/cloudhsm/latest/APIReference/API_DescribeClusters.html) operation. To add HSMs to the cluster, use the
[CreateHsm](https://docs.aws.amazon.com/cloudhsm/latest/APIReference/API_CreateHsm.html)
operation. Also, the [ `kmsuser` crypto user](https://docs.aws.amazon.com/kms/latest/developerguide/key-store-concepts.html#concept-kmsuser)
(CU) must not be logged into the cluster. This prevents KMS from using this
account to log in.
The connection process can take an extended amount of time to complete; up to 20
minutes. This operation starts the connection process, but it does not wait for
it to complete. When it succeeds, this operation quickly returns an HTTP 200
response and a JSON object with no properties. However, this response does not
indicate that the custom key store is connected. To get the connection state of
the custom key store, use the `DescribeCustomKeyStores` operation.
During the connection process, KMS finds the CloudHSM cluster that is associated
with the custom key store, creates the connection infrastructure, connects to
the cluster, logs into the CloudHSM client as the `kmsuser` CU, and rotates its
password.
The `ConnectCustomKeyStore` operation might fail for various reasons. To find
the reason, use the `DescribeCustomKeyStores` operation and see the
`ConnectionErrorCode` in the response. For help interpreting the
`ConnectionErrorCode`, see `CustomKeyStoresListEntry`.
To fix the failure, use the `DisconnectCustomKeyStore` operation to disconnect
the custom key store, correct the error, use the `UpdateCustomKeyStore`
operation if necessary, and then use `ConnectCustomKeyStore` again.
If you are having trouble connecting or disconnecting a custom key store, see
[Troubleshooting a Custom Key Store](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html)
in the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different Amazon Web Services account.
**Required permissions**:
[kms:ConnectCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def connect_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ConnectCustomKeyStore", input, options)
end
@doc """
Creates a friendly name for a KMS key.
Adding, deleting, or updating an alias can allow or deny permission to the KMS
key. For details, see [Using ABAC in KMS](https://docs.aws.amazon.com/kms/latest/developerguide/abac.html) in the
*Key Management Service Developer Guide*.
You can use an alias to identify a KMS key in the KMS console, in the
`DescribeKey` operation and in [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations),
such as `Encrypt` and `GenerateDataKey`. You can also change the KMS key that's
associated with the alias (`UpdateAlias`) or delete the alias (`DeleteAlias`) at
any time. These operations don't affect the underlying KMS key.
You can associate the alias with any customer managed key in the same Amazon Web
Services Region. Each alias is associated with only one KMS key at a time, but a
KMS key can have multiple aliases. A valid KMS key is required. You can't create
an alias without a KMS key.
The alias must be unique in the account and Region, but you can have aliases
with the same name in different Regions. For detailed information about aliases,
see [Using aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html)
in the *Key Management Service Developer Guide*.
This operation does not return a response. To get the alias that you created,
use the `ListAliases` operation.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on an alias in a
different Amazon Web Services account.
## Required permissions
*
[kms:CreateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the alias (IAM policy).
*
[kms:CreateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
on the KMS key (key policy).
For details, see [Controlling access to aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *Key Management Service Developer Guide*.
## Related operations:
* `DeleteAlias`
* `ListAliases`
* `UpdateAlias`
"""
def create_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateAlias", input, options)
end
@doc """
Creates a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
that is associated with an [CloudHSM cluster](https://docs.aws.amazon.com/cloudhsm/latest/userguide/clusters.html)
that you own and manage.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in KMS, which combines the convenience and extensive integration of KMS
with the isolation and control of a single-tenant key store.
Before you create the custom key store, you must assemble the required elements,
including an CloudHSM cluster that fulfills the requirements for a custom key
store. For details about the required elements, see [Assemble the Prerequisites](https://docs.aws.amazon.com/kms/latest/developerguide/create-keystore.html#before-keystore)
in the *Key Management Service Developer Guide*.
When the operation completes successfully, it returns the ID of the new custom
key store. Before you can use your new custom key store, you need to use the
`ConnectCustomKeyStore` operation to connect the new key store to its CloudHSM
cluster. Even if you are not going to use your custom key store immediately, you
might want to connect it to verify that all settings are correct and then
disconnect it until you are ready to use it.
For help with failures, see [Troubleshooting a Custom Key Store](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html)
in the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different Amazon Web Services account.
**Required permissions**:
[kms:CreateCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy).
## Related operations:
* `ConnectCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def create_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCustomKeyStore", input, options)
end
@doc """
Adds a grant to a KMS key.
A *grant* is a policy instrument that allows Amazon Web Services principals to
use KMS keys in cryptographic operations. It also can allow them to view a KMS
key (`DescribeKey`) and create and manage grants. When authorizing access to a
KMS key, grants are considered along with key policies and IAM policies. Grants
are often used for temporary permissions because you can create one, use its
permissions, and delete it without changing your key policies or IAM policies.
For detailed information about grants, including grant terminology, see [Using grants](https://docs.aws.amazon.com/kms/latest/developerguide/grants.html) in
the * *Key Management Service Developer Guide* *. For examples of working with
grants in several programming languages, see [Programming grants](https://docs.aws.amazon.com/kms/latest/developerguide/programming-grants.html).
The `CreateGrant` operation returns a `GrantToken` and a `GrantId`.
* When you create, retire, or revoke a grant, there might be a brief
delay, usually less than five minutes, until the grant is available throughout
KMS. This state is known as *eventual consistency*. Once the grant has achieved
eventual consistency, the grantee principal can use the permissions in the grant
without identifying the grant.
However, to use the permissions in the grant immediately, use the `GrantToken`
that `CreateGrant` returns. For details, see [Using a grant token](https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#using-grant-token)
in the * *Key Management Service Developer Guide* *.
* The `CreateGrant` operation also returns a `GrantId`. You can use
the `GrantId` and a key identifier to identify the grant in the `RetireGrant`
and `RevokeGrant` operations. To find the grant ID, use the `ListGrants` or
`ListRetirableGrants` operations.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation on a KMS key in a
different Amazon Web Services account, specify the key ARN in the value of the
`KeyId` parameter.
**Required permissions**:
[kms:CreateGrant](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `ListGrants`
* `ListRetirableGrants`
* `RetireGrant`
* `RevokeGrant`
"""
def create_grant(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateGrant", input, options)
end
@doc """
Creates a unique customer managed [KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#kms-keys)
in your Amazon Web Services account and Region.
KMS is replacing the term *customer master key (CMK)* with *KMS key* and *KMS
key*. The concept has not changed. To prevent breaking changes, KMS is keeping
some variations of this term.
You can use the `CreateKey` operation to create symmetric or asymmetric KMS
keys.
* **Symmetric KMS keys** contain a 256-bit symmetric key that never
leaves KMS unencrypted. To use the KMS key, you must call KMS. You can use a
symmetric KMS key to encrypt and decrypt small amounts of data, but they are
typically used to generate [data keys](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys)
and [data keys pairs](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-key-pairs).
For details, see `GenerateDataKey` and `GenerateDataKeyPair`.
* **Asymmetric KMS keys** can contain an RSA key pair or an Elliptic
Curve (ECC) key pair. The private key in an asymmetric KMS key never leaves KMS
unencrypted. However, you can use the `GetPublicKey` operation to download the
public key so it can be used outside of KMS. KMS keys with RSA key pairs can be
used to encrypt or decrypt data or sign and verify messages (but not both). KMS
keys with ECC key pairs can be used only to sign and verify messages.
For information about symmetric and asymmetric KMS keys, see [Using Symmetric and Asymmetric KMS
keys](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *Key Management Service Developer Guide*.
To create different types of KMS keys, use the following guidance:
## Definitions
### Asymmetric KMS keys
To create an asymmetric KMS key, use the `KeySpec` parameter to specify the type
of key material in the KMS key. Then, use the `KeyUsage` parameter to determine
whether the KMS key will be used to encrypt and decrypt or sign and verify. You
can't change these properties after the KMS key is created.
### Symmetric KMS keys
When creating a symmetric KMS key, you don't need to specify the `KeySpec` or
`KeyUsage` parameters. The default value for `KeySpec`, `SYMMETRIC_DEFAULT`, and
the default value for `KeyUsage`, `ENCRYPT_DECRYPT`, are the only valid values
for symmetric KMS keys.
### Multi-Region primary keys
### Imported key material
To create a multi-Region *primary key* in the local Amazon Web Services Region,
use the `MultiRegion` parameter with a value of `True`. To create a multi-Region
*replica key*, that is, a KMS key with the same key ID and key material as a
primary key, but in a different Amazon Web Services Region, use the
`ReplicateKey` operation. To change a replica key to a primary key, and its
primary key to a replica key, use the `UpdatePrimaryRegion` operation.
This operation supports *multi-Region keys*, an KMS feature that lets you create
multiple interoperable KMS keys in different Amazon Web Services Regions.
Because these KMS keys have the same key ID, key material, and other metadata,
you can use them interchangeably to encrypt data in one Amazon Web Services
Region and decrypt it in a different Amazon Web Services Region without
re-encrypting the data or making a cross-Region call. For more information about
multi-Region keys, see [Using multi-Region keys](https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-overview.html)
in the *Key Management Service Developer Guide*.
You can create symmetric and asymmetric multi-Region keys and multi-Region keys
with imported key material. You cannot create multi-Region keys in a custom key
store.
To import your own key material, begin by creating a symmetric KMS key with no
key material. To do this, use the `Origin` parameter of `CreateKey` with a value
of `EXTERNAL`. Next, use `GetParametersForImport` operation to get a public key
and import token, and use the public key to encrypt your key material. Then, use
`ImportKeyMaterial` with your import token to import the key material. For
step-by-step instructions, see [Importing Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the * *Key Management Service Developer Guide* *. You cannot import the key
material into an asymmetric KMS key.
To create a multi-Region primary key with imported key material, use the
`Origin` parameter of `CreateKey` with a value of `EXTERNAL` and the
`MultiRegion` parameter with a value of `True`. To create replicas of the
multi-Region primary key, use the `ReplicateKey` operation. For more information
about multi-Region keys, see [Using multi-Region keys](https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-overview.html)
in the *Key Management Service Developer Guide*.
### Custom key store
To create a symmetric KMS key in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html),
use the `CustomKeyStoreId` parameter to specify the custom key store. You must
also use the `Origin` parameter with a value of `AWS_CLOUDHSM`. The CloudHSM
cluster that is associated with the custom key store must have at least two
active HSMs in different Availability Zones in the Amazon Web Services Region.
You cannot create an asymmetric KMS key in a custom key store. For information
about custom key stores in KMS see [Using Custom Key Stores](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
in the * *Key Management Service Developer Guide* *.
**Cross-account use**: No. You cannot use this operation to create a KMS key in
a different Amazon Web Services account.
**Required permissions**:
[kms:CreateKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) (IAM policy). To use the `Tags` parameter,
[kms:TagResource](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy). For examples and information about related permissions, see [Allow a user to create KMS
keys](https://docs.aws.amazon.com/kms/latest/developerguide/iam-policies.html#iam-policy-example-create-key)
in the *Key Management Service Developer Guide*.
## Related operations:
* `DescribeKey`
* `ListKeys`
* `ScheduleKeyDeletion`
"""
def create_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateKey", input, options)
end
@doc """
Decrypts ciphertext that was encrypted by a KMS key using any of the following
operations:
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `GenerateDataKeyWithoutPlaintext`
* `GenerateDataKeyPairWithoutPlaintext`
You can use this operation to decrypt ciphertext that was encrypted under a
symmetric or asymmetric KMS key.
When the KMS key is asymmetric, you must specify the KMS key and the encryption
algorithm that was used to encrypt the ciphertext. For information about
symmetric and asymmetric KMS keys, see [Using Symmetric and Asymmetric KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *Key Management Service Developer Guide*.
The Decrypt operation also decrypts ciphertext that was encrypted outside of KMS
by the public key in an KMS asymmetric KMS key. However, it cannot decrypt
ciphertext produced by other libraries, such as the [Amazon Web Services Encryption
SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/) or
[Amazon S3 client-side encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingClientSideEncryption.html).
These libraries return a ciphertext format that is incompatible with KMS.
If the ciphertext was encrypted under a symmetric KMS key, the `KeyId` parameter
is optional. KMS can get this information from metadata that it adds to the
symmetric ciphertext blob. This feature adds durability to your implementation
by ensuring that authorized users can decrypt ciphertext decades after it was
encrypted, even if they've lost track of the key ID. However, specifying the KMS
key is always recommended as a best practice. When you use the `KeyId` parameter
to specify a KMS key, KMS only uses the KMS key you specify. If the ciphertext
was encrypted under a different KMS key, the `Decrypt` operation fails. This
practice ensures that you use the KMS key that you intend.
Whenever possible, use key policies to give users permission to call the
`Decrypt` operation on a particular KMS key, instead of using IAM policies.
Otherwise, you might create an IAM user policy that gives the user `Decrypt`
permission on all KMS keys. This user could decrypt ciphertext that was
encrypted by KMS keys in other accounts if the key policy for the cross-account
KMS key permits it. If you must use an IAM policy for `Decrypt` permissions,
limit the user to particular KMS keys or particular trusted accounts. For
details, see [Best practices for IAM policies](https://docs.aws.amazon.com/kms/latest/developerguide/iam-policies.html#iam-policies-best-practices)
in the *Key Management Service Developer Guide*.
Applications in Amazon Web Services Nitro Enclaves can call this operation by
using the [Amazon Web Services Nitro Enclaves Development Kit](https://github.com/aws/aws-nitro-enclaves-sdk-c). For information about the
supporting parameters, see [How Amazon Web Services Nitro Enclaves use KMS](https://docs.aws.amazon.com/kms/latest/developerguide/services-nitro-enclaves.html)
in the *Key Management Service Developer Guide*.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a KMS key in a
different Amazon Web Services account, specify the key ARN or alias ARN in the
value of the `KeyId` parameter.
**Required permissions**:
[kms:Decrypt](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `ReEncrypt`
"""
def decrypt(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Decrypt", input, options)
end
@doc """
Deletes the specified alias.
Adding, deleting, or updating an alias can allow or deny permission to the KMS
key. For details, see [Using ABAC in KMS](https://docs.aws.amazon.com/kms/latest/developerguide/abac.html) in the
*Key Management Service Developer Guide*.
Because an alias is not a property of a KMS key, you can delete and change the
aliases of a KMS key without affecting the KMS key. Also, aliases do not appear
in the response from the `DescribeKey` operation. To get the aliases of all KMS
keys, use the `ListAliases` operation.
Each KMS key can have multiple aliases. To change the alias of a KMS key, use
`DeleteAlias` to delete the current alias and `CreateAlias` to create a new
alias. To associate an existing alias with a different KMS key, call
`UpdateAlias`.
**Cross-account use**: No. You cannot perform this operation on an alias in a
different Amazon Web Services account.
## Required permissions
*
[kms:DeleteAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the alias (IAM policy).
*
[kms:DeleteAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
on the KMS key (key policy).
For details, see [Controlling access to aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *Key Management Service Developer Guide*.
## Related operations:
* `CreateAlias`
* `ListAliases`
* `UpdateAlias`
"""
def delete_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAlias", input, options)
end
@doc """
Deletes a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
This operation does not delete the CloudHSM cluster that is associated with the
custom key store, or affect any users or keys in the cluster.
The custom key store that you delete cannot contain any KMS [KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#kms_keys).
Before deleting the key store, verify that you will never need to use any of the
KMS keys in the key store for any [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations).
Then, use `ScheduleKeyDeletion` to delete the KMS keys from the key store. When
the scheduled waiting period expires, the `ScheduleKeyDeletion` operation
deletes the KMS keys. Then it makes a best effort to delete the key material
from the associated cluster. However, you might need to manually [delete the orphaned key
material](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html#fix-keystore-orphaned-key)
from the cluster and its backups.
After all KMS keys are deleted from KMS, use `DisconnectCustomKeyStore` to
disconnect the key store from KMS. Then, you can delete the custom key store.
Instead of deleting the custom key store, consider using
`DisconnectCustomKeyStore` to disconnect it from KMS. While the key store is
disconnected, you cannot create or use the KMS keys in the key store. But, you
do not need to delete KMS keys and you can reconnect a disconnected custom key
store at any time.
If the operation succeeds, it returns a JSON object with no properties.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in KMS, which combines the convenience and extensive integration of KMS
with the isolation and control of a single-tenant key store.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different Amazon Web Services account.
**Required permissions**:
[kms:DeleteCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def delete_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteCustomKeyStore", input, options)
end
@doc """
Deletes key material that you previously imported.
This operation makes the specified KMS key unusable. For more information about
importing key material into KMS, see [Importing Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *Key Management Service Developer Guide*.
When the specified KMS key is in the `PendingDeletion` state, this operation
does not change the KMS key's state. Otherwise, it changes the KMS key's state
to `PendingImport`.
After you delete key material, you can use `ImportKeyMaterial` to reimport the
same key material into the KMS key.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:DeleteImportedKeyMaterial](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `GetParametersForImport`
* `ImportKeyMaterial`
"""
def delete_imported_key_material(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteImportedKeyMaterial", input, options)
end
@doc """
Gets information about [custom key stores](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
in the account and Region.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in KMS, which combines the convenience and extensive integration of KMS
with the isolation and control of a single-tenant key store.
By default, this operation returns information about all custom key stores in
the account and Region. To get only information about a particular custom key
store, use either the `CustomKeyStoreName` or `CustomKeyStoreId` parameter (but
not both).
To determine whether the custom key store is connected to its CloudHSM cluster,
use the `ConnectionState` element in the response. If an attempt to connect the
custom key store failed, the `ConnectionState` value is `FAILED` and the
`ConnectionErrorCode` element in the response indicates the cause of the
failure. For help interpreting the `ConnectionErrorCode`, see
`CustomKeyStoresListEntry`.
Custom key stores have a `DISCONNECTED` connection state if the key store has
never been connected or you use the `DisconnectCustomKeyStore` operation to
disconnect it. If your custom key store state is `CONNECTED` but you are having
trouble using it, make sure that its associated CloudHSM cluster is active and
contains the minimum number of HSMs required for the operation, if any.
For help repairing your custom key store, see the [Troubleshooting Custom Key Stores](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html)
topic in the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different Amazon Web Services account.
**Required permissions**:
[kms:DescribeCustomKeyStores](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def describe_custom_key_stores(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCustomKeyStores", input, options)
end
@doc """
Provides detailed information about a KMS key.
You can run `DescribeKey` on a [customer managed key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk)
or an [Amazon Web Services managed key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk).
This detailed information includes the key ARN, creation date (and deletion
date, if applicable), the key state, and the origin and expiration date (if any)
of the key material. It includes fields, like `KeySpec`, that help you
distinguish symmetric from asymmetric KMS keys. It also provides information
that is particularly important to asymmetric keys, such as the key usage
(encryption or signing) and the encryption algorithms or signing algorithms that
the KMS key supports. For KMS keys in custom key stores, it includes information
about the custom key store, such as the key store ID and the CloudHSM cluster
ID. For multi-Region keys, it displays the primary key and all related replica
keys.
`DescribeKey` does not return the following information:
* Aliases associated with the KMS key. To get this information, use
`ListAliases`.
* Whether automatic key rotation is enabled on the KMS key. To get
this information, use `GetKeyRotationStatus`. Also, some key states prevent a
KMS key from being automatically rotated. For details, see [How Automatic Key Rotation
Works](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html#rotate-keys-how-it-works)
in *Key Management Service Developer Guide*.
* Tags on the KMS key. To get this information, use
`ListResourceTags`.
* Key policies and grants on the KMS key. To get this information,
use `GetKeyPolicy` and `ListGrants`.
If you call the `DescribeKey` operation on a *predefined Amazon Web Services
alias*, that is, an Amazon Web Services alias with no key ID, KMS creates an
[Amazon Web Services managed key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk).
Then, it associates the alias with the new KMS key, and returns the `KeyId` and
`Arn` of the new KMS key in the response.
**Cross-account use**: Yes. To perform this operation with a KMS key in a
different Amazon Web Services account, specify the key ARN or alias ARN in the
value of the `KeyId` parameter.
**Required permissions**:
[kms:DescribeKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `GetKeyPolicy`
* `GetKeyRotationStatus`
* `ListAliases`
* `ListGrants`
* `ListKeys`
* `ListResourceTags`
* `ListRetirableGrants`
"""
def describe_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeKey", input, options)
end
@doc """
Sets the state of a KMS key to disabled.
This change temporarily prevents use of the KMS key for [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations).
For more information about how key state affects the use of a KMS key, see [Key state: Effect on your KMS
key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the * *Key Management Service Developer Guide* *.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:DisableKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `EnableKey`
"""
def disable_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableKey", input, options)
end
@doc """
Disables [automatic rotation of the key material](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
for the specified symmetric KMS key.
You cannot enable automatic rotation of [asymmetric KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/symm-asymm-concepts.html#asymmetric-cmks),
KMS keys with [imported key material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html),
or KMS keys in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
To enable or disable automatic rotation of a set of related [multi-Region keys](https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-overview.html#mrk-replica-key),
set the property on the primary key.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:DisableKeyRotation](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `EnableKeyRotation`
* `GetKeyRotationStatus`
"""
def disable_key_rotation(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableKeyRotation", input, options)
end
@doc """
Disconnects the [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
from its associated CloudHSM cluster.
While a custom key store is disconnected, you can manage the custom key store
and its KMS keys, but you cannot create or use KMS keys in the custom key store.
You can reconnect the custom key store at any time.
While a custom key store is disconnected, all attempts to create KMS keys in the
custom key store or to use existing KMS keys in [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations)
will fail. This action can prevent users from storing and accessing sensitive
data.
To find the connection state of a custom key store, use the
`DescribeCustomKeyStores` operation. To reconnect a custom key store, use the
`ConnectCustomKeyStore` operation.
If the operation succeeds, it returns a JSON object with no properties.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in KMS, which combines the convenience and extensive integration of KMS
with the isolation and control of a single-tenant key store.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different Amazon Web Services account.
**Required permissions**:
[kms:DisconnectCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `UpdateCustomKeyStore`
"""
def disconnect_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisconnectCustomKeyStore", input, options)
end
@doc """
Sets the key state of a KMS key to enabled.
This allows you to use the KMS key for [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations).
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:EnableKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `DisableKey`
"""
def enable_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableKey", input, options)
end
@doc """
Enables [automatic rotation of the key material](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
for the specified symmetric KMS key.
You cannot enable automatic rotation of [asymmetric KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/symm-asymm-concepts.html#asymmetric-cmks),
KMS keys with [imported key material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html),
or KMS keys in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
To enable or disable automatic rotation of a set of related [multi-Region keys](https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-overview.html#mrk-replica-key),
set the property on the primary key.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:EnableKeyRotation](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `DisableKeyRotation`
* `GetKeyRotationStatus`
"""
def enable_key_rotation(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableKeyRotation", input, options)
end
@doc """
Encrypts plaintext into ciphertext by using a KMS key.
The `Encrypt` operation has two primary use cases:
* You can encrypt small amounts of arbitrary data, such as a
personal identifier or database password, or other sensitive information.
* You can use the `Encrypt` operation to move encrypted data from
one Amazon Web Services Region to another. For example, in Region A, generate a
data key and use the plaintext key to encrypt your data. Then, in Region A, use
the `Encrypt` operation to encrypt the plaintext data key under a KMS key in
Region B. Now, you can move the encrypted data and the encrypted data key to
Region B. When necessary, you can decrypt the encrypted data key and the
encrypted data entirely within in Region B.
You don't need to use the `Encrypt` operation to encrypt a data key. The
`GenerateDataKey` and `GenerateDataKeyPair` operations return a plaintext data
key and an encrypted copy of that data key.
When you encrypt data, you must specify a symmetric or asymmetric KMS key to use
in the encryption operation. The KMS key must have a `KeyUsage` value of
`ENCRYPT_DECRYPT.` To find the `KeyUsage` of a KMS key, use the `DescribeKey`
operation.
If you use a symmetric KMS key, you can use an encryption context to add
additional security to your encryption operation. If you specify an
`EncryptionContext` when encrypting data, you must specify the same encryption
context (a case-sensitive exact match) when decrypting the data. Otherwise, the
request to decrypt fails with an `InvalidCiphertextException`. For more
information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *Key Management Service Developer Guide*.
If you specify an asymmetric KMS key, you must also specify the encryption
algorithm. The algorithm must be compatible with the KMS key type.
When you use an asymmetric KMS key to encrypt or reencrypt data, be sure to
record the KMS key and encryption algorithm that you choose. You will be
required to provide the same KMS key and encryption algorithm when you decrypt
the data. If the KMS key and algorithm do not match the values used to encrypt
the data, the decrypt operation fails.
You are not required to supply the key ID and encryption algorithm when you
decrypt with symmetric KMS keys because KMS stores this information in the
ciphertext blob. KMS cannot store metadata in ciphertext generated with
asymmetric keys. The standard format for asymmetric key ciphertext does not
include configurable fields.
The maximum size of the data that you can encrypt varies with the type of KMS
key and the encryption algorithm that you choose.
* Symmetric KMS keys
* `SYMMETRIC_DEFAULT`: 4096 bytes
* `RSA_2048`
* `RSAES_OAEP_SHA_1`: 214 bytes
* `RSAES_OAEP_SHA_256`: 190 bytes
* `RSA_3072`
* `RSAES_OAEP_SHA_1`: 342 bytes
* `RSAES_OAEP_SHA_256`: 318 bytes
* `RSA_4096`
* `RSAES_OAEP_SHA_1`: 470 bytes
* `RSAES_OAEP_SHA_256`: 446 bytes
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a KMS key in a
different Amazon Web Services account, specify the key ARN or alias ARN in the
value of the `KeyId` parameter.
**Required permissions**:
[kms:Encrypt](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
"""
def encrypt(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Encrypt", input, options)
end
@doc """
Generates a unique symmetric data key for client-side encryption.
This operation returns a plaintext copy of the data key and a copy that is
encrypted under a KMS key that you specify. You can use the plaintext key to
encrypt your data outside of KMS and store the encrypted data key with the
encrypted data.
`GenerateDataKey` returns a unique data key for each request. The bytes in the
plaintext key are not related to the caller or the KMS key.
To generate a data key, specify the symmetric KMS key that will be used to
encrypt the data key. You cannot use an asymmetric KMS key to generate data
keys. To get the type of your KMS key, use the `DescribeKey` operation. You must
also specify the length of the data key. Use either the `KeySpec` or
`NumberOfBytes` parameters (but not both). For 128-bit and 256-bit data keys,
use the `KeySpec` parameter.
To get only an encrypted copy of the data key, use
`GenerateDataKeyWithoutPlaintext`. To generate an asymmetric data key pair, use
the `GenerateDataKeyPair` or `GenerateDataKeyPairWithoutPlaintext` operation. To
get a cryptographically secure random byte string, use `GenerateRandom`.
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *Key Management Service Developer Guide*.
Applications in Amazon Web Services Nitro Enclaves can call this operation by
using the [Amazon Web Services Nitro Enclaves Development Kit](https://github.com/aws/aws-nitro-enclaves-sdk-c). For information about the
supporting parameters, see [How Amazon Web Services Nitro Enclaves use KMS](https://docs.aws.amazon.com/kms/latest/developerguide/services-nitro-enclaves.html)
in the *Key Management Service Developer Guide*.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
## How to use your data key
We recommend that you use the following pattern to encrypt data locally in your
application. You can write your own code or use a client-side encryption
library, such as the [Amazon Web Services Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/), the
[Amazon DynamoDB Encryption Client](https://docs.aws.amazon.com/dynamodb-encryption-client/latest/devguide/),
or [Amazon S3 client-side encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingClientSideEncryption.html)
to do these tasks for you.
To encrypt data outside of KMS:
1. Use the `GenerateDataKey` operation to get a data key.
2. Use the plaintext data key (in the `Plaintext` field of the
response) to encrypt your data outside of KMS. Then erase the plaintext data key
from memory.
3. Store the encrypted data key (in the `CiphertextBlob` field of
the response) with the encrypted data.
To decrypt data outside of KMS:
1. Use the `Decrypt` operation to decrypt the encrypted data key.
The operation returns a plaintext copy of the data key.
2. Use the plaintext data key to decrypt data outside of KMS, then
erase the plaintext data key from memory.
**Cross-account use**: Yes. To perform this operation with a KMS key in a
different Amazon Web Services account, specify the key ARN or alias ARN in the
value of the `KeyId` parameter.
**Required permissions**:
[kms:GenerateDataKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKeyPair`
* `GenerateDataKeyPairWithoutPlaintext`
* `GenerateDataKeyWithoutPlaintext`
"""
def generate_data_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateDataKey", input, options)
end
@doc """
Generates a unique asymmetric data key pair.
The `GenerateDataKeyPair` operation returns a plaintext public key, a plaintext
private key, and a copy of the private key that is encrypted under the symmetric
KMS key you specify. You can use the data key pair to perform asymmetric
cryptography and implement digital signatures outside of KMS.
You can use the public key that `GenerateDataKeyPair` returns to encrypt data or
verify a signature outside of KMS. Then, store the encrypted private key with
the data. When you are ready to decrypt data or sign a message, you can use the
`Decrypt` operation to decrypt the encrypted private key.
To generate a data key pair, you must specify a symmetric KMS key to encrypt the
private key in a data key pair. You cannot use an asymmetric KMS key or a KMS
key in a custom key store. To get the type and origin of your KMS key, use the
`DescribeKey` operation.
Use the `KeyPairSpec` parameter to choose an RSA or Elliptic Curve (ECC) data
key pair. KMS recommends that your use ECC key pairs for signing, and use RSA
key pairs for either encryption or signing, but not both. However, KMS cannot
enforce any restrictions on the use of data key pairs outside of KMS.
If you are using the data key pair to encrypt data, or for any operation where
you don't immediately need a private key, consider using the
`GenerateDataKeyPairWithoutPlaintext` operation.
`GenerateDataKeyPairWithoutPlaintext` returns a plaintext public key and an
encrypted private key, but omits the plaintext private key that you need only to
decrypt ciphertext or sign a message. Later, when you need to decrypt the data
or sign a message, use the `Decrypt` operation to decrypt the encrypted private
key in the data key pair.
`GenerateDataKeyPair` returns a unique data key pair for each request. The bytes
in the keys are not related to the caller or the KMS key that is used to encrypt
the private key. The public key is a DER-encoded X.509 SubjectPublicKeyInfo, as
specified in [RFC 5280](https://tools.ietf.org/html/rfc5280). The private key is a DER-encoded PKCS8 PrivateKeyInfo, as specified in [RFC
5958](https://tools.ietf.org/html/rfc5958).
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *Key Management Service Developer Guide*.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a KMS key in a
different Amazon Web Services account, specify the key ARN or alias ARN in the
value of the `KeyId` parameter.
**Required permissions**:
[kms:GenerateDataKeyPair](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPairWithoutPlaintext`
* `GenerateDataKeyWithoutPlaintext`
"""
def generate_data_key_pair(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateDataKeyPair", input, options)
end
@doc """
Generates a unique asymmetric data key pair.
The `GenerateDataKeyPairWithoutPlaintext` operation returns a plaintext public
key and a copy of the private key that is encrypted under the symmetric KMS key
you specify. Unlike `GenerateDataKeyPair`, this operation does not return a
plaintext private key.
You can use the public key that `GenerateDataKeyPairWithoutPlaintext` returns to
encrypt data or verify a signature outside of KMS. Then, store the encrypted
private key with the data. When you are ready to decrypt data or sign a message,
you can use the `Decrypt` operation to decrypt the encrypted private key.
To generate a data key pair, you must specify a symmetric KMS key to encrypt the
private key in a data key pair. You cannot use an asymmetric KMS key or a KMS
key in a custom key store. To get the type and origin of your KMS key, use the
`DescribeKey` operation.
Use the `KeyPairSpec` parameter to choose an RSA or Elliptic Curve (ECC) data
key pair. KMS recommends that your use ECC key pairs for signing, and use RSA
key pairs for either encryption or signing, but not both. However, KMS cannot
enforce any restrictions on the use of data key pairs outside of KMS.
`GenerateDataKeyPairWithoutPlaintext` returns a unique data key pair for each
request. The bytes in the key are not related to the caller or KMS key that is
used to encrypt the private key. The public key is a DER-encoded X.509
SubjectPublicKeyInfo, as specified in [RFC 5280](https://tools.ietf.org/html/rfc5280).
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *Key Management Service Developer Guide*.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a KMS key in a
different Amazon Web Services account, specify the key ARN or alias ARN in the
value of the `KeyId` parameter.
**Required permissions**:
[kms:GenerateDataKeyPairWithoutPlaintext](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `GenerateDataKeyWithoutPlaintext`
"""
def generate_data_key_pair_without_plaintext(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GenerateDataKeyPairWithoutPlaintext",
input,
options
)
end
@doc """
Generates a unique symmetric data key.
This operation returns a data key that is encrypted under a KMS key that you
specify. To request an asymmetric data key pair, use the `GenerateDataKeyPair`
or `GenerateDataKeyPairWithoutPlaintext` operations.
`GenerateDataKeyWithoutPlaintext` is identical to the `GenerateDataKey`
operation except that returns only the encrypted copy of the data key. This
operation is useful for systems that need to encrypt data at some point, but not
immediately. When you need to encrypt the data, you call the `Decrypt` operation
on the encrypted copy of the key.
It's also useful in distributed systems with different levels of trust. For
example, you might store encrypted data in containers. One component of your
system creates new containers and stores an encrypted data key with each
container. Then, a different component puts the data into the containers. That
component first decrypts the data key, uses the plaintext data key to encrypt
data, puts the encrypted data into the container, and then destroys the
plaintext data key. In this system, the component that creates the containers
never sees the plaintext data key.
`GenerateDataKeyWithoutPlaintext` returns a unique data key for each request.
The bytes in the keys are not related to the caller or KMS key that is used to
encrypt the private key.
To generate a data key, you must specify the symmetric KMS key that is used to
encrypt the data key. You cannot use an asymmetric KMS key to generate a data
key. To get the type of your KMS key, use the `DescribeKey` operation.
If the operation succeeds, you will find the encrypted copy of the data key in
the `CiphertextBlob` field.
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *Key Management Service Developer Guide*.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a KMS key in a
different Amazon Web Services account, specify the key ARN or alias ARN in the
value of the `KeyId` parameter.
**Required permissions**:
[kms:GenerateDataKeyWithoutPlaintext](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `GenerateDataKeyPairWithoutPlaintext`
"""
def generate_data_key_without_plaintext(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateDataKeyWithoutPlaintext", input, options)
end
@doc """
Returns a random byte string that is cryptographically secure.
By default, the random byte string is generated in KMS. To generate the byte
string in the CloudHSM cluster that is associated with a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html),
specify the custom key store ID.
Applications in Amazon Web Services Nitro Enclaves can call this operation by
using the [Amazon Web Services Nitro Enclaves Development Kit](https://github.com/aws/aws-nitro-enclaves-sdk-c). For information about the
supporting parameters, see [How Amazon Web Services Nitro Enclaves use KMS](https://docs.aws.amazon.com/kms/latest/developerguide/services-nitro-enclaves.html)
in the *Key Management Service Developer Guide*.
For more information about entropy and random number generation, see [Key Management Service Cryptographic
Details](https://docs.aws.amazon.com/kms/latest/cryptographic-details/).
**Required permissions**:
[kms:GenerateRandom](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
"""
def generate_random(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateRandom", input, options)
end
@doc """
Gets a key policy attached to the specified KMS key.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:GetKeyPolicy](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `PutKeyPolicy`
"""
def get_key_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetKeyPolicy", input, options)
end
@doc """
Gets a Boolean value that indicates whether [automatic rotation of the key material](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
is enabled for the specified KMS key.
You cannot enable automatic rotation of [asymmetric KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/symm-asymm-concepts.html#asymmetric-cmks),
KMS keys with [imported key material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html),
or KMS keys in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
To enable or disable automatic rotation of a set of related [multi-Region keys](https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-overview.html#mrk-replica-key),
set the property on the primary key. The key rotation status for these KMS keys
is always `false`.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
* Disabled: The key rotation status does not change when you disable
a KMS key. However, while the KMS key is disabled, KMS does not rotate the key
material.
* Pending deletion: While a KMS key is pending deletion, its key
rotation status is `false` and KMS does not rotate the key material. If you
cancel the deletion, the original key rotation status is restored.
**Cross-account use**: Yes. To perform this operation on a KMS key in a
different Amazon Web Services account, specify the key ARN in the value of the
`KeyId` parameter.
**Required permissions**:
[kms:GetKeyRotationStatus](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `DisableKeyRotation`
* `EnableKeyRotation`
"""
def get_key_rotation_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetKeyRotationStatus", input, options)
end
@doc """
Returns the items you need to import key material into a symmetric, customer
managed KMS key.
For more information about importing key material into KMS, see [Importing Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *Key Management Service Developer Guide*.
This operation returns a public key and an import token. Use the public key to
encrypt the symmetric key material. Store the import token to send with a
subsequent `ImportKeyMaterial` request.
You must specify the key ID of the symmetric KMS key into which you will import
key material. This KMS key's `Origin` must be `EXTERNAL`. You must also specify
the wrapping algorithm and type of wrapping key (public key) that you will use
to encrypt the key material. You cannot perform this operation on an asymmetric
KMS key or on any KMS key in a different Amazon Web Services account.
To import key material, you must use the public key and import token from the
same response. These items are valid for 24 hours. The expiration date and time
appear in the `GetParametersForImport` response. You cannot use an expired token
in an `ImportKeyMaterial` request. If your key and token expire, send another
`GetParametersForImport` request.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:GetParametersForImport](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `ImportKeyMaterial`
* `DeleteImportedKeyMaterial`
"""
def get_parameters_for_import(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetParametersForImport", input, options)
end
@doc """
Returns the public key of an asymmetric KMS key.
Unlike the private key of a asymmetric KMS key, which never leaves KMS
unencrypted, callers with `kms:GetPublicKey` permission can download the public
key of an asymmetric KMS key. You can share the public key to allow others to
encrypt messages and verify signatures outside of KMS. For information about
symmetric and asymmetric KMS keys, see [Using Symmetric and Asymmetric KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *Key Management Service Developer Guide*.
You do not need to download the public key. Instead, you can use the public key
within KMS by calling the `Encrypt`, `ReEncrypt`, or `Verify` operations with
the identifier of an asymmetric KMS key. When you use the public key within KMS,
you benefit from the authentication, authorization, and logging that are part of
every KMS operation. You also reduce of risk of encrypting data that cannot be
decrypted. These features are not effective outside of KMS. For details, see
[Special Considerations for Downloading Public Keys](https://docs.aws.amazon.com/kms/latest/developerguide/download-public-key.html#download-public-key-considerations).
To help you use the public key safely outside of KMS, `GetPublicKey` returns
important information about the public key in the response, including:
*
[KeySpec](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-KeySpec): The type of key material in the public key, such as `RSA_4096` or
`ECC_NIST_P521`.
*
[KeyUsage](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-KeyUsage):
Whether the key is used for encryption or signing.
*
[EncryptionAlgorithms](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-EncryptionAlgorithms) or
[SigningAlgorithms](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-SigningAlgorithms):
A list of the encryption algorithms or the signing algorithms for the key.
Although KMS cannot enforce these restrictions on external operations, it is
crucial that you use this information to prevent the public key from being used
improperly. For example, you can prevent a public signing key from being used
encrypt data, or prevent a public key from being used with an encryption
algorithm that is not supported by KMS. You can also avoid errors, such as using
the wrong signing algorithm in a verification operation.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a KMS key in a
different Amazon Web Services account, specify the key ARN or alias ARN in the
value of the `KeyId` parameter.
**Required permissions**:
[kms:GetPublicKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `CreateKey`
"""
def get_public_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetPublicKey", input, options)
end
@doc """
Imports key material into an existing symmetric KMS KMS key that was created
without key material.
After you successfully import key material into a KMS key, you can [reimport the same key
material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html#reimport-key-material)
into that KMS key, but you cannot import different key material.
You cannot perform this operation on an asymmetric KMS key or on any KMS key in
a different Amazon Web Services account. For more information about creating KMS
keys with no key material and then importing key material, see [Importing Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *Key Management Service Developer Guide*.
Before using this operation, call `GetParametersForImport`. Its response
includes a public key and an import token. Use the public key to encrypt the key
material. Then, submit the import token from the same `GetParametersForImport`
response.
When calling this operation, you must specify the following values:
* The key ID or key ARN of a KMS key with no key material. Its
`Origin` must be `EXTERNAL`.
To create a KMS key with no key material, call `CreateKey` and set the value of
its `Origin` parameter to `EXTERNAL`. To get the `Origin` of a KMS key, call
`DescribeKey`.)
* The encrypted key material. To get the public key to encrypt the
key material, call `GetParametersForImport`.
* The import token that `GetParametersForImport` returned. You must
use a public key and token from the same `GetParametersForImport` response.
* Whether the key material expires and if so, when. If you set an
expiration date, KMS deletes the key material from the KMS key on the specified
date, and the KMS key becomes unusable. To use the KMS key again, you must
reimport the same key material. The only way to change an expiration date is by
reimporting the same key material and specifying a new expiration date.
When this operation is successful, the key state of the KMS key changes from
`PendingImport` to `Enabled`, and you can use the KMS key.
If this operation fails, use the exception to help determine the problem. If the
error is related to the key material, the import token, or wrapping key, use
`GetParametersForImport` to get a new public key and import token for the KMS
key and repeat the import procedure. For help, see [How To Import Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html#importing-keys-overview)
in the *Key Management Service Developer Guide*.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:ImportKeyMaterial](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `DeleteImportedKeyMaterial`
* `GetParametersForImport`
"""
def import_key_material(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ImportKeyMaterial", input, options)
end
@doc """
Gets a list of aliases in the caller's Amazon Web Services account and region.
For more information about aliases, see `CreateAlias`.
By default, the `ListAliases` operation returns all aliases in the account and
region. To get only the aliases associated with a particular KMS key, use the
`KeyId` parameter.
The `ListAliases` response can include aliases that you created and associated
with your customer managed keys, and aliases that Amazon Web Services created
and associated with Amazon Web Services managed keys in your account. You can
recognize Amazon Web Services aliases because their names have the format
`aws/<service-name>`, such as `aws/dynamodb`.
The response might also include aliases that have no `TargetKeyId` field. These
are predefined aliases that Amazon Web Services has created but has not yet
associated with a KMS key. Aliases that Amazon Web Services creates in your
account, including predefined aliases, do not count against your [KMS aliases quota](https://docs.aws.amazon.com/kms/latest/developerguide/limits.html#aliases-limit).
**Cross-account use**: No. `ListAliases` does not return aliases in other Amazon
Web Services accounts.
**Required permissions**:
[kms:ListAliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) (IAM policy)
For details, see [Controlling access to
aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *Key Management Service Developer Guide*.
## Related operations:
* `CreateAlias`
* `DeleteAlias`
* `UpdateAlias`
"""
def list_aliases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAliases", input, options)
end
@doc """
Gets a list of all grants for the specified KMS key.
You must specify the KMS key in all requests. You can filter the grant list by
grant ID or grantee principal.
For detailed information about grants, including grant terminology, see [Using grants](https://docs.aws.amazon.com/kms/latest/developerguide/grants.html) in
the * *Key Management Service Developer Guide* *. For examples of working with
grants in several programming languages, see [Programming grants](https://docs.aws.amazon.com/kms/latest/developerguide/programming-grants.html).
The `GranteePrincipal` field in the `ListGrants` response usually contains the
user or role designated as the grantee principal in the grant. However, when the
grantee principal in the grant is an Amazon Web Services service, the
`GranteePrincipal` field contains the [service principal](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html#principal-services),
which might represent several different grantee principals.
**Cross-account use**: Yes. To perform this operation on a KMS key in a
different Amazon Web Services account, specify the key ARN in the value of the
`KeyId` parameter.
**Required permissions**:
[kms:ListGrants](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `CreateGrant`
* `ListRetirableGrants`
* `RetireGrant`
* `RevokeGrant`
"""
def list_grants(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListGrants", input, options)
end
@doc """
Gets the names of the key policies that are attached to a KMS key.
This operation is designed to get policy names that you can use in a
`GetKeyPolicy` operation. However, the only valid policy name is `default`.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:ListKeyPolicies](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `GetKeyPolicy`
* `PutKeyPolicy`
"""
def list_key_policies(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListKeyPolicies", input, options)
end
@doc """
Gets a list of all KMS keys in the caller's Amazon Web Services account and
Region.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:ListKeys](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `CreateKey`
* `DescribeKey`
* `ListAliases`
* `ListResourceTags`
"""
def list_keys(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListKeys", input, options)
end
@doc """
Returns all tags on the specified KMS key.
For general information about tags, including the format and syntax, see
[Tagging Amazon Web Services resources](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html) in
the *Amazon Web Services General Reference*. For information about using tags in
KMS, see [Tagging keys](https://docs.aws.amazon.com/kms/latest/developerguide/tagging-keys.html).
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:ListResourceTags](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `CreateKey`
* `ReplicateKey`
* `TagResource`
* `UntagResource`
"""
def list_resource_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListResourceTags", input, options)
end
@doc """
Returns information about all grants in the Amazon Web Services account and
Region that have the specified retiring principal.
You can specify any principal in your Amazon Web Services account. The grants
that are returned include grants for KMS keys in your Amazon Web Services
account and other Amazon Web Services accounts. You might use this operation to
determine which grants you may retire. To retire a grant, use the `RetireGrant`
operation.
For detailed information about grants, including grant terminology, see [Using grants](https://docs.aws.amazon.com/kms/latest/developerguide/grants.html) in
the * *Key Management Service Developer Guide* *. For examples of working with
grants in several programming languages, see [Programming grants](https://docs.aws.amazon.com/kms/latest/developerguide/programming-grants.html).
**Cross-account use**: You must specify a principal in your Amazon Web Services
account. However, this operation can return grants in any Amazon Web Services
account. You do not need `kms:ListRetirableGrants` permission (or any other
additional permission) in any Amazon Web Services account other than your own.
**Required permissions**:
[kms:ListRetirableGrants](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy) in your Amazon Web Services account.
## Related operations:
* `CreateGrant`
* `ListGrants`
* `RetireGrant`
* `RevokeGrant`
"""
def list_retirable_grants(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListRetirableGrants", input, options)
end
@doc """
Attaches a key policy to the specified KMS key.
For more information about key policies, see [Key Policies](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html)
in the *Key Management Service Developer Guide*. For help writing and formatting
a JSON policy document, see the [IAM JSON Policy Reference](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies.html)
in the * *Identity and Access Management User Guide* *. For examples of adding a
key policy in multiple programming languages, see [Setting a key policy](https://docs.aws.amazon.com/kms/latest/developerguide/programming-key-policies.html#put-policy)
in the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:PutKeyPolicy](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `GetKeyPolicy`
"""
def put_key_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutKeyPolicy", input, options)
end
@doc """
Decrypts ciphertext and then reencrypts it entirely within KMS.
You can use this operation to change the KMS key under which data is encrypted,
such as when you [manually rotate](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html#rotate-keys-manually)
a KMS key or change the KMS key that protects a ciphertext. You can also use it
to reencrypt ciphertext under the same KMS key, such as to change the
[encryption context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
of a ciphertext.
The `ReEncrypt` operation can decrypt ciphertext that was encrypted by using an
KMS KMS key in an KMS operation, such as `Encrypt` or `GenerateDataKey`. It can
also decrypt ciphertext that was encrypted by using the public key of an
[asymmetric KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/symm-asymm-concepts.html#asymmetric-cmks)
outside of KMS. However, it cannot decrypt ciphertext produced by other
libraries, such as the [Amazon Web Services Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/) or
[Amazon S3 client-side encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingClientSideEncryption.html).
These libraries return a ciphertext format that is incompatible with KMS.
When you use the `ReEncrypt` operation, you need to provide information for the
decrypt operation and the subsequent encrypt operation.
* If your ciphertext was encrypted under an asymmetric KMS key, you
must use the `SourceKeyId` parameter to identify the KMS key that encrypted the
ciphertext. You must also supply the encryption algorithm that was used. This
information is required to decrypt the data.
* If your ciphertext was encrypted under a symmetric KMS key, the
`SourceKeyId` parameter is optional. KMS can get this information from metadata
that it adds to the symmetric ciphertext blob. This feature adds durability to
your implementation by ensuring that authorized users can decrypt ciphertext
decades after it was encrypted, even if they've lost track of the key ID.
However, specifying the source KMS key is always recommended as a best practice.
When you use the `SourceKeyId` parameter to specify a KMS key, KMS uses only the
KMS key you specify. If the ciphertext was encrypted under a different KMS key,
the `ReEncrypt` operation fails. This practice ensures that you use the KMS key
that you intend.
* To reencrypt the data, you must use the `DestinationKeyId`
parameter specify the KMS key that re-encrypts the data after it is decrypted.
You can select a symmetric or asymmetric KMS key. If the destination KMS key is
an asymmetric KMS key, you must also provide the encryption algorithm. The
algorithm that you choose must be compatible with the KMS key.
When you use an asymmetric KMS key to encrypt or reencrypt data, be sure to
record the KMS key and encryption algorithm that you choose. You will be
required to provide the same KMS key and encryption algorithm when you decrypt
the data. If the KMS key and algorithm do not match the values used to encrypt
the data, the decrypt operation fails.
You are not required to supply the key ID and encryption algorithm when you
decrypt with symmetric KMS keys because KMS stores this information in the
ciphertext blob. KMS cannot store metadata in ciphertext generated with
asymmetric keys. The standard format for asymmetric key ciphertext does not
include configurable fields.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: Yes. The source KMS key and destination KMS key can be in
different Amazon Web Services accounts. Either or both KMS keys can be in a
different account than the caller. To specify a KMS key in a different account,
you must use its key ARN or alias ARN.
**Required permissions**:
*
[kms:ReEncryptFrom](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) permission on the source KMS key (key policy)
*
[kms:ReEncryptTo](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
permission on the destination KMS key (key policy)
To permit reencryption from or to a KMS key, include the `"kms:ReEncrypt*"`
permission in your [key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html).
This permission is automatically included in the key policy when you use the
console to create a KMS key. But you must include it manually when you create a
KMS key programmatically or when you use the `PutKeyPolicy` operation to set a
key policy.
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
"""
def re_encrypt(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ReEncrypt", input, options)
end
@doc """
Replicates a multi-Region key into the specified Region.
This operation creates a multi-Region replica key based on a multi-Region
primary key in a different Region of the same Amazon Web Services partition. You
can create multiple replicas of a primary key, but each must be in a different
Region. To create a multi-Region primary key, use the `CreateKey` operation.
This operation supports *multi-Region keys*, an KMS feature that lets you create
multiple interoperable KMS keys in different Amazon Web Services Regions.
Because these KMS keys have the same key ID, key material, and other metadata,
you can use them interchangeably to encrypt data in one Amazon Web Services
Region and decrypt it in a different Amazon Web Services Region without
re-encrypting the data or making a cross-Region call. For more information about
multi-Region keys, see [Using multi-Region keys](https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-overview.html)
in the *Key Management Service Developer Guide*.
A *replica key* is a fully-functional KMS key that can be used independently of
its primary and peer replica keys. A primary key and its replica keys share
properties that make them interoperable. They have the same [key ID](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-id-key-id)
and key material. They also have the same [key spec](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-spec),
[key usage](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-usage),
[key material origin](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-origin),
and [automatic key rotation status](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html).
KMS automatically synchronizes these shared properties among related
multi-Region keys. All other properties of a replica key can differ, including
its [key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html),
[tags](https://docs.aws.amazon.com/kms/latest/developerguide/tagging-keys.html), [aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html),
and [key state](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html).
KMS pricing and quotas for KMS keys apply to each primary key and replica key.
When this operation completes, the new replica key has a transient key state of
`Creating`. This key state changes to `Enabled` (or `PendingImport`) after a few
seconds when the process of creating the new replica key is complete. While the
key state is `Creating`, you can manage key, but you cannot yet use it in
cryptographic operations. If you are creating and using the replica key
programmatically, retry on `KMSInvalidStateException` or call `DescribeKey` to
check its `KeyState` value before using it. For details about the `Creating` key
state, see [Key state: Effect on your KMS key](kms/latest/developerguide/key-state.html) in the *Key Management Service
Developer Guide*.
The CloudTrail log of a `ReplicateKey` operation records a `ReplicateKey`
operation in the primary key's Region and a `CreateKey` operation in the replica
key's Region.
If you replicate a multi-Region primary key with imported key material, the
replica key is created with no key material. You must import the same key
material that you imported into the primary key. For details, see [Importing key material into multi-Region
keys](kms/latest/developerguide/multi-region-keys-import.html) in the *Key
Management Service Developer Guide*.
To convert a replica key to a primary key, use the `UpdatePrimaryRegion`
operation.
`ReplicateKey` uses different default values for the `KeyPolicy` and `Tags`
parameters than those used in the KMS console. For details, see the parameter
descriptions.
**Cross-account use**: No. You cannot use this operation to create a replica key
in a different Amazon Web Services account.
**Required permissions**:
* `kms:ReplicateKey` on the primary key (in the primary key's
Region). Include this permission in the primary key's key policy.
* `kms:CreateKey` in an IAM policy in the replica Region.
* To use the `Tags` parameter, `kms:TagResource` in an IAM policy in
the replica Region.
## Related operations
* `CreateKey`
* `UpdatePrimaryRegion`
"""
def replicate_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ReplicateKey", input, options)
end
@doc """
Deletes a grant.
Typically, you retire a grant when you no longer need its permissions. To
identify the grant to retire, use a [grant token](https://docs.aws.amazon.com/kms/latest/developerguide/grants.html#grant_token),
or both the grant ID and a key identifier (key ID or key ARN) of the KMS key.
The `CreateGrant` operation returns both values.
This operation can be called by the *retiring principal* for a grant, by the
*grantee principal* if the grant allows the `RetireGrant` operation, and by the
Amazon Web Services account (root user) in which the grant is created. It can
also be called by principals to whom permission for retiring a grant is
delegated. For details, see [Retiring and revoking grants](https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#grant-delete)
in the *Key Management Service Developer Guide*.
For detailed information about grants, including grant terminology, see [Using grants](https://docs.aws.amazon.com/kms/latest/developerguide/grants.html) in
the * *Key Management Service Developer Guide* *. For examples of working with
grants in several programming languages, see [Programming grants](https://docs.aws.amazon.com/kms/latest/developerguide/programming-grants.html).
**Cross-account use**: Yes. You can retire a grant on a KMS key in a different
Amazon Web Services account.
**Required permissions:**:Permission to retire a grant is determined primarily
by the grant. For details, see [Retiring and revoking grants](https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#grant-delete)
in the *Key Management Service Developer Guide*.
## Related operations:
* `CreateGrant`
* `ListGrants`
* `ListRetirableGrants`
* `RevokeGrant`
"""
def retire_grant(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RetireGrant", input, options)
end
@doc """
Deletes the specified grant.
You revoke a grant to terminate the permissions that the grant allows. For more
information, see [Retiring and revoking grants](https://docs.aws.amazon.com/kms/latest/developerguide/managing-grants.html#grant-delete)
in the * *Key Management Service Developer Guide* *.
When you create, retire, or revoke a grant, there might be a brief delay,
usually less than five minutes, until the grant is available throughout KMS.
This state is known as *eventual consistency*. For details, see [Eventual consistency](https://docs.aws.amazon.com/kms/latest/developerguide/grants.html#terms-eventual-consistency)
in the * *Key Management Service Developer Guide* *.
For detailed information about grants, including grant terminology, see [Using grants](https://docs.aws.amazon.com/kms/latest/developerguide/grants.html) in
the * *Key Management Service Developer Guide* *. For examples of working with
grants in several programming languages, see [Programming grants](https://docs.aws.amazon.com/kms/latest/developerguide/programming-grants.html).
**Cross-account use**: Yes. To perform this operation on a KMS key in a
different Amazon Web Services account, specify the key ARN in the value of the
`KeyId` parameter.
**Required permissions**:
[kms:RevokeGrant](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy).
## Related operations:
* `CreateGrant`
* `ListGrants`
* `ListRetirableGrants`
* `RetireGrant`
"""
def revoke_grant(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RevokeGrant", input, options)
end
@doc """
Schedules the deletion of a KMS key.
By default, KMS applies a waiting period of 30 days, but you can specify a
waiting period of 7-30 days. When this operation is successful, the key state of
the KMS key changes to `PendingDeletion` and the key can't be used in any
cryptographic operations. It remains in this state for the duration of the
waiting period. Before the waiting period ends, you can use `CancelKeyDeletion`
to cancel the deletion of the KMS key. After the waiting period ends, KMS
deletes the KMS key, its key material, and all KMS data associated with it,
including all aliases that refer to it.
Deleting a KMS key is a destructive and potentially dangerous operation. When a
KMS key is deleted, all data that was encrypted under the KMS key is
unrecoverable. (The only exception is a multi-Region replica key.) To prevent
the use of a KMS key without deleting it, use `DisableKey`.
If you schedule deletion of a KMS key from a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html),
when the waiting period expires, `ScheduleKeyDeletion` deletes the KMS key from
KMS. Then KMS makes a best effort to delete the key material from the associated
CloudHSM cluster. However, you might need to manually [delete the orphaned key material](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html#fix-keystore-orphaned-key)
from the cluster and its backups.
You can schedule the deletion of a multi-Region primary key and its replica keys
at any time. However, KMS will not delete a multi-Region primary key with
existing replica keys. If you schedule the deletion of a primary key with
replicas, its key state changes to `PendingReplicaDeletion` and it cannot be
replicated or used in cryptographic operations. This status can continue
indefinitely. When the last of its replicas keys is deleted (not just
scheduled), the key state of the primary key changes to `PendingDeletion` and
its waiting period (`PendingWindowInDays`) begins. For details, see [Deleting multi-Region
keys](https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-delete.html)
in the *Key Management Service Developer Guide*.
For more information about scheduling a KMS key for deletion, see [Deleting KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/deleting-keys.html)
in the *Key Management Service Developer Guide*.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**: kms:ScheduleKeyDeletion (key policy)
## Related operations
* `CancelKeyDeletion`
* `DisableKey`
"""
def schedule_key_deletion(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ScheduleKeyDeletion", input, options)
end
@doc """
Creates a [digital signature](https://en.wikipedia.org/wiki/Digital_signature) for a message or message digest by using the private key in an asymmetric KMS
key.
To verify the signature, use the `Verify` operation, or use the public key in
the same asymmetric KMS key outside of KMS. For information about symmetric and
asymmetric KMS keys, see [Using Symmetric and Asymmetric KMS
keys](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *Key Management Service Developer Guide*.
Digital signatures are generated and verified by using asymmetric key pair, such
as an RSA or ECC pair that is represented by an asymmetric KMS key. The key
owner (or an authorized user) uses their private key to sign a message. Anyone
with the public key can verify that the message was signed with that particular
private key and that the message hasn't changed since it was signed.
To use the `Sign` operation, provide the following information:
* Use the `KeyId` parameter to identify an asymmetric KMS key with a
`KeyUsage` value of `SIGN_VERIFY`. To get the `KeyUsage` value of a KMS key, use
the `DescribeKey` operation. The caller must have `kms:Sign` permission on the
KMS key.
* Use the `Message` parameter to specify the message or message
digest to sign. You can submit messages of up to 4096 bytes. To sign a larger
message, generate a hash digest of the message, and then provide the hash digest
in the `Message` parameter. To indicate whether the message is a full message or
a digest, use the `MessageType` parameter.
* Choose a signing algorithm that is compatible with the KMS key.
When signing a message, be sure to record the KMS key and the signing algorithm.
This information is required to verify the signature.
To verify the signature that this operation generates, use the `Verify`
operation. Or use the `GetPublicKey` operation to download the public key and
then use the public key to verify the signature outside of KMS.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a KMS key in a
different Amazon Web Services account, specify the key ARN or alias ARN in the
value of the `KeyId` parameter.
**Required permissions**:
[kms:Sign](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `Verify`
"""
def sign(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Sign", input, options)
end
@doc """
Adds or edits tags on a [customer managed key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk).
Tagging or untagging a KMS key can allow or deny permission to the KMS key. For
details, see [Using ABAC in KMS](https://docs.aws.amazon.com/kms/latest/developerguide/abac.html) in the
*Key Management Service Developer Guide*.
Each tag consists of a tag key and a tag value, both of which are case-sensitive
strings. The tag value can be an empty (null) string. To add a tag, specify a
new tag key and a tag value. To edit a tag, specify an existing tag key and a
new tag value.
You can use this operation to tag a [customer managed key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk),
but you cannot tag an [Amazon Web Services managed key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk),
an [Amazon Web Services owned key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-owned-cmk),
a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#keystore-concept),
or an
[alias](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#alias-concept). You can also add tags to a KMS key while creating it (`CreateKey`) or
replicating it (`ReplicateKey`).
For information about using tags in KMS, see [Tagging
keys](https://docs.aws.amazon.com/kms/latest/developerguide/tagging-keys.html).
For general information about tags, including the format and syntax, see
[Tagging Amazon Web Services resources](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html) in
the *Amazon Web Services General Reference*.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:TagResource](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations
* `CreateKey`
* `ListResourceTags`
* `ReplicateKey`
* `UntagResource`
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Deletes tags from a [customer managed key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk).
To delete a tag, specify the tag key and the KMS key.
Tagging or untagging a KMS key can allow or deny permission to the KMS key. For
details, see [Using ABAC in KMS](https://docs.aws.amazon.com/kms/latest/developerguide/abac.html) in the
*Key Management Service Developer Guide*.
When it succeeds, the `UntagResource` operation doesn't return any output. Also,
if the specified tag key isn't found on the KMS key, it doesn't throw an
exception or return a response. To confirm that the operation worked, use the
`ListResourceTags` operation.
For information about using tags in KMS, see [Tagging keys](https://docs.aws.amazon.com/kms/latest/developerguide/tagging-keys.html).
For general information about tags, including the format and syntax, see
[Tagging Amazon Web Services resources](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html) in
the *Amazon Web Services General Reference*.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:UntagResource](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations
* `CreateKey`
* `ListResourceTags`
* `ReplicateKey`
* `TagResource`
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Associates an existing KMS alias with a different KMS key.
Each alias is associated with only one KMS key at a time, although a KMS key can
have multiple aliases. The alias and the KMS key must be in the same Amazon Web
Services account and Region.
Adding, deleting, or updating an alias can allow or deny permission to the KMS
key. For details, see [Using ABAC in KMS](https://docs.aws.amazon.com/kms/latest/developerguide/abac.html) in the
*Key Management Service Developer Guide*.
The current and new KMS key must be the same type (both symmetric or both
asymmetric), and they must have the same key usage (`ENCRYPT_DECRYPT` or
`SIGN_VERIFY`). This restriction prevents errors in code that uses aliases. If
you must assign an alias to a different type of KMS key, use `DeleteAlias` to
delete the old alias and `CreateAlias` to create a new alias.
You cannot use `UpdateAlias` to change an alias name. To change an alias name,
use `DeleteAlias` to delete the old alias and `CreateAlias` to create a new
alias.
Because an alias is not a property of a KMS key, you can create, update, and
delete the aliases of a KMS key without affecting the KMS key. Also, aliases do
not appear in the response from the `DescribeKey` operation. To get the aliases
of all KMS keys in the account, use the `ListAliases` operation.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
## Required permissions
*
[kms:UpdateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the alias (IAM policy).
*
[kms:UpdateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
on the current KMS key (key policy).
*
[kms:UpdateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the new KMS key (key policy).
For details, see [Controlling access to
aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *Key Management Service Developer Guide*.
## Related operations:
* `CreateAlias`
* `DeleteAlias`
* `ListAliases`
"""
def update_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateAlias", input, options)
end
@doc """
Changes the properties of a custom key store.
Use the `CustomKeyStoreId` parameter to identify the custom key store you want
to edit. Use the remaining parameters to change the properties of the custom key
store.
You can only update a custom key store that is disconnected. To disconnect the
custom key store, use `DisconnectCustomKeyStore`. To reconnect the custom key
store after the update completes, use `ConnectCustomKeyStore`. To find the
connection state of a custom key store, use the `DescribeCustomKeyStores`
operation.
The `CustomKeyStoreId` parameter is required in all commands. Use the other
parameters of `UpdateCustomKeyStore` to edit your key store settings.
* Use the `NewCustomKeyStoreName` parameter to change the friendly
name of the custom key store to the value that you specify.
* Use the `KeyStorePassword` parameter tell KMS the current password
of the [ `kmsuser` crypto user (CU)](https://docs.aws.amazon.com/kms/latest/developerguide/key-store-concepts.html#concept-kmsuser)
in the associated CloudHSM cluster. You can use this parameter to [fix connection
failures](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html#fix-keystore-password)
that occur when KMS cannot log into the associated cluster because the `kmsuser`
password has changed. This value does not change the password in the CloudHSM
cluster.
* Use the `CloudHsmClusterId` parameter to associate the custom key
store with a different, but related, CloudHSM cluster. You can use this
parameter to repair a custom key store if its CloudHSM cluster becomes corrupted
or is deleted, or when you need to create or restore a cluster from a backup.
If the operation succeeds, it returns a JSON object with no properties.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in KMS, which combines the convenience and extensive integration of KMS
with the isolation and control of a single-tenant key store.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different Amazon Web Services account.
**Required permissions**:
[kms:UpdateCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
"""
def update_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateCustomKeyStore", input, options)
end
@doc """
Updates the description of a KMS key.
To see the description of a KMS key, use `DescribeKey`.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a KMS key in a
different Amazon Web Services account.
**Required permissions**:
[kms:UpdateKeyDescription](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations
* `CreateKey`
* `DescribeKey`
"""
def update_key_description(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateKeyDescription", input, options)
end
@doc """
Changes the primary key of a multi-Region key.
This operation changes the replica key in the specified Region to a primary key
and changes the former primary key to a replica key. For example, suppose you
have a primary key in `us-east-1` and a replica key in `eu-west-2`. If you run
`UpdatePrimaryRegion` with a `PrimaryRegion` value of `eu-west-2`, the primary
key is now the key in `eu-west-2`, and the key in `us-east-1` becomes a replica
key. For details, see [Updating the primary Region](https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-manage.html#multi-region-update)
in the *Key Management Service Developer Guide*.
This operation supports *multi-Region keys*, an KMS feature that lets you create
multiple interoperable KMS keys in different Amazon Web Services Regions.
Because these KMS keys have the same key ID, key material, and other metadata,
you can use them interchangeably to encrypt data in one Amazon Web Services
Region and decrypt it in a different Amazon Web Services Region without
re-encrypting the data or making a cross-Region call. For more information about
multi-Region keys, see [Using multi-Region keys](https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-overview.html)
in the *Key Management Service Developer Guide*.
The *primary key* of a multi-Region key is the source for properties that are
always shared by primary and replica keys, including the key material, [key ID](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-id-key-id),
[key spec](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-spec),
[key usage](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-usage),
[key material origin](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-origin),
and [automatic key rotation](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html).
It's the only key that can be replicated. You cannot [delete the primary key](https://docs.aws.amazon.com/kms/latest/APIReference/API_ScheduleKeyDeletion.html)
until all replica keys are deleted.
The key ID and primary Region that you specify uniquely identify the replica key
that will become the primary key. The primary Region must already have a replica
key. This operation does not create a KMS key in the specified Region. To find
the replica keys, use the `DescribeKey` operation on the primary key or any
replica key. To create a replica key, use the `ReplicateKey` operation.
You can run this operation while using the affected multi-Region keys in
cryptographic operations. This operation should not delay, interrupt, or cause
failures in cryptographic operations.
Even after this operation completes, the process of updating the primary Region
might still be in progress for a few more seconds. Operations such as
`DescribeKey` might display both the old and new primary keys as replicas. The
old and new primary keys have a transient key state of `Updating`. The original
key state is restored when the update is complete. While the key state is
`Updating`, you can use the keys in cryptographic operations, but you cannot
replicate the new primary key or perform certain management operations, such as
enabling or disabling these keys. For details about the `Updating` key state,
see [Key state: Effect on your KMS key](kms/latest/developerguide/key-state.html) in the *Key Management Service
Developer Guide*.
This operation does not return any output. To verify that primary key is
changed, use the `DescribeKey` operation.
**Cross-account use**: No. You cannot use this operation in a different Amazon
Web Services account.
**Required permissions**:
* `kms:UpdatePrimaryRegion` on the current primary key (in the
primary key's Region). Include this permission primary key's key policy.
* `kms:UpdatePrimaryRegion` on the current replica key (in the
replica key's Region). Include this permission in the replica key's key policy.
## Related operations
* `CreateKey`
* `ReplicateKey`
"""
def update_primary_region(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdatePrimaryRegion", input, options)
end
@doc """
Verifies a digital signature that was generated by the `Sign` operation.
Verification confirms that an authorized user signed the message with the
specified KMS key and signing algorithm, and the message hasn't changed since it
was signed. If the signature is verified, the value of the `SignatureValid`
field in the response is `True`. If the signature verification fails, the
`Verify` operation fails with an `KMSInvalidSignatureException` exception.
A digital signature is generated by using the private key in an asymmetric KMS
key. The signature is verified by using the public key in the same asymmetric
KMS key. For information about symmetric and asymmetric KMS keys, see [Using Symmetric and Asymmetric KMS
keys](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *Key Management Service Developer Guide*.
To verify a digital signature, you can use the `Verify` operation. Specify the
same asymmetric KMS key, message, and signing algorithm that were used to
produce the signature.
You can also verify the digital signature by using the public key of the KMS key
outside of KMS. Use the `GetPublicKey` operation to download the public key in
the asymmetric KMS key and then use the public key to verify the signature
outside of KMS. The advantage of using the `Verify` operation is that it is
performed within KMS. As a result, it's easy to call, the operation is performed
within the FIPS boundary, it is logged in CloudTrail, and you can use key policy
and IAM policy to determine who is authorized to use the KMS key to verify
signatures.
The KMS key that you use for this operation must be in a compatible key state.
For details, see [Key state: Effect on your KMS key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a KMS key in a
different Amazon Web Services account, specify the key ARN or alias ARN in the
value of the `KeyId` parameter.
**Required permissions**:
[kms:Verify](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `Sign`
"""
def verify(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Verify", input, options)
end
end
|
lib/aws/generated/kms.ex
| 0.895788 | 0.615088 |
kms.ex
|
starcoder
|
defmodule Songmate.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use Songmate.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Songmate.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Songmate.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Songmate.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Songmate.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "<PASSWORD>"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["<PASSWORD>"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(
changeset,
fn {message, opts} ->
Regex.replace(
~r"%{(\w+)}",
message,
fn _, key ->
opts
|> Keyword.get(String.to_existing_atom(key), key)
|> to_string()
end
)
end
)
end
# -----------------------------------------------------------------------
# Accounts
def valid_user_attrs do
%{
name: "<NAME>",
bio: "Some nights I stay up cashing in my bad luck",
avatar: "some-link-to-an-image",
credential: %{
provider: :spotify,
email: "<EMAIL>",
username: "hisongmate"
}
}
end
def valid_2nd_user_attrs do
%{
name: "Spotify Rocks",
bio: "ugh",
credential: %{
provider: :spotify,
email: "<EMAIL>",
username: "spotify-rocks"
}
}
end
# -----------------------------------------------------------------------
# Music
def valid_track_attrs do
%{
isrc: "USMRG0467010",
name: "Rebellion (Lies)",
popularity: 65,
spotify_id: "0xOeB16JDbBJBJKSdHbElT"
}
end
def valid_artist_attrs do
%{
name: "9m88",
popularity: 53,
spotify_id: "4PjY2961rc0MHE9zHYWEnH"
}
end
def valid_genre_attrs do
%{name: "Modern Rock"}
end
# -----------------------------------------------------------------------
# MusicProfile
def valid_music_profile_attrs do
%{
user: valid_user_attrs(),
artist_preferences: [%{
rank: 1,
artist: valid_artist_attrs()
}]
}
end
end
|
test/support/data_case.ex
| 0.790207 | 0.464598 |
data_case.ex
|
starcoder
|
defmodule Unicode.CanonicalCombiningClass do
@moduledoc """
Functions to introspect Unicode
canonical combining classes for binaries
(Strings) and codepoints.
"""
@behaviour Unicode.Property.Behaviour
alias Unicode.Utils
@combining_classes Utils.combining_classes()
|> Utils.remove_annotations()
@doc """
Returns the map of Unicode
canonical combining classes..
The class name is the map
key and a list of codepoint
ranges as tuples as the value.
"""
def combining_classes do
@combining_classes
end
@doc """
Returns a list of known Unicode
canonical combining class names.
This function does not return the
names of any class aliases.
"""
@known_combining_classes Map.keys(@combining_classes)
def known_combining_classes do
@known_combining_classes
end
@combining_class_alias Utils.property_value_alias()
|> Map.get("ccc")
|> Enum.map(fn {k, v} -> {k, String.to_integer(v)} end)
|> Map.new()
|> Utils.downcase_keys_and_remove_whitespace()
|> Utils.add_canonical_alias()
@doc """
Returns a map of aliases for
Unicode canonical combining classes..
An alias is an alternative name
for referring to a class. Aliases
are resolved by the `fetch/1` and
`get/1` functions.
"""
@impl Unicode.Property.Behaviour
def aliases do
@combining_class_alias
end
@doc """
Returns the Unicode ranges for
a given canonical combining class
as a list of ranges as 2-tuples.
Aliases are resolved by this function.
Returns either `{:ok, range_list}` or
`:error`.
"""
@impl Unicode.Property.Behaviour
def fetch(combining_class) when is_atom(combining_class) do
Map.fetch(combining_classes(), combining_class)
end
def fetch(combining_class) when is_binary(combining_class) do
combining_class = Utils.downcase_and_remove_whitespace(combining_class)
combining_class = Map.get(aliases(), combining_class, combining_class)
Map.fetch(combining_classes(), combining_class)
end
def fetch(combining_class) when is_integer(combining_class) do
Map.fetch(combining_classes(), combining_class)
end
@doc """
Returns the Unicode ranges for
a given canonical combining class
as a list of ranges as 2-tuples.
Aliases are resolved by this function.
Returns either `range_list` or
`nil`.
"""
@impl Unicode.Property.Behaviour
def get(combining_class) do
case fetch(combining_class) do
{:ok, combining_class} -> combining_class
_ -> nil
end
end
@doc """
Returns the count of the number of characters
for a given canonical combining class.
## Example
iex> Unicode.CanonicalCombiningClass.count(230)
484
"""
@impl Unicode.Property.Behaviour
def count(class) do
with {:ok, class} <- fetch(class) do
Enum.reduce(class, 0, fn {from, to}, acc -> acc + to - from + 1 end)
end
end
@doc """
Returns the canonical combining class
name(s) for the given binary or codepoint.
In the case of a codepoint, a single
class name is returned.
For a binary a list of distinct class
names represented by the graphemes in
the binary is returned.
"""
def combining_class(string) when is_binary(string) do
string
|> String.to_charlist()
|> Enum.map(&combining_class/1)
|> Enum.uniq()
end
for {combining_class, ranges} <- @combining_classes do
def combining_class(codepoint) when unquote(Utils.ranges_to_guard_clause(ranges)) do
unquote(combining_class)
end
end
def combining_class(codepoint) when is_integer(codepoint) and codepoint in 0..0x10FFFF do
0
end
end
|
lib/unicode/combining_class.ex
| 0.903419 | 0.58818 |
combining_class.ex
|
starcoder
|
defmodule Fuzzyurl do
## N.B. when this moduledoc changes, it should be copy/pasted into README.md
@moduledoc ~S"""
Fuzzyurl provides two related functions: non-strict parsing of URLs or
URL-like strings into their component pieces (protocol, username, password,
hostname, port, path, query, and fragment), and fuzzy matching of URLs
and URL patterns.
Specifically, URLs that look like this:
[protocol ://] [username [: password] @] [hostname] [: port] [/ path] [? query] [# fragment]
Fuzzyurls can be constructed using some or all of the above
fields, optionally replacing some or all of those fields with a `*`
wildcard if you wish to use the Fuzzyurl as a URL mask.
## Parsing URLs
iex> f = Fuzzyurl.from_string("https://api.example.com/users/123?full=true")
%Fuzzyurl{fragment: nil, hostname: "api.example.com", password: <PASSWORD>, path: "/users/123", port: nil, protocol: "https", query: "full=true", username: nil}
iex> f.protocol
"https"
iex> f.hostname
"api.example.com"
iex> f.query
"full=true"
## Constructing URLs
iex> f = Fuzzyurl.new(hostname: "example.com", protocol: "http", port: "8080")
%Fuzzyurl{fragment: nil, hostname: "example.com", password: nil, path: nil, port: "8080", protocol: "http", query: nil, username: nil}
iex> Fuzzyurl.to_string(f)
"http://example.com:8080"
## Matching URLs
Fuzzyurl supports wildcard matching:
* `*` matches anything, including `nil`.
* `foo*` matches `foo`, `foobar`, `foo/bar`, etc.
* `*bar` matches `bar`, `foobar`, `foo/bar`, etc.
Path and hostname matching allows the use of a greedier wildcard `**` in
addition to the naive wildcard `*`:
* `*.example.com` matches `filsrv-01.corp.example.com` but not `example.com`.
* `**.example.com` matches `filsrv-01.corp.example.com` and `example.com`.
* `/some/path/*` matches `/some/path/foo/bar` and `/some/path/`
but not `/some/path`
* `/some/path/**` matches `/some/path/foo/bar` and `/some/path/`
and `/some/path`
The `Fuzzyurl.mask/0` and `Fuzzyurl.mask/1` functions aid in the
creation of URL masks.
iex> m = Fuzzyurl.mask
%Fuzzyurl{fragment: "*", hostname: "*", password: "*", path: "*", port: "*", protocol: "*", query: "*", username: "*"}
iex> Fuzzyurl.matches?(m, "http://example.com/a/b/c")
true
iex> m2 = Fuzzyurl.mask(path: "/a/b/**")
%Fuzzyurl{fragment: "*", hostname: "*", password: "*", path: "/a/b/**", port: "*", protocol: "*", query: "*", username: "*"}
iex> Fuzzyurl.matches?(m2, "https://example.com/a/b/")
true
iex> Fuzzyurl.matches?(m2, "git+ssh://[email protected]/a/b")
true
iex> Fuzzyurl.matches?(m2, "https://example.com/a/bar")
false
`Fuzzyurl.best_match`, given a list of URL masks and a URL, will return
the mask which most closely matches the URL:
iex> masks = ["/foo/*", "/foo/bar", Fuzzyurl.mask]
iex> Fuzzyurl.best_match(masks, "http://example.com/foo/bar")
"/foo/bar"
If you'd prefer the list index of the best-matching URL mask, use
`Fuzzyurl.best_match_index` instead:
iex> masks = ["/foo/*", "/foo/bar", Fuzzyurl.mask]
iex> Fuzzyurl.best_match_index(masks, "http://example.com/foo/bar")
1
"""
@type t :: %Fuzzyurl{}
@type string_or_fuzzyurl :: String.t() | Fuzzyurl.t()
alias Fuzzyurl.Match
alias Fuzzyurl.Strings
@default [
protocol: nil,
username: nil,
password: nil,
hostname: nil,
port: nil,
path: nil,
query: nil,
fragment: nil
]
defstruct @default
@doc ~S"""
Creates an empty Fuzzyurl.
iex> Fuzzyurl.new()
%Fuzzyurl{fragment: nil, hostname: nil, password: nil, path: nil, port: nil, protocol: nil, query: nil, username: nil}
"""
@spec new() :: Fuzzyurl.t()
def new(), do: %Fuzzyurl{}
@doc ~S"""
Creates a new Fuzzyurl with the given parameters.
iex> Fuzzyurl.new("http", "user", "pass", "example.com", "80", "/", "query=true", "123")
%Fuzzyurl{fragment: "123", hostname: "example.com", password: "<PASSWORD>", path: "/", port: "80", protocol: "http", query: "query=true", username: "user"}
"""
@spec new(
String.t(),
String.t(),
String.t(),
String.t(),
String.t(),
String.t(),
String.t(),
String.t()
) :: Fuzzyurl.t()
def new(protocol, username, password, hostname, port, path, query, fragment) do
%Fuzzyurl{
protocol: protocol,
username: username,
password: password,
hostname: hostname,
port: port,
path: path,
query: query,
fragment: fragment
}
end
@doc ~S"""
Creates a new Fuzzyurl with the given parameters.
`params` may be a map or a keyword list.
iex> Fuzzyurl.new(hostname: "example.com", protocol: "http")
%Fuzzyurl{fragment: nil, hostname: "example.com", password: nil, path: nil, port: nil, protocol: "http", query: nil, username: nil}
"""
@spec new(Keyword.t() | map) :: Fuzzyurl.t()
def new(params), do: new() |> Fuzzyurl.with(params)
@doc ~S"""
Returns a new Fuzzyurl based on `fuzzy_url`, with the given arguments
changed.
`params` may be a map or a keyword list.
iex> fuzzy_url = Fuzzyurl.new(hostname: "example.com", protocol: "http")
iex> fuzzy_url |> Fuzzyurl.with(protocol: "https", path: "/index.html")
%Fuzzyurl{fragment: nil, hostname: "example.com", password: nil, path: "/index.html", port: nil, protocol: "https", query: nil, username: nil}
"""
@spec with(Fuzzyurl.t(), map) :: Fuzzyurl.t()
def with(fuzzy_url, %{} = params) do
Fuzzyurl.with(fuzzy_url, Map.to_list(params))
end
@spec with(Fuzzyurl.t(), Keyword.t()) :: Fuzzyurl.t()
def with(fuzzy_url, params) do
Enum.reduce(params, fuzzy_url, fn {k, v}, acc ->
## prevent struct damage by checking keys
if Keyword.has_key?(@default, k), do: Map.put(acc, k, v), else: acc
end)
end
@doc ~S"""
Returns a Fuzzyurl containing all wildcard matches, that will match any
Fuzzyurl.
iex> Fuzzyurl.mask()
%Fuzzyurl{fragment: "*", hostname: "*", password: "*", path: "*", port: "*", protocol: "*", query: "*", username: "*"}
"""
@spec mask() :: Fuzzyurl.t()
def mask(), do: new("*", "*", "*", "*", "*", "*", "*", "*")
@doc ~S"""
Returns a Fuzzyurl mask with the given parameters set.
`params` may be a map or a keyword list.
iex> Fuzzyurl.mask(hostname: "example.com")
%Fuzzyurl{fragment: "*", hostname: "example.com", password: "*", path: "*", port: "*", protocol: "*", query: "*", username: "*"}
"""
@spec mask(map | Keyword.t()) :: Fuzzyurl.t()
def mask(params), do: mask() |> Fuzzyurl.with(params)
@doc ~S"""
Returns an integer representing how closely `mask` (which may have
wildcards) resembles `url` (which may not), or `nil` in the
case of a conflict.
`mask` and `url` may each be a Fuzzyurl or a string.
iex> Fuzzyurl.match("http://example.com", "http://example.com")
2
iex> Fuzzyurl.match("example.com", "http://example.com")
1
iex> Fuzzyurl.match("**.example.com", "http://example.com")
0
iex> Fuzzyurl.match("*.example.com", "http://example.com")
nil
"""
@spec match(string_or_fuzzyurl, string_or_fuzzyurl) :: non_neg_integer | nil
def match(mask, url) do
m = if is_binary(mask), do: from_string(mask, default: "*"), else: mask
u = if is_binary(url), do: from_string(url), else: url
Match.match(m, u)
end
@doc ~S"""
Returns true if `mask` matches `url`, false otherwise.
`mask` and `url` may each be a Fuzzyurl or a string.
iex> mask = Fuzzyurl.mask(hostname: "example.com")
iex> Fuzzyurl.matches?(mask, "http://example.com")
true
iex> Fuzzyurl.matches?(mask, "http://nope.example.com")
false
"""
@spec matches?(string_or_fuzzyurl, string_or_fuzzyurl) :: false | true
def matches?(mask, url) do
m = if is_binary(mask), do: from_string(mask, default: "*"), else: mask
u = if is_binary(url), do: from_string(url), else: url
Match.matches?(m, u)
end
@doc ~S"""
Returns a Fuzzyurl struct containing values indicating match quality:
0 for a wildcard match, 1 for exact match, and nil otherwise.
`mask` and `url` may each be a Fuzzyurl or a string.
iex> mask = Fuzzyurl.mask(hostname: "example.com")
iex> Fuzzyurl.match_scores(mask, "http://example.com")
%Fuzzyurl{fragment: 0, hostname: 1, password: 0, path: 0, port: 0, protocol: 0, query: 0, username: 0}
"""
@spec match_scores(string_or_fuzzyurl, string_or_fuzzyurl) :: Fuzzyurl.t()
def match_scores(mask, url) do
m = if is_binary(mask), do: from_string(mask, default: "*"), else: mask
u = if is_binary(url), do: from_string(url), else: url
Match.match_scores(m, u)
end
@doc ~S"""
From a list of Fuzzyurl masks, returns the one which best matches `url`.
Returns nil if none of `masks` match.
`url` and each mask may be a Fuzzyurl or a string.
iex> masks = ["/foo/*", "/foo/bar", Fuzzyurl.mask]
iex> Fuzzyurl.best_match(masks, "http://example.com/foo/bar")
"/foo/bar"
"""
@spec best_match([string_or_fuzzyurl], string_or_fuzzyurl) :: string_or_fuzzyurl | nil
def best_match(masks, url) do
case best_match_index(masks, url) do
nil -> nil
i -> Enum.at(masks, i)
end
end
@doc ~S"""
From a list of Fuzzyurl masks, returns the list index of the one which
best matches `url`. Returns nil if none of `masks` match.
`url` and each mask may be a Fuzzyurl or a string.
iex> masks = ["/foo/*", "/foo/bar", Fuzzyurl.mask]
iex> Fuzzyurl.best_match_index(masks, "http://example.com/foo/bar")
1
"""
@spec best_match_index([string_or_fuzzyurl], string_or_fuzzyurl) :: non_neg_integer | nil
def best_match_index(masks, url) do
masks
|> Enum.map(fn m -> if is_binary(m), do: from_string(m, default: "*"), else: m end)
|> Match.best_match_index(if is_binary(url), do: from_string(url), else: url)
end
@doc ~S"""
Returns a String representation of `fuzzy_url`.
iex> fuzzy_url = Fuzzyurl.new(hostname: "example.com", protocol: "http")
iex> Fuzzyurl.to_string(fuzzy_url)
"http://example.com"
"""
@spec to_string(Fuzzyurl.t()) :: String.t()
def to_string(%Fuzzyurl{} = fuzzy_url) do
Strings.to_string(fuzzy_url)
end
@doc ~S"""
Creates a new Fuzzyurl from the given URL string. Provide `default: "*"`
when creating a URL mask. Raises `ArgumentError` if input string is
not a parseable URL.
iex> Fuzzyurl.from_string("http://example.com")
%Fuzzyurl{fragment: nil, hostname: "example.com", password: <PASSWORD>, path: nil, port: nil, protocol: "http", query: nil, username: nil}
iex> Fuzzyurl.from_string("*.example.com:443", default: "*")
%Fuzzyurl{fragment: "*", hostname: "*.example.com", password: "*", path: "*", port: "443", protocol: "*", query: "*", username: "*"}
"""
@spec from_string(String.t(), Keyword.t()) :: Fuzzyurl.t() | no_return
def from_string(string, opts \\ []) when is_binary(string) do
case Strings.from_string(string, opts) do
{:ok, fuzzy_url} ->
fuzzy_url
{:error, msg} ->
raise ArgumentError, msg
end
end
end
|
lib/fuzzyurl.ex
| 0.833392 | 0.4575 |
fuzzyurl.ex
|
starcoder
|
defmodule AdventOfCode.Solutions.Day09 do
@moduledoc """
Solution for day 9 exercise.
### Exercise
https://adventofcode.com/2021/day/9
"""
require Logger
def calculate_risk(filename) do
map =
filename
|> File.read!()
|> parse_map()
result = do_calculate_risk(map)
IO.puts("Risk associated is #{result}")
end
def calculate_basins(filename) do
map =
filename
|> File.read!()
|> parse_map()
result = do_calculate_basins(map)
IO.puts("Result of basins calculation is #{result}")
end
defp parse_map(file_content) do
file_content
|> String.replace("\r\n", "\n")
|> String.split("\n", trim: true)
|> Enum.map(fn row ->
row
|> String.graphemes()
|> Enum.map(&String.to_integer/1)
end)
end
defp do_calculate_risk(map) do
get_low_points(map)
|> Enum.map(fn {x, y} -> get_value_from_map(map, {x, y}) + 1 end)
|> Enum.sum()
end
defp do_calculate_basins(map) do
basins =
get_low_points(map)
|> Enum.map(fn {x, y} ->
{basil_coords, _checked} = get_basin_coords(map, {x, y})
basil_coords
|> Enum.uniq()
|> length()
end)
|> Enum.sort(&(&1 >= &2))
[top_1 | [top_2 | [top_3 | _tail]]] = basins
top_1 * top_2 * top_3
end
defp get_basin_coords(map, {x, y}, checked_coords \\ []) do
around_coords =
[
{x + 1, y},
{x - 1, y},
{x, y + 1},
{x, y - 1}
]
|> Enum.reject(&(&1 in checked_coords))
|> Enum.reject(fn {x, y} ->
value = get_value_from_map(map, {x, y})
value in [nil, 9]
end)
updated_checked_coords = [{x, y} | checked_coords]
# We need a reduce here to keep tje list of checked coords up to date and
# avoid accessing tons of times the same coords.
{basil_coords, checked_coords} =
Enum.reduce(around_coords, {[{x, y}], updated_checked_coords}, fn
{cx, cy}, {acc_basil_coords, acc_checked_coords} ->
{basil_coords, checked_coords} = get_basin_coords(map, {cx, cy}, acc_checked_coords)
acc_checked_coords = Enum.uniq(checked_coords ++ acc_checked_coords)
acc_basil_coords = Enum.uniq(basil_coords ++ acc_basil_coords)
{acc_basil_coords, acc_checked_coords}
end)
{basil_coords, checked_coords}
end
defp get_low_points(map) do
coords = calculate_available_coords(map)
Enum.reduce(coords, [], fn {x, y}, acc ->
value = get_value_from_map(map, {x, y})
around_values =
[
get_value_from_map(map, {x + 1, y}),
get_value_from_map(map, {x - 1, y}),
get_value_from_map(map, {x, y + 1}),
get_value_from_map(map, {x, y - 1})
]
|> Enum.reject(&is_nil/1)
if Enum.all?(around_values, &(&1 > value)) do
[{x, y} | acc]
else
acc
end
end)
end
defp calculate_available_coords(map) do
max_y = length(map)
max_x =
map
|> hd()
|> length()
for x <- 0..(max_x - 1) do
for y <- 0..(max_y - 1) do
{x, y}
end
end
|> List.flatten()
end
defp get_value_from_map(_map, {x, _y}) when x < 0, do: nil
defp get_value_from_map(_map, {_x, y}) when y < 0, do: nil
defp get_value_from_map(map, {x, y}) do
map
|> Enum.at(y, [])
|> Enum.at(x)
end
end
|
lib/advent_of_code/solutions/day09.ex
| 0.6508 | 0.635293 |
day09.ex
|
starcoder
|
defmodule MvOpentelemetry.LiveView do
use MvOpentelemetry.SpanTracer,
name: :live_view,
prefix: :phoenix,
events: [
[:phoenix, :live_view, :mount, :start],
[:phoenix, :live_view, :mount, :stop],
[:phoenix, :live_view, :mount, :exception],
[:phoenix, :live_view, :handle_params, :start],
[:phoenix, :live_view, :handle_params, :stop],
[:phoenix, :live_view, :handle_params, :exception],
[:phoenix, :live_view, :handle_event, :start],
[:phoenix, :live_view, :handle_event, :stop],
[:phoenix, :live_view, :handle_event, :exception],
[:phoenix, :live_component, :handle_event, :start],
[:phoenix, :live_component, :handle_event, :stop],
[:phoenix, :live_component, :handle_event, :exception]
]
defp get_name([:phoenix, component, action, _], opts) do
list = [opts[:prefix]] ++ [component, action]
Enum.join(list, ".")
end
@spec handle_event([atom()], map(), map(), Access.t()) :: :ok
def handle_event([:phoenix, :live_view, :mount, :start] = event, _measurements, meta, opts) do
attributes = [{"live_view.view", meta.socket.view}]
params_attributes =
meta
|> get_params()
|> filter_list(opts[:query_params_whitelist])
|> Enum.map(&prefix_key_with(&1, "live_view.params"))
attributes = attributes ++ params_attributes ++ opts[:default_attributes]
name = get_name(event, opts)
OpentelemetryTelemetry.start_telemetry_span(opts[:tracer_id], name, meta, %{})
|> Span.set_attributes(attributes)
:ok
end
def handle_event(
[:phoenix, :live_view, :handle_params, :start] = event,
_measurements,
meta,
opts
) do
attributes = [{"live_view.view", meta.socket.view}, {"live_view.uri", meta[:uri]}]
params_attributes =
meta
|> get_params()
|> filter_list(opts[:query_params_whitelist])
|> Enum.map(&prefix_key_with(&1, "live_view.params"))
attributes = attributes ++ params_attributes ++ opts[:default_attributes]
name = get_name(event, opts)
OpentelemetryTelemetry.start_telemetry_span(opts[:tracer_id], name, meta, %{})
|> Span.set_attributes(attributes)
:ok
end
def handle_event(
[:phoenix, :live_view, :handle_event, :start] = event,
_measurements,
meta,
opts
) do
attributes = [
{"live_view.view", meta.socket.view},
{"live_view.uri", meta[:uri]},
{"live_view.event", meta.event}
]
name = get_name(event, opts)
params_attributes =
meta
|> get_params()
|> filter_list(opts[:query_params_whitelist])
|> Enum.map(&prefix_key_with(&1, "live_view.params"))
attributes = attributes ++ params_attributes ++ opts[:default_attributes]
OpentelemetryTelemetry.start_telemetry_span(opts[:tracer_id], name, meta, %{})
|> Span.set_attributes(attributes)
:ok
end
def handle_event(
[:phoenix, :live_component, :handle_event, :start] = event,
_measurements,
meta,
opts
) do
attributes = [
{"live_component.view", meta.socket.view},
{"live_component.event", meta.event},
{"live_component.component", meta.component},
{"live_component.host_uri", meta.socket.host_uri},
{"live_component.uri", meta[:uri]}
]
name = get_name(event, opts)
attributes = attributes ++ opts[:default_attributes]
OpentelemetryTelemetry.start_telemetry_span(opts[:tracer_id], name, meta, %{})
|> Span.set_attributes(attributes)
:ok
end
def handle_event([:phoenix, :live_view, _, :exception], _measurements, meta, opts) do
ctx = OpentelemetryTelemetry.set_current_telemetry_span(opts[:tracer_id], meta)
Span.set_status(ctx, OpenTelemetry.status(:error, ""))
attributes = [
{"live_view.kind", meta.kind},
{"live_view.reason", meta.reason},
{"error", true}
]
Span.set_attributes(ctx, attributes)
OpentelemetryTelemetry.end_telemetry_span(opts[:tracer_id], meta)
:ok
end
def handle_event([:phoenix, :live_component, _, :exception], _measurements, meta, opts) do
ctx = OpentelemetryTelemetry.set_current_telemetry_span(opts[:tracer_id], meta)
Span.set_status(ctx, OpenTelemetry.status(:error, ""))
attributes = [
{"live_component.kind", meta.kind},
{"live_component.reason", meta.reason},
{"error", true}
]
Span.set_attributes(ctx, attributes)
OpentelemetryTelemetry.end_telemetry_span(opts[:tracer_id], meta)
:ok
end
def handle_event([:phoenix, _, _, :stop], _measurements, meta, opts) do
_ctx = OpentelemetryTelemetry.set_current_telemetry_span(opts[:tracer_id], meta)
OpentelemetryTelemetry.end_telemetry_span(opts[:tracer_id], meta)
:ok
end
defp filter_list(params, nil), do: params
defp filter_list(params, whitelist) do
Enum.filter(params, fn {k, _v} -> Enum.member?(whitelist, k) end)
end
defp prefix_key_with({key, value}, prefix) when is_binary(key) do
complete_key = prefix <> "." <> key
{complete_key, value}
end
defp get_params(%{params: params}) when is_map(params), do: params
defp get_params(_), do: %{}
end
|
lib/mv_opentelemetry/live_view.ex
| 0.565299 | 0.418786 |
live_view.ex
|
starcoder
|
defmodule Commanded.Aggregates.AggregateLifespan do
@moduledoc """
The `Commanded.Aggregates.AggregateLifespan` behaviour is used to control the
aggregate `GenServer` process lifespan.
By default an aggregate instance process will run indefinitely once started.
You can change this default by implementing the
`Commanded.Aggregates.AggregateLifespan` behaviour in a module and configuring
it in your router.
After a command successfully executes, and creates at least one domain event,
the `c:after_event/1` function is called passing the last created event.
When a command is successfully handled but results in no domain events (by
returning `nil` or an empty list `[]`), the command struct is passed to the
`c:after_command/1` function.
Finally, if there is an error executing the command, the error reason is
passed to the `c:after_error/1` function.
For all the above, the returned inactivity timeout value is used to shutdown
the aggregate process if no other messages are received.
## Supported return values
- Non-negative integer - specify an inactivity timeout, in millisconds.
- `:infinity` - prevent the aggregate instance from shutting down.
- `:hibernate` - send the process into hibernation.
- `:stop` - immediately shutdown the aggregate process.
### Hibernation
A hibernated process will continue its loop once a message is in its message
queue. Hibernating an aggregate causes garbage collection and minimises the
memory used by the process. Hibernating should not be used aggressively as too
much time could be spent garbage collecting.
## Example
Define a module that implements the `Commanded.Aggregates.AggregateLifespan`
behaviour:
defmodule BankAccountLifespan do
@behaviour Commanded.Aggregates.AggregateLifespan
def after_event(%MoneyDeposited{}), do: :timer.hours(1)
def after_event(%BankAccountClosed{}), do: :stop
def after_event(_event), do: :infinity
def after_command(%CloseAccount{}), do: :stop
def after_command(_command), do: :infinity
def after_error(:invalid_initial_balance), do: :timer.minutes(5)
def after_error(_error), do: :stop
end
Then specify the module as the `lifespan` option when registering the
applicable commands in your router:
defmodule BankRouter do
use Commanded.Commands.Router
dispatch [OpenAccount, CloseAccount],
to: BankAccount,
lifespan: BankAccountLifespan,
identity: :account_number
end
"""
@type lifespan :: timeout | :hibernate | :stop
@doc """
Aggregate process will be stopped after specified inactivity timeout unless
`:infinity`, `:hibernate`, or `:stop` are returned.
"""
@callback after_event(event :: struct) :: lifespan
@doc """
Aggregate process will be stopped after specified inactivity timeout unless
`:infinity`, `:hibernate`, or `:stop` are returned.
"""
@callback after_command(command :: struct) :: lifespan
@doc """
Aggregate process will be stopped after specified inactivity timeout unless
`:infinity`, `:hibernate`, or `:stop` are returned.
"""
@callback after_error(any) :: lifespan
end
|
lib/commanded/aggregates/aggregate_lifespan.ex
| 0.874212 | 0.561185 |
aggregate_lifespan.ex
|
starcoder
|
defmodule Bonny.Server.Scheduler do
@moduledoc """
Kubernetes custom scheduler interface. Built on top of `Reconciler`.
The only function that needs to be implemented is `select_node_for_pod/2`. All others defined by behaviour have default implementations.
## Examples
Will schedule each unschedule pod with `spec.schedulerName=cheap-node` to a node with a label `cheap=true`.
`nodes` is a stream that can be lazily filtered:
defmodule CheapNodeScheduler do
use Bonny.Server.Scheduler, name: "cheap-node"
@impl Bonny.Server.Scheduler
def select_node_for_pod(_pod, nodes) do
nodes
|> Stream.filter(fn(node) ->
is_cheap = K8s.Resource.label(node, "cheap")
is_cheap == "true"
end)
|> Enum.take(1)
|> List.first
end
end
CheapNodeScheduler.start_link()
Will schedule each unschedule pod with `spec.schedulerName=random-node` to a random node:
defmodule RandomNodeScheduler do
use Bonny.Server.Scheduler, name: "random-node"
@impl Bonny.Server.Scheduler
def select_node_for_pod(_pod, nodes) do
Enum.random(nodes)
end
end
RandomNodeScheduler.start_link()
Override `nodes/0` default implementation (`pods/0` can be overridden too).
Schedules pod on a random GPU node:
defmodule GpuScheduler do
use Bonny.Server.Scheduler, name: "gpu-node"
@impl Bonny.Server.Scheduler
def select_node_for_pod(_pod, nodes) do
Enum.random(nodes)
end
@impl Bonny.Server.Scheduler
def nodes() do
label = "my.label.on.gpu.instances"
conn = Bonny.Config.conn()
op = K8s.Client.list("v1", :nodes)
K8s.Client.stream(conn, op, params: %{labelSelector: label})
end
end
GpuScheduler.start_link()
"""
require Logger
@doc """
Name of the scheduler.
"""
@callback name() :: binary()
@doc """
List of nodes available to this scheduler.
Default implementation is all nodes in cluster.
"""
@callback nodes(K8s.Conn.t()) :: {:ok, Enumerable.t()} | {:error, any()}
@doc """
Field selector for selecting unscheduled pods waiting to be scheduled by this scheduler.
Default implementation is all unscheduled pods assigned to this scheduler.
"""
@callback field_selector() :: binary()
@callback conn() :: K8s.Conn.t()
@doc """
Selects the best node for the current `pod`.
Takes the current unscheduled pod and a `Stream` of nodes. `pod` is provided in the event that `taints` or `affinities` would need to be respected by the scheduler.
Returns the node to schedule on.
"""
@callback select_node_for_pod(map, list(map)) :: map
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Bonny.Server.Scheduler
@behaviour Bonny.Server.Reconciler
@name opts[:name] || Macro.to_string(__MODULE__)
@doc "Scheduler name"
@impl Bonny.Server.Scheduler
def name(), do: @name
@doc "Kubernetes HTTP API `fieldSelector`."
@impl Bonny.Server.Scheduler
def field_selector(), do: Bonny.Server.Scheduler.field_selector(@name)
@doc "List of nodes available to this scheduler."
@impl Bonny.Server.Scheduler
def nodes(conn), do: Bonny.Server.Scheduler.nodes(conn)
@spec child_spec(keyword()) :: Supervisor.child_spec()
def child_spec(args \\ []) do
list_operation =
K8s.Client.list("v1", :pods, namespace: :all)
|> Map.put(:query_params, fieldSelector: field_selector())
conn = conn()
args
|> Keyword.put(
:stream,
Bonny.Server.Reconciler.get_stream(__MODULE__, conn, list_operation)
)
|> Keyword.put(:termination_delay, 5_000)
|> Bonny.Server.AsyncStreamRunner.child_spec()
end
defdelegate conn(), to: Bonny.Config
defoverridable nodes: 1, field_selector: 0, conn: 0
@impl Bonny.Server.Reconciler
def reconcile(pod), do: Bonny.Server.Scheduler.reconcile(__MODULE__, pod)
end
end
@spec reconcile(module(), map()) :: :ok
def reconcile(scheduler, pod) do
conn = scheduler.conn()
with {:ok, nodes} <- nodes(conn),
node <- scheduler.select_node_for_pod(pod, nodes),
{:ok, _} <- Bonny.Server.Scheduler.bind(scheduler.conn(), pod, node) do
:ok
end
end
@doc "Kubernetes API `fieldSelector` value for unbound pods waiting on the given scheduler."
@spec field_selector(binary) :: binary
def field_selector(scheduler_name) do
"spec.schedulerName=#{scheduler_name},spec.nodeName="
end
@doc "Binds a pod to a node"
@spec bind(K8s.Conn.t(), map(), map()) :: {:ok, map} | {:error, atom}
def bind(conn, pod, node) do
pod =
pod
|> Map.put("apiVersion", "v1")
|> Map.put("kind", "pod")
Bonny.Server.Scheduler.Binding.create(conn, pod, node)
end
@doc "Returns a list of all nodes in the cluster."
@spec nodes(K8s.Conn.t()) :: {:ok, list(map())} | {:error, any()}
def nodes(conn) do
op = K8s.Client.list("v1", :nodes)
response = K8s.Client.stream(conn, op)
metadata = %{operation: op}
case response do
{:ok, stream} ->
Logger.debug("Scheduler fetching nodes succeeded", metadata)
{:ok, Enum.into(stream, [])}
{:error, error} ->
Logger.error("Scheduler fetching nodes failed", Map.put(metadata, :error, error))
{:error, error}
end
end
end
|
lib/bonny/server/scheduler.ex
| 0.900094 | 0.54583 |
scheduler.ex
|
starcoder
|
defmodule Farmbot do
@moduledoc """
Supervises the individual modules that make up the Farmbot Application.
"""
require Logger
use Supervisor
alias Farmbot.Sync.Database
alias Farmbot.System.Supervisor, as: FBSYS
@spec init(map) :: [{:ok, pid}]
def init(%{target: target,
compat_version: compat_version,
version: version,
commit: commit})
do
children = [
# system specifics
supervisor(FBSYS, [target: target], restart: :permanent),
# auth services
worker(Farmbot.Auth, [], restart: :permanent),
# web app
supervisor(Farmbot.Configurator, [], restart: :permanent),
# Generic counter.
worker(Counter, [], restart: :permanent),
# The worker for diffing db entries.
worker(Farmbot.Sync.Supervisor, [], restart: :permanent),
# Handles tracking of various parts of the bots state.
supervisor(Farmbot.BotState.Supervisor,
[%{target: target,
compat_version: compat_version,
version: version,
commit: commit}], restart: :permanent),
# Handles FarmEvents
supervisor(FarmEvent.Supervisor, [], restart: :permanent),
# Handles the passing of messages from one part of the system to another.
supervisor(Farmbot.Transport.Supervisor, [], restart: :permanent),
# Handles external scripts and what not
supervisor(Farmware.Supervisor, [], restart: :permanent),
# handles communications between bot and arduino
supervisor(Farmbot.Serial.Supervisor, [], restart: :permanent),
worker(Farmbot.ImageWatcher, [], restart: :permanent)
]
opts = [strategy: :one_for_one]
supervise(children, opts)
end
@doc """
Entry Point to Farmbot
"""
@spec start(atom, [any]) :: {:ok, pid}
def start(type, args)
def start(_, [args]) do
Logger.info ">> init!"
Amnesia.start
Database.create! Keyword.put([], :memory, [node()])
Database.wait(15_000)
Supervisor.start_link(__MODULE__, args, name: Farmbot.Supervisor)
end
end
|
lib/farmbot.ex
| 0.655115 | 0.403185 |
farmbot.ex
|
starcoder
|
defmodule Engine.Ethereum.RootChain.Abi do
@moduledoc """
Functions that provide ethereum log decoding
"""
alias Engine.Ethereum.RootChain.AbiEventSelector
alias Engine.Ethereum.RootChain.AbiFunctionSelector
alias Engine.Ethereum.RootChain.Event
alias Engine.Ethereum.RootChain.Fields
alias ExPlasma.Crypto
alias ExPlasma.Encoding
def decode_function(enriched_data, signature) do
"0x" <> data = enriched_data
<<method_id::binary-size(4), _::binary>> = Crypto.keccak_hash(signature)
method_id |> Encoding.to_hex() |> Kernel.<>(data) |> Encoding.to_binary!() |> decode_function()
end
def decode_function(enriched_data) do
function_specs =
Enum.reduce(AbiFunctionSelector.module_info(:exports), [], fn
{:module_info, 0}, acc -> acc
{function, 0}, acc -> [apply(AbiFunctionSelector, function, []) | acc]
_, acc -> acc
end)
{function_spec, data} = ABI.find_and_decode(function_specs, enriched_data)
decode_function_call_result(function_spec, data)
end
@spec decode_log(map(), map()) :: Event.t()
def decode_log(log, keccak_signatures_pair) do
event_specs =
Enum.reduce(AbiEventSelector.module_info(:exports), [], fn
{:module_info, 0}, acc -> acc
{function, 0}, acc -> [apply(AbiEventSelector, function, []) | acc]
_, acc -> acc
end)
topics =
Enum.map(log["topics"], fn
nil -> nil
topic -> Encoding.to_binary!(topic)
end)
{_event_spec, data} =
ABI.Event.find_and_decode(
event_specs,
Enum.at(topics, 0),
Enum.at(topics, 1),
Enum.at(topics, 2),
Enum.at(topics, 3),
Encoding.to_binary!(log["data"])
)
data
|> Enum.into(%{}, fn {key, _type, _indexed, value} -> {key, value} end)
|> common_parse_event(log, keccak_signatures_pair)
end
def common_parse_event(event, log, keccak_signatures_pair) do
topic = log |> Map.get("topics") |> Enum.at(0)
event_signature = Map.get(keccak_signatures_pair, topic)
%Event{
data: event,
eth_height: Encoding.to_int(log["blockNumber"]),
root_chain_tx_hash: Encoding.to_binary!(log["transactionHash"]),
log_index: Encoding.to_int(log["logIndex"]),
event_signature: event_signature
}
end
defp decode_function_call_result(function_spec, [values]) when is_tuple(values) do
function_spec.input_names
|> Enum.zip(Tuple.to_list(values))
|> Enum.into(%{})
|> Fields.rename(function_spec)
end
defp decode_function_call_result(function_spec, values) do
function_spec.input_names
|> Enum.zip(Enum.map(values, &to_hex/1))
|> Enum.into(%{})
end
defp to_hex(value) when is_binary(value) do
case String.valid?(value) do
false ->
Encoding.to_hex(value)
true ->
value
end
end
defp to_hex(value) do
value
end
end
|
apps/engine/lib/engine/ethereum/root_chain/abi.ex
| 0.73848 | 0.435241 |
abi.ex
|
starcoder
|
defmodule Cloudevents.Format.V_0_1.Decoder.JSON do
@moduledoc false
@behaviour Cloudevents.Format.Decoder.JSON
alias Cloudevents.Format.Decoder.DecodeError
alias Cloudevents.Format.ParseError
alias Cloudevents.Format.V_0_1.Event
@doc """
Turns a JSON string into a Cloudevent 0.1 struct.
## Examples
### Successful case
iex> json = ~S({
...> "cloudEventsVersion": "0.1",
...> "eventType": "com.github.pull.create",
...> "source": "https://github.com/cloudevents/spec/pull",
...> "eventID": "A234-1234-1234",
...> "eventTime": "2018-04-05T17:31:00Z",
...> "comexampleextension1": "value",
...> "comexampleothervalue": 5,
...> "contentType": "text/xml",
...> "data": "<much wow=\\"xml\\"/>"
...> })
iex> {:ok, event} = Cloudevents.Format.V_0_1.Decoder.JSON.decode(json)
iex> with %Cloudevents.Format.V_0_1.Event{
...> eventType: "com.github.pull.create",
...> source: "https://github.com/cloudevents/spec/pull",
...> eventID: "A234-1234-1234",
...> eventTime: "2018-04-05T17:31:00Z",
...> extensions: %{
...> "comexampleextension1" => "value",
...> "comexampleothervalue" => 5
...> },
...> contentType: "text/xml",
...> data: ~S(<much wow="xml"/>)
...> } <- event, do: :passed
:passed
### Not a JSON at all
iex> not_a_json = "..."
iex> Cloudevents.Format.V_0_1.Decoder.JSON.decode(not_a_json)
{:error, %Cloudevents.Format.Decoder.DecodeError{
cause: %Jason.DecodeError{data: "...", position: 0, token: nil}}}
### Missing required fields
iex> json = ~S({
...> "cloudEventsVersion": "0.1",
...> "eventType": "com.github.pull.create"
...> })
iex> Cloudevents.Format.V_0_1.Decoder.JSON.decode(json)
{:error, %Cloudevents.Format.Decoder.DecodeError{
cause: %Cloudevents.Format.ParseError{message: "missing source"}}}
### Invalid extension attribute name
iex> json = ~S({
...> "cloudEventsVersion": "0.1",
...> "eventType": "com.github.pull.create",
...> "source": "https://github.com/cloudevents/spec/pull",
...> "eventID": "A234-1234-1234",
...> "an extension attribute that contains spaces": "is not allowed"
...> })
iex> Cloudevents.Format.V_0_1.Decoder.JSON.decode(json)
{:error, %Cloudevents.Format.Decoder.DecodeError{
cause: %Cloudevents.Format.ParseError{message: "invalid extension attributes: \\"an extension attribute that contains spaces\\""}}}
"""
def decode(json) do
with {:ok, orig_map} <- Jason.decode(json),
{:ok, event} <- Event.from_map(orig_map) do
# If contenttype is application/json and data is a string, data could be an encoded JSON structure :/
data = decode_json_if_possible(event.contentType, event.data)
event = Map.put(event, :data, data)
{:ok, event}
else
{:error, %ParseError{} = error} -> {:error, %DecodeError{cause: error}}
{:error, %Jason.DecodeError{} = error} -> {:error, %DecodeError{cause: error}}
end
end
defp decode_json_if_possible(contentType, data) when byte_size(data) > 0 do
# This is likely good enough but perhaps we should do proper mime type handling here..
case contentType do
"application/json" <> _ ->
case Jason.decode(data) do
{:ok, decoded} -> decoded
_ -> data
end
_ ->
data
end
end
defp decode_json_if_possible(_, data), do: data
end
|
lib/cloudevents/format/v_0_1/decoder/json.ex
| 0.756268 | 0.406656 |
json.ex
|
starcoder
|
defmodule Faker.Yoga.En do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for Yoga studios, poses in English
"""
@spec adjective() :: String.t()
sampler(:adjective, [
"Grateful",
"Thankful",
"Beholden",
"Contented",
"Gratified",
"Serene",
"Cheerful",
"Satisfied",
"Peaceful",
"Placid",
"Tranquil",
"Sedate",
"Comfortable",
"Pastoral",
"Bucolic",
"Supple",
"Rhythmic",
"Elegant",
"Graceful",
"Polished",
"Exquisite",
"Airy",
"Buoyant",
"Ethereal",
"Fluffy",
"Silken",
"Celestial",
"Sublime"
])
@spec noun() :: String.t()
sampler(:noun, [
"Turtle",
"Warrior",
"Yoga",
"Works",
"Motion",
"Flow",
"Quokka",
"Bonobo",
"Sloth",
"Lotus",
"Ostrich",
"Mole",
"Panda",
"Elephant",
"Bear",
"Tapir",
"Otter",
"Crane",
"Flamingo",
"Wallaby",
"Penguin",
"Fox",
"Oryx",
"Goat",
"Gnu",
"Hare",
"Rabbit",
"Ibex",
"Impala",
"Jaguar",
"Kangaroo",
"Koala",
"Lark",
"Lemur",
"Macaque",
"Manatee",
"Meerkat",
"Marmot",
"Ocelot",
"Owl",
"Partridge",
"Peacock",
"Pelican",
"Pheasant",
"Platypus",
"Plover",
"Puffin",
"Puka",
"Puma",
"Serval",
"Swan",
"Lykoi",
"Tern",
"Tortoise",
"Turkey",
"Vervet",
"Yak",
"Zebra"
])
@spec class_adj() :: String.t()
sampler(:class_adj, [
"Hot Power",
"Hot",
"Slow",
"Soulful",
"Slow Flow",
"Ashtanga",
"Alignment",
"Vinyasa",
"Power",
"Pure",
"Zen",
"Pure Zen",
"Yin",
"Figure 4",
"Ashtanga",
"Ashtanga",
"Prenatal",
"Handstand",
"Vigorous",
"Gentle",
"Restorative",
"Intense",
"",
"Meditative",
])
sampler(:class_type, [
"Iyengar",
"Yoga",
"Flow",
"Nidra",
"Basics",
"Barre",
"Ashtanga",
"Vinyasa",
"Breath",
"Motion",
"Mindfulness",
"Ashtanga",
"Fundamentals",
"Meditation",
"",
"Workshop",
])
def studio, do: "#{adjective()} #{noun()}"
def random_class, do: "#{random_adjectives()} #{class_type()}"
def class, do: "#{class_adj()} #{class_type()}"
defp random_adjectives,
do: get_random_adjectives("", Faker.random_between(0, 2))
defp get_random_adjectives(adj, 0), do: adj
defp get_random_adjectives(adj, cnt),
do: get_random_adjectives("#{adj} #{class_adj()}", cnt - 1)
end
|
lib/faker/yoga/en.ex
| 0.585575 | 0.491517 |
en.ex
|
starcoder
|
defmodule Membrane.Sink do
@moduledoc """
Module defining behaviour for sinks - elements consuming data.
Behaviours for sinks are specified, besides this place, in modules
`Membrane.Element.Base`,
and `Membrane.Element.WithInputPads`.
Sink elements can define only input pads. Job of a usual sink is to receive some
data on such pad and consume it (write to a soundcard, send through TCP etc.).
If the pad works in pull mode, which is the most common case, then element is
also responsible for requesting demands when it is able and willing to consume
data (for more details, see `t:Membrane.Element.Action.demand_t/0`).
Sinks, like all elements, can of course have multiple pads if needed to
provide more complex solutions.
"""
alias Membrane.{Buffer, Element, Pad}
alias Membrane.Element.CallbackContext
@doc """
Callback that is called when buffer should be written by the sink.
By default calls `handle_write/4` for each buffer.
For pads in pull mode it is called when buffers have been demanded (by returning
`:demand` action from any callback).
For pads in push mode it is invoked when buffers arrive.
"""
@callback handle_write_list(
pad :: Pad.ref_t(),
buffers :: list(Buffer.t()),
context :: CallbackContext.Write.t(),
state :: Element.state_t()
) :: Membrane.Element.Base.callback_return_t()
@doc """
Callback that is called when buffer should be written by the sink. In contrast
to `c:handle_write_list/4`, it is passed only a single buffer.
Called by default implementation of `c:handle_write_list/4`.
"""
@callback handle_write(
pad :: Pad.ref_t(),
buffer :: Buffer.t(),
context :: CallbackContext.Write.t(),
state :: Element.state_t()
) :: Membrane.Element.Base.callback_return_t()
@doc """
Brings all the stuff necessary to implement a sink element.
Options:
- `:bring_pad?` - if true (default) requires and aliases `Membrane.Pad`
"""
defmacro __using__(options) do
quote location: :keep do
use Membrane.Element.Base, unquote(options)
use Membrane.Element.WithInputPads
@behaviour unquote(__MODULE__)
@impl true
def membrane_element_type, do: :sink
@impl true
def handle_write(_pad, _buffer, _context, state),
do: {{:error, :handle_write_not_implemented}, state}
@impl true
def handle_write_list(pad, buffers, _context, state) do
args_list = buffers |> Enum.map(&[pad, &1])
{{:ok, split: {:handle_write, args_list}}, state}
end
defoverridable handle_write_list: 4,
handle_write: 4
end
end
end
|
lib/membrane/sink.ex
| 0.836788 | 0.437944 |
sink.ex
|
starcoder
|
defmodule Swiss.String do
@moduledoc """
A few extra functions to deal with Strings. Heavily inspired by lodash.
"""
@word_regex ~r/[^\x00-\x2f\x3a-\x40\x5b-\x60\x7b-\x7f]+/
@upper_word_regex ~r/(^[A-Z]+$)|[A-Z][a-z0-9]*/
@doc """
Deburrs a string from unicode to its ascii equivalent.
## Examples
iex> Swiss.String.deburr "hola señor!"
"hola senor!"
"""
@spec deburr(String.t()) :: String.t()
def deburr(string) do
string
|> :unicode.characters_to_nfd_binary()
|> String.replace(~r/[^\x00-\x7F]/u, "")
end
@doc """
Decomposes a string into an array of its words.
## Examples
iex> Swiss.String.words "FredBarney"
["Fred", "Barney"]
iex> Swiss.String.words "fred, barney, & pebbles"
["fred", "barney", "pebbles"]
iex> Swiss.String.words "fred, barney, & pebbles", ~r/[^, ]+/
["fred", "barney", "&", "pebbles"]
"""
@spec words(String.t(), Regex.t()) :: [String.t()]
def words(string, pattern \\ @word_regex) do
string
|> String.split(pattern, trim: true, include_captures: true)
|> Enum.filter(&String.match?(&1, pattern))
|> Enum.flat_map(&String.split(&1, @upper_word_regex, trim: true, include_captures: true))
end
@doc """
Converts a string into kebab-case.
## Examples
iex> Swiss.String.kebab_case "Foo Bar"
"foo-bar"
iex> Swiss.String.kebab_case "--foo-bar--"
"foo-bar"
iex> Swiss.String.kebab_case "__FOO_BAR__"
"foo-bar"
iex> Swiss.String.kebab_case "FooBar"
"foo-bar"
"""
@spec kebab_case(String.t()) :: String.t()
def kebab_case(string) do
string
|> deburr()
|> words()
|> Stream.map(&String.downcase/1)
|> Enum.join("-")
end
@doc """
Converts a string into snake_case.
## Examples
iex> Swiss.String.snake_case "Foo Bar"
"foo_bar"
iex> Swiss.String.snake_case "--foo-bar--"
"foo_bar"
iex> Swiss.String.snake_case "__FOO_BAR__"
"foo_bar"
iex> Swiss.String.snake_case "FooBar"
"foo_bar"
"""
@spec snake_case(String.t()) :: String.t()
def snake_case(string) do
string
|> deburr()
|> words()
|> Stream.map(&String.downcase/1)
|> Enum.join("_")
end
@doc """
Converts a string to Capital Case.
## Options
* `:deburr`: whether to deburr (remove accents, etc.) the given string.
`true` by default, for consistency with the other functions in this module.
## Examples
iex> Swiss.String.start_case "Foo Bar"
"Foo Bar"
iex> Swiss.String.start_case "--foo-bar--"
"Foo Bar"
iex> Swiss.String.start_case "__FOO_BAR__"
"Foo Bar"
iex> Swiss.String.start_case "FooBar"
"Foo Bar"
iex> Swiss.String.start_case "hola señor"
"Hola Senor"
iex> Swiss.String.start_case "hola señor", deburr: false
"Hola Señor"
"""
@spec start_case(String.t(), keyword()) :: String.t()
def start_case(string, opts \\ []) do
string
|> Swiss.apply_if(&deburr/1, Keyword.get(opts, :deburr, true))
|> words()
|> Stream.map(&String.capitalize/1)
|> Enum.join(" ")
end
@doc """
Inserts a substring into another string at the given position.
## Examples
iex> Swiss.String.insert_at "Banas", 2, "na"
"Bananas"
iex> Swiss.String.insert_at "800", -2, "."
"8.00"
"""
@spec insert_at(String.t(), integer(), String.t()) :: String.t()
def insert_at(string, pos, substr) do
{left, right} = String.split_at(string, pos)
left <> substr <> right
end
end
|
lib/swiss/string.ex
| 0.780579 | 0.442335 |
string.ex
|
starcoder
|
defmodule GitRekt do
@moduledoc false
alias GitRekt.Git
defmodule GitCommit do
@moduledoc """
Represents a Git commit.
"""
defstruct [:oid, :commit]
@type t :: %__MODULE__{oid: Git.oid, commit: Git.commit}
defimpl Inspect do
def inspect(commit, _opts), do: "<GitCommit:#{Git.oid_fmt_short(commit.oid)}>"
end
end
defmodule GitRef do
@moduledoc """
Represents a Git reference.
"""
defstruct [:oid, :name, :prefix, :type]
@type t :: %__MODULE__{oid: Git.oid, name: binary, prefix: binary, type: :branch | :tag}
defimpl Inspect do
def inspect(ref, _opts), do: "<GitRef:#{ref.prefix}#{ref.name}>"
end
end
defmodule GitTag do
@moduledoc """
Represents a Git tag.
"""
defstruct [:oid, :name, :tag]
@type t :: %__MODULE__{oid: Git.oid, name: :binary, tag: Git.tag}
defimpl Inspect do
def inspect(tag, _opts), do: "<GitTag:#{tag.name}>"
end
end
defmodule GitBlob do
@moduledoc """
Represents a Git blob.
"""
defstruct [:oid, :blob]
@type t :: %__MODULE__{oid: Git.oid, blob: Git.blob}
defimpl Inspect do
def inspect(blob, _opts), do: "<GitBlob:#{Git.oid_fmt_short(blob.oid)}>"
end
end
defmodule GitTree do
@moduledoc """
Represents a Git tree.
"""
defstruct [:oid, :tree]
@type t :: %__MODULE__{oid: Git.oid, tree: Git.blob}
defimpl Inspect do
def inspect(tree, _opts), do: "<GitTree:#{Git.oid_fmt_short(tree.oid)}>"
end
end
defmodule GitTreeEntry do
@moduledoc """
Represents a Git tree entry.
"""
defstruct [:oid, :name, :mode, :type]
@type t :: %__MODULE__{oid: Git.oid, name: binary, mode: integer, type: :blob | :tree}
defimpl Inspect do
def inspect(tree_entry, _opts), do: "<GitTreeEntry:#{tree_entry.name}>"
end
end
defmodule GitDiff do
@moduledoc """
Represents a Git diff.
"""
defstruct [:diff]
@type t :: %__MODULE__{diff: Git.diff}
defimpl Inspect do
def inspect(diff, _opts), do: "<GitDiff:#{inspect diff.diff}>"
end
end
end
|
apps/gitrekt/lib/gitrekt.ex
| 0.814496 | 0.494446 |
gitrekt.ex
|
starcoder
|
defmodule Solana.SystemProgram do
@moduledoc """
Functions for interacting with Solana's [System
Program](https://docs.solana.com/developing/runtime-facilities/programs#system-program)
"""
alias Solana.{Instruction, Account}
import Solana.Helpers
@doc """
The System Program's program ID.
"""
def id(), do: Solana.pubkey!("11111111111111111111111111111111")
@create_account_schema [
lamports: [
type: :pos_integer,
required: true,
doc: "Amount of lamports to transfer to the created account"
],
space: [
type: :non_neg_integer,
required: true,
doc: "Amount of space in bytes to allocate to the created account"
],
from: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account that will transfer lamports to the created account"
],
new: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key of the created account"
],
program_id: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key of the program which will own the created account"
],
base: [
type: {:custom, Solana.Key, :check, []},
doc: "Base public key to use to derive the created account's address"
],
seed: [
type: :string,
doc: "Seed to use to derive the created account's address"
]
]
@doc """
Generates instructions to create a new account.
Accepts a `new` address generated via `Solana.Key.with_seed/3`, as long as the
`base` key and `seed` used to generate that address are provided.
## Options
#{NimbleOptions.docs(@create_account_schema)}
"""
def create_account(opts) do
case validate(opts, @create_account_schema) do
{:ok, params} ->
maybe_with_seed(
params,
&create_account_ix/1,
&create_account_with_seed_ix/1,
[:base, :seed]
)
error ->
error
end
end
@transfer_schema [
lamports: [
type: :pos_integer,
required: true,
doc: "Amount of lamports to transfer"
],
from: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Account that will transfer lamports"
],
to: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Account that will receive the transferred lamports"
],
base: [
type: {:custom, Solana.Key, :check, []},
doc: "Base public key to use to derive the funding account address"
],
seed: [
type: :string,
doc: "Seed to use to derive the funding account address"
],
program_id: [
type: {:custom, Solana.Key, :check, []},
doc: "Program ID to use to derive the funding account address"
]
]
@doc """
Generates instructions to transfer lamports from one account to another.
Accepts a `from` address generated via `Solana.Key.with_seed/3`, as long as the
`base` key, `program_id`, and `seed` used to generate that address are
provided.
## Options
#{NimbleOptions.docs(@transfer_schema)}
"""
def transfer(opts) do
case validate(opts, @transfer_schema) do
{:ok, params} ->
maybe_with_seed(
params,
&transfer_ix/1,
&transfer_with_seed_ix/1
)
error ->
error
end
end
@assign_schema [
account: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key for the account which will receive a new owner"
],
program_id: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Program ID to assign as the owner"
],
base: [
type: {:custom, Solana.Key, :check, []},
doc: "Base public key to use to derive the assigned account address"
],
seed: [
type: :string,
doc: "Seed to use to derive the assigned account address"
]
]
@doc """
Generates instructions to assign account ownership to a program.
Accepts an `account` address generated via `Solana.Key.with_seed/3`, as long
as the `base` key and `seed` used to generate that address are provided.
## Options
#{NimbleOptions.docs(@assign_schema)}
"""
def assign(opts) do
case validate(opts, @assign_schema) do
{:ok, params} ->
maybe_with_seed(
params,
&assign_ix/1,
&assign_with_seed_ix/1,
[:base, :seed]
)
error ->
error
end
end
@allocate_schema [
account: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key for the account to allocate"
],
space: [
type: :non_neg_integer,
required: true,
doc: "Amount of space in bytes to allocate"
],
program_id: [
type: {:custom, Solana.Key, :check, []},
doc: "Program ID to assign as the owner of the allocated account"
],
base: [
type: {:custom, Solana.Key, :check, []},
doc: "Base public key to use to derive the allocated account address"
],
seed: [
type: :string,
doc: "Seed to use to derive the allocated account address"
]
]
@doc """
Generates instructions to allocate space to an account.
Accepts an `account` address generated via `Solana.Key.with_seed/3`, as long
as the `base` key, `program_id`, and `seed` used to generate that address are
provided.
## Options
#{NimbleOptions.docs(@allocate_schema)}
"""
def allocate(opts) do
case validate(opts, @allocate_schema) do
{:ok, params} ->
maybe_with_seed(
params,
&allocate_ix/1,
&allocate_with_seed_ix/1,
[:base, :seed]
)
error ->
error
end
end
defp maybe_with_seed(opts, ix_fn, ix_seed_fn, keys \\ [:base, :seed, :program_id]) do
key_check = Enum.map(keys, &Map.has_key?(opts, &1))
cond do
Enum.all?(key_check) -> ix_seed_fn.(opts)
!Enum.any?(key_check) -> ix_fn.(opts)
true -> {:error, :missing_seed_params}
end
end
defp create_account_ix(params) do
%Instruction{
program: id(),
accounts: [
%Account{key: params.from, signer?: true, writable?: true},
%Account{key: params.new, signer?: true, writable?: true}
],
data:
Instruction.encode_data([
{0, 32},
{params.lamports, 64},
{params.space, 64},
params.program_id
])
}
end
defp create_account_with_seed_ix(params) do
%Instruction{
program: id(),
accounts: create_account_with_seed_accounts(params),
data:
Instruction.encode_data([
{3, 32},
params.base,
{params.seed, "str"},
{params.lamports, 64},
{params.space, 64},
params.program_id
])
}
end
defp create_account_with_seed_accounts(params = %{from: from, base: from}) do
[
%Account{key: from, signer?: true, writable?: true},
%Account{key: params.new, writable?: true}
]
end
defp create_account_with_seed_accounts(params) do
[
%Account{key: params.from, signer?: true, writable?: true},
%Account{key: params.new, writable?: true},
%Account{key: params.base, signer?: true}
]
end
defp transfer_ix(params) do
%Instruction{
program: id(),
accounts: [
%Account{key: params.from, signer?: true, writable?: true},
%Account{key: params.to, writable?: true}
],
data: Instruction.encode_data([{2, 32}, {params.lamports, 64}])
}
end
defp transfer_with_seed_ix(params) do
%Instruction{
program: id(),
accounts: [
%Account{key: params.from, writable?: true},
%Account{key: params.base, signer?: true},
%Account{key: params.to, writable?: true}
],
data:
Instruction.encode_data([
{11, 32},
{params.lamports, 64},
{params.seed, "str"},
params.program_id
])
}
end
defp assign_ix(params) do
%Instruction{
program: id(),
accounts: [
%Account{key: params.account, signer?: true, writable?: true}
],
data: Instruction.encode_data([{1, 32}, params.program_id])
}
end
defp assign_with_seed_ix(params) do
%Instruction{
program: id(),
accounts: [
%Account{key: params.account, writable?: true},
%Account{key: params.base, signer?: true}
],
data:
Instruction.encode_data([
{10, 32},
params.base,
{params.seed, "str"},
params.program_id
])
}
end
defp allocate_ix(params) do
%Instruction{
program: id(),
accounts: [
%Account{key: params.account, signer?: true, writable?: true}
],
data: Instruction.encode_data([{8, 32}, {params.space, 64}])
}
end
defp allocate_with_seed_ix(params) do
%Instruction{
program: id(),
accounts: [
%Account{key: params.account, writable?: true},
%Account{key: params.base, signer?: true}
],
data:
Instruction.encode_data([
{9, 32},
params.base,
{params.seed, "str"},
{params.space, 64},
params.program_id
])
}
end
end
|
lib/solana/system_program.ex
| 0.866698 | 0.546496 |
system_program.ex
|
starcoder
|
defmodule ShEx.Shape do
@moduledoc false
defstruct [
# shapeExprLabel?
:id,
# tripleExpr?
:expression,
# BOOL?
:closed,
# [IRI]?
:extra,
# [SemAct]?
:sem_acts,
# [Annotation+]?
:annotations
]
import ShEx.GraphUtils
def satisfies(shape, graph, schema, association, state) do
node = association.node
arcs_out = arcs_out(graph, node)
# Since the search for arcs_in is computationally very expensive with RDF.ex
# having currently no index on the triple objects, we do this only when
# necessary, i.e. when inverse triple expressions exist.
arcs_in =
unless shape.expression &&
ShEx.TripleExpression.required_arcs(shape.expression, state) == {:arcs_out} do
arcs_in(graph, node)
end
with {:ok, _matched, {_, outs}} <-
matches(shape.expression, {arcs_in, arcs_out}, graph, schema, association, state),
{matchables, unmatchables} <-
matchables(shape.expression, outs, state),
:ok <-
check_unmatched(shape.expression, matchables, graph, schema, association, state),
:ok <-
check_extra(List.wrap(shape.extra), matchables, shape.expression),
:ok <-
check_closed(shape.closed, unmatchables, shape) do
ShEx.ShapeMap.Association.conform(association)
else
{:error, violation} ->
ShEx.ShapeMap.Association.violation(association, violation)
end
end
defp matches(nil, triples, _, _, _, _) do
{:ok, [], triples}
end
defp matches(triple_constraint, triples, graph, schema, association, state) do
ShEx.TripleExpression.matches(triple_constraint, triples, graph, schema, association, state)
end
# Let `matchables` be the triples in `outs` whose predicate appears in a `TripleConstraint` in `expression`. If `expression` is absent, `matchables = Ø` (the empty set).
# Let `unmatchables` be the triples in `outs` which are not in `matchables`.
defp matchables(nil, outs, _), do: {[], outs}
defp matchables(triple_constraint, outs, state) do
predicates = ShEx.TripleExpression.predicates(triple_constraint, state)
Enum.split_with(outs, fn {_, predicate, _} -> predicate in predicates end)
end
# No matchable can be matched by any TripleConstraint in expression
defp check_unmatched(nil, _, _, _, _, _), do: :ok
defp check_unmatched(triple_constraint, matchables, graph, schema, association, state) do
if triple_constraint
|> matching_unmatched(matchables, graph, schema, association, state)
|> Enum.empty?() do
:ok
else
{:error, %ShEx.Violation.MaxCardinality{triple_expression: triple_constraint}}
end
end
defp matching_unmatched(triple_constraint, matchables, graph, schema, association, state) do
triple_constraints =
triple_constraint
|> ShEx.TripleExpression.triple_constraints(state)
# We'll reset the cardinality here, because one match is sufficient ...
|> Enum.map(fn expression -> expression |> Map.put(:min, nil) |> Map.put(:max, nil) end)
Enum.filter(matchables, fn {_, predicate, _} = statement ->
Enum.any?(triple_constraints, fn triple_constraint ->
triple_constraint.predicate == predicate and
match?(
{:ok, _, _},
matches(triple_constraint, {[], [statement]}, graph, schema, association, state)
)
end)
end)
end
# There is no triple in matchables whose predicate does not appear in extra.
defp check_extra(extra, matchables, triple_expressions) do
if Enum.all?(matchables, fn {_, predicate, _} -> predicate in extra end) do
:ok
else
{:error, %ShEx.Violation.MaxCardinality{triple_expression: triple_expressions}}
end
end
# closed is false or unmatchables is empty.
defp check_closed(closed, unmatchables, shape) do
if !closed || Enum.empty?(unmatchables) do
:ok
else
{:error, %ShEx.Violation.ClosedShape{shape: shape, unmatchables: unmatchables}}
end
end
defimpl ShEx.ShapeExpression do
def satisfies(shape, graph, schema, association, state) do
ShEx.Shape.satisfies(shape, graph, schema, association, state)
end
end
defimpl ShEx.Operator do
def children(shape) do
cond do
is_nil(shape.expression) ->
[]
RDF.term?(shape.expression) ->
[{:triple_expression_label, shape.expression}]
true ->
[shape.expression]
end
end
def triple_expression_label_and_operands(shape), do: {nil, List.wrap(shape.expression)}
end
end
|
lib/shex/shape_expressions/shape.ex
| 0.717111 | 0.659655 |
shape.ex
|
starcoder
|
defmodule Spandex.Tracer do
@moduledoc """
A module that can be used to build your own tracer.
Example:
```
defmodule MyApp.Tracer do
use Spandex.Tracer, otp_app: :my_app
end
```
"""
alias Spandex.{
Span,
SpanContext,
Trace
}
@type tagged_tuple(arg) :: {:ok, arg} | {:error, term()}
@type span_name() :: String.t()
@type opts :: Keyword.t() | :disabled
@callback configure(opts) :: :ok
@callback start_trace(span_name, opts) :: tagged_tuple(Trace.t())
@callback start_span(span_name, opts) :: tagged_tuple(Span.t())
@callback update_span(opts) :: tagged_tuple(Span.t())
@callback update_top_span(opts) :: tagged_tuple(Span.t())
@callback finish_trace(opts) :: tagged_tuple(Trace.t())
@callback finish_span(opts) :: tagged_tuple(Span.t())
@callback span_error(error :: Exception.t(), stacktrace :: [term], opts) :: tagged_tuple(Span.t())
@callback continue_trace(span_name :: String.t(), trace_context :: SpanContext.t(), opts) :: tagged_tuple(Trace.t())
@callback continue_trace_from_span(span_name, span :: term, opts) :: tagged_tuple(Trace.t())
@callback current_trace_id(opts) :: nil | Spandex.id()
@callback current_span_id(opts) :: nil | Spandex.id()
@callback current_span(opts) :: nil | Span.t()
@callback current_context(opts) ::
{:ok, SpanContext.t()}
| {:error, :disabled}
| {:error, :no_span_context}
| {:error, :no_trace_context}
| {:error, [Optimal.error()]}
@callback distributed_context(Plug.Conn.t(), opts) :: tagged_tuple(map)
@callback inject_context(Spandex.headers(), opts) :: Spandex.headers()
@macrocallback span(span_name, opts, do: Macro.t()) :: Macro.t()
@macrocallback trace(span_name, opts, do: Macro.t()) :: Macro.t()
@tracer_opts Optimal.schema(
opts: [
adapter: :atom,
service: :atom,
disabled?: :boolean,
env: :string,
services: {:keyword, :atom},
strategy: :atom,
sender: :atom,
trace_key: :atom
],
required: [:adapter, :service],
defaults: [
disabled?: false,
services: [],
strategy: Spandex.Strategy.Pdict
],
describe: [
adapter: "The third party adapter to use",
trace_key: "Don't set manually. This option is passed automatically.",
sender:
"Once a trace is complete, it is sent using this module. Defaults to the `default_sender/0` of the selected adapter",
service: "The default service name to use for spans declared without a service",
disabled?: "Allows for wholesale disabling a tracer",
env: "A name used to identify the environment name, e.g `prod` or `development`",
services: "A mapping of service name to the default span types.",
strategy: "The storage and tracing strategy. Currently only supports local process dictionary."
]
)
@all_tracer_opts @tracer_opts
|> Optimal.merge(
Span.span_opts(),
annotate: "Span Creation",
add_required?: false
)
|> Map.put(:extra_keys?, false)
@doc """
A schema for the opts that a tracer accepts.
#{Optimal.Doc.document(@all_tracer_opts)}
All tracer functions that take opts use this schema.
This also accepts defaults for any value that can
be given to a span.
"""
def tracer_opts(), do: @all_tracer_opts
defmacro __using__(opts) do
# credo:disable-for-next-line Credo.Check.Refactor.LongQuoteBlocks
quote do
@otp_app unquote(opts)[:otp_app] || raise("Must provide `otp_app` to `use Spandex.Tracer`")
@behaviour Spandex.Tracer
@opts Spandex.Tracer.tracer_opts()
@doc """
Use to create and configure a tracer.
"""
@impl Spandex.Tracer
@spec configure(Spandex.Tracer.opts()) :: :ok
def configure(opts) do
case config(opts, @otp_app) do
:disabled ->
Application.put_env(@otp_app, __MODULE__, merge_config(opts, @otp_app))
config ->
Application.put_env(@otp_app, __MODULE__, config)
end
end
@impl Spandex.Tracer
defmacro trace(name, opts \\ [], do: body) when is_binary(name) do
quote do
opts = unquote(opts)
name = unquote(name)
_ = unquote(__MODULE__).start_trace(name, opts)
try do
unquote(body)
rescue
exception ->
stacktrace = __STACKTRACE__
_ = unquote(__MODULE__).span_error(exception, stacktrace, opts)
reraise exception, stacktrace
after
_ = unquote(__MODULE__).finish_trace()
end
end
end
@impl Spandex.Tracer
defmacro span(name, opts \\ [], do: body) when is_binary(name) do
quote do
opts = unquote(opts)
name = unquote(name)
_ = unquote(__MODULE__).start_span(name, opts)
try do
unquote(body)
rescue
exception ->
stacktrace = __STACKTRACE__
_ = unquote(__MODULE__).span_error(exception, stacktrace, opts)
reraise exception, stacktrace
after
_ = unquote(__MODULE__).finish_span()
end
end
end
@impl Spandex.Tracer
def start_trace(name, opts \\ []) do
Spandex.start_trace(name, config(opts, @otp_app))
end
@impl Spandex.Tracer
def start_span(name, opts \\ []) do
Spandex.start_span(name, config(opts, @otp_app))
end
@impl Spandex.Tracer
def update_span(opts) do
Spandex.update_span(validate_update_config(opts, @otp_app))
end
@impl Spandex.Tracer
def update_top_span(opts) do
Spandex.update_top_span(validate_update_config(opts, @otp_app))
end
@impl Spandex.Tracer
def finish_trace(opts \\ []) do
opts
|> validate_update_config(@otp_app)
|> Spandex.finish_trace()
end
@impl Spandex.Tracer
def finish_span(opts \\ []) do
opts
|> validate_update_config(@otp_app)
|> Spandex.finish_span()
end
@impl Spandex.Tracer
def span_error(error, stacktrace, opts \\ []) do
Spandex.span_error(error, stacktrace, config(opts, @otp_app))
end
@impl Spandex.Tracer
def continue_trace(span_name, span_context, opts \\ [])
def continue_trace(span_name, %SpanContext{} = span_context, opts) do
Spandex.continue_trace(span_name, span_context, config(opts, @otp_app))
end
# This is just to get around the ambiguous defaults until we fully remove this API
def continue_trace(span_name, trace_id, span_id) do
continue_trace(span_name, trace_id, span_id, [])
end
@deprecated "please use continue_trace/3 instead."
def continue_trace(span_name, trace_id, span_id, opts) do
continue_trace(span_name, %SpanContext{trace_id: trace_id, parent_id: span_id}, opts)
end
@impl Spandex.Tracer
def continue_trace_from_span(span_name, span, opts \\ []) do
Spandex.continue_trace_from_span(span_name, span, config(opts, @otp_app))
end
@impl Spandex.Tracer
def current_trace_id(opts \\ []) do
Spandex.current_trace_id(config(opts, @otp_app))
end
@impl Spandex.Tracer
def current_span_id(opts \\ []) do
Spandex.current_span_id(config(opts, @otp_app))
end
@impl Spandex.Tracer
def current_span(opts \\ []) do
Spandex.current_span(config(opts, @otp_app))
end
@impl Spandex.Tracer
def current_context(opts \\ []) do
Spandex.current_context(config(opts, @otp_app))
end
@impl Spandex.Tracer
def distributed_context(conn, opts \\ []) do
Spandex.distributed_context(conn, config(opts, @otp_app))
end
@impl Spandex.Tracer
def inject_context(headers, opts \\ []) do
opts
|> current_context()
|> case do
{:ok, span_context} ->
Spandex.inject_context(headers, span_context, config(opts, @otp_app))
_ ->
headers
end
end
defp merge_config(opts, otp_app) do
otp_app
|> Application.get_env(__MODULE__)
|> Kernel.||([])
|> Keyword.merge(opts || [])
|> Optimal.validate!(@opts)
|> Keyword.put(:trace_key, __MODULE__)
end
defp config(opts, otp_app) do
config = merge_config(opts, otp_app)
if config[:disabled?] do
:disabled
else
config
end
end
defp validate_update_config(opts, otp_app) do
env = Application.get_env(otp_app, __MODULE__)
if env[:disabled?] do
:disabled
else
schema = %{@opts | defaults: [], required: []}
# TODO: We may want to have some concept of "the quintessential tracer configs"
# So that we can take those here, instead of embedding that knowledge here.
opts
|> Optimal.validate!(schema)
|> Keyword.put(:trace_key, __MODULE__)
|> Keyword.put(:strategy, env[:strategy] || Spandex.Strategy.Pdict)
|> Keyword.put(:adapter, env[:adapter])
|> Keyword.put(:sender, env[:sender])
end
end
end
end
end
|
lib/tracer.ex
| 0.821939 | 0.685529 |
tracer.ex
|
starcoder
|
defmodule PcanlogParse.Examples.ExportToCsv do
require PcanlogParser
require CSV
alias PcanlogParser, as: Parser
@moduledoc """
Documentation for `PcanlogParser.Examples.ExportToCsv`.
This is an example of usage for PcanlogParser. It parses a P-CAN log file and exports the Payload as CSV data
"""
# converts a P-CAN log into a CSV file, it process the CAN payload for this specific example
def convert(file_path) when is_bitstring(file_path) do
if not File.exists?(file_path) do
{:error, "The file #{file_path} does not exist"}
else
# entries =
file_path
|> File.read!()
|> Parser.parse_log()
|> Enum.map(fn entry -> entry.data_bytes end)
|> convert_to_csv()
|> Enum.each(fn dic -> write_file(dic) end)
end
end
def write_file(dic) when is_map(dic) do
if length(Map.keys(dic)) != 1, do: {:error, "unexpected dictionary with more than one key"}
[pin] = Map.keys(dic)
%{^pin => data} = dic
file_name = "#{pin}.csv"
file = File.open!(file_name, [:write, :utf8])
data
|> CSV.encode()
|> Enum.each(&IO.write(file, &1))
end
def convert_to_csv(data) when is_list(data) do
data
|> Enum.map(&process_databyte/1)
|> Enum.group_by(fn dic -> dic.pin end)
|> Enum.map(fn {pin, entries} -> %{pin => serialize(entries)} end)
end
# converts the maps into a list of values
def serialize(data_bytes) when is_list(data_bytes) do
list =
data_bytes
|> Enum.map(fn d -> [d.pin, d.time, d.duty_cycle, d.current] end)
[["Pin", "Time", "Duty cycle", "Current"]] ++ list
end
# Expects data of the type "1400000000340E00" and returns a Map with the parsed field
# this is the CAN payload, the values and fields are just for testing and are not part of any standard.
def process_databyte(data) when is_bitstring(data) do
bytes =
data
|> String.split("", trim: true)
|> Enum.chunk_every(2)
|> Enum.map(fn x -> Enum.join(x) end)
[pin, dc_high, dc_low, curr_high, curr_low, time_high, time_low, _rest] = bytes
%{
pin: pin,
duty_cycle: scale_duty_cycle(dc_high <> dc_low),
current: hex_to_int(curr_high <> curr_low),
time: hex_to_int(time_high <> time_low)
}
end
# Expects a hexadecimal number as string and returns its integer value as string
# i.e.: "F0" -> "240"
defp hex_to_int(hex) when is_bitstring(hex) do
hex
|> String.to_integer(16)
|> Integer.to_string()
end
# scales the 16 bits value of the duty cycle.
# FFFFh to 100% and 7FFFh to 50% and all the values in between
def scale_duty_cycle(hex_duty) do
int_duty = String.to_integer(hex_duty, 16)
[duty_int, _duty_dec] =
(int_duty * 100 / 0xFFFF)
|> Float.ceil()
|> Float.to_string()
|> String.split(".")
duty_int
end
end
|
lib/examples/export_to_csv.ex
| 0.852076 | 0.446857 |
export_to_csv.ex
|
starcoder
|
defmodule Tai.Config do
@moduledoc """
Global configuration for a `tai` instance. This module provides a utility
function to hydrate a struct from the OTP `Application` environment.
It can be configured with the following options:
```
# [default: 10_000] [optional] Adapter start timeout in milliseconds
config :tai, adapter_timeout: 60_000
# [default: nil] [optional] Handler to call after all venues & advisors have successfully started on boot
config :tai, after_boot: {Mod, :func_name, []}
# [default: nil] [optional] Handler to call after any venues or advisors have failed to start on boot
config :tai, after_boot_error: {Mod, :func_name, []}
# [default: false] [optional] Flag which enables the forwarding of each order book change set to the system bus
config :tai, broadcast_change_set: true
# [default: 5] [optional] Maximum pool size
config :tai, order_workers: 5
# [default: 2] [optional] Maximum number of workers created if pool is empty
config :tai, order_workers_max_overflow: 2
# [default: false] [optional] Flag which enables the sending of orders to the venue. When this is `false`, it
# acts a safety net by enqueueing and skipping the order transmission to the venue. This is useful in
# development to prevent accidently sending live orders.
config :tai, send_orders: true
# [default: System.schedulers_online] [optional] Number of processes that can forward internal pubsub messages.
# Defaults to the number of CPU's available in the Erlang VM `System.schedulers_online/0`.
config :tai, system_bus_registry_partitions: 2
# [default: %{}] [optional] Map of configured venues. See below for more details.
config :tai, venues: %{}
# [default: %{}] [optional] Map of configured fleets. See below for more details.
config :tai, fleets: %{}
```
"""
@type env :: [{Application.key(), Application.value()}]
@type handler :: module
@type func_name :: atom
@type boot_args :: term
@type t :: %Tai.Config{
adapter_timeout: pos_integer,
fleets: map,
after_boot: {handler, func_name} | {handler, func_name, boot_args} | nil,
after_boot_error: {handler, func_name} | {handler, func_name, boot_args} | nil,
broadcast_change_set: boolean,
logger: module,
order_workers: pos_integer,
order_workers_max_overflow: non_neg_integer,
order_transition_workers: pos_integer,
send_orders: boolean,
system_bus_registry_partitions: pos_integer,
venues: map
}
@enforce_keys ~w[
adapter_timeout
fleets
order_workers
order_workers_max_overflow
send_orders
system_bus_registry_partitions
venues
]a
defstruct ~w[
adapter_timeout
fleets
after_boot
after_boot_error
broadcast_change_set
logger
order_workers
order_workers_max_overflow
order_transition_workers
send_orders
system_bus_registry_partitions
venues
]a
@spec parse() :: t
@spec parse(env) :: t
def parse(env \\ Application.get_all_env(:tai)) do
%Tai.Config{
adapter_timeout: get(env, :adapter_timeout),
fleets: get(env, :fleets),
after_boot: get(env, :after_boot),
after_boot_error: get(env, :after_boot_error),
broadcast_change_set: !!get(env, :broadcast_change_set),
logger: get(env, :logger),
order_workers: get(env, :order_workers),
order_transition_workers: get(env, :order_transition_workers),
order_workers_max_overflow: get(env, :order_workers_max_overflow),
send_orders: !!get(env, :send_orders),
system_bus_registry_partitions: get(env, :system_bus_registry_partitions),
venues: get(env, :venues, %{})
}
end
@spec get(atom) :: term
def get(key), do: get(Application.get_all_env(:tai), key)
@spec get(env, atom) :: term
def get(env, :adapter_timeout = key), do: get(env, key, 10_000)
def get(env, :fleets = key), do: get(env, key, %{})
def get(env, :order_transition_workers = key), do: get(env, key, 5)
def get(env, :order_workers = key), do: get(env, key, 5)
def get(env, :order_workers_max_overflow = key), do: get(env, key, 2)
def get(env, :system_bus_registry_partitions = key), do: get(env, key, System.schedulers_online())
def get(env, :venues = key), do: Keyword.get(env, key, %{})
def get(env, key), do: Keyword.get(env, key)
@spec get(env, atom, term) :: term
def get(env, key, default), do: Keyword.get(env, key, default)
end
|
apps/tai/lib/tai/config.ex
| 0.863392 | 0.774199 |
config.ex
|
starcoder
|
defmodule Wallet do
alias UltraDark.Transaction
alias UltraDark.Utilities
alias UltraDark.UtxoStore
alias UltraDark.KeyPair
def new_transaction(address, amount, desired_fee) do
inputs = find_suitable_inputs(amount + desired_fee)
designations = [%{amount: amount, addr: address}]
designations = if Transaction.sum_inputs(inputs) > amount + desired_fee do
# Since a UTXO is fully used up when we put it in a new transaction, we must create a new output
# that credits us with the change
[%{amount: Transaction.sum_inputs(inputs) - (amount + desired_fee), addr: "MY OWN ADDR"} | designations]
else
designations
end
tx =
%Transaction{
designations: designations,
inputs: inputs,
timestamp: DateTime.utc_now |> DateTime.to_string
}
# The transaction ID is just the merkle root of all the inputs, concatenated with the timestamp
id =
Transaction.calculate_hash(tx) <> tx.timestamp
|> (&(Utilities.sha_base16 &1)).()
tx = %{tx | id: id}
Map.merge(tx, Transaction.calculate_outputs(tx))
end
@doc """
Return all UTXOs that are owned by the given public key
"""
@spec find_pubkey_utxos(String.t) :: list
def find_pubkey_utxos(public_key) do
UtxoStore.find_by_address(public_key)
end
def find_wallet_utxos do
{:ok, keyfiles} = File.ls(".keys")
keyfiles
|> Enum.flat_map(fn file ->
{pub, priv} = KeyPair.get_from_file(".keys/#{file}")
hex = pub |> Base.encode16
find_pubkey_utxos(hex)
|> Enum.map( &(Map.merge(&1, %{signature: KeyPair.sign(priv, &1.txoid) |> Base.encode16})) )
end)
end
@doc """
Take all the inputs that we have the necessary credentials to utilize, and then return
the most possible utxos whos amounts add up to the amount passed in
"""
@spec find_suitable_inputs(number) :: list
def find_suitable_inputs(amount) do
find_wallet_utxos()
|> Enum.sort(&(&1.amount < &2.amount))
|> take_necessary_utxos(amount)
end
defp take_necessary_utxos(utxos, amount), do: take_necessary_utxos(utxos, [], amount)
defp take_necessary_utxos(_utxos, chosen, amount) when amount <= 0, do: chosen
defp take_necessary_utxos(utxos, chosen, amount) do
[utxo | remaining] = utxos
take_necessary_utxos(remaining, [utxo | chosen], amount - utxo.amount)
end
end
|
lib/wallet.ex
| 0.791378 | 0.41401 |
wallet.ex
|
starcoder
|
defmodule Correios.CEP do
@moduledoc """
Find Brazilian addresses by zip code, directly from Correios API. No HTML parsers.
"""
alias Correios.CEP.{Address, Client, Error, Parser}
@type t :: {:ok, Address.t()} | {:error, Error.t()}
@zipcode_regex ~r/^\d{5}-?\d{3}$/
@doc """
Finds address by the given `zipcode`.
Zip codes with and without "-" separator are accepted.
## Options
* `connection_timeout`: timeout for establishing a connection, in milliseconds. Default is 5000.
* `request_timeout`: timeout for receiving the HTTP response, in milliseconds. Default is 5000.
* `proxy`: proxy to be used for the request: `{host, port}` tuple, where `port` is an integer.
* `proxy_auth`: proxy authentication: `{user, password}` tuple.
* `url`: Correios API full URL. Default is `https://apps.correios.com.br/SigepMasterJPA/AtendeClienteService/AtendeCliente`.
## Examples
iex> #{inspect(__MODULE__)}.find_address("54250610")
{:ok,
%#{inspect(Address)}{
city: "Jaboatão dos Guararapes",
complement: "",
neighborhood: "Cavaleiro",
state: "PE",
street: "Rua Fernando Amorim",
zipcode: "54250610"
}}
iex> #{inspect(__MODULE__)}.find_address("54250-610")
{:ok,
%#{inspect(Address)}{
city: "Jaboatão dos Guararapes",
complement: "",
neighborhood: "Cavaleiro",
state: "PE",
street: "Rua Fernando Amorim",
zipcode: "54250610"
}}
iex> #{inspect(__MODULE__)}.find_address("54250-610", connection_timeout: 1000, request_timeout: 1000)
{:ok,
%#{inspect(Address)}{
city: "Jaboatão dos Guararapes",
complement: "",
neighborhood: "Cavaleiro",
state: "PE",
street: "Rua Fernando Amorim",
zipcode: "54250610"
}}
iex> #{inspect(__MODULE__)}.find_address("54250-610", proxy: {"localhost", 8888})
{:ok,
%#{inspect(Address)}{
city: "Jaboatão dos Guararapes",
complement: "",
neighborhood: "Cavaleiro",
state: "PE",
street: "Rua Fernando Amorim",
zipcode: "54250610"
}}
iex> #{inspect(__MODULE__)}.find_address(
...> "54250-610",
...> proxy: {"localhost", 8888},
...> proxy_auth: {"myuser", "<PASSWORD>"}
...> )
{:ok,
%#{inspect(Address)}{
city: "Jaboatão dos Guararapes",
complement: "",
neighborhood: "Cavaleiro",
state: "PE",
street: "Rua Fernando Amorim",
zipcode: "54250610"
}}
iex> #{inspect(__MODULE__)}.find_address("00000-000")
{:error, %#{inspect(Error)}{reason: "CEP NAO ENCONTRADO"}}
iex> #{inspect(__MODULE__)}.find_address("1234567")
{:error, %#{inspect(Error)}{reason: "zipcode in invalid format"}}
iex> #{inspect(__MODULE__)}.find_address("")
{:error, %#{inspect(Error)}{reason: "zipcode is required"}}
"""
@spec find_address(String.t(), keyword()) :: t()
def find_address(zipcode, options \\ [])
def find_address("", _options), do: {:error, Error.new("zipcode is required")}
def find_address(zipcode, options) when is_binary(zipcode) and is_list(options) do
if valid_zipcode?(zipcode) do
zipcode
|> client().request(options)
|> parse()
else
{:error, Error.new("zipcode in invalid format")}
end
end
@spec valid_zipcode?(String.t()) :: boolean()
defp valid_zipcode?(zipcode), do: zipcode =~ @zipcode_regex
@spec client :: module()
defp client, do: Application.get_env(:correios_cep, :client) || Client
@spec parse(Client.t()) :: t()
defp parse({:ok, response}) do
response
|> Parser.parse_ok()
|> case do
%Address{} = address -> {:ok, address}
%Error{} = error -> {:error, error}
end
end
defp parse({:error, error}), do: {:error, Parser.parse_error(error)}
@doc """
Finds address by a given zip code.
Similar to `find_address/2` except it will unwrap the error tuple and raise in case of errors.
## Examples
iex> #{inspect(__MODULE__)}.find_address!("54250610")
%#{inspect(Address)}{
city: "Jaboatão dos Guararapes",
complement: "",
neighborhood: "Cavaleiro",
state: "PE",
street: "Rua Fernando Amorim",
zipcode: "54250610"
}
iex> #{inspect(__MODULE__)}.find_address!("00000-000")
** (#{inspect(Error)}) CEP NAO ENCONTRADO
"""
@spec find_address!(String.t(), keyword()) :: Address.t()
def find_address!(zipcode, options \\ []) when is_binary(zipcode) and is_list(options) do
zipcode
|> find_address(options)
|> case do
{:ok, response} -> response
{:error, error} -> raise(error)
end
end
end
|
lib/correios/cep.ex
| 0.875168 | 0.457197 |
cep.ex
|
starcoder
|
defmodule Plug.Static do
@moduledoc """
A plug for serving static assets.
It requires two options:
* `:at` - the request path to reach for static assets.
It must be a string.
* `:from` - the file system path to read static assets from.
It can be either: a string containing a file system path, an
atom representing the application name (where assets will
be served from `priv/static`), a tuple containing the
application name and the directory to serve assets from (besides
`priv/static`), or an MFA tuple.
The preferred form is to use `:from` with an atom or tuple, since
it will make your application independent from the starting directory.
For example, if you pass:
plug Plug.Static, from: "priv/app/path"
Plug.Static will be unable to serve assets if you build releases
or if you change the current directory. Instead do:
plug Plug.Static, from: {:app_name, "priv/app/path"}
If a static asset cannot be found, `Plug.Static` simply forwards
the connection to the rest of the pipeline.
## Cache mechanisms
`Plug.Static` uses etags for HTTP caching. This means browsers/clients
should cache assets on the first request and validate the cache on
following requests, not downloading the static asset once again if it
has not changed. The cache-control for etags is specified by the
`cache_control_for_etags` option and defaults to `"public"`.
However, `Plug.Static` also supports direct cache control by using
versioned query strings. If the request query string starts with
"?vsn=", `Plug.Static` assumes the application is versioning assets
and does not set the `ETag` header, meaning the cache behaviour will
be specified solely by the `cache_control_for_vsn_requests` config,
which defaults to `"public, max-age=31536000"`.
## Options
* `:encodings` - list of 2-ary tuples where first value is value of
the `Accept-Encoding` header and second is extension of the file to
be served if given encoding is accepted by client. Entries will be tested
in order in list, so entries higher in list will be preferred. Defaults
to: `[]`.
In addition to setting this value directly it supports 2 additional
options for compatibility reasons:
+ `:brotli` - will append `{"br", ".br"}` to the encodings list.
+ `:gzip` - will append `{"gzip", ".gz"}` to the encodings list.
Additional options will be added in the above order (Brotli takes
preference over Gzip) to reflect older behaviour which was set due
to fact that Brotli in general provides better compression ratio than
Gzip.
* `:cache_control_for_etags` - sets the cache header for requests
that use etags. Defaults to `"public"`.
* `:etag_generation` - specify a `{module, function, args}` to be used
to generate an etag. The `path` of the resource will be passed to
the function, as well as the `args`. If this option is not supplied,
etags will be generated based off of file size and modification time.
Note it is [recommended for the etag value to be quoted](https://tools.ietf.org/html/rfc7232#section-2.3),
which Plug won't do automatically.
* `:cache_control_for_vsn_requests` - sets the cache header for
requests starting with "?vsn=" in the query string. Defaults to
`"public, max-age=31536000"`.
* `:only` - filters which requests to serve. This is useful to avoid
file system access on every request when this plug is mounted
at `"/"`. For example, if `only: ["images", "favicon.ico"]` is
specified, only files in the "images" directory and the
"favicon.ico" file will be served by `Plug.Static`.
Note that `Plug.Static` matches these filters against request
uri and not against the filesystem. When requesting
a file with name containing non-ascii or special characters,
you should use urlencoded form. For example, you should write
`only: ["file%20name"]` instead of `only: ["file name"]`.
Defaults to `nil` (no filtering).
* `:only_matching` - a relaxed version of `:only` that will
serve any request as long as one of the given values matches the
given path. For example, `only_matching: ["images", "favicon"]`
will match any request that starts at "images" or "favicon",
be it "/images/foo.png", "/images-high/foo.png", "/favicon.ico"
or "/favicon-high.ico". Such matches are useful when serving
digested files at the root. Defaults to `nil` (no filtering).
* `:headers` - other headers to be set when serving static assets. Specify either
an enum of key-value pairs or a `{module, function, args}` to return an enum. The
`conn` will be passed to the function, as well as the `args`.
* `:content_types` - custom MIME type mapping. As a map with filename as key
and content type as value. For example:
`content_types: %{"apple-app-site-association" => "application/json"}`.
## Examples
This plug can be mounted in a `Plug.Builder` pipeline as follows:
defmodule MyPlug do
use Plug.Builder
plug Plug.Static,
at: "/public",
from: :my_app,
only: ~w(images robots.txt)
plug :not_found
def not_found(conn, _) do
send_resp(conn, 404, "not found")
end
end
"""
@behaviour Plug
@allowed_methods ~w(GET HEAD)
import Plug.Conn
alias Plug.Conn
# In this module, the `:prim_file` Erlang module along with the `:file_info`
# record are used instead of the more common and Elixir-y `File` module and
# `File.Stat` struct, respectively. The reason behind this is performance: all
# the `File` operations pass through a single process in order to support node
# operations that we simply don't need when serving assets.
require Record
Record.defrecordp(:file_info, Record.extract(:file_info, from_lib: "kernel/include/file.hrl"))
defmodule InvalidPathError do
defexception message: "invalid path for static asset", plug_status: 400
end
@impl true
def init(opts) do
from =
case Keyword.fetch!(opts, :from) do
{_, _} = from -> from
{_, _, _} = from -> from
from when is_atom(from) -> {from, "priv/static"}
from when is_binary(from) -> from
_ -> raise ArgumentError, ":from must be an atom, a binary or a tuple"
end
encodings =
opts
|> Keyword.get(:encodings, [])
|> maybe_add("br", ".br", Keyword.get(opts, :brotli, false))
|> maybe_add("gzip", ".gz", Keyword.get(opts, :gzip, false))
%{
encodings: encodings,
only_rules: {Keyword.get(opts, :only, []), Keyword.get(opts, :only_matching, [])},
qs_cache: Keyword.get(opts, :cache_control_for_vsn_requests, "public, max-age=31536000"),
et_cache: Keyword.get(opts, :cache_control_for_etags, "public"),
et_generation: Keyword.get(opts, :etag_generation, nil),
headers: Keyword.get(opts, :headers, %{}),
content_types: Keyword.get(opts, :content_types, %{}),
from: from,
at: opts |> Keyword.fetch!(:at) |> Plug.Router.Utils.split()
}
end
@impl true
def call(
conn = %Conn{method: meth},
%{at: at, only_rules: only_rules, from: from, encodings: encodings} = options
)
when meth in @allowed_methods do
segments = subset(at, conn.path_info)
if allowed?(only_rules, segments) do
segments = Enum.map(segments, &uri_decode/1)
if invalid_path?(segments) do
raise InvalidPathError, "invalid path for static asset: #{conn.request_path}"
end
path = path(from, segments)
range = get_req_header(conn, "range")
encoding = file_encoding(conn, path, range, encodings)
serve_static(encoding, conn, segments, range, options)
else
conn
end
end
def call(conn, _options) do
conn
end
defp uri_decode(path) do
# TODO: Remove rescue as this can't fail from Elixir v1.13
try do
URI.decode(path)
rescue
ArgumentError ->
raise InvalidPathError
end
end
defp allowed?(_only_rules, []), do: false
defp allowed?({[], []}, _list), do: true
defp allowed?({full, prefix}, [h | _]) do
h in full or (prefix != [] and match?({0, _}, :binary.match(h, prefix)))
end
defp serve_static({content_encoding, file_info, path}, conn, segments, range, options) do
%{
qs_cache: qs_cache,
et_cache: et_cache,
et_generation: et_generation,
headers: headers,
content_types: types
} = options
case put_cache_header(conn, qs_cache, et_cache, et_generation, file_info, path) do
{:stale, conn} ->
filename = List.last(segments)
content_type = Map.get(types, filename) || MIME.from_path(filename)
conn
|> put_resp_header("content-type", content_type)
|> put_resp_header("accept-ranges", "bytes")
|> maybe_add_encoding(content_encoding)
|> merge_headers(headers)
|> serve_range(file_info, path, range, options)
{:fresh, conn} ->
conn
|> maybe_add_vary(options)
|> send_resp(304, "")
|> halt()
end
end
defp serve_static(:error, conn, _segments, _range, _options) do
conn
end
defp serve_range(conn, file_info, path, [range], options) do
file_info(size: file_size) = file_info
with %{"bytes" => bytes} <- Plug.Conn.Utils.params(range),
{range_start, range_end} <- start_and_end(bytes, file_size) do
send_range(conn, path, range_start, range_end, file_size, options)
else
_ -> send_entire_file(conn, path, options)
end
end
defp serve_range(conn, _file_info, path, _range, options) do
send_entire_file(conn, path, options)
end
defp start_and_end("-" <> rest, file_size) do
case Integer.parse(rest) do
{last, ""} when last > 0 and last <= file_size -> {file_size - last, file_size - 1}
_ -> :error
end
end
defp start_and_end(range, file_size) do
case Integer.parse(range) do
{first, "-"} when first >= 0 ->
{first, file_size - 1}
{first, "-" <> rest} when first >= 0 ->
case Integer.parse(rest) do
{last, ""} when last >= first -> {first, min(last, file_size - 1)}
_ -> :error
end
_ ->
:error
end
end
defp send_range(conn, path, 0, range_end, file_size, options) when range_end == file_size - 1 do
send_entire_file(conn, path, options)
end
defp send_range(conn, path, range_start, range_end, file_size, _options) do
length = range_end - range_start + 1
conn
|> put_resp_header("content-range", "bytes #{range_start}-#{range_end}/#{file_size}")
|> send_file(206, path, range_start, length)
|> halt()
end
defp send_entire_file(conn, path, options) do
conn
|> maybe_add_vary(options)
|> send_file(200, path)
|> halt()
end
defp maybe_add_encoding(conn, nil), do: conn
defp maybe_add_encoding(conn, ce), do: put_resp_header(conn, "content-encoding", ce)
defp maybe_add_vary(conn, %{encodings: encodings}) do
# If we serve gzip or brotli at any moment, we need to set the proper vary
# header regardless of whether we are serving gzip content right now.
# See: http://www.fastly.com/blog/best-practices-for-using-the-vary-header/
if encodings != [] do
update_in(conn.resp_headers, &[{"vary", "Accept-Encoding"} | &1])
else
conn
end
end
defp put_cache_header(
%Conn{query_string: "vsn=" <> _} = conn,
qs_cache,
_et_cache,
_et_generation,
_file_info,
_path
)
when is_binary(qs_cache) do
{:stale, put_resp_header(conn, "cache-control", qs_cache)}
end
defp put_cache_header(conn, _qs_cache, et_cache, et_generation, file_info, path)
when is_binary(et_cache) do
etag = etag_for_path(file_info, et_generation, path)
conn =
conn
|> put_resp_header("cache-control", et_cache)
|> put_resp_header("etag", etag)
if etag in get_req_header(conn, "if-none-match") do
{:fresh, conn}
else
{:stale, conn}
end
end
defp put_cache_header(conn, _, _, _, _, _) do
{:stale, conn}
end
defp etag_for_path(file_info, et_generation, path) do
case et_generation do
{module, function, args} ->
apply(module, function, [path | args])
nil ->
file_info(size: size, mtime: mtime) = file_info
<<?", {size, mtime} |> :erlang.phash2() |> Integer.to_string(16)::binary, ?">>
end
end
defp file_encoding(conn, path, [_range], _encodings) do
# We do not support compression for range queries.
file_encoding(conn, path, nil, [])
end
defp file_encoding(conn, path, _range, encodings) do
encoded =
Enum.find_value(encodings, fn {encoding, ext} ->
if file_info = accept_encoding?(conn, encoding) && regular_file_info(path <> ext) do
{encoding, file_info, path <> ext}
end
end)
cond do
not is_nil(encoded) ->
encoded
file_info = regular_file_info(path) ->
{nil, file_info, path}
true ->
:error
end
end
defp regular_file_info(path) do
case :prim_file.read_file_info(path) do
{:ok, file_info(type: :regular) = file_info} ->
file_info
_ ->
nil
end
end
defp accept_encoding?(conn, encoding) do
encoding? = &String.contains?(&1, [encoding, "*"])
Enum.any?(get_req_header(conn, "accept-encoding"), fn accept ->
accept |> Plug.Conn.Utils.list() |> Enum.any?(encoding?)
end)
end
defp maybe_add(list, key, value, true), do: list ++ [{key, value}]
defp maybe_add(list, _key, _value, false), do: list
defp path({module, function, arguments}, segments)
when is_atom(module) and is_atom(function) and is_list(arguments),
do: Enum.join([apply(module, function, arguments) | segments], "/")
defp path({app, from}, segments) when is_atom(app) and is_binary(from),
do: Enum.join([Application.app_dir(app), from | segments], "/")
defp path(from, segments),
do: Enum.join([from | segments], "/")
defp subset([h | expected], [h | actual]), do: subset(expected, actual)
defp subset([], actual), do: actual
defp subset(_, _), do: []
defp invalid_path?(list) do
invalid_path?(list, :binary.compile_pattern(["/", "\\", ":", "\0"]))
end
defp invalid_path?([h | _], _match) when h in [".", "..", ""], do: true
defp invalid_path?([h | t], match), do: String.contains?(h, match) or invalid_path?(t)
defp invalid_path?([], _match), do: false
defp merge_headers(conn, {module, function, args}) do
merge_headers(conn, apply(module, function, [conn | args]))
end
defp merge_headers(conn, headers) do
merge_resp_headers(conn, headers)
end
end
|
lib/plug/static.ex
| 0.909829 | 0.628507 |
static.ex
|
starcoder
|
defmodule Grizzly.ZWave.NodeId do
@moduledoc false
# helper module for encoding and parsing node ids
alias Grizzly.ZWave
# When encoding for 16 bit node ids in the context of the node remove family of
# command (node ids > 255) the 8 bit node id byte of the binary needs to be set
# to 0xFF as per the specification.
# In ZWA_Z-Wave Network Protocol Command Class Specification 12.0.0.pdf:
# For example in sections 172.16.31.10 and 172.16.58.3:
# "This field MUST be set to 0xFF if the removed NodeID is greater than 255."
# This only is used for version 4 parsing and encoding.
@node_id_is_16_bit 0xFF
@doc """
Encode the node id
"""
@spec encode(ZWave.node_id()) :: binary()
def encode(node_id), do: <<node_id>>
@typedoc """
Options for extended format encoding
* `:delimiter` - when the 8 bit node id and 16 bit node id bytes do not follow
in sequence next to each other, this option allows the information between
the two formats to be passed.
"""
@type extended_encoding_opt() :: {:delimiter, binary()}
@doc """
Encode the node id using the extended node id format
The format for command classes that support extended node ids is:
```
<<0xFF, 16_bit_node_id>>
```
Or
```
<<8_bit_node_id, 16_bits_that_match_8_bit_node_id>>
```
When using the extended format encoding as passing an 8 bit node id the
specification often times states that the remaining 2 bytes at the end of the
binary must match the node id passed to the 8 bit node id byte. For example:
```elixir
iex> encode_extended(0x05)
<<0x05, 0x00, 0x05>>
```
Since the node id fits within the 8 bit node id field that is the first byte.
The next two bytes (16 bits) is that node id repeated within the 2 byte space
required to fill the total number of bytes for the extended format encoding.
When the 8 bit node id and the 16 bit node id bytes are not in sequence and
there other information between them you can pass the `:delimiter` option to
ensure that binary is in between the 8 bit node and the 16 bit node id bytes.
```elixir
iex> encode_extended(0x10, delimiter: <<0xA0, 0xB0>>)
<<0x10, 0xA0, 0xB0, 0x00, 0x10>>
iex> encode_extended(0x1010, delimiter: <<0xA0, 0xB0>>)
<<0xFF, 0xA0, 0xB0, 0x10, 0x10>>
```
"""
@spec encode_extended(ZWave.node_id(), [extended_encoding_opt()]) :: binary()
def encode_extended(node_ids, opts \\ [])
def encode_extended(node_id, opts) when node_id < 233 do
case Keyword.get(opts, :delimiter) do
nil ->
<<node_id, node_id::16>>
padding ->
<<node_id, padding::binary, node_id::16>>
end
end
def encode_extended(node_id, opts) when node_id > 255 and node_id <= 65535 do
case Keyword.get(opts, :delimiter) do
nil ->
<<@node_id_is_16_bit, node_id::16>>
padding ->
<<@node_id_is_16_bit, padding::binary, node_id::16>>
end
end
@typedoc """
Parsing options
* `:delimiter_size` - when the 8 bit node id and the 16 bit node are not in
sequence and are separated by bytes in between them you can specify the byte
size of the delimiter.
"""
@type parse_opt() :: {:delimiter_size, non_neg_integer()}
@doc """
Parse the binary that contains the node id
For node id binaries that support the extended node id format but contain
bytes in between the 8 bit node id byte and the 16 bit node id bytes the
options `:delimiter_size` can be passed to account for these in between
bytes in parsing.
"""
@spec parse(binary(), [parse_opt()]) :: ZWave.node_id()
def parse(node_id_binary, opts \\ [])
def parse(<<node_id>>, _opts), do: node_id
def parse(node_id_bin, opts) do
case Keyword.get(opts, :delimiter_size, 0) do
0 ->
do_parse(node_id_bin)
del_size ->
<<node_id_8, _delimiter::size(del_size)-unit(8), node_id_16::binary>> = node_id_bin
do_parse(<<node_id_8, node_id_16::binary>>)
end
end
defp do_parse(<<node_id>>), do: node_id
# sometimes packets will have extra bytes after the 16 bit node id, we can
# ignore those
defp do_parse(<<@node_id_is_16_bit, node_id::16, _ignore::binary>>), do: node_id
defp do_parse(<<node_id, _ignored::binary>>), do: node_id
end
|
lib/grizzly/zwave/node_id.ex
| 0.823648 | 0.833596 |
node_id.ex
|
starcoder
|
defmodule Foldable do
@typedoc """
Functor dictionary
intuitive type: fmap : f (a -> b) -> f a -> f b
* `fmap`: (f a, a -> b) -> f b # params are swapped to facilitate piping, mandatory
* `lift_left`: a -> f b -> f a # default implementation provided, optional
"""
@type t :: %__MODULE__{
}
def __struct__, do: %{
__struct__: __MODULE__,
fold: fn _, _ -> raise("Foldable: missing definition for fold") end,
foldMap: fn _, _, _ -> raise("Foldable: missing definition for foldMap") end,
foldr: fn _, _, _ -> raise("Foldable: missing definition for foldr") end,
foldl: fn _, _, _ -> raise("Foldable: missing definition for foldl") end,
foldr1: fn _, _ -> raise("Foldable: missing definition for foldr1") end,
foldl1: fn _, _ -> raise("Foldable: missing definition for foldl1") end,
toList: fn _ -> raise("Foldable: missing definition for toList") end,
null: fn _ -> raise("Foldable: missing definition for null") end,
length: fn _ -> raise("Foldable: missing definition for length") end,
elem: fn _, _, _ -> raise("Foldable: missing definition for elem") end,
maximum: fn _, _ -> raise("Foldable: missing definition for maximum") end,
minimum: fn _, _ -> raise("Foldable: missing definition for minimum") end,
sum: fn _, _ -> raise("Foldable: missing definition for sum") end,
product: fn _, _ -> raise("Foldable: missing definition for product") end,
}
def __struct__(kv) do
required_keys = [
:fold,
:foldMap,
:foldr,
:foldl,
:foldr1,
:foldl1,
:toList,
:null,
:length,
:elem,
:maximum,
:minimum,
:sum,
:product,
]
{map, keys} =
Enum.reduce(kv, {__struct__(), required_keys}, fn {key, val}, {map, keys} ->
{Map.replace!(map, key, val), List.delete(keys, key)}
end)
case keys do
[] ->
map
_ ->
raise ArgumentError,
"the following keys must also be given when building " <>
"struct #{inspect(__MODULE__)}: #{inspect(keys)}"
end
end
def define(base_dict) do
base_dict = Map.new(base_dict)
{foldMap, foldr} = case base_dict do
%{foldMap: foldMap, foldr: foldr} -> {foldMap, foldr}
%{foldMap: foldMap} ->
foldr = fn t, z, f -> appEndo(foldMap.(t, fn x -> endo(fn y -> f.(x, y) end) end, monoid_endo())).(z) end
{foldMap, foldr}
#%{foldMap: foldMap} ->
# foldr = fn f, z, t ->
# foldMap.(monoid_endo(), fn x -> {:Endo, fn y -> f.(x, y) end} end, t)
# |> case do {:Endo, g} -> g.(z) end
# end
# {foldMap, foldr}
%{foldr: foldr} ->
foldMap = fn t, f, monoid_dict -> foldr.(t, monoid_dict.mempty, fn x, acc -> monoid_dict.mappend.(f.(x), acc) end) end
{foldMap, foldr}
%{} -> raise("#{__MODULE__} minimal definition require either foldMap or foldr")
end
fold = Map.get(base_dict, :fold, fn t, monoid_dict -> foldMap.(t, fn x -> x end, monoid_dict) end)
foldl = Map.get(base_dict, :foldl, fn t, z, f -> appEndo(getDual(foldMap.(t, fn x -> dual(endo(fn y -> f.(y, x) end)) end, monoid_dual(monoid_endo())))).(z) end)
foldr1 = Map.get(base_dict, :foldr1, fn t, f ->
mf = fn x, m ->
result = case m do
:Nothing -> x
{:Just, y} -> f.(x, y)
end
{:Just, result}
end
case foldr.(t, :Nothing, mf) do
:Nothing -> raise("foldr1: empty structure")
{:Just, x} -> x
end
end)
foldl1 = Map.get(base_dict, :foldl1, fn t, f ->
mf = fn m, y ->
result = case m do
:Nothing -> y
{:Just, x} -> f.(x, y)
end
{:Just, result}
end
case foldl.(t, :Nothing, mf) do
:Nothing -> raise("foldr1: empty structure")
{:Just, x} -> x
end
end)
toList = Map.get(base_dict, :toList, fn t -> foldr.(t, [], fn x, acc -> [x | acc] end) end)
null = Map.get(base_dict, :null, fn t -> foldr.(t, true, fn _, _ -> false end) end)
length = Map.get(base_dict, :length, fn t -> foldl.(t, 0, fn acc, _ -> acc + 1 end) end)
elem = Map.get(base_dict, :elem, fn t, x, eq_dict -> foldMap.(t, fn y -> eq_dict.==.(x, y) end, monoid_any()) end)
maximum = Map.get(base_dict, :maximum, fn t, ord_dict -> case getMax(foldMap.(t, fn x -> max({:Just, x}) end, monoid_max(ord_dict))) do
:Nothing -> raise("maximum: empty structure")
{:Just, x} -> x
end end)
minimum = Map.get(base_dict, :maximum, fn t, ord_dict -> case getMin(foldMap.(t, fn x -> min({:Just, x}) end, monoid_min(ord_dict))) do
:Nothing -> raise("maximum: empty structure")
{:Just, x} -> x
end end)
sum = Map.get(base_dict, :sum, fn t, num_dict -> fold.(t, monoid_sum(num_dict)) end)
product = Map.get(base_dict, :product, fn t, num_dict -> fold.(t, monoid_product(num_dict)) end)
%__MODULE__{
fold: fold,
foldMap: foldMap,
foldr: foldr,
foldl: foldl,
foldr1: foldr1,
foldl1: foldl1,
toList: toList,
null: null,
length: length,
elem: elem,
maximum: maximum,
minimum: minimum,
sum: sum,
product: product,
}
end
def endo(f), do: {:Endo, f}
def appEndo({:Endo, f}), do: f
def semigroup_endo do
Semigroup.define(<>: fn {:Endo, f}, {:Endo, g} -> {:Endo, fn x -> f.(g.(x)) end} end)
end
def monoid_endo do
Monoid.define(
semigroup: semigroup_endo(),
mempty: {:Endo, fn x -> x end}
)
end
def dual(x), do: {:Dual, x}
def getDual({:Dual, x}), do: x
def semigroup_dual(semigroup_a) do
Semigroup.define(
<>: fn {:Dual, x}, {:Dual, y} -> {:Dual, semigroup_a.<>.(y, x)} end,
stimes: fn {:Dual, x}, n -> {:Dual, semigroup_a.stimes.(x, n)} end
)
end
def monoid_dual(monoid_a) do
Monoid.define(
semigroup: semigroup_dual(monoid_a.semigroup),
mempty: {:Dual, monoid_a.mempty}
)
end
def monoid_any do
Monoid.define(
semigroup: Semigroup.define(<>: &or/2),
mempty: false,
mconcat: fn xs -> Enum.reduce_while(xs, false, fn x, acc -> if x or acc, do: {:halt, true}, else: {:cont, false} end) end
)
end
def max(x), do: {:Max, x}
def getMax({:Max, x}), do: x
def monoid_max(ord_dict) do
Monoid.define(
semigroup: Semigroup.define(<>: fn
l , {:Max, :Nothing} -> l
{:Max, :Nothing} , r -> r
{:Max, {:Just, x}} = l, {:Max, {:Just, y}} = r -> if ord_dict.>=.(x, y), do: l, else: r
end),
mempty: {:Max, :Nothing}
)
end
def min(x), do: {:Min, x}
def getMin({:Min, x}), do: x
def monoid_min(ord_dict) do
Monoid.define(
semigroup: Semigroup.define(<>: fn
l , {:Min, :Nothing} -> l
{:Min, :Nothing} , r -> r
{:Min, {:Just, x}} = l, {:Min, {:Just, y}} = r -> if ord_dict.<=.(x, y), do: l, else: r
end),
mempty: {:Min, :Nothing}
)
end
def monoid_sum(num_dict) do
Monoid.define(
semigroup: Semigroup.define(<>: num_dict.+),
mempty: 0
)
end
def monoid_product(num_dict) do
Monoid.define(
semigroup: Semigroup.define(<>: num_dict.*),
mempty: 1
)
end
end
|
typeclassopedia/lib/foldable.ex
| 0.831964 | 0.564098 |
foldable.ex
|
starcoder
|
defmodule Grizzly do
@moduledoc """
Send commands to Z-Wave devices
Grizzly provides the `send_command` function as the way to send a command to
Z-Wave devices.
The `send_command` function takes the node id that you are trying to send a
command to, the command name, and optionally command arguments and command
options.
A basic command that has no options or arguments looks like this:
```elixir
Grizzly.send_command(node_id, :switch_binary_get)
```
A command with command arguments:
```elixir
Grizzly.send_command(node_id, :switch_binary_set, value: :off)
```
Also, a command can have options.
```elixir
Grizzly.send_command(node_id, :switch_binary_get, [], timeout: 10_000, retries: 5)
```
Some possible return values from `send_command` are:
1. `{:ok, Grizzly.Report.t()}` - the command was sent and the Z-Wave device
responded with a report. See `Grizzly.Report` for more information.
1. `{:error, :including}` - current the Z-Wave controller is adding or
removing a device and commands cannot be processed right now
1. `{:error, :firmware_updating}` - current the Z-Wave controller is updating firmware and commands cannot be processed right now
1. `{:error, reason}` - there was some other reason for an error, two
common ones are: `:nack_response`
For a more detailed explanation of the responses from a `send_command` call
see the typedoc for `Grizzly.send_command_response()`.
# Events from Z-Wave
Events generating from a Z-Wave device, for example a motion detected event,
can be handled via the `Grizzly.subscribe_command/1` and
`Grizzly.subscribe_commands/1` functions. This will allow you to subscribe
to specific commands. When the command is received from the Z-Wave network
it will placed in a `Grizzly.Report` and set to the subscribing process. The
node that generated the report can be accessed with the `:node_id` field in
the report.
```elixir
iex> Grizzly.subscribe_command(:battery_report)
# sometime latter
iex> flush
{:grizzly, :event, %Grizzly.Report{command: %Grizzly.ZWave.Command{name: :battery_report}}}
```
"""
alias Grizzly.{Connection, Inclusions, FirmwareUpdates, Report}
alias Grizzly.Commands.Table
alias Grizzly.UnsolicitedServer.Messages
alias Grizzly.ZWave
@typedoc """
The response from sending a Z-Wave command
When everything is okay the response will be `{:ok, Grizzly.Report{}}`. For
documentation about a report see `Grizzly.Report` module.
When there are errors the response will be in the pattern of
`{:error, reason}`.
Three reasons that Grizzly supports for all commands are `:nack_response`,
`:update_firmware`, and `:including`.
A `:nack_response` normally means that the Z-Wave node that you were trying
to send a command to is unreachable and did not receive your command at all.
This could mean that the Z-Wave network is overloaded and you should reissue
the command, the device is too far from the controller, or the device is no
longer part of the Z-Wave network.
Grizzly by default will try a command 3 times before sending returning a
`:nack_response`. This is configurable via the `:retries` command option in
the `Grizzly.send_command/4` function. This is useful if you are going to
have a known spike in Z-Wave traffic.
In you receive the reason for the error to be `:including` that means the
controller is in an inclusion state and your command will be dropped if we
tried to send it. So we won't allow sending a Z-Wave command during an
inclusion. It's best to wait and try again once your application is done
trying to include.
"""
@type send_command_response() ::
{:ok, Report.t()}
| {:error, :including | :updating_firmware | :nack_response | any()}
@type seq_number() :: non_neg_integer()
@type node_id() :: non_neg_integer()
@typedoc """
A custom handler for the command.
See `Grizzly.CommandHandler` behaviour for more documentation.
"""
@type handler() :: module() | {module(), args :: any()}
@type command_opt() ::
{:timeout, non_neg_integer()}
| {:retries, non_neg_integer()}
| {:handler, handler()}
| {:transmission_stats, boolean()}
@type command :: atom()
@doc """
Send a command to the node via the node id
"""
@spec send_command(ZWave.node_id(), command(), args :: list(), [command_opt()]) ::
send_command_response()
def send_command(node_id, command_name, args \\ [], opts \\ []) do
# always open a connection. If the connection is already opened this
# will not establish a new connection
including? = Inclusions.inclusion_running?()
updating_firmware? = FirmwareUpdates.firmware_update_running?()
with false <- including? or updating_firmware?,
{command_module, default_opts} <- Table.lookup(command_name),
{:ok, command} <- command_module.new(args),
{:ok, _} <- Connection.open(node_id) do
Connection.send_command(node_id, command, Keyword.merge(default_opts, opts))
else
true ->
reason = if including?, do: :including, else: :updating_firmware
{:error, reason}
{:error, _} = error ->
error
end
end
@doc """
Subscribe to a command event from a Z-Wave device
"""
@spec subscribe_command(command()) :: :ok
def subscribe_command(command_name) do
Messages.subscribe(command_name)
end
@doc """
Subscribe to many events from a Z-Wave device
"""
@spec subscribe_commands([command()]) :: :ok
def subscribe_commands(command_names) do
Enum.each(command_names, &subscribe_command/1)
end
@doc """
Unsubscribe to an event
"""
@spec unsubscribe_command(command()) :: :ok
def unsubscribe_command(command_name) do
Messages.unsubscribe(command_name)
end
@doc """
List the support commands
"""
@spec list_commands() :: [atom()]
def list_commands() do
Enum.map(Table.dump(), fn {command, _} -> command end)
end
@doc """
List the command for a particular command class
"""
@spec commands_for_command_class(atom()) :: [atom()]
def commands_for_command_class(command_class_name) do
Table.dump()
|> Enum.filter(fn {_command, {command_module, _}} ->
{:ok, command} = command_module.new([])
command.command_class.name() == command_class_name
end)
|> Enum.map(fn {command, _} -> command end)
end
end
|
lib/grizzly.ex
| 0.836821 | 0.848282 |
grizzly.ex
|
starcoder
|
defimpl Timex.Comparable, for: Timex.DateTime do
alias Timex.Time
alias Timex.DateTime
alias Timex.AmbiguousDateTime
alias Timex.Comparable
alias Timex.Convertable
alias Timex.Types
import Timex.Macros
@units [:years, :months, :weeks, :calendar_weeks, :days, :hours, :minutes, :seconds, :timestamp]
@doc """
See docs for `Timex.compare/3`
"""
@spec compare(DateTime.t, Comparable.comparable, Comparable.granularity) :: Comparable.compare_result
def compare(a, :epoch, granularity), do: compare(a, DateTime.epoch(), granularity)
def compare(a, :zero, granularity), do: compare(a, DateTime.zero(), granularity)
def compare(_, :distant_past, _granularity), do: +1
def compare(_, :distant_future, _granularity), do: -1
def compare(a, a, _granularity), do: 0
def compare(_, %AmbiguousDateTime{} = b, _granularity),
do: {:error, {:ambiguous_comparison, b}}
def compare(%DateTime{} = this, %DateTime{} = other, granularity) when granularity in @units do
case {ok!(DateTime.to_seconds(this, :zero)), ok!(DateTime.to_seconds(other, :zero))} do
{{:error, _} = err, _} ->
err
{_, {:error, _} = err} ->
err
{{:ok, this_secs}, {:ok, other_secs}} ->
case ok!(diff(this, other, granularity)) do
{:error, _} = err ->
err
{:ok, delta} ->
occurs_after? = cond do
other_secs < this_secs -> true
:else -> false
end
cond do
delta == 0 -> 0
delta > 0 && occurs_after? -> 1
:else -> -1
end
end
end
end
def compare(_, _, granularity) when not granularity in @units,
do: {:error, {:invalid_granularity, granularity}}
def compare(a, b, granularity) do
case Convertable.to_datetime(b) do
{:error, _} = err ->
err
%DateTime{} = datetime ->
compare(a, datetime, granularity)
%AmbiguousDateTime{} = ambiguous ->
{:error, {:ambiguous_comparison, ambiguous}}
end
end
@doc """
See docs for `Timex.compare/3`
"""
@spec diff(DateTime.t, Comparable.comparable, Comparable.granularity) :: Types.timestamp | integer | {:error, term}
def diff(a, b, :secs) do
IO.write :stderr, "warning: :secs is a deprecated unit name, use :seconds instead\n"
diff(a, b, :seconds)
end
def diff(a, b, :mins) do
IO.write :stderr, "warning: :mins is a deprecated unit name, use :minutes instead\n"
diff(a, b, :minutes)
end
def diff(_, %AmbiguousDateTime{} = b, _granularity),
do: {:error, {:ambiguous_comparison, b}}
def diff(%DateTime{} = this, %DateTime{} = other, type) do
case {ok!(DateTime.to_seconds(this, :zero)), ok!(DateTime.to_seconds(other, :zero))} do
{{:error, _} = err, _} ->
err
{_, {:error, _} = err} ->
err
{{:ok, this_secs}, {:ok, other_secs}} ->
diff_secs = this_secs - other_secs
cond do
diff_secs == 0 -> zero(type)
diff_secs > 0 -> do_diff(this, this_secs, other, other_secs, type)
diff_secs < 0 -> do_diff(other, other_secs, this, this_secs, type)
end
end
end
# Handle custom conversions
def diff(a, b, granularity) do
case Convertable.to_datetime(b) do
{:error, _} = err ->
err
%DateTime{} = datetime ->
diff(a, datetime, granularity)
%AmbiguousDateTime{} = ambiguous ->
{:error, {:ambiguous_comparison, ambiguous}}
end
end
defp do_diff(_, a, _, a, type), do: zero(type)
defp do_diff(_adate, a, _bdate, b, :timestamp) do
seconds = a - b
case ok!(Time.from(seconds, :seconds)) do
{:error, _} = err -> err
{:ok, timestamp} -> timestamp
end
end
defp do_diff(_adate, a, _bdate, b, :seconds), do: a - b
defp do_diff(_adate, a, _bdate, b, :minutes), do: div(a - b, 60)
defp do_diff(adate, a, bdate, b, :hours) do
minutes = do_diff(adate, a, bdate, b, :minutes)
div(minutes, 60)
end
defp do_diff(%DateTime{:year => ay, :month => am, :day => ad}, _, %DateTime{:year => by, :month => bm, :day => bd}, _, :days) do
a_days = :calendar.date_to_gregorian_days({ay,am,ad})
b_days = :calendar.date_to_gregorian_days({by,bm,bd})
a_days - b_days
end
defp do_diff(adate, a, bdate, b, :weeks) do
days = do_diff(adate, a, bdate, b, :days)
weeks = div(days, 7)
extra_days = rem(days, 7)
actual_weeks = (if extra_days == 0, do: weeks, else: weeks + 1)
cond do
actual_weeks == 1 && extra_days < 7 -> 0
:else -> actual_weeks
end
end
defp do_diff(adate, _, bdate, _, :calendar_weeks) do
case {ok!(Timex.end_of_week(adate)), ok!(Timex.beginning_of_week(bdate))} do
{{:error, _} = err, _} -> err
{_, {:error, _} = err} -> err
{{:ok, ending}, {:ok, start}} ->
end_secs = DateTime.to_seconds(ending, :zero)
start_secs = DateTime.to_seconds(start, :zero)
days = do_diff(ending, end_secs, start, start_secs, :days)
weeks = div(days, 7)
extra_days = rem(days, 7)
actual_weeks = (if extra_days == 0, do: weeks, else: weeks + 1)
result = cond do
actual_weeks == 1 && extra_days < 7 -> 0
:else -> actual_weeks
end
result
end
end
defp do_diff(%DateTime{:year => ly, :month => lm, :day => ld}, _, %DateTime{:year => ey, :month => em, :day => ed}, _, :months) do
x = cond do
ld >= ed -> 0
:else -> -1
end
y = ly - ey
z = lm - em
x+y*12+z
end
defp do_diff(adate, a, bdate, b, :years) do
months = do_diff(adate, a, bdate, b, :months)
years = div(months, 12)
years
end
defp do_diff(_, _, _, _, unit) when not unit in @units,
do: {:error, {:invalid_granularity, unit}}
defp zero(:timestamp), do: Time.zero
defp zero(_type), do: 0
end
|
lib/comparable/datetime.ex
| 0.690976 | 0.551634 |
datetime.ex
|
starcoder
|
defmodule Artemis.Helpers do
require Logger
@doc """
Generate a random string
"""
def random_string(string_length) do
string_length
|> :crypto.strong_rand_bytes()
|> Base.url_encode64()
|> binary_part(0, string_length)
end
@doc """
Detect if value is truthy
"""
def present?(nil), do: false
def present?(0), do: false
def present?(value) when is_bitstring(value), do: String.trim(value) != ""
def present?(_value), do: true
@doc """
Detect if a key's value is truthy
"""
def present?(entry, key) when is_list(entry) do
entry
|> Keyword.get(key)
|> present?
end
def present?(entry, key) when is_map(entry) do
entry
|> Map.get(key)
|> present?
end
@doc """
Detect if value is empty
"""
def empty?(nil), do: true
def empty?(value) when is_bitstring(value), do: String.length(String.trim(value)) == 0
def empty?(value) when is_map(value), do: map_size(value) == 0
def empty?(value) when is_list(value), do: length(value) == 0
@doc """
Detect if the first map is a subset of the second
Input: %{one: 1}, %{one: 1, two: 2}
Output: true
"""
def subset?(first, %_{} = second), do: subset?(first, Map.from_struct(second))
def subset?(first, second), do: Enum.all?(first, &(&1 in second))
@doc """
Renames a key in a map. If the key does not exist, original map is returned.
"""
def rename_key(map, current_key, new_key) when is_map(map) do
case Map.has_key?(map, current_key) do
true -> Map.put(map, new_key, Map.get(map, current_key))
false -> map
end
end
@doc """
Takes the result of a `group_by` statement, applying the passed function
to each grouping's values. Returns a map.
"""
def reduce_group_by(grouped_data, function) do
Enum.reduce(grouped_data, %{}, fn {key, values}, acc ->
Map.put(acc, key, function.(values))
end)
end
@doc """
Takes a collection of values and an attribute and returns the max value for that attribute.
"""
def max_by_attribute(values, attribute, fun \\ fn x -> x end)
def max_by_attribute([], _, _), do: nil
def max_by_attribute(values, attribute, fun) do
values
|> Enum.max_by(&fun.(Map.get(&1, attribute)))
|> Map.get(attribute)
end
@doc """
Takes a collection of values and an attribute and returns the min value for that attribute.
"""
def min_by_attribute(values, attribute, fun \\ fn x -> x end)
def min_by_attribute([], _, _), do: []
def min_by_attribute(values, attribute, fun) do
values
|> Enum.min_by(&fun.(Map.get(&1, attribute)))
|> Map.get(attribute)
end
@doc """
Returns a titlecased string. Example:
Input: hello world
Ouput: Hello World
"""
def titlecase(value) when is_nil(value), do: ""
def titlecase(value) do
value
|> String.split(" ")
|> Enum.map(&String.capitalize(&1))
|> Enum.join(" ")
end
@doc """
Returns a snakecase string. Example:
Input: Artemis.HelloWorld
Ouput: "hello_world"
"""
def snakecase(value) when is_atom(value) do
value
|> Atom.to_string()
|> String.split(".")
|> List.last()
|> snakecase()
end
def snakecase(value) when is_bitstring(value) do
Macro.underscore(value)
end
@doc """
Returns a dashcase string. Example:
Input: Artemis.HelloWorld
Ouput: "hello-world"
"""
def dashcase(value) do
value
|> snakecase()
|> String.replace("_", "-")
end
@doc """
Returns a modulecase string. Example:
Input: "hello_world"
Ouput: HelloWorld
"""
def modulecase(value) do
value
|> snakecase()
|> String.split("_")
|> Enum.map(&String.capitalize(&1))
|> Enum.join("")
end
@doc """
Returns a simplified module name. Example:
Input: Elixir.MyApp.MyModule
Ouput: MyModule
"""
def module_name(module) do
module
|> Atom.to_string()
|> String.split(".")
|> List.last()
|> String.to_atom()
end
@doc """
Arbitrary addition using Decimal and returning a Float
"""
def decimal_add(first, second) when is_float(first), do: decimal_add(Decimal.from_float(first), second)
def decimal_add(first, second) when is_float(second), do: decimal_add(first, Decimal.from_float(second))
def decimal_add(first, second) do
first
|> Decimal.add(second)
|> Decimal.to_float()
end
@doc """
Converts an atom or string to an integer
"""
def to_integer(value) when is_float(value), do: Kernel.trunc(value)
def to_integer(value) when is_atom(value), do: to_integer(Atom.to_string(value))
def to_integer(value) when is_bitstring(value), do: String.to_integer(value)
def to_integer(value), do: value
@doc """
Converts an atom or integer to a bitstring
"""
def to_string(value) when is_nil(value), do: ""
def to_string(value) when is_atom(value), do: Atom.to_string(value)
def to_string(value) when is_integer(value), do: Integer.to_string(value)
def to_string(value) when is_float(value), do: Float.to_string(value)
def to_string(value), do: value
@doc """
Converts a bitstring or integer to a atom
"""
def to_atom(value) when is_nil(value), do: ""
def to_atom(value) when is_bitstring(value), do: String.to_atom(value)
def to_atom(value) when is_integer(value), do: to_atom(Integer.to_string(value))
def to_atom(value) when is_float(value), do: to_atom(Float.to_string(value))
def to_atom(value), do: value
@doc """
Converts a nested list to a nested map. Example:
Input: [[:one, :two, 3], [:one, :three, 3]]
Output: %{one: %{two: 2, three: 3}}
"""
def nested_list_to_map(nested_list) do
Enum.reduce(nested_list, %{}, fn item, acc ->
deep_merge(acc, list_to_map(item))
end)
end
@doc """
Converts a simple list to a nested map. Example:
Input: [:one, :two, 3]
Output: %{one: %{two: 2}}
"""
def list_to_map([head | tail]) when tail == [], do: head
def list_to_map([head | tail]) when is_integer(head), do: list_to_map([Integer.to_string(head) | tail])
def list_to_map([head | tail]), do: Map.put(%{}, head, list_to_map(tail))
@doc """
Deep merges two maps
See: https://stackoverflow.com/questions/38864001/elixir-how-to-deep-merge-maps/38865647#38865647
"""
def deep_merge(left, right) do
Map.merge(left, right, &deep_resolve/3)
end
defp deep_resolve(_key, left = %{}, right = %{}) do
# Key exists in both maps, and both values are maps as well.
# These can be merged recursively.
deep_merge(left, right)
end
defp deep_resolve(_key, _left, right) do
# Key exists in both maps, but at least one of the values is
# NOT a map. We fall back to standard merge behavior, preferring
# the value on the right.
right
end
# Tasks
@doc """
Runs a list of tasks in parallel. Example:
async_await_many([&task_one/0, &task_two/0])
Returns:
["task_one/0 result", "task_two/0 result"]
## Maps
Also accepts a map:
async_await_many(%{
one: &task_one/0,
two: &task_two/0
})
Returns:
%{
one: "task_one/0 result",
two: "task_two/0 result"
}
"""
def async_await_many(tasks, options \\ [])
def async_await_many(tasks, options) when is_map(tasks) do
values =
tasks
|> Map.values()
|> async_await_many(options)
tasks
|> Map.keys()
|> Enum.zip(values)
|> Enum.into(%{})
end
def async_await_many(tasks, options) when is_list(options) do
timeout = Keyword.get(options, :timeout, :timer.hours(1))
tasks
|> Enum.map(&Task.async(&1))
|> Enum.map(&Task.await(&1, timeout))
end
@doc """
Convert a list of bitstrings to atoms. If passed the `allow` option with a
list of atoms, only converted values matching that list will be returned.
Options:
`:allow` -> List of allowed atoms. When passed, any converted values not in the list will be removed
Example:
list_to_atoms([:hello, "world"])
Returns:
[:hello, :world]
"""
def list_to_atoms(values, options \\ [])
def list_to_atoms(values, options) when is_list(values) do
allow = Keyword.get(options, :allow)
convert_values_to_atoms(values, allow)
end
def list_to_atoms(value, options) do
[value]
|> list_to_atoms(options)
|> List.first()
end
defp convert_values_to_atoms(values, allow) do
values
|> Enum.reduce([], fn value, acc ->
case convert_value_to_atom(value, allow) do
nil -> acc
value -> [value | acc]
end
end)
|> Enum.reverse()
end
defp convert_value_to_atom(value, allow) when is_atom(value) and is_list(allow) do
case Enum.member?(allow, value) do
true -> value
false -> nil
end
end
defp convert_value_to_atom(value, allow) when is_bitstring(value) and is_list(allow) do
allow_strings = Enum.map(allow, &Artemis.Helpers.to_string(&1))
case Enum.member?(allow_strings, value) do
true -> String.to_atom(value)
false -> nil
end
end
defp convert_value_to_atom(value, _allow) when is_atom(value), do: value
defp convert_value_to_atom(value, _allow) when is_bitstring(value), do: String.to_atom(value)
defp convert_value_to_atom(_value, _allow), do: nil
@doc """
Recursively converts the keys of a map into an atom.
Options:
`:whitelist` -> List of strings to convert to atoms. When passed, only
strings in whitelist will be converted.
`:recursive` -> Boolean, default: true. When true, keys are updated
recursively. When false, only the top level keys are converted.
Example:
keys_to_atoms(%{"nested" => %{"example" => "value"}})
Returns:
%{nested: %{example: "value"}}
"""
def keys_to_atoms(map, options \\ [])
def keys_to_atoms(%_{} = struct, _options), do: struct
def keys_to_atoms(map, options) when is_map(map) do
for {key, value} <- map, into: %{} do
key =
case is_bitstring(key) do
false ->
key
true ->
case Keyword.get(options, :whitelist) do
nil ->
String.to_atom(key)
whitelist ->
case Enum.member?(whitelist, key) do
false -> key
true -> String.to_atom(key)
end
end
end
case Keyword.get(options, :recursive, true) do
true -> {key, keys_to_atoms(value, options)}
_ -> {key, value}
end
end
end
def keys_to_atoms(value, _), do: value
@doc """
Recursively converts the keys of a map into a string.
Options:
`:recursive` -> Boolean, default: true. When true, keys are updated
recursively. When false, only the top level keys are converted.
Example:
keys_to_strings(%{nested: %{example: "value"}})
Returns:
%{"nested" => %{"example" => "value"}}
"""
def keys_to_strings(map, options \\ [])
def keys_to_strings(%_{} = struct, _options), do: struct
def keys_to_strings(map, options) when is_map(map) do
for {key, value} <- map, into: %{} do
key =
case is_atom(key) do
false -> key
true -> Atom.to_string(key)
end
case Keyword.get(options, :recursive, true) do
true -> {key, keys_to_strings(value, options)}
_ -> {key, value}
end
end
end
def keys_to_strings(value, _), do: value
@doc """
Searches a map for a matching atom or bitstring key, then returns the value.
Warning! This is an unsafe action and assumes the map does not have the same
key as a bitstring and an atom. Raises an exception when this case is found.
Example:
indifferent_get(%{"hello" => "world"}, :hello)
Returns:
"world"
Example:
indifferent_get(%{hello: "world"}, "other key", "fallback")
Returns:
"fallback value"
Example:
indifferent_get(%{"hello" => "world", hello: "world"}, :hello)
Returns:
<no return - raises an exception>
"""
def indifferent_get(map, field, fallback \\ nil) when is_map(map) do
field_as_atom = Artemis.Helpers.to_atom(field)
field_as_string = Artemis.Helpers.to_string(field)
atom? = Map.has_key?(map, field_as_atom)
string? = Map.has_key?(map, field_as_string)
error_message = "Indifferent get cannot be used on a map with both atom and string keys"
cond do
atom? && string? -> raise(ArgumentError, error_message)
atom? -> Map.get(map, field_as_atom, fallback)
true -> Map.get(map, field_as_string, fallback)
end
end
@doc """
Searches a map for a matching atom or bitstring key, then updates the value.
Warning! This is an unsafe action and assumes the map does not have the same
key as a bitstring and an atom. Raises an exception when this case is found.
Example:
indifferent_put(%{"hello" => "world"}, :hello, "updated!")
Returns:
%{"hello" => "updated!"}
Example:
indifferent_put(%{hello: "world"}, "new key", "new value")
Returns:
%{hello: "world", "new key" => "new value"}
Example:
indifferent_put(%{"hello" => "world", hello: "world"}, :hello, "updated!")
Returns:
<no return - raises an exception>
"""
def indifferent_put(map, field, value) when is_map(map) do
field_as_atom = Artemis.Helpers.to_atom(field)
field_as_string = Artemis.Helpers.to_string(field)
atom? = Map.has_key?(map, field_as_atom)
string? = Map.has_key?(map, field_as_string)
error_message = "Indifferent put cannot be used on a map with both atom and string keys"
cond do
atom? && string? -> raise(ArgumentError, error_message)
atom? -> Map.put(map, field_as_atom, value)
string? -> Map.put(map, field_as_string, value)
true -> Map.put(map, field, value)
end
end
@doc """
Serialize process id (pid) number to string
"""
def serialize_pid(pid) when is_pid(pid) do
pid
|> :erlang.pid_to_list()
|> :erlang.list_to_binary()
end
@doc """
Deserialize process id (pid) string to pid
"""
def deserialize_pid("#PID" <> string), do: deserialize_pid(string)
def deserialize_pid(string) do
string
|> :erlang.binary_to_list()
|> :erlang.list_to_pid()
end
@doc """
Recursive version of `Map.delete/2`. Deletes all instances of the given key.
Adds support for nested values:
Example:
map = %{
hello: "world",
nested: %{example: "value", hello: "world"}
}
deep_delete(map, :hello)
Returns:
%{
nested: %{example: "value"}
}
"""
def deep_delete(data, delete_key) when is_map(data) do
data
|> Map.delete(delete_key)
|> Enum.reduce(%{}, fn {key, value}, acc ->
Map.put(acc, key, deep_delete(value, delete_key))
end)
end
def deep_delete(data, _), do: data
@doc """
Recursive version of `Map.drop/2`. Adds support for nested values:
Example:
map = %{
simple: "simple",
nested: %{example: "value", other: "value"}
}
deep_drop(map, [nested: [:example]])
Returns:
map = %{
simple: "simple",
nested: %{other: "value"}
}
"""
def deep_drop(map, keys) when is_map(map) do
{nested_keys, simple_keys} = Enum.split_with(keys, &is_tuple/1)
simple = Map.drop(map, simple_keys)
nested =
Enum.reduce(nested_keys, %{}, fn {key, keys}, acc ->
value =
map
|> Map.get(key)
|> deep_drop(keys)
Map.put(acc, key, value)
end)
Map.merge(simple, nested)
end
@doc """
Recursively drops all instances of the given value.
Example:
map = %{
hello: "world",
nested: %{example: "value", hello: "world"}
}
deep_drop_by_value(map, "world")
Returns:
%{
nested: %{example: "value"}
}
"""
def deep_drop_by_value(data, match) when is_map(data) do
matcher = get_deep_drop_by_value_match_function(match)
Enum.reduce(data, %{}, fn {key, value}, acc ->
case matcher.(value) do
true -> acc
false -> Map.put(acc, key, deep_drop_by_value(value, match))
end
end)
end
def deep_drop_by_value(data, _), do: data
defp get_deep_drop_by_value_match_function(match) when is_function(match), do: match
defp get_deep_drop_by_value_match_function(match), do: &(&1 == match)
@doc """
Recursive version of `Access.get/2`. Adds support for nested values:
Example:
map = %{
simple: "simple",
nested: %{example: "value", other: "value"}
}
deep_get(map, [:nested, :example])
Returns:
"value"
"""
def deep_get(data, keys, default \\ nil)
def deep_get(data, [current_key | remaining_keys], default) when is_map(data) or is_list(data) do
value =
cond do
is_map(data) -> Map.get(data, current_key)
is_list(data) -> Keyword.get(data, current_key)
true -> Access.get(data, current_key)
end
case remaining_keys do
[] -> value
_ -> deep_get(value, remaining_keys, default)
end
end
def deep_get(_data, _, default), do: default
@doc """
Recursive version of `Map.fetch/2`. Adds support for nested values:
Example:
map = %{
simple: "simple",
nested: %{example: "value", other: "value"}
}
deep_fetch(map, [:nested, :example])
Returns:
"value"
"""
def deep_fetch(data, keys, default \\ nil)
def deep_fetch(data, [current_key | remaining_keys], default) when is_map(data) do
value = Map.get(data, current_key)
case remaining_keys do
[] -> value
_ -> deep_fetch(value, remaining_keys, default)
end
end
def deep_fetch(_data, _, default), do: default
@doc """
Recursive version of `Kernel.put_in`. Adds support for nested values that do
not already exist:
Example:
current = %{one: 1}
keys = [:two, :three, :four]
deep_put(current, keys, "hello")
Returns:
%{one: 1, two: %{three: %{four: "hello"}}}
From: https://elixirforum.com/t/put-update-deep-inside-nested-maps-and-auto-create-intermediate-keys/7993/8
"""
def deep_put(current \\ %{}, keys, value) do
put_in(current, Enum.map(keys, &Access.key(&1, %{})), value)
end
@doc """
Recursive version of `Map.size/2`. Returns the total number of keys in
Maps and Keyword Lists.
All other values, including Lists, return 0.
Example:
map = %{
hello: "world",
nested: %{example: "value", hello: "world"},
keywords: [one: 1, two: 2],
list: [1, 2, 3]
}
deep_size(map, [:nested, :example])
Returns:
8
"""
def deep_size(data) when is_map(data) do
Enum.reduce(data, 0, fn {_, value}, acc ->
1 + deep_size(value) + acc
end)
end
def deep_size(data) when is_list(data) do
case Keyword.keyword?(data) do
false ->
0
true ->
Enum.reduce(data, 0, fn {_, value}, acc ->
1 + deep_size(value) + acc
end)
end
end
def deep_size(_), do: 0
@doc """
Recursive version of `Map.take/2`. Adds support for nested values:
Example:
map = %{
simple: "simple",
nested: %{example: "value", other: "value"}
}
deep_take(map, [:simple, nested: [:example]])
Returns:
map = %{
simple: "simple",
nested: %{example: "value"}
}
"""
def deep_take(map, keys) when is_map(map) do
{nested_keys, simple_keys} = Enum.split_with(keys, &is_tuple/1)
simple = Map.take(map, simple_keys)
nested =
Enum.reduce(nested_keys, %{}, fn {key, keys}, acc ->
value =
map
|> Map.get(key)
|> deep_take(keys)
Map.put(acc, key, value)
end)
Map.merge(simple, nested)
end
@doc """
Generate a slug value from bitstring
"""
def generate_slug(value, limit \\ 80)
def generate_slug(nil, _limit), do: nil
def generate_slug(value, limit) do
slug = Slugger.slugify_downcase(value)
case is_number(limit) do
true -> Slugger.truncate_slug(slug, limit)
false -> slug
end
end
@doc """
Print entire value without truncation
"""
def print(value) do
IO.inspect(value, limit: :infinity, printable_limit: :infinity)
end
@doc """
Benchmark execution time
Options:
log_level -> when not set, uses default value set in an env variable
Example:
Artemis.Helpers.benchmark("Sleep Performance", fn ->
:timer.sleep(5_000)
end, log_level: :info)
"""
def benchmark(callback), do: benchmark(nil, callback)
def benchmark(callback, options) when is_list(options), do: benchmark(nil, callback, options)
def benchmark(key, callback, options \\ []) do
start_time = Timex.now()
result = callback.()
end_time = Timex.now()
duration = Timex.diff(end_time, start_time, :milliseconds)
default_log_level = Artemis.Helpers.AppConfig.fetch!(:artemis, :benchmark, :default_log_level)
options = Keyword.put_new(options, :log_level, default_log_level)
message = [
type: "Benchmark",
key: key,
duration: "#{duration}ms"
]
log(message, options)
result
end
@doc """
Send values to Logger
"""
def log(values, options \\ [])
def log(values, options) when is_list(values) do
message = format_log_message(values)
log(message, options)
end
def log(message, options) do
log_level = get_log_level(options)
Logger.log(log_level, message)
end
defp format_log_message(values) do
values
|> Enum.map(fn {key, value} ->
case is_nil(value) do
true -> nil
false -> "[#{key}: #{value}]"
end
end)
|> Enum.reject(&is_nil/1)
|> Enum.join(" ")
end
defp get_log_level(options) do
default_log_level = :info
log_level =
options
|> Keyword.get(:log_level, Keyword.get(options, :level))
|> Kernel.||(default_log_level)
|> Artemis.Helpers.to_string()
case log_level do
"emergency" -> :emergency
"alert" -> :alert
"critical" -> :critical
"error" -> :error
"warning" -> :warning
"notice" -> :notice
"info" -> :info
_ -> :debug
end
end
@doc """
Log application start
"""
def log_application_start(name) do
type = "ApplicationStart"
log(type: type, key: name, start: Timex.now())
end
@doc """
Log rescued errors
"""
def rescue_log(stacktrace \\ nil, caller, error) do
default_values = [
caller: serialize_caller(caller),
error: Map.get(error, :__struct__),
message: Map.get(error, :message, inspect(error)),
stacktrace: serialize_stacktrace(stacktrace)
]
log_message = format_log_message(default_values)
Logger.error(log_message)
end
defp serialize_caller(caller) when is_map(caller), do: Map.get(caller, :__struct__)
defp serialize_caller(caller), do: caller
defp serialize_stacktrace(nil), do: nil
defp serialize_stacktrace(stacktrace) do
stracktrace =
stacktrace
|> Enum.map(&inspect(&1))
|> Enum.join("\n ")
"\n " <> stracktrace
end
@doc """
Send values to Error
"""
def error(values) when is_list(values) do
message = format_log_message(values)
Logger.error(message)
end
def error(message), do: Logger.error(message: message)
@doc """
Convert an Ecto Query into SQL
Example:
Customer
|> distinct_query(params, default: false)
|> order_query(params)
|> Artemis.Helpers.print_to_sql(Artemis.Repo)
|> Repo.all()
"""
def print_to_sql(query, repo) do
IO.inspect(Ecto.Adapters.SQL.to_sql(:all, repo, query))
query
end
end
|
apps/artemis/lib/artemis/helpers.ex
| 0.862844 | 0.482307 |
helpers.ex
|
starcoder
|
defmodule SecureServer do
@moduledoc """
SecureServer provides the encoder and decoder for secure Phoenix web servers.
While not all the functions defined here are used in Phoenix or Plug, they are
important to have to implement a 'complete' encoder/decoder.
The encoding and decoding functions in this file MUST match those in
`:secure_client`, or else they will not be able to communicate.
"""
@type encoded :: binary
@type safe_encoded :: {:ok, encoded} | {:error, any}
@type decoded :: map | struct | list
@type safe_decoded :: {:ok, decoded} | {:error, any}
@doc """
Encodes any valid elixir structure as an encrypted, base64 encoded binary.
"""
@spec encode(decoded) :: safe_encoded
def encode(data) do
with {:ok, json} <- Poison.encode(data),
do: {:ok, json |> Cloak.encrypt |> :base64.encode}
end
@doc """
Decodes any encrypted, base64 encoded binary into a valid elixir structure.
"""
@spec decode(encoded) :: safe_decoded
def decode(data) do
data
|> :base64.decode
|> Cloak.decrypt
|> Poison.decode
end
@doc """
Calls `encode/1`. Since the data ends up encoded with base64, the iodata is
the same as the binary.
"""
@spec encode_to_iodata(decoded) :: safe_encoded
def encode_to_iodata(data), do: encode(data)
@doc """
Encodes any valid elixir structure as an encrypted, base64 encoded binary.
"""
@spec encode!(decoded) :: encoded
def encode!(data) do
data
|> Poison.encode!
|> Cloak.encrypt
|> :base64.encode
end
@doc """
Decodes any encrypted, base64 encoded binary into a valid elixir structure.
This method is used in Plug.Parsers.ENCRYPTED.
"""
@spec decode!(encoded) :: decoded
def decode!(data) do
data
|> :base64.decode
|> Cloak.decrypt
|> Poison.decode!
end
@doc """
Calls `encode!/1`. Since the data ends up encoded with base64, the iodata is
the same as the binary.
This function is called when rendering encrypted data in Phoenix.
"""
@spec encode_to_iodata!(decoded) :: encoded
def encode_to_iodata!(data), do: encode!(data)
end
|
lib/secure_server.ex
| 0.907545 | 0.480052 |
secure_server.ex
|
starcoder
|
defmodule Pager do
@moduledoc """
Pager is a library for adding cursor-based pagination to Ecto. It
provides an efficient means of paginating through a resultset, but
it requires some buy-in to take advantage of.
In the cursor model, each record in the resultset has an associated
'cursor', a value that represents its position within the ordered
resultset. By using an object's cursor, we can ask for results
before or after a given result.
## Use
Let us say we wish to paginate a list of users. To make it easy, we
shall have a serial primary key, only a single database node and be
querying by order of signup.
Our cursor format will be: `[user.id]`. Because it is a primary key,
it is unique and because it is a serial and we only have one database
node, it will naturally encode the order of signup.
```elixir
defmodule Example do
import Ecto.Query
def pager() do
%Pager{
cursor_generator: &[&1.id],
cursor_validator: &is_integer/1,
default_limit: 25, # When the user doesn't provide a limit
max_limit: 100, # Can't go higher than this
min_limit: 1, # Can't go lower than this
overflow: :saturate, # When you go higher, pin it at the maximum
underflow: :saturate, # When you go lower, pin it at the minimum
}
end
def list_users(options) do
with {:ok, opts} <- Pager.cast(options, pager()) do
end
end
end
```
## Cursors in detail
A cursor is a list of field values for a record. The structure of
the cursor is a property of the page, that is to say cursors for a
page should be generated from the same fields used in the `order`
clause of your query, in the order they are used.
Cursors must be unique in a resultset in order to ensure pagination
works reliably. For example if you sort users by followers, it's
quite conceivable that many users will have the same number of
followers, particularly when that number is low. By sorting by an
additional unique column, you create a total ordering which allows
pagination to always work reliably.
"""
alias Pager.InvalidCursor
@enforce_keys [:cursor_generator, :cursor_validator]
defstruct [
:cursor_generator,
:cursor_validator,
:default_limit,
:max_limit,
:min_limit,
:overflow,
:underflow,
]
@type cursor :: [term]
@type page_opts :: %{
optional(:after) => cursor,
optional(:before) => cursor,
optional(:limit) => non_neg_integer,
}
@type processed_page_opts :: %{
required(:limit) => non_neg_integer,
optional(:after) => cursor,
optional(:before) => cursor,
}
@type cursor_generator :: (term -> cursor)
@type cursor_validator :: (cursor -> boolean)
@type t :: %Pager{
cursor_generator: cursor_generator,
cursor_validator: cursor_validator,
default_limit: pos_integer | nil,
max_limit: pos_integer | nil,
min_limit: pos_integer | nil,
overflow: :saturate | :default | nil,
underflow: :saturate | :default | nil,
}
@default_limit 25
@max_limit 100
@min_limit 1
@underflow :saturate
@overflow :saturate
@doc "Given a list of keys and a record, generate a cursor"
def generate_cursor(keys, data), do: Enum.map(keys, &Map.fetch!(data, &1))
@doc "Given a cursor and a list of predicates, does the cursor pass?"
def validate_cursor(cursor, tests)
def validate_cursor([], []), do: true
def validate_cursor([], _), do: false
def validate_cursor(_, []), do: false
def validate_cursor([c|cs], [t|ts]), do: t.(c) and validate_cursor(cs, ts)
@doc """
Casts paging options on pages where we respect both limits and
before/after cursors.
"""
@spec cast(page_opts, Pager.t) :: {:ok, processed_page_opts} | {:error, term}
def cast(opts, %Pager{cursor_validator: v}=pager) when is_function(v, 1) do
case opts do
%{after: a} -> cast_relative(opts, pager, :after, a)
%{before: b} -> cast_relative(opts, pager, :before, b)
%{} -> {:ok, cast_limit(opts, opts)}
end
end
defp cast_relative(opts, pager, key, val) do
if pager.cursor_validator.(val),
do: {:ok, Map.put(cast_limit(opts, pager), key, val)},
else: {:error, InvalidCursor.new(key)}
end
@doc """
Casts paging options on pages where we only respect limits, such
as in multi-parent batched graphql queries.
"""
@spec cast_limit(page_opts, Pager.t) :: %{limit: pos_integer}
def cast_limit(opts, %Pager{}=pager) do
default = option(pager, :default_limit, @default_limit)
limit = option(opts, :limit, default)
max = option(pager, :max_limit, @max_limit)
min = option(pager, :min_limit, @min_limit)
underflow = option(pager, :underflow, @underflow)
overflow = option(pager, :overflow, @overflow)
cond do
limit > max and overflow == :default -> %{limit: default}
limit > max -> %{limit: max}
limit < min and underflow == :default -> %{limit: default}
limit < min -> %{limit: min}
true -> %{limit: limit}
end
end
# retrieve a key from the options, or config, or use a fallback
defp option(options, key, fallback), do: options[key] || config(key, fallback)
# retrieve a key from this app's config
defp config(key, default), do: Application.get_env(:pager, key, default)
@doc """
The number of rows an ecto query should select. Expects a map as
given by either of `cast/2` or `cast_limit/2`.
"""
@spec ecto_limit(processed_page_opts) :: non_neg_integer
def ecto_limit(%{limit: l, after: _}), do: l + 2
def ecto_limit(%{limit: l, before: _}), do: l + 2
def ecto_limit(%{limit: l}), do: l + 1
end
|
lib/pager.ex
| 0.836237 | 0.894237 |
pager.ex
|
starcoder
|
defmodule Accent.Scopes.Translation do
import Ecto.Query
alias Ecto.Queryable
alias Accent.{Operation, Repo, Translation}
@doc """
Default ordering is by ascending key
## Examples
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, nil)
#Ecto.Query<from t0 in Accent.Translation, order_by: [asc: t0.key]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "key")
#Ecto.Query<from t0 in Accent.Translation, order_by: [asc: t0.key]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "-key")
#Ecto.Query<from t0 in Accent.Translation, order_by: [desc: t0.key]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "updated")
#Ecto.Query<from t0 in Accent.Translation, order_by: [asc: t0.updated_at]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "-updated")
#Ecto.Query<from t0 in Accent.Translation, order_by: [desc: t0.updated_at]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "index")
#Ecto.Query<from t0 in Accent.Translation, order_by: [asc: t0.file_index]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "-index")
#Ecto.Query<from t0 in Accent.Translation, order_by: [desc: t0.file_index]>
"""
@spec parse_order(Queryable.t(), any()) :: Queryable.t()
def parse_order(query, "index"), do: from(query, order_by: [asc: :file_index])
def parse_order(query, "-index"), do: from(query, order_by: [desc: :file_index])
def parse_order(query, "key"), do: from(query, order_by: [asc: :key])
def parse_order(query, "-key"), do: from(query, order_by: [desc: :key])
def parse_order(query, "updated"), do: from(query, order_by: [asc: :updated_at])
def parse_order(query, "-updated"), do: from(query, order_by: [desc: :updated_at])
def parse_order(query, "master"),
do:
from(translations in query,
inner_join: revisions in assoc(translations, :revision),
inner_join: languages in assoc(revisions, :language),
order_by: fragment("(case when ? then 0 else 2 end) ASC", revisions.master)
)
def parse_order(query, _), do: from(query, order_by: [asc: :key])
@doc """
## Examples
iex> Accent.Scopes.Translation.active(Accent.Translation)
#Ecto.Query<from t0 in Accent.Translation, where: t0.removed == false>
"""
@spec active(Queryable.t()) :: Queryable.t()
def active(query), do: from(query, where: [removed: false])
@doc """
## Examples
iex> Accent.Scopes.Translation.not_locked(Accent.Translation)
#Ecto.Query<from t0 in Accent.Translation, where: t0.locked == false>
"""
@spec not_locked(Queryable.t()) :: Queryable.t()
def not_locked(query), do: from(query, where: [locked: false])
@doc """
## Examples
iex> Accent.Scopes.Translation.parse_conflicted(Accent.Translation, nil)
Accent.Translation
iex> Accent.Scopes.Translation.parse_conflicted(Accent.Translation, false)
#Ecto.Query<from t0 in Accent.Translation, where: t0.conflicted == false>
iex> Accent.Scopes.Translation.parse_conflicted(Accent.Translation, true)
#Ecto.Query<from t0 in Accent.Translation, where: t0.conflicted == true>
"""
@spec parse_conflicted(Queryable.t(), nil | boolean()) :: Queryable.t()
def parse_conflicted(query, nil), do: query
def parse_conflicted(query, false), do: not_conflicted(query)
def parse_conflicted(query, true), do: conflicted(query)
@doc """
## Examples
iex> Accent.Scopes.Translation.parse_empty(Accent.Translation, nil)
Accent.Translation
iex> Accent.Scopes.Translation.parse_empty(Accent.Translation, true)
#Ecto.Query<from t0 in Accent.Translation, where: t0.value_type in ^["empty", "null"] or t0.corrected_text == "">
"""
@spec parse_empty(Queryable.t(), nil | boolean()) :: Queryable.t()
def parse_empty(query, nil), do: query
def parse_empty(query, true), do: from(translations in query, where: translations.value_type in ^["empty", "null"] or translations.corrected_text == "")
@doc """
## Examples
iex> Accent.Scopes.Translation.parse_not_empty(Accent.Translation, nil)
Accent.Translation
iex> Accent.Scopes.Translation.parse_not_empty(Accent.Translation, true)
#Ecto.Query<from t0 in Accent.Translation, where: t0.corrected_text != "">
"""
@spec parse_not_empty(Queryable.t(), nil | boolean()) :: Queryable.t()
def parse_not_empty(query, nil), do: query
def parse_not_empty(query, true), do: from(translations in query, where: translations.corrected_text != "")
@spec parse_added_last_sync(Queryable.t(), nil | boolean(), String.t()) :: Queryable.t()
def parse_added_last_sync(query, nil, _), do: query
def parse_added_last_sync(query, true, project_id) do
from(
operations in Operation,
where: operations.project_id == ^project_id,
where: operations.action == ^"sync",
select: operations.id,
limit: 1,
order_by: [desc: operations.inserted_at]
)
|> Repo.one()
|> case do
nil ->
query
last_sync_id ->
from(
translations in query,
inner_join: operations in assoc(translations, :operations),
where: operations.batch_operation_id == ^last_sync_id
)
end
end
@doc """
## Examples
iex> Accent.Scopes.Translation.parse_commented_on(Accent.Translation, nil)
Accent.Translation
iex> Accent.Scopes.Translation.parse_commented_on(Accent.Translation, true)
#Ecto.Query<from t0 in Accent.Translation, join: c1 in assoc(t0, :comments)>
"""
@spec parse_commented_on(Queryable.t(), nil | boolean()) :: Queryable.t()
def parse_commented_on(query, nil), do: query
def parse_commented_on(query, true) do
from(
translations in query,
inner_join: comments in assoc(translations, :comments)
)
end
@doc """
## Examples
iex> Accent.Scopes.Translation.conflicted(Accent.Translation)
#Ecto.Query<from t0 in Accent.Translation, where: t0.conflicted == true>
"""
@spec conflicted(Queryable.t()) :: Queryable.t()
def conflicted(query), do: from(query, where: [conflicted: true])
@doc """
## Examples
iex> Accent.Scopes.Translation.not_conflicted(Accent.Translation)
#Ecto.Query<from t0 in Accent.Translation, where: t0.conflicted == false>
"""
@spec not_conflicted(Queryable.t()) :: Queryable.t()
def not_conflicted(query), do: from(query, where: [conflicted: false])
@spec related_to(Queryable.t(), Translation.t()) :: Queryable.t()
def related_to(query, translation) do
query
|> from_key(translation.key)
|> from_document(translation.document_id)
|> from_version(translation.version_id)
|> distinct([translations], translations.revision_id)
|> subquery()
|> from()
|> parse_order("master")
end
@spec related_to_one(Queryable.t(), Translation.t()) :: Queryable.t()
def related_to_one(query, translation) do
query
|> related_to(translation)
|> limit(1)
end
@doc """
## Examples
iex> Accent.Scopes.Translation.no_version(Accent.Translation)
#Ecto.Query<from t0 in Accent.Translation, where: is_nil(t0.version_id)>
"""
@spec no_version(Queryable.t()) :: Queryable.t()
def no_version(query), do: from_version(query, nil)
@doc """
## Examples
iex> Accent.Scopes.Translation.from_version(Accent.Translation, nil)
#Ecto.Query<from t0 in Accent.Translation, where: is_nil(t0.version_id)>
iex> Accent.Scopes.Translation.from_version(Accent.Translation, "test")
#Ecto.Query<from t0 in Accent.Translation, where: t0.version_id == ^"test">
"""
@spec from_version(Queryable.t(), any()) :: Queryable.t()
def from_version(query, nil), do: from(t in query, where: is_nil(t.version_id))
def from_version(query, version_id), do: from(query, where: [version_id: ^version_id])
@doc """
## Examples
iex> Accent.Scopes.Translation.from_revision(Accent.Translation, "test")
#Ecto.Query<from t0 in Accent.Translation, where: t0.revision_id == ^"test">
"""
@spec from_revision(Queryable.t(), String.t()) :: Queryable.t()
def from_revision(query, revision_id), do: from(query, where: [revision_id: ^revision_id])
@doc """
## Examples
iex> Accent.Scopes.Translation.not_from_revision(Accent.Translation, "test")
#Ecto.Query<from t0 in Accent.Translation, where: t0.revision_id != ^"test">
"""
@spec from_revision(Queryable.t(), String.t()) :: Queryable.t()
def not_from_revision(query, nil), do: query
def not_from_revision(query, revision_id), do: from(t in query, where: t.revision_id != ^revision_id)
@doc """
## Examples
iex> Accent.Scopes.Translation.from_revisions(Accent.Translation, ["test"])
#Ecto.Query<from t0 in Accent.Translation, where: t0.revision_id in ^["test"]>
"""
@spec from_revision(Queryable.t(), list(String.t())) :: Queryable.t()
def from_revisions(query, revision_ids), do: from(t in query, where: t.revision_id in ^revision_ids)
@doc """
## Examples
iex> Accent.Scopes.Translation.from_project(Accent.Translation, "test")
#Ecto.Query<from t0 in Accent.Translation, join: p1 in assoc(t0, :project), where: p1.id == ^"test">
"""
@spec from_project(Queryable.t(), String.t()) :: Queryable.t()
def from_project(query, project_id) do
from(
translation in query,
inner_join: project in assoc(translation, :project),
where: project.id == ^project_id
)
end
@doc """
## Examples
iex> Accent.Scopes.Translation.from_document(Accent.Translation, nil)
#Ecto.Query<from t0 in Accent.Translation, where: is_nil(t0.document_id)>
iex> Accent.Scopes.Translation.from_document(Accent.Translation, :all)
Accent.Translation
iex> Accent.Scopes.Translation.from_document(Accent.Translation, "test")
#Ecto.Query<from t0 in Accent.Translation, where: t0.document_id == ^"test">
"""
@spec from_document(Queryable.t(), any()) :: Queryable.t()
def from_document(query, nil), do: from(t in query, where: is_nil(t.document_id))
def from_document(query, :all), do: query
def from_document(query, document_id), do: from(query, where: [document_id: ^document_id])
@doc """
## Examples
iex> Accent.Scopes.Translation.from_documents(Accent.Translation, ["test"])
#Ecto.Query<from t0 in Accent.Translation, where: t0.document_id in ^["test"]>
"""
@spec from_documents(Queryable.t(), list(String.t())) :: Queryable.t()
def from_documents(query, document_ids), do: from(t in query, where: t.document_id in ^document_ids)
@doc """
## Examples
iex> Accent.Scopes.Translation.from_key(Accent.Translation, "test")
#Ecto.Query<from t0 in Accent.Translation, where: t0.key == ^"test">
"""
@spec from_key(Queryable.t(), String.t()) :: Queryable.t()
def from_key(query, key), do: from(query, where: [key: ^key])
@doc """
## Examples
iex> Accent.Scopes.Translation.from_keys(Accent.Translation, ["test"])
#Ecto.Query<from t0 in Accent.Translation, where: t0.key in ^["test"]>
"""
@spec from_keys(Queryable.t(), list(String.t())) :: Queryable.t()
def from_keys(query, key_ids), do: from(t in query, where: t.key in ^key_ids)
@doc """
## Examples
iex> Accent.Scopes.Translation.from_search(Accent.Translation, "")
Accent.Translation
iex> Accent.Scopes.Translation.from_search(Accent.Translation, nil)
Accent.Translation
iex> Accent.Scopes.Translation.from_search(Accent.Translation, 1234)
Accent.Translation
iex> Accent.Scopes.Translation.from_search(Accent.Translation, "test")
#Ecto.Query<from t0 in Accent.Translation, where: ilike(t0.key, ^\"%test%\") or ilike(t0.corrected_text, ^\"%test%\")>
iex> Accent.Scopes.Translation.from_search(Accent.Translation, "030519c4-1d47-42bb-95ee-205880be01d9")
#Ecto.Query<from t0 in Accent.Translation, where: ilike(t0.key, ^\"%030519c4-1d47-42bb-95ee-205880be01d9%\") or ilike(t0.corrected_text, ^\"%030519c4-1d47-42bb-95ee-205880be01d9%\"), or_where: t0.id == ^\"030519c4-1d47-42bb-95ee-205880be01d9\">
"""
@spec from_search(Queryable.t(), any()) :: Queryable.t()
def from_search(query, nil), do: query
def from_search(query, term) when term === "", do: query
def from_search(query, term) when not is_binary(term), do: query
def from_search(query, search_term) do
term = "%" <> search_term <> "%"
from(
translation in query,
where: ilike(translation.key, ^term) or ilike(translation.corrected_text, ^term)
)
|> from_search_id(search_term)
end
defp from_search_id(query, key) do
case Ecto.UUID.cast(key) do
{:ok, uuid} -> from(t in query, or_where: [id: ^uuid])
_ -> query
end
end
@doc """
## Examples
iex> Accent.Scopes.Translation.select_key_text(Accent.Translation)
#Ecto.Query<from t0 in Accent.Translation, select: %{id: t0.id, key: t0.key, updated_at: t0.updated_at, corrected_text: t0.corrected_text}>
"""
@spec select_key_text(Queryable.t()) :: Queryable.t()
def select_key_text(query) do
from(
translation in query,
select: %{
id: translation.id,
key: translation.key,
updated_at: translation.updated_at,
corrected_text: translation.corrected_text
}
)
end
end
|
lib/accent/scopes/translation.ex
| 0.780621 | 0.403861 |
translation.ex
|
starcoder
|
defmodule Oban.Plugins.Reindexer do
@moduledoc """
Periodically rebuild indexes to minimize database bloat.
Over time various Oban indexes may grow without `VACUUM` cleaning them up properly. When this
happens, rebuilding the indexes will release bloat.
The plugin uses `REINDEX` with the `CONCURRENTLY` option to rebuild without taking any locks
that prevent concurrent inserts, updates, or deletes on the table.
Note: This plugin requires the `CONCURRENT` option, which is only available in Postgres 12 and
above.
## Using the Plugin
By default, the plugin will reindex once a day, at midnight UTC:
config :my_app, Oban,
plugins: [Oban.Plugins.Reindexer],
...
To run on a different schedule you can provide a cron expression. For example, you could use the
`"@weekly"` shorthand to run once a week on Sunday:
config :my_app, Oban,
plugins: [{Oban.Plugins.Reindexer, schedule: "@weekly"}],
...
## Options
* `:schedule` — a cron expression that controls when to reindex. Defaults to `"@midnight"`.
* `:timezone` — which timezone to use when evaluating the schedule. To use a timezone other than
the default of "Etc/UTC" you *must* have a timezone database like [tzdata][tzdata] installed
and configured.
[tzdata]: https://hexdocs.pm/tzdata
"""
@behaviour Oban.Plugin
use GenServer
alias Oban.Cron.Expression
alias Oban.Plugins.Cron
alias Oban.{Peer, Plugin, Repo, Validation}
@type option :: Plugin.option() | {:schedule, binary()}
defmodule State do
@moduledoc false
defstruct [:conf, :name, :schedule, :timer, timezone: "Etc/UTC"]
end
@impl Plugin
@spec start_link([option()]) :: GenServer.on_start()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@impl Plugin
def validate(opts) do
Validation.validate(opts, fn
{:conf, _} -> :ok
{:name, _} -> :ok
{:schedule, schedule} -> validate_schedule(schedule)
{:timezone, timezone} -> Validation.validate_timezone(:timezone, timezone)
option -> {:error, "unknown option provided: #{inspect(option)}"}
end)
end
@impl GenServer
def init(opts) do
Validation.validate!(opts, &validate/1)
Process.flag(:trap_exit, true)
opts =
opts
|> Keyword.put_new(:schedule, "@midnight")
|> Keyword.update!(:schedule, &Expression.parse!/1)
state =
State
|> struct!(opts)
|> schedule_reindex()
:telemetry.execute([:oban, :plugin, :init], %{}, %{conf: state.conf, plugin: __MODULE__})
{:ok, state}
end
@impl GenServer
def terminate(_reason, %State{timer: timer}) do
if is_reference(timer), do: Process.cancel_timer(timer)
:ok
end
@impl GenServer
def handle_info(:reindex, %State{} = state) do
meta = %{conf: state.conf, plugin: __MODULE__}
:telemetry.span([:oban, :plugin], meta, fn ->
case check_leadership_and_reindex(state) do
{:ok, _} ->
{:ok, meta}
error ->
{:error, Map.put(meta, :error, error)}
end
end)
{:noreply, schedule_reindex(state)}
end
# Validation
defp validate_schedule(schedule) do
Expression.parse!(schedule)
:ok
rescue
error in [ArgumentError] -> {:error, error}
end
# Scheduling
defp schedule_reindex(state) do
timer = Process.send_after(self(), :reindex, Cron.interval_to_next_minute())
%{state | timer: timer}
end
# Reindexing
defp check_leadership_and_reindex(state) do
if Peer.leader?(state.conf) do
{:ok, datetime} = DateTime.now(state.timezone)
if Expression.now?(state.schedule, datetime) do
table = "#{state.conf.prefix}.oban_jobs"
Repo.query(state.conf, "REINDEX TABLE CONCURRENTLY #{table}", [])
end
else
{:ok, []}
end
end
end
|
lib/oban/plugins/reindexer.ex
| 0.870721 | 0.614047 |
reindexer.ex
|
starcoder
|
defmodule Romanex do
@moduledoc """
Encode, Decode, and Validate roman numerals.
Letter values are:
M = 1000, D = 500, C = 100, L = 50, X = 10, V = 5, I = 1
The Range of Values representable by roman numerals is:
1 - 4999
"""
@doc "Encode an Integer into a Roman Numeral."
@spec encode(integer) :: {atom(), String.t}
def encode(int) when is_integer int do
cond do
int >= 5000 -> {:error, "too big"}
int <= 0 -> {:error, "too small"}
true -> {:ok, do_encode(int)}
end
end
defp do_encode(int) do
cond do
int == 0 -> ""
int >= 1000 -> "M" <> do_encode(int-1000)
int >= 900 -> "CM" <> do_encode(int-900)
int >= 500 -> "D" <> do_encode(int-500)
int >= 400 -> "CD" <> do_encode(int-400)
int >= 100 -> "C" <> do_encode(int-100)
int >= 90 -> "XC" <> do_encode(int-90)
int >= 50 -> "L" <> do_encode(int-50)
int >= 40 -> "XL" <> do_encode(int-40)
int >= 10 -> "X" <> do_encode(int-10)
int == 9 -> "IX"
int >= 5 -> "V" <> do_encode(int-5)
int == 4 -> "IV"
int >= 1 -> "I" <> do_encode(int-1)
end
end
@doc ~S"""
Decode a Roman Numeral into an Integer
Returns {:ok, result} or {:error, position-of-error}
Lesser value letters that come after Higher value letters signify addition.
Only 1 letter may be subtracted from another letter.
EX: 8 is VIII and never IIX.
Subtraction can only occur if the result does not equal another letter.
EX: 50 is never LC, as L is alread 50.
Lesser value letters that come before Higher value letters signify subtraction.
Letters that are repeated signify addition.
A letter may be repeated at most 3 times.
EX: 4 is always IV and never IIII
Addition can only occur if the result does not equal another letter.
EX: 100 is always C and never LL
V, L, and D may appear only once.
I, X, C, and M may appear up to 4 times.
"""
@spec decode(String.t) :: {:ok | :error, non_neg_integer}
def decode(rnum) when is_binary rnum do
String.upcase(rnum)
|> do_decode(0, nil, 0, 1)
end
defp do_decode("", total, _, subtotal,_), do: {:ok, total + subtotal}
defp do_decode(<<rn::utf8, rns::binary>>, tot, prn, st, char) do
[tot, st] = case [prn, rn] do
[nil, ?M] -> [tot, 1000]
[?M, ?M] when st < 4000 -> [tot, st+1000]
[a, ?D] when a in [?M, nil] -> [tot+st, 500]
[?C, ?D] when st == 100 -> [tot+400, 0]
[a, ?C] when a in [?M,?D,nil] and rem(tot, 500) == 0 -> [tot+st, 100]
[?C, ?C] when st < 400 -> [tot, st+100]
[?X, ?C] when st == 10 -> [tot+90, 0]
[a, ?L] when a in [?M, ?D, ?C, nil] and rem(tot, 100) == 0 -> [tot+st, 50]
[?X, ?L] when st == 10 -> [tot+40, 0]
[a, ?X] when a in [?M, ?D, ?C, ?L, nil] and rem(tot, 50) == 0 -> [tot+st, 10]
[?X, ?X] when st < 40 -> [tot, st+10]
[?I, ?X] when st == 1 -> [tot+9, 0]
[a, ?V] when not a in [?I,?V] and rem(tot, 10) == 0 -> [tot+st, 5]
[?I, ?V] when st == 1 and rem(tot, 10) == 0 -> [tot+4, 0]
[a, ?I] when not a in [?I] and rem(tot, 5) == 0 -> [tot+st, 1]
[?I, ?I] when st < 4 -> [tot, st+1]
[_,_] -> [:error, char]
end
case [tot, st] do
[:error, st] -> {:error, st}
_ -> do_decode(rns, tot, rn, st, char+1)
end
end
@doc "Validates a Roman Numeral. Returns true or false"
@spec valid?(String.t) :: boolean
def valid?(rnum) when is_binary rnum do
case decode(rnum) do
{:ok, _} -> true
_ -> false
end
end
end
|
lib/romanex.ex
| 0.763351 | 0.512876 |
romanex.ex
|
starcoder
|
defmodule GCM do
@moduledoc """
GCM push notifications to devices.
```
iex> GCM.push("api_key", ["registration_id"], %{notification: %{ title: "Hello!"} })
{:ok,
%{body: "...",
canonical_ids: [], failure: 0,
headers: [{"Content-Type", "application/json; charset=UTF-8"},
{"Vary", "Accept-Encoding"}, {"Transfer-Encoding", "chunked"}],
invalid_registration_ids: [], not_registered_ids: [], status_code: 200,
success: 1}}
```
"""
alias HTTPoison.Response
@base_url "https://gcm-http.googleapis.com/gcm"
@doc """
Push a notification to a list of `registration_ids` or a single `registration_id`
using the `api_key` as authorization.
```
iex> GCM.push(api_key, ["registration_id1", "registration_id2"])
{:ok,
%{body: "...",
canonical_ids: [], failure: 0,
headers: [{"Content-Type", "application/json; charset=UTF-8"},
{"Vary", "Accept-Encoding"}, {"Transfer-Encoding", "chunked"}],
invalid_registration_ids: [], not_registered_ids: [], status_code: 200,
success: 2}}
```
"""
@spec push(String.t, String.t | [String.t], Map.t | Keyword.t) :: { :ok, Map.t } | { :error, term }
def push(api_key, registration_ids, options \\ %{}) do
registration_ids = List.wrap(registration_ids)
body = case registration_ids do
[id] -> %{ to: id }
ids -> %{ registration_ids: ids }
end
|> Map.merge(options)
|> Poison.encode!
case HTTPoison.post(@base_url <> "/send", body, headers(api_key)) do
{ :ok, response } -> build_response(registration_ids, response)
error -> error
end
end
defp build_response(registration_ids, %Response{ headers: headers, status_code: 200, body: body }) do
response = body |> Poison.decode!
results = build_results(response, registration_ids)
|> Map.merge(%{ failure: response["failure"],
success: response["success"],
body: body, headers: headers,
status_code: 200 })
{ :ok, results }
end
defp build_response(_, %Response{ status_code: 400 }) do
{ :error, :bad_request }
end
defp build_response(_, %Response{ status_code: 401 }) do
{ :error, :unauthorized }
end
defp build_response(_, %Response{ status_code: 503 }) do
{ :error, :service_unavailable }
end
defp build_response(_, %Response{ status_code: code }) when code in 500..599 do
{ :error, :server_error }
end
@empty_results %{ not_registered_ids: [],
canonical_ids: [],
invalid_registration_ids: [],
to_be_retried_ids: [] }
defp build_results(%{ "failure" => 0, "canonical_ids" => 0 }, _), do: @empty_results
defp build_results(%{ "results" => results}, reg_ids) do
response = @empty_results
Enum.zip(reg_ids, results)
|> Enum.reduce(response, fn({reg_id, result}, response) ->
case result do
%{ "error" => "NotRegistered" } ->
prepend_in(response, :not_registered_ids, reg_id)
%{ "error" => "InvalidRegistration" } ->
prepend_in(response, :invalid_registration_ids, reg_id)
%{ "error" => error } when error in ["InternalServerError", "Unavailable"] ->
prepend_in(response, :to_be_retried_ids, reg_id)
%{ "registration_id" => new_reg_id } ->
update = %{ old: reg_id, new: new_reg_id}
prepend_in(response, :canonical_ids, update)
_ -> response
end
end)
end
defp build_results(_, _), do: @empty_results
defp prepend_in(map, key, val), do: update_in(map[key], &([val | &1]))
defp headers(api_key) do
[{ "Authorization", "key=#{api_key}" },
{ "Content-Type", "application/json" },
{ "Accept", "application/json"}]
end
end
|
lib/gcm.ex
| 0.779238 | 0.606761 |
gcm.ex
|
starcoder
|
defmodule Ratatouille.Runtime.Command do
@moduledoc """
Commands provide a way to start an expensive call in the background and get
the result back via `c:Ratatouille.App.update/2`.
Commands should be constructed via the functions below and not via the struct
directly, as this is internal and subject to change.
"""
alias __MODULE__
@enforce_keys [:type]
defstruct type: nil,
message: nil,
function: nil,
subcommands: []
@opaque t :: %__MODULE__{
type: :single | :batch,
message: term(),
function: (() -> term()) | nil,
subcommands: list(t())
}
@doc """
Returns a new command that can be returned in the `c:Ratatouille.App.update/2`
or `c:Ratatouille.App.init/1` callbacks.
Takes an anonymous function and a message. The message is used to send a
response back to your app along with the result. It can be any Erlang term, so
it's also possible to include identifiers (e.g., `{:finished, id}`).
"""
@spec new((() -> term()), term()) :: Command.t()
def new(func, message) when is_function(func) do
%Command{type: :single, message: message, function: func}
end
# TODO: Need an MFA-style form of new/2
@doc """
Returns a batch command given a list of commands. This simply provides a way
to return multiple commands as a single one. Batch commands should not depend
on one another---Ratatouille's runtime may run some or all of them in
parallel and doesn't guarantee any particular order of execution.
Dependencies should be expressed via a single command or a sequence of
commands orchestrated via the application model state.
"""
@spec batch([Command.t()]) :: Command.t()
def batch([%Command{} | _] = cmds) do
%Command{type: :batch, subcommands: cmds}
end
@doc false
def to_list(%Command{type: :batch, subcommands: [cmd | rest]}) do
to_list(cmd) ++ to_list(%Command{type: :batch, subcommands: rest})
end
def to_list(%Command{type: :batch, subcommands: []}), do: []
def to_list(%Command{type: :single} = cmd), do: [cmd]
end
|
lib/ratatouille/runtime/command.ex
| 0.677261 | 0.495056 |
command.ex
|
starcoder
|
defmodule Croma.Defun do
@moduledoc """
Module that provides `Croma.Defun.defun/2` macro.
"""
@doc """
Defines a function together with its typespec.
This provides a lighter-weight syntax for functions with type specifications and functions with multiple clauses.
## Example
The following examples assume that `Croma.Defun` is imported
(you can import it by `use Croma`).
defun f(a :: integer, b :: String.t) :: String.t do
"\#{a} \#{b}"
end
The code above is expanded to the following function definition.
@spec f(integer, String.t) :: String.t
def f(a, b) do
"\#{a} \#{b}"
end
Function with multiple clauses and/or pattern matching on parameters can be defined
in the same way as `case do ... end`:
defun dumbmap(as :: [a], f :: (a -> b)) :: [b] when a: term, b: term do
([] , _) -> []
([h | t], f) -> [f.(h) | dumbmap(t, f)]
end
is converted to
@spec dumbmap([a], (a -> b)) :: [b] when a: term, b: term
def dumbmap(as, f)
def dumbmap([], _) do
[]
end
def dumbmap([h | t], f) do
[f.(h) | dumbmap(t, f)]
end
## Pattern matching on function parameter and omitting parameter's type
If you omit parameter's type, its type is infered from the parameter's expression.
Suppose we have the following function:
defun f(%MyStruct{field1: field1, field2: field2}) :: String.t do
"\#{field1} \#{field2}"
end
then the parameter type becomes `MyStruct.t`.
@spec f(MyStruct.t) :: String.t
def f(a1)
def f(%MyStruct{field1: field1, field2: field2}) do
"\#{field1} \#{field2}"
end
## Generating guards from argument types
Simple guard expressions can be generated by `defun/2` using `g[type]` syntax.
For example,
defun f(s :: g[String.t], i :: g[integer]) :: String.t do
"\#{s} \#{i}"
end
is converted to the following function with `when is_integer(i)` guard.
@spec f(String.t, integer) :: String.t
def f(s, i)
def f(s, i) when is_binary(s) and is_integer(i) do
"\#{s} \#{i}"
end
For supported types of guard-generation please refer to the source code of `Croma.Guard.make/3`.
Guard generation can be disabled by setting application config at compile time.
For example, by putting the following into `config/config.exs`,
config :croma, [
defun_generate_guard: false
]
then `g[String.t]` becomes semantically the same as `String.t`.
## Validating arguments and return value based on their types
You can instrument check of pre/post conditions by specifying type as `v[type]`.
For instance,
defmodule MyString do
use Croma.SubtypeOfString, pattern: ~r/^foo|bar$/
end
defun f(s :: v[MyString.t]) :: atom do
String.to_atom(s)
end
becomes the following function definition that calls `valid?/1` at the top of its body:
@spec f(MyString.t) :: atom
def f(s)
def f(s) do
if !MyString.valid?(s) do
raise "..."
end
String.to_atom(s)
end
The generated code assumes that `valid?/1` function is defined in the type module of the specified type.
For primitive types croma defines their type modules and thus you can freely use e.g. `v[integer]`.
Generating validation of arguments and return values can be disabled by setting application config during compilation.
config :croma, [
defun_generate_validation: false
]
## Known limitations
- Overloaded typespecs are not supported.
- Guard generation and validation are not allowed to be used with multi-clause syntax.
- Using unquote fragment in parameter list is not fully supported.
- `try` block is not implicitly started in body of `defun`, in contrast to `def`.
"""
defmacro defun({:::, _, [fun, ret]}, [do: block]) do
defun_impl(:def, fun, ret, [], block, __CALLER__)
end
defmacro defun({:when, _, [{:::, _, [fun, ret]}, type_params]}, [do: block]) do
defun_impl(:def, fun, ret, type_params, block, __CALLER__)
end
defmacro defun(_, _) do
raise %SyntaxError{description: "syntax error in usage of defun", file: __CALLER__.file, line: __CALLER__.line}
end
@doc """
Defines a private function together with its typespec.
See `defun/2` for usage of this macro.
"""
defmacro defunp({:::, _, [fun, ret]}, [do: block]) do
defun_impl(:defp, fun, ret, [], block, __CALLER__)
end
defmacro defunp({:when, _, [{:::, _, [fun, ret]}, type_params]}, [do: block]) do
defun_impl(:defp, fun, ret, type_params, block, __CALLER__)
end
defmacro defunp(_, _) do
raise %SyntaxError{description: "syntax error in usage of defunp", file: __CALLER__.file, line: __CALLER__.line}
end
@doc """
Defines a unit-testable private function together with its typespec.
See `defun/2` for usage of this macro.
See also `Croma.Defpt.defpt/2`.
"""
defmacro defunpt({:::, _, [fun, ret]}, [do: block]) do
defun_impl(:defpt, fun, ret, [], block, __CALLER__)
end
defmacro defunpt({:when, _, [{:::, _, [fun, ret]}, type_params]}, [do: block]) do
defun_impl(:defpt, fun, ret, type_params, block, __CALLER__)
end
defmacro defunpt(_, _) do
raise %SyntaxError{description: "syntax error in usage of defunpt", file: __CALLER__.file, line: __CALLER__.line}
end
defmodule Arg do
@moduledoc false
defstruct [:arg_expr, :type, :default, :guard?, :validate?, :index]
def new({:\\, _, [inner_expr, default]}, index) do
%__MODULE__{new(inner_expr, index) | default: {:some, default}}
end
def new({:::, _, [arg_expr, type_expr]}, index) do
{type_expr2, g_used?, v_used?} = extract_guard_and_validate(type_expr)
guard? = g_used? and Application.get_env(:croma, :defun_generate_guard , true)
validate? = v_used? and Application.get_env(:croma, :defun_generate_validation, true)
%__MODULE__{arg_expr: arg_expr, type: type_expr2, default: :none, guard?: guard?, validate?: validate?, index: index}
end
def new(arg_expr, index) do
%__MODULE__{arg_expr: arg_expr, type: infer_type(arg_expr), default: :none, guard?: false, validate?: false, index: index}
end
defp extract_guard_and_validate({{:., _, [Access, :get]}, _, [{:g, _, _}, inner_expr]}), do: {inner_expr, true , false}
defp extract_guard_and_validate({{:., _, [Access, :get]}, _, [{:v, _, _}, inner_expr]}), do: {inner_expr, false, true }
defp extract_guard_and_validate(type_expr ), do: {type_expr , false, false}
defp infer_type(arg_expr) do
case arg_expr do
{:=, _, [{_, _, c}, inner]} when is_atom(c) -> infer_type(inner)
{:=, _, [inner, {_, _, c}]} when is_atom(c) -> infer_type(inner)
{_name, _, c} when is_atom(c) -> quote do: any
{:{}, _, elements} -> quote do: {unquote_splicing(Enum.map(elements, &infer_type/1))}
{elem1, elem2} -> quote do: {unquote(infer_type(elem1)), unquote(infer_type(elem2))}
l when is_list(l) -> quote do: []
{:%{}, _, _} -> quote do: map
{:%, _, [module_alias, _]} -> quote do: unquote(module_alias).t
expr when is_atom(expr) -> expr
expr when is_integer(expr) -> quote do: integer
expr when is_float(expr) -> quote do: float
expr when is_binary(expr) -> if String.valid?(expr), do: (quote do: String.t), else: (quote do: binary)
_ -> quote do: any
end
end
defp var_name(arg_expr) do
case arg_expr do
{name, _, context} when is_atom(context) -> name
{:=, _, [{name, _, context}, _]} when is_atom(context) -> name
{:=, _, [_, {name, _, context}]} when is_atom(context) -> name
_ -> nil
end
end
def as_var(%__MODULE__{arg_expr: arg_expr}) do
case var_name(arg_expr) do
nil -> nil
name -> Macro.var(name, nil)
end
end
defp as_var!(%__MODULE__{arg_expr: arg_expr} = arg) do
as_var(arg) || raise "parameter `#{Macro.to_string(arg_expr)}` is not a var"
end
def make_arg_expr(%__MODULE__{arg_expr: arg_expr, guard?: g?, validate?: v?, index: index}) do
# For an underscored function parameter with guard/validation,
# we have to bind it to another variable and use that variable in guard/validation
# expression in order not to touch the underscored parameter.
case var_name(arg_expr) |> Atom.to_string() do
"_" <> _ when g? or v? ->
var2 = Macro.var(:"croma_arg#{index}", nil)
quote do
unquote(arg_expr) = unquote(var2)
end
_ ->
arg_expr
end
end
def reassignment_expr(%__MODULE__{guard?: g?, validate?: v?} = arg) do
# To enable compiler warning about unused function parameter,
# we re-assign the variable to the same name after guard/validation check.
if g? or v? do
case as_var(arg) do
nil -> nil
{var_name, _, _} = var ->
if Atom.to_string(var_name) |> String.starts_with?("_") do
nil
else
quote do
unquote(var) = unquote(var)
end
end
end
else
nil
end
end
def guard_expr(%__MODULE__{guard?: false}, _), do: nil
def guard_expr(%__MODULE__{guard?: true, type: type, index: index} = arg, caller) do
{var_name, _, _} = var = as_var!(arg)
case Atom.to_string(var_name) do
"_" <> _ -> Croma.Guard.make(type, Macro.var(:"croma_arg#{index}", nil), caller)
_ -> Croma.Guard.make(type, var, caller)
end
end
def validation_expr(%__MODULE__{validate?: false}, _), do: nil
def validation_expr(%__MODULE__{validate?: true, type: type, index: index} = arg, caller) do
{var_name, _, _} = var = as_var!(arg)
case Atom.to_string(var_name) do
"_" <> _ -> Croma.Validation.make(type, Macro.var(:"croma_arg#{index}", nil), caller)
_ -> Croma.Validation.make(type, var, caller)
end
end
end
defmodule Ret do
@moduledoc false
defstruct [:type, :guard?, :validate?]
def new(expr) do
{type_expr, g_used?, v_used?} = extract(expr)
guard? = g_used? and Application.get_env(:croma, :defun_generate_guard , true)
validate? = v_used? and Application.get_env(:croma, :defun_generate_validation, true)
%__MODULE__{type: type_expr, guard?: guard?, validate?: validate?}
end
defp extract({{:., _, [Access, :get]}, _, [{:g, _, _}, inner_expr]}), do: {inner_expr, true , false}
defp extract({{:., _, [Access, :get]}, _, [{:v, _, _}, inner_expr]}), do: {inner_expr, false, true }
defp extract(expr ), do: {expr , false, false}
def wrap_body_with_return_value_check(ret, body, caller) do
cond do
ret.guard? -> wrap_body_with_guard(ret, body, caller)
ret.validate? -> wrap_body_with_validation(ret, body, caller)
:otherwise -> body
end
end
defp wrap_body_with_guard(%__MODULE__{type: type, guard?: true}, body, caller) do
guard_expr = Croma.Guard.make(type, Macro.var(:return_value, __MODULE__), caller)
quote do
case unquote(body) do
return_value when unquote(guard_expr) -> return_value
end
end
end
defp wrap_body_with_validation(%__MODULE__{type: type, validate?: true}, body, caller) do
validation_expr = Croma.Validation.make(type, Macro.var(:return_value, __MODULE__), caller)
quote do
return_value = unquote(body)
unquote(validation_expr)
return_value
end
end
end
defp defun_impl(def_or_defp, {fname, env, args0}, ret0, type_params, block, caller) do
args = case args0 do
context when is_atom(context) -> [] # function definition without parameter list
_ -> Enum.with_index(args0) |> Enum.map(fn {arg, i} -> Arg.new(arg, i) end)
end
ret = Ret.new(ret0)
{:__block__, [], [
typespec(fname, env, args, ret, type_params),
bodyless_function(def_or_defp, fname, env, args),
function_definition(def_or_defp, fname, env, args, ret, block, caller),
]}
end
defp typespec(fname, env, args, ret, type_params) do
arg_types = Enum.map(args, &(&1.type))
func_with_return_type = {:::, [], [{fname, [], arg_types}, ret.type]}
spec_expr = case type_params do
[] -> func_with_return_type
_ -> {:when, [], [func_with_return_type, type_params]}
end
{:@, env, [
{:spec, [], [spec_expr]}
]}
end
defp bodyless_function(def_or_defp, fname, env, args) do
arg_exprs = Enum.with_index(args) |> Enum.map(fn {%Arg{default: default} = arg, index} ->
var = Arg.as_var(arg) || Macro.var(:"arg#{Integer.to_string(index)}", nil)
case default do
:none -> var
{:some, default} -> {:\\, [], [var, default]}
end
end)
{def_or_defp, env, [{fname, env, arg_exprs}]}
end
defp function_definition(def_or_defp, fname, env, args, ret, block, caller) do
defs = case block do
{:__block__, _, multiple_defs} -> multiple_defs
single_def -> List.wrap(single_def)
end
if !Enum.empty?(defs) and Enum.all?(defs, &pattern_match_expr?/1) do
if Enum.any?(args, &(&1.guard? )), do: raise "guard generation cannot be used with multi-clause syntax"
if Enum.any?(args, &(&1.validate?)), do: raise "argument validation cannot be used with multi-clause syntax"
if ret.guard? or ret.validate? , do: raise "return value validation cannot be used with multi-clause syntax"
clause_defs = Enum.map(defs, &to_clause_definition(def_or_defp, fname, &1))
{:__block__, env, clause_defs}
else
call_expr = call_expr_with_guard(fname, env, args, caller)
body = body_with_validation(args, block, caller)
wrapped_body = Ret.wrap_body_with_return_value_check(ret, body, caller)
{def_or_defp, env, [call_expr, [do: wrapped_body]]}
end
end
defp pattern_match_expr?({:->, _, _}), do: true
defp pattern_match_expr?(_ ), do: false
defp to_clause_definition(def_or_defp, fname, {:->, env, [args, block]}) do
case args do
[{:when, _, when_args}] ->
fargs = Enum.take(when_args, length(when_args) - 1)
guards = List.last(when_args)
{def_or_defp, env, [{:when, [], [{fname, [], fargs}, guards]}, [do: block]]}
_ ->
{def_or_defp, env, [{fname, env, args}, [do: block]]}
end
end
defp call_expr_with_guard(fname, env, args, caller) do
arg_exprs = Enum.map(args, &Arg.make_arg_expr/1) |> reset_hygienic_counter()
guard_exprs = Enum.map(args, &Arg.guard_expr(&1, caller)) |> Enum.reject(&is_nil/1)
if Enum.empty?(guard_exprs) do
{fname, env, arg_exprs}
else
combined_guard_expr = Enum.reduce(guard_exprs, fn(expr, acc) -> {:and, env, [acc, expr]} end)
{:when, env, [{fname, env, arg_exprs}, combined_guard_expr]}
end
end
defp body_with_validation(args, block, caller) do
validation_exprs = Enum.map(args, &Arg.validation_expr(&1, caller)) |> Enum.reject(&is_nil/1)
reassignment_exprs = Enum.map(args, &Arg.reassignment_expr/1) |> Enum.reject(&is_nil/1)
exprs =
case reset_hygienic_counter(block) do
{:__block__, _, exprs} -> exprs
nil -> []
expr -> [expr]
end
case validation_exprs ++ reassignment_exprs ++ exprs do
[] -> nil
[expr] -> expr
exprs -> {:__block__, [], exprs}
end
end
defp reset_hygienic_counter(ast) do
Macro.prewalk(ast, fn
{name, meta, context} when is_atom(context) -> {name, Keyword.delete(meta, :counter), nil}
t -> t
end)
end
end
|
lib/croma/defun.ex
| 0.867983 | 0.692973 |
defun.ex
|
starcoder
|
defmodule Faker.Pizza do
import Faker, only: [sampler: 2]
alias Faker.Util
@moduledoc """
Functions for generating Pizza related data in English.
"""
@doc """
Returns a list with a number of pizzas.
If an integer is provided, exactly that number of pizzas will be returned.
If a range is provided, the number will be in the range.
If no range or integer is specified it defaults to 2..5
## Examples
iex> Faker.Pizza.pizzas()
[
"14\\" Greek Maltija",
"Large with Reindeer, Buffalo Chicken, Egg, Chorizo, and Clam",
"9\\" Capricciosa",
"9\\" Sicilian Style Frutti di mare"
]
iex> Faker.Pizza.pizzas(2..3)
[
"12\\" Fajita",
"Medium Fajita"
]
iex> Faker.Pizza.pizzas(3..4)
[
"Large Gluten-Free Corn with Oysters, Bacon, and Steak",
"10\\" Flatbread Grilled Vegetarian",
"30\\" Thai Chicken",
"Small with Sauerkraut"
]
iex> Faker.Pizza.pizzas(5)
[
"Large Quattro Formaggio",
"Small Sweet Potato Crust with Mackerel, Jalapeños, Smoked Mozzarella, and Smoked Salmon",
"30\\" with Pickled Ginger, Meatballs, Goat Cheese, Prosciutto, and Pineapple",
"9\\" Detroit-style with Steak",
"Family with Clam, Cherry Tomatoes, Salmon, and Chicken"
]
"""
@spec pizzas(Range.t) :: list(String.t)
def pizzas(range \\ 2..5)
def pizzas(first..last) do
pizzas(Faker.random_between(first, last))
end
@spec pizzas(integer) :: list(String.t())
def pizzas(num) do
stream = Stream.repeatedly(&pizza/0)
Enum.take(stream, num)
end
@doc """
Returns a pizza
## Examples
iex> Faker.Pizza.pizza()
"16\\" with Fior di latte"
iex> Faker.Pizza.pizza()
"Medium New York Style with Clam and Reindeer"
iex> Faker.Pizza.pizza()
"9\\" Supreme"
iex> Faker.Pizza.pizza()
"16\\" Shrimp Club"
"""
@spec pizza() :: String.t()
def pizza, do: pizza(Faker.random_between(1, 30))
defp pizza(n) when n <= 5, do: "#{size_or_inches()} with #{toppings_sentence(n)}"
defp pizza(n) when n <= 10,
do: "#{size_or_inches()} #{style()} with #{toppings_sentence(n - 5)}"
defp pizza(n) when n <= 15, do: "#{size_or_inches()} #{style()} #{combo()}"
defp pizza(_n), do: "#{size_or_inches()} #{combo()}"
@doc """
Returns a list with a number of toppings.
If an integer is provided, exactly that number of toppings will be returned.
If a range is provided, the number will be in the range.
If no range or integer is specified it defaults to 2..5
## Examples
iex> Faker.Pizza.toppings()
["Pesto Sauce", "Fior di latte", "Broccoli", "Banana Peppers"]
iex> Faker.Pizza.toppings(4)
["Clam", "Reindeer", "Buffalo Chicken", "Egg"]
iex> Faker.Pizza.toppings(2..3)
["Sausage", "Green Peas"]
iex> Faker.Pizza.toppings(2..3)
["Shellfish", "Smoked Salmon"]
"""
@spec toppings(Range.t) :: list(String.t)
def toppings(range \\ 2..5)
def toppings(first..last) do
toppings(Faker.random_between(first, last))
end
@spec toppings(integer) :: list(String.t())
def toppings(num) do
stream = Stream.repeatedly(&topping/0)
Enum.take(stream, num)
end
defp toppings_sentence(num) do
num |> toppings() |> Util.to_sentence()
end
@doc """
Returns a random cheese, sauce, meat or vegetarian topping
## Examples
iex> Faker.Pizza.topping()
"Black Olives"
iex> Faker.Pizza.topping()
"Meatballs"
iex> Faker.Pizza.topping()
"Asiago"
iex> Faker.Pizza.topping()
"Philly Steak"
"""
@spec topping() :: String.t()
def topping, do: topping(Faker.random_between(0, 7))
defp topping(0), do: cheese()
defp topping(1), do: sauce()
defp topping(count) when count <= 5, do: meat()
defp topping(_count), do: vegetable()
@doc """
Returns a Pizza Restaurant string
## Examples
iex> Faker.Pizza.company()
"Papa Plastique"
iex> Faker.Pizza.company()
"Chicago Deep Dish"
iex> Faker.Pizza.company()
"Pizza Joe’s"
iex> Faker.Pizza.company()
"CosaNostra Pizza"
"""
@spec company() :: String.t()
sampler(:company, [
"New York Style Pizza Co",
"Chicago Deep Dish",
"Pizza the Hutt",
"Mystic Pizza",
"Bob's Pizza-Burgers",
"Only Cheese Pizzas",
"California Style Pizzas",
"Thin Crust or Bust",
"Family Bros. Pizza",
"Pizza Planet",
"Polmieri Pizzeria",
"Che<NAME>",
"Mona Pizza",
"J. Maggio's Pizza",
"Pizza De Roma",
"Dinosaur Pizza",
"Pizza Potamus",
"Pizza Forest",
"Pizza This..",
"Little Nero's Pizza",
"<NAME>'s Pizza",
"The Pizza Hole",
"Bloaty’s Pizza Hog",
"Guidio’s Pizza Palace",
"CosaNostra Pizza",
"Gamer Pizzas",
"Jubilee Pizza",
"Maria’s Pasta and Pizza",
"Nemo’s Pizza",
"Paisanos",
"Papa Plastique",
"Pete’s Pizza",
"Pizza Clown",
"Pizza Face Pizza",
"Pizza on a Stick",
"Pizza Joe’s",
"Pizza Pig-Out",
"Pizza Shack",
"Ron’s Pizza Hovel",
"Sid’s Pizza Parlor"
])
@doc """
Returns a pizza style
## Examples
iex> Faker.Pizza.style()
"Pizza Frittata"
iex> Faker.Pizza.style()
"Gluten-Free Corn"
iex> Faker.Pizza.style()
"Detroit-style"
iex> Faker.Pizza.style()
"Stuffed Crust"
"""
@spec style() :: String.t()
sampler(:style, [
"Wood Fired",
"Thick Crust",
"Thin Crust",
"New York Style",
"Deep Dish",
"Stuffed Crust",
"Detroit-style",
"Kosher",
"Gluten-Free Quinoa",
"Flatbread",
"Sweet Potato Crust",
"Grilled",
"New Haven Style",
"Greek",
"Sicilian Style",
"Pizza Rustica",
"Tomatoe Pie",
"Pizza Bread",
"Neapolitan",
"Deep Fried Pizza",
"Gluten-Free Corn",
"Pizza Frittata",
"Hand Tossed",
"Multigrain",
"Whole Wheat",
"Fugazza",
"Spooning Pizza"
])
@doc """
Returns a sauce string
## Examples
iex> Faker.Pizza.sauce()
"Spicy Tomato Sauce"
iex> Faker.Pizza.sauce()
"Hummus"
iex> Faker.Pizza.sauce()
"Pesto Sauce"
iex> Faker.Pizza.sauce()
"Hummus"
"""
@spec sauce() :: String.t()
sampler(:sauce, [
"Soy Miso Sauce",
"Masala Sauce",
"Classic Tomato Sauce",
"Spicy Tomato Sauce",
"Clam Sauce",
"Bechamel (White) Sauce",
"Pesto Sauce",
"BBQ Sauce",
"Salsa",
"Passata Sauce",
"Chipolte Sauce",
"Mango Sauce",
"Siracha Sauce",
"Buffalo Sauce",
"Tomatoe Garlic Sauce",
"Butter Chicken Sauce",
"Olive Oil",
"Hummus",
"White Garlic Sauce",
"Marinara Sauce",
"Tapenade",
"Romesco Sauce",
"Chimichurri Sauce",
"Gravy",
"Aioli",
"Hoisin Sauce",
"Chili Sauce",
"Sweet Chili Sauce",
"Gremolata",
"Mustard",
"Curry",
"Alfredo Sauce",
"Zaatar"
])
@doc """
Returns a cheese string
## Examples
iex> Faker.Pizza.cheese()
"Mozzarella"
iex> Faker.Pizza.cheese()
"Marscapone"
iex> Faker.Pizza.cheese()
"Blue (Bleu) Cheese"
iex> Faker.Pizza.cheese()
"Smoked Mozzarella"
"""
@spec cheese() :: String.t()
sampler(:cheese, [
"Mozzarella",
"Cheddar",
"Feta",
"Secret Cheese Blend",
"4 Cheese Blend",
"Buffalo Mozzarella",
"Asiago",
"Romano",
"Provolone",
"Emmental",
"Ricotta",
"Marscapone",
"Swiss",
"Parmesan",
"Smoked Mozzarella",
"Blue (Bleu) Cheese",
"Soy Cheese",
"Lactose Free Cheese",
"Whole Milk Mozzarella",
"Burrata",
"Goat Cheese",
"Cashew Cheese",
"Fior di latte",
"Paneer",
"Cheese Curds",
"Gouda",
"Fontina",
"Gorgonzola",
"Queso Fresco",
"Crème fraîche"
])
@doc """
Returns a meat string
## Examples
iex> Faker.Pizza.meat()
"Buffalo Chicken"
iex> Faker.Pizza.meat()
"Meatballs"
iex> Faker.Pizza.meat()
"Chicken"
iex> Faker.Pizza.meat()
"Meatballs"
"""
@spec meat() :: String.t()
sampler(:meat, [
"Anchovies",
"Bacon",
"Pulled Pork",
"Buffalo Chicken",
"Canadian Bacon",
"Capicola",
"Chicken",
"Chorizo",
"Clam",
"Duck",
"Eel",
"Egg",
"Gyro",
"Ham",
"Hamburger",
"Hot Dogs",
"Mackerel",
"Meatballs",
"Mutton",
"Pepperoni",
"Philly Steak",
"Prosciutto",
"Reindeer",
"Salami",
"Salmon",
"Sausage",
"Scallops",
"Shellfish",
"Shrimps",
"Smoked Salmon",
"Squid",
"Steak",
"Venison"
])
@doc """
Returns a vegetable string
## Examples
iex> Faker.Pizza.vegetable()
"Mango"
iex> Faker.Pizza.vegetable()
"Black Olives"
iex> Faker.Pizza.vegetable()
"Green Olives"
iex> Faker.Pizza.vegetable()
"Sauerkraut"
"""
@spec vegetable() :: String.t()
sampler(:vegetable, [
"Artichoke Hearts",
"Arugula",
"Banana",
"Banana Peppers",
"Basil",
"Black Olives",
"Broccoli",
"Capers",
"Caramelised Onions",
"Cherry Tomatoes",
"Coconut",
"Eggplant",
"Garlic",
"Green Olives",
"Green Peas",
"Green Peppers",
"Habanero Peppers",
"Jalapeños",
"Mango",
"Mushrooms",
"Onions",
"Oysters",
"Peperoncini",
"Pickled Ginger",
"Pineapple",
"Potatoes",
"Red Onion",
"Red Peppers",
"Sauerkraut",
"Spinach",
"Sun-Dried Tomatoes",
"Sweet Corn",
"Sweet Potato",
"Zucchini"
])
@doc """
Returns a combo string
## Examples
iex> Faker.Pizza.combo()
"Breakfast"
iex> Faker.Pizza.combo()
"Caprese"
iex> Faker.Pizza.combo()
"Mockba"
iex> Faker.Pizza.combo()
"Poutine"
"""
@spec combo() :: String.t()
sampler(:combo, [
"Africana",
"All Dressed",
"Bacon Cheeseburger ",
"BBQ Chicken",
"Bianca ",
"Bolognese",
"Breakfast",
"Buffalo Chicken",
"Canadian",
"Caprese",
"Capricciosa",
"Capricciosa ",
"Cheese",
"Chicken Pesto",
"Ciao-ciao",
"Curry Banana",
"Double Dutch",
"Fajita",
"Fig and Goat Cheese",
"Four Seasons",
"Frutti di mare",
"Funghi",
"Grandma",
"Greek",
"Grilled Vegetarian",
"Hawaiian",
"Hot & Spicy",
"Italian Deli",
"Kebab",
"Loaded",
"Maltija",
"Margherita",
"Meat Feast",
"Meat Lovers",
"Meatball ",
"Mockba",
"Onion & Gorgonzola",
"Pepperoni & Mushroom",
"Perogie",
"Pesto Chicken",
"Poutine",
"Prociutto Arugala",
"Pugliese",
"Quattro Formaggi",
"Quattro Formaggio",
"Quattro stagioni",
"Romana",
"Shrimp Club",
"Smoked Salmon & Goat Cheese",
"Supreme",
"Taco",
"Thai Chicken",
"Vegetarian Lovers",
"Vegetariana",
"Veggie Korma",
"Viennese"
])
@doc """
Returns a random size or inches
## Examples
iex> Faker.Pizza.size_or_inches()
"Family"
iex> Faker.Pizza.size_or_inches()
"14\\""
iex> Faker.Pizza.size_or_inches()
"Personal"
iex> Faker.Pizza.size_or_inches()
"Medium"
"""
@spec size_or_inches() :: String.t()
def size_or_inches, do: size_or_inches(Faker.random_between(0, 1))
defp size_or_inches(0), do: size()
defp size_or_inches(1), do: inches()
@doc """
Returns a size string
## Examples
iex> Faker.Pizza.size()
"Personal"
iex> Faker.Pizza.size()
"Family"
iex> Faker.Pizza.size()
"Large"
iex> Faker.Pizza.size()
"Medium"
"""
@spec size() :: String.t()
sampler(:size, ["Personal", "Small", "Medium", "Large", "Extra-Large", "Family"])
@doc """
Returns an inches string
## Examples
iex> Faker.Pizza.inches()
"9\\""
iex> Faker.Pizza.inches()
"10\\""
iex> Faker.Pizza.inches()
"16\\""
iex> Faker.Pizza.inches()
"14\\""
"""
@spec inches() :: String.t()
sampler(:inches, ["9\"", "10\"", "11\"", "12\"", "14\"", "16\"", "18\"", "20\"", "26\"", "30\""])
end
|
lib/faker/pizza.ex
| 0.772874 | 0.507446 |
pizza.ex
|
starcoder
|
defmodule Mix.Tasks.Compile.Thrift do
@moduledoc """
Provides a mix task for compiling Thrift IDL files to Erlang.
Once Thrash is
compiled, you can execute `mix compile.thrift` to generate Erlang code
(a required precursor for Thrash) from your Thrift IDL files (i.e.,
`.thrift` files). By default, `mix compile.thrift` assumes that your
IDL files are in the `thrift` directory and that the output should go
in the `src` directory.
The following environment variables modify the behavior of `mix
compile.thrift`.
* `THRIFT` - Path to the `thrift` binary (default: `thrift`).
* `THRIFT_INPUT_DIR` - Directory containing your `.thrift` files
(default: `thrift`).
* `THRIFT_OUTPUT_DIR` - Directory in which generated Erlang
source code is placed (default: `src`).
* `FORCE_THRIFT` - Set to any of `["TRUE", "true", "1"]` to force
execution of `thrift`. By default, the task automatically determines
if it is necessary to execute `thrift` based on the mtimes of the
files in the input and output directories.
Prepend `:thrift` to the list of compilers in your project
and this task will run automatically as needed.
```
defmodule MyProject.Mixfile do
use Mix.Project
def project do
[app: :my_project,
# usual stuff ..
# prepend thrift to the usual list of compilers
compilers: [:thrift] ++ Mix.compilers
# ...
]
end
end
```
Run `mix deps.compile` first to ensure that the `compile.thrift` task
is available.
"""
use Mix.Task
def run(_args) do
options = get_env_options()
File.mkdir_p!(options[:thrift_output_dir])
input_files = thrift_files(options[:thrift_input_dir])
output_files = generated_files(options[:thrift_output_dir])
if require_compile?(options[:force_thrift], input_files, output_files) do
run_thrift(options[:thrift],
options[:thrift_input_dir],
options[:thrift_output_dir])
end
end
defp get_env_options() do
%{
thrift: System.get_env("THRIFT") || "thrift",
thrift_input_dir: System.get_env("THRIFT_INPUT_DIR") || "thrift",
thrift_output_dir: System.get_env("THRIFT_OUTPUT_DIR") || "src",
force_thrift: force_thrift?(System.get_env("FORCE_THRIFT") || false)
}
end
defp thrift_files(thrift_input_dir) do
Mix.Utils.extract_files([thrift_input_dir], ["thrift"])
end
defp run_thrift_on(f, thrift_bin, thrift_output_dir) do
cmd = thrift_bin <> " -o #{thrift_output_dir} --gen erl #{f}"
IO.puts cmd
0 = Mix.shell.cmd(cmd)
end
defp run_thrift(thrift_bin, thrift_input_dir, thrift_output_dir) do
thrift_input_dir
|> thrift_files
|> Enum.each(fn(f) -> run_thrift_on(f, thrift_bin, thrift_output_dir) end)
end
defp generated_files(output_dir) do
Mix.Utils.extract_files([Path.join(output_dir, "gen-erl")], ["hrl", "erl"])
end
defp force_thrift?("TRUE"), do: true
defp force_thrift?("true"), do: true
defp force_thrift?("1"), do: true
defp force_thrift?(_), do: false
defp require_compile?(true, _, _), do: true
defp require_compile?(false, _, []), do: true
defp require_compile?(false, input_files, output_files) do
input_stats = stats_by_mtime(input_files)
output_stats = stats_by_mtime(output_files)
most_recent(input_stats) > least_recent(output_stats)
end
defp stats_by_mtime(files) do
Enum.sort_by(file_stats(files), fn(stat) -> stat.mtime end)
end
defp file_stats(files) do
Enum.map(files, fn(file) ->
File.stat!(file, time: :posix)
end)
end
defp most_recent([]), do: 0
defp most_recent([h | _t]), do: h.mtime
# note x < :infinity is true for any integer
defp least_recent([]), do: :infinity
defp least_recent(list) do
last = List.last(list)
last.mtime
end
end
|
lib/mix/tasks/compile/thrift.ex
| 0.693992 | 0.539226 |
thrift.ex
|
starcoder
|
defmodule GrokEX do
@moduledoc """
Compiles grok patterns into Elixir objects which can be used for testing
strings against patterns.
## Examples
```
iex> GrokEX.compile_regex("Here's a number %{NUMBER:the_number}")
{:ok,
~r/Here's a number (?<the_number>(?:(?<![0-9.+-])(?>[+-]?(?:(?:[0-9]+(?:\\.[0-9]+)?)|(?:\\.[0-9]+)))))/}
iex> GrokEX.compile_predicate("User %{QUOTEDSTRING:username} connected from %{IP:user_address}")
#Function<0.46228848/1 in GrokEX.compile_predicate/2>
```
"""
import Unicode.Guards
@type grok_predicate :: (String.t() -> :no_match | map())
@type compile_opts :: {:patterns, %{String.t() => String.t()}}
@doc """
Compiles a grok pattern to a function that takes a string and returns either the
named captures if the string matches the pattern, or `:no_match` if the string
doesn't match.
## Examples
```
iex> GrokEX.compile_predicate("User %{QUOTEDSTRING:username} connected from %{IP:user_address}")
```
## Options
* `:patterns` - Provide custom patterns to the grok compiler. These patterns will be merged with
the default patterns
"""
@spec compile_predicate(String.t(), [compile_opts()]) :: {:ok, grok_predicate()} | {:error, term()}
def compile_predicate(string, opts \\ []) do
patterns = Keyword.get(opts, :patterns, %{}) |> Map.merge(GrokEX.DefaultPatterns.default_patterns())
case compile_regex(string, [patterns: patterns]) do
{:ok, regex} ->
{:ok,
fn string ->
if Regex.match?(regex, string) do
Regex.named_captures(regex, string)
else
:no_match
end
end}
err -> err
end
end
@spec compile_regex(String.t(), [compile_opts()]) :: {:ok, Regex.t()} | {:error, term()}
@doc """
Compiles a grok pattern to a function that takes a string and returns either the
named captures if the string matches the pattern, or `:no_match` if the string
doesn't match.
## Examples
```
iex> GrokEX.compile_regex("User %{QUOTEDSTRING:username} connected from %{IP:user_address}")
```
## Options
* `:patterns` - Provide custom patterns to the grok compiler. These patterns will be merged with
the default patterns
"""
def compile_regex(string, opts \\ []) do
patterns = Keyword.get(opts, :patterns, %{}) |> Map.merge(GrokEX.DefaultPatterns.default_patterns())
with {:ok, pattern} <- compile_pattern([], string, patterns),
{:ok, regex} <- Regex.compile(pattern)
do
{:ok, regex}
else
err -> err
end
end
defp tokenize("", tokens, column, current_string) do
{:ok, Enum.reverse([finalize_string_literal(current_string, column) | tokens])}
end
defp tokenize(<<"%", remaining::binary>>, tokens, column, current_string) do
case remaining do
"" -> {:error, [:unexpected_eof, column]}
<<"{", remaining::binary>> -> consume_template_type(remaining, [finalize_string_literal(current_string, column) | tokens], column + 1, "")
<<codepoint::utf8, remaining::binary>> -> tokenize(remaining, tokens, column + 1, current_string <> <<codepoint::utf8>>)
end
end
defp tokenize(<<codepoint::utf8, remaining::binary>>, tokens, column, current_string), do: tokenize(remaining, tokens, column + 1, current_string <> <<codepoint::utf8>>)
defp finalize_string_literal(string, column), do: [:literal_string, column - String.length(string), string]
defp consume_template_type(<<"}", remaining::binary>>, tokens, column, type) do
tokenize(remaining, [finalize_type(type, column) | tokens], column + 1, "")
end
defp consume_template_type(<<":", remaining::binary>>, tokens, column, type) do
consume_template_name(remaining, [finalize_type(type, column) | tokens], column + 1, "")
end
defp consume_template_type(<<codepoint::utf8, remaining::binary>>, tokens, column, "") when is_upper(codepoint) do
consume_template_type(remaining, tokens, column + 1, <<codepoint::utf8>>)
end
defp consume_template_type(<<codepoint::utf8, remaining::binary>>, tokens, column, type) when is_upper(codepoint) or is_digit(codepoint) do
consume_template_type(remaining, tokens, column + 1, type <> <<codepoint::utf8>>)
end
defp consume_template_type(<<codepoint::utf8, _remaining::binary>>, _tokens, column, _type) do
{:error, [:unexpected_token, :template_type, column, <<codepoint::utf8>>, [:upper, :digit]]}
end
defp finalize_type(type, column), do: [:template_type, column - String.length(type), type]
defp consume_template_name(<<codepoint::utf8, remaining::binary>>, tokens, column, "") when is_lower(codepoint) or is_upper(codepoint) do
consume_template_name(remaining, tokens, column + 1, <<codepoint::utf8>>)
end
defp consume_template_name(<<codepoint::utf8, remaining::binary>>, tokens, column, name) when is_lower(codepoint) or is_upper(codepoint) or is_digit(codepoint) or codepoint == ?_ do
consume_template_name(remaining, tokens, column + 1, name <> <<codepoint::utf8>>)
end
defp consume_template_name(<<"}", remaining::binary>>, tokens, column, name) do
tokenize(remaining, [[:template_param, column - String.length(name), name] | tokens], column, "")
end
defp consume_template_name(<<t::utf8, _remaining::binary>>, _tokens, column, _name), do: {:error, [:unexpected_token, :template_param, column, <<t::utf8>>, [:alphanumeric]]}
defp consume_template_name("", _tokens, column, _name), do: {:error, [:unexpected_eof, column]}
defp compile_pattern([[:literal_string, _column, string]], "", _patterns), do: {:ok, string}
defp compile_pattern([[:literal_string, _column, string] | tokens], pattern, patterns), do: compile_pattern(tokens, "#{pattern}#{string}", patterns)
defp compile_pattern([[:template_type, column, type] | tokens], pattern, patterns) do
case patterns do
%{^type => type_pattern} ->
case tokens do
[[:template_param, _column, name] | tokens] -> compile_pattern(tokens, "#{pattern}(?<#{name}>#{type_pattern})", patterns)
[_token | _tokens] -> compile_pattern(tokens, "#{pattern}#{type_pattern}", patterns)
[] -> {:error, [:unexpected_eof, -1]}
end
_ -> {:error, [:unknown_pattern, column - String.length(type), type]}
end
end
defp compile_pattern([], pattern, patterns) do
case tokenize(pattern, [], 0, "") do
{:ok, tokens} -> compile_pattern(tokens, "", patterns)
err -> err
end
end
end
|
lib/grokex.ex
| 0.873741 | 0.837487 |
grokex.ex
|
starcoder
|
defmodule Pixie.LocalSubscription do
use GenServer
@moduledoc """
Represents an in-VM subscription to a Bayeux channel.
"""
@doc """
Subscribe to a channel and call the provided function with messages.
```elixir
{:ok, sub} = Pixie.subscribe "/my_awesome_channel", fn(message,_)->
IO.inspect message
end
```
The function must take two arguments:
- A message struct.
- The subscription pid.
"""
def subscribe(channel_name, callback) when is_binary(channel_name) and is_function(callback, 2) do
Pixie.LocalSubscriptionSupervisor.add_worker Pixie.LocalSubscription, {channel_name, callback}, [channel_name, callback]
end
@doc """
Cancel a local subscription.
Example:
```elixir
Pixie.subscribe "/only_one_please", fn(message,sub)->
IO.inspect message
Pixie.unsubscribe sub
end
```
"""
def unsubscribe pid do
GenServer.cast pid, :unsubscribe
end
def start_link channel_name, callback do
GenServer.start_link __MODULE__, {channel_name, callback}
end
def init {channel_name,callback} do
{client_id, client_pid} = Pixie.Backend.create_client
transport_pid = Pixie.Client.set_transport client_id, "local"
Pixie.Backend.subscribe client_id, channel_name
Pixie.Transport.connect transport_pid, []
state = %{
channel_name: channel_name,
callback: callback,
client_id: client_id,
client_pid: client_pid,
transport_pid: transport_pid
}
{:ok, state, ping_timeout}
end
def handle_info :timeout, %{client_pid: pid}=state do
Pixie.Client.ping pid
{:noreply, state, ping_timeout}
end
def handle_info {_ref, messages}, %{client_pid: pid, callback: callback}=state do
Pixie.Client.ping pid
Enum.each messages, fn(message)->
callback.(message, self)
end
{:noreply, state, ping_timeout}
end
def handle_cast :unsubscribe, %{client_id: client_id} do
Pixie.Backend.destroy_client client_id, "Local unsubscription."
{:stop, :normal, nil}
end
defp ping_timeout do
trunc Pixie.timeout * 0.75
end
end
|
lib/pixie/local_subscription.ex
| 0.715921 | 0.567128 |
local_subscription.ex
|
starcoder
|
defmodule Rummage.Ecto.CustomHooks.KeysetPaginate do
@moduledoc """
`Rummage.Ecto.CustomHooks.KeysetPaginate` is a custom paginate hook that comes shipped
with `Rummage.Ecto`.
This module can be used by overriding the default paginate module. This can be done
in the following ways:
In the `Rummage.Ecto` call:
```elixir
Rummage.Ecto.rummage(queryable, rummage, paginate: Rummage.Ecto.CustomHooks.KeysetPaginate)
```
OR
Globally for all models in `config.exs`:
```elixir
config :rummage_ecto,
Rummage.Ecto,
default_paginate: Rummage.Ecto.CustomHooks.KeysetPaginate
```
"""
import Ecto.Query
alias Rummage.Ecto.Config
@behaviour Rummage.Ecto.Hook
@doc """
Builds a paginate queryable on top of the given `queryable` from the rummage parameters
from the given `rummage` struct.
## Examples
When rummage struct passed doesn't have the key "paginate", it simply returns the
queryable itself:
iex> alias Rummage.Ecto.CustomHooks.KeysetPaginate
iex> import Ecto.Query
iex> KeysetPaginate.run(Parent, %{})
Parent
When the queryable passed is not just a struct:
iex> alias Rummage.Ecto.CustomHooks.KeysetPaginate
iex> import Ecto.Query
iex> queryable = from u in "parents"
#Ecto.Query<from p in "parents">
iex> KeysetPaginate.run(queryable, %{})
#Ecto.Query<from p in "parents">
When rummage `struct` passed has the key `"paginate"`, but with a value of `%{}`, `""`
or `[]` it simply returns the `queryable` itself:
iex> alias Rummage.Ecto.CustomHooks.KeysetPaginate
iex> import Ecto.Query
iex> KeysetPaginate.run(Parent, %{"paginate" => %{}})
Parent
iex> alias Rummage.Ecto.CustomHooks.KeysetPaginate
iex> import Ecto.Query
iex> KeysetPaginate.run(Parent, %{"paginate" => ""})
Parent
iex> alias Rummage.Ecto.CustomHooks.KeysetPaginate
iex> import Ecto.Query
iex> KeysetPaginate.run(Parent, %{"paginate" => []})
Parent
When rummage struct passed has the key "paginate", with "per_page" and "page" keys
it returns a paginated version of the queryable passed in as the argument:
iex> alias Rummage.Ecto.CustomHooks.KeysetPaginate
iex> import Ecto.Query
iex> rummage = %{"paginate" => %{"per_page" => "1", "page" => "1"}}
%{"paginate" => %{"page" => "1", "per_page" => "1"}}
iex> queryable = from u in "parents"
#Ecto.Query<from p in "parents">
iex> KeysetPaginate.run(queryable, rummage)
#Ecto.Query<from p in "parents", limit: ^1, offset: ^0>
iex> alias Rummage.Ecto.CustomHooks.KeysetPaginate
iex> import Ecto.Query
iex> rummage = %{"paginate" => %{"per_page" => "5", "page" => "2"}}
%{"paginate" => %{"page" => "2", "per_page" => "5"}}
iex> queryable = from u in "parents"
#Ecto.Query<from p in "parents">
iex> KeysetPaginate.run(queryable, rummage)
#Ecto.Query<from p in "parents", limit: ^5, offset: ^5>
When no `"page"` key is passed, it defaults to `1`:
iex> alias Rummage.Ecto.CustomHooks.KeysetPaginate
iex> import Ecto.Query
iex> rummage = %{"paginate" => %{"per_page" => "10"}}
%{"paginate" => %{"per_page" => "10"}}
iex> queryable = from u in "parents"
#Ecto.Query<from p in "parents">
iex> KeysetPaginate.run(queryable, rummage)
#Ecto.Query<from p in "parents", limit: ^10, offset: ^0>
"""
@spec run(Ecto.Query.t(), map) :: {Ecto.Query.t(), map}
def run(queryable, rummage) do
paginate_params = Map.get(rummage, "paginate")
case paginate_params do
a when a in [nil, [], {}, [""], "", %{}] -> queryable
_ -> handle_paginate(queryable, paginate_params)
end
end
@doc """
Implementation of `before_hook` for `Rummage.Ecto.CustomHooks.KeysetPaginate`. This function
takes a `queryable`, `rummage` struct and an `opts` map. Using those it calculates
the `total_count` and `max_page` for the paginate hook.
## Examples
iex> alias Rummage.Ecto.CustomHooks.KeysetPaginate
iex> alias Rummage.Ecto.Category
iex> KeysetPaginate.before_hook(Category, %{}, %{})
%{}
iex> alias Rummage.Ecto.CustomHooks.KeysetPaginate
iex> alias Rummage.Ecto.Category
iex> Ecto.Adapters.SQL.Sandbox.checkout(Rummage.Ecto.Repo)
iex> Rummage.Ecto.Repo.insert(%Category{category_name: "Category 1"})
iex> Rummage.Ecto.Repo.insert(%Category{category_name: "Category 2"})
iex> Rummage.Ecto.Repo.insert(%Category{category_name: "Category 3"})
iex> rummage = %{"paginate" => %{"per_page" => "1", "page" => "1"}}
iex> KeysetPaginate.before_hook(Category, rummage, %{})
%{"paginate" => %{"max_page" => "3", "page" => "1", "per_page" => "1", "total_count" => "3"}}
"""
@spec before_hook(Ecto.Query.t(), map, map) :: map
def before_hook(queryable, rummage, opts) do
paginate_params = Map.get(rummage, "paginate")
case paginate_params do
nil ->
rummage
_ ->
total_count = get_total_count(queryable, opts)
{page, per_page} = parse_page_and_per_page(paginate_params, opts)
per_page = if per_page < 1, do: 1, else: per_page
max_page_fl = total_count / per_page
max_page =
max_page_fl
|> Float.ceil()
|> round
page =
cond do
page < 1 -> 1
max_page > 0 && page > max_page -> max_page
true -> page
end
paginate_params =
paginate_params
|> Map.put("page", Integer.to_string(page))
|> Map.put("per_page", Integer.to_string(per_page))
|> Map.put("total_count", Integer.to_string(total_count))
|> Map.put("max_page", Integer.to_string(max_page))
Map.put(rummage, "paginate", paginate_params)
end
end
defp get_total_count(queryable, opts), do: length(apply(get_repo(opts), :all, [queryable]))
defp get_repo(opts) do
opts[:repo] || Config.default_repo()
end
defp parse_page_and_per_page(paginate_params, opts) do
per_page =
paginate_params
|> Map.get("per_page", Integer.to_string(opts[:per_page] || Config.default_per_page()))
|> String.to_integer()
page =
paginate_params
|> Map.get("page", "1")
|> String.to_integer()
{page, per_page}
end
defp handle_paginate(queryable, paginate_params) do
per_page =
paginate_params
|> Map.get("per_page")
|> String.to_integer()
page =
paginate_params
|> Map.get("page", "1")
|> String.to_integer()
offset = per_page * (page - 1)
queryable
|> limit(^per_page)
|> offset(^offset)
end
end
|
lib/rummage_ecto/custom_hooks/keyset_paginate.ex
| 0.800185 | 0.731071 |
keyset_paginate.ex
|
starcoder
|
defmodule Zaryn.BeaconChain.Slot.Validation do
@moduledoc false
alias Zaryn.BeaconChain.Slot
alias Zaryn.BeaconChain.Slot.EndOfNodeSync
alias Zaryn.BeaconChain.Slot.TransactionSummary
alias Zaryn.BeaconChain.SummaryTimer
alias Zaryn.Crypto
alias Zaryn.P2P
alias Zaryn.P2P.Message.GetTransactionSummary
alias Zaryn.P2P.Node
alias Zaryn.Replication
@doc """
Validate the transaction summaries to ensure the transactions included really exists
"""
@spec valid_transaction_summaries?(Slot.t()) :: boolean()
def valid_transaction_summaries?(%Slot{transaction_summaries: transaction_summaries}) do
Task.async_stream(transaction_summaries, &do_valid_transaction_summary/1,
ordered: false,
on_timeout: :kill_task
)
|> Enum.into([], fn {:ok, res} -> res end)
|> Enum.all?(&match?(true, &1))
end
defp do_valid_transaction_summary(
summary = %TransactionSummary{address: address, timestamp: timestamp}
) do
case transaction_summary_storage_nodes(address, timestamp) do
[] ->
true
nodes ->
case P2P.reply_atomic(nodes, 3, %GetTransactionSummary{address: address}) do
{:ok, ^summary} ->
true
_ ->
false
end
end
end
defp transaction_summary_storage_nodes(address, timestamp) do
address
|> Replication.chain_storage_nodes()
|> Enum.filter(fn %Node{enrollment_date: enrollment_date} ->
previous_summary_time = SummaryTimer.previous_summary(timestamp)
diff = DateTime.compare(DateTime.truncate(enrollment_date, :second), previous_summary_time)
diff == :lt or diff == :eq
end)
|> Enum.reject(&(&1.first_public_key == Crypto.first_node_public_key()))
end
@doc """
Validate the end of node synchronization to ensure the list of nodes exists
"""
@spec valid_end_of_node_sync?(Slot.t()) :: boolean
def valid_end_of_node_sync?(%Slot{end_of_node_synchronizations: end_of_node_sync}) do
Enum.all?(end_of_node_sync, fn %EndOfNodeSync{public_key: key} ->
match?({:ok, %Node{first_public_key: ^key}}, P2P.get_node_info(key))
end)
end
end
|
lib/zaryn/beacon_chain/slot/validation.ex
| 0.828973 | 0.432243 |
validation.ex
|
starcoder
|
defmodule DarkEcto.Projections.Types do
@moduledoc """
Type conversions
"""
alias DarkEcto.Projections.PermuteConversions
# @types [:ecto, :postgrex, :typespec, :cli, :typescript, :absinthe, :prop_schema]
# @types [:ecto, :absinthe, :typespec, :typescript, :cli]
@types [:ecto, :absinthe, :typespec, :typescript]
@typings [
# Primative
{:binary, :string, "String.t()", "string"},
{:string, :string, "String.t()", "string"},
{:boolean, :boolean, "boolean()", "boolean"},
# Numeric
{:float, :float, "float()", "number"},
{:integer, :integer, "integer()", "Int"},
{:decimal, :decimal, "Decimal.t()", "Decimal"},
# Date / Time
{:time, :time, "Time.t()", "TimeStr"},
{:time_usec, :time, "Time.t()", "TimeStr"},
{:date, :date, "Date.t()", "DateStr"},
{:naive_datetime, :naive_datetime, "NaiveDateTime.t()", "DateTimeStr"},
{:naive_datetime_usec, :naive_datetime, "NaiveDateTime.t()", "DateTimeStr"},
{:utc_datetime, :datetime, "DateTime.t()", "DateTimeStr"},
{:utc_datetime_usec, :datetime, "DateTime.t()", "DateTimeStr"},
# Compound
{:map, :json, "map()", "Object"},
# Nested
# {:array, inner}
# {:map, inner}
# Keys
# {:id, :id, "SFX.primary_key()", "ID"},
{:id, :id, "SFX.foreign_key()", "ID"},
{:binary_id, :uuid4, "Ecto.UUID.t()", "UUID4"},
{Ecto.UUID, :uuid4, "Ecto.UUID.t()", "UUID4"},
# Specialized
{:inet, :inet, "Postgrex.INET()", "IPv4"},
{EctoFields.URL, :string, "String.t()", "UrlStr"},
{EctoNetwork.INET, :inet, "Postgrex.INET()", "IPv4"},
{Postgrex.INET, :inet, "Postgrex.INET()", "IPv4"},
# Custom
{SFX.Ecto.Types.DriversLicenseNumberType, :string, "String.t()", "string"},
{SFX.Ecto.Types.EmailType, :string, "String.t()", "string"},
{SFX.Ecto.Types.FederalTaxIdType, :string, "String.t()", "string"},
{SFX.Ecto.Types.ImageBase64Type, :string, "String.t()", "string"},
{SFX.Ecto.Types.SSNType, :string, "String.t()", "string"},
{SFX.Ecto.Types.USPhoneNumberType, :string, "String.t()", "string"},
{SFX.Ecto.Types.JsonLogicType, :json, "SFX.Ecto.Types.JsonLogicType.t()", "JsonLogicObject"},
# Incomplete knowledge
{:__ecto_virtual_field__, :__EXCLUDE__, "virtual :: any()", "any"},
{:__ecto_join_table__, :__EXCLUDE__, "join_table :: struct()", "Object"}
# {:__ecto_virtual_field__, :__virtual__, "virtual :: any()", "any"},
# {:__ecto_join_table__, :__join_table__, "join_table :: struct()", "Object"}
]
def permuted_conversion_mappings(opts \\ []) do
types = Keyword.get(opts, :types, [])
typings = Keyword.get(opts, :typings, [])
PermuteConversions.permute_conversion_mappings!(@types ++ types, @typings ++ typings)
end
def absinthe_field_types do
for typing <- @typings,
absinthe_type = elem(typing, 1),
absinthe_type not in [:__EXCLUDE__] do
absinthe_type
end
end
end
|
lib/dark_ecto/projections/types.ex
| 0.719186 | 0.449997 |
types.ex
|
starcoder
|
defmodule Particle.Stream.Event do
@moduledoc false
defstruct event: nil, data: nil, ttl: nil, published_at: nil, coreid: nil
end
defmodule Particle.Stream do
require Logger
alias Experimental.GenStage
alias Particle.Stream.Event
alias Particle.Http
use GenStage
@moduledoc false
@base_url "https://api.particle.io/v1/"
defstruct ref: nil, demand: 0, url: "", event: %Event{}
defdelegate stream(stages), to: GenStage
defdelegate stop(stage, reason \\ :normal, timeout \\ :infinity), to: GenStage
def start_link(url, options \\ []) do
GenStage.start_link(__MODULE__, url, options)
end
def init(url) do
{:ok, ref} = Http.stream(url, self)
{:producer, %__MODULE__{ref: ref, url: url}}
end
def handle_demand(demand, state) when demand > 0 do
if state.demand == 0, do: :hackney.stream_next(state.ref)
{:noreply, [], %__MODULE__{state | demand: state.demand + demand}}
end
def handle_info({:hackney_response, ref, {:status, status_code, reason}}, state) do
if status_code in 200..299 do
if state.demand > 0, do: :hackney.stream_next(ref)
{:noreply, [], %__MODULE__{state | ref: ref}}
else
Logger.warn "Hackney Error: #{status_code} - #{inspect reason}"
:hackney.stream_next(ref)
{:noreply, [], %__MODULE__{state | ref: ref}}
end
end
def handle_info({:hackney_response, _ref, {:headers, _headers}}, state) do
:hackney.stream_next(state.ref)
{:noreply, [], state}
end
def handle_info({:hackney_response, _ref, {:error, reason}}, state) do
Logger.warn "Hackney Error: #{inspect reason}"
{:stop, reason, state}
end
def handle_info({:hackney_response, _ref, :done}, state) do
Logger.warn "Connection Closed"
{:stop, "Connection Closed", state}
end
def handle_info({:hackney_response, _ref, chunk}, state) when is_binary(chunk) do
case event = process_chunk(chunk, state.event) do
%Event{data: d, event: e} when not is_nil(d) and not is_nil(e) ->
if state.demand > 0, do: :hackney.stream_next(state.ref)
{:noreply, [event], %__MODULE__{state | event: %Event{}, demand: max(0, state.demand - 1)}}
{:error, error} ->
Logger.warn "Hackney Error: #{inspect error}"
:hackney.stream_next(state.ref)
{:noreply, [], %__MODULE__{state | event: event}}
_ ->
:hackney.stream_next(state.ref)
{:noreply, [], %__MODULE__{state | event: event}}
end
end
def terminate(_reason, state) do
:hackney.stop_async(state.ref)
end
defp process_chunk(chunk, acc \\ %Event{}) do
cond do
chunk == "" ->
acc
chunk == ":ok" ->
acc
chunk =~ ~r/event: / ->
%{"event" => event} = Regex.named_captures(~r/event: (?<event>.*)/, chunk)
%Event{event: event, data: nil}
chunk =~ ~r/data: / ->
%{"data" => data} = Regex.named_captures(~r/data: (?<data>.*)/, chunk)
data = data
|> Poison.decode!(keys: :atoms)
struct(acc, data)
chunk =~ ~r/"error":/ ->
error = chunk
|> Poison.decode!(keys: :atoms)
{:error, error}
true ->
acc
end
end
end
|
lib/particle/stream.ex
| 0.628749 | 0.457743 |
stream.ex
|
starcoder
|
defmodule ExIcal.Event do
@moduledoc """
Represents an iCalendar event.
For more information on iCalendar events, please see the official specs
([RFC 2445]). Here is a brief summary of the available properties of
`ExIcal.Event` as well as links for more detailed information:
## Fields
- `start`:
Specifies when the event begins. Corresponds to the iCal `DTSTART`
property ([4.8.2.4 Date/Time Start]).
- `end`:
Specifies the date and time that an event ends. Corresponds to the iCal
`DTEND` property ([4.8.2.2 Date/Time End]).
- `stamp`:
Indicates the date/time that the instance of the iCalendar object was
created; this must be specified in UTC format. Corresponds to the iCal
`DTSTAMP` property ([4.8.7.2 Date/Time Stamp]).
- `description`:
Provides a more complete description of the event than `summary`.
Corresponds to the iCal `DESCRIPTION` property ([4.8.1.5 Description]).
- `summary`:
Defines a short summary or subject for the event. Corresponds to the iCal
`SUMMARY` property ([4.8.1.12 Summary]).
- `rrule`:
Defines a rule or repeating pattern for recurring events. Corresponds to
the iCal `RRULE` property ([4.8.5.4 Recurrence Rule]).
- `categories`:
Defines the categories for a calendar component. Corresponds to
the iCal `CATEGORIES` property ([4.8.1.2 Categories Rule]).
- `uid`:
Defines the persistent, globally unique identifier for the calendar component.
Corresponds to the iCal `UID` property ([4.8.4.7 Unique Identifier]).
[RFC 2445]: https://www.ietf.org/rfc/rfc2445.txt
[4.8.2.4 Date/Time Start]: http://www.kanzaki.com/docs/ical/dtstart.html
[4.8.2.2 Date/Time End]: http://www.kanzaki.com/docs/ical/dtend.html
[4.8.7.2 Date/Time Stamp]: http://www.kanzaki.com/docs/ical/dtstamp.html
[4.8.1.5 Description]: http://www.kanzaki.com/docs/ical/description.html
[4.8.1.12 Summary]: http://www.kanzaki.com/docs/ical/summary.html
[4.8.5.4 Recurrence Rule]: http://www.kanzaki.com/docs/ical/rrule.html
[4.8.1.2 Categories Rule]: https://www.kanzaki.com/docs/ical/categories.html
[4.8.4.7 Unique Identifier]: https://www.kanzaki.com/docs/ical/uid.html
While this covers many of the commonly-used properties of an iCal `VEVENT`,
`ExIcal` does not yet have full coverage of all valid properties. More
properties will be added over time, but if you need a legal iCalendar
property that `ExIcal` does not yet support, please submit an issue on GitHub.
"""
defstruct start: nil,
end: nil,
stamp: nil,
description: nil,
summary: nil,
rrule: nil,
categories: nil,
uid: nil
end
|
lib/ex_ical/event.ex
| 0.831349 | 0.574126 |
event.ex
|
starcoder
|
defmodule ExUnitJsonFormatter do
use GenServer
@moduledoc """
Formats ExUnit output as a stream of JSON objects (roughly compatible
with Mocha's json-stream reporter)
"""
# GenServer callbacks that receive test runner messages
def init(opts) do
config = %{
seed: opts[:seed],
trace: opts[:trace],
pass_counter: 0,
failure_counter: 0,
skipped_counter: 0,
invalid_counter: 0,
case_counter: 0,
start_time: nil
}
{:ok, config}
end
def handle_cast({:suite_started, opts}, state) do
["start", %{"including" => opts[:include],
"excluding" => opts[:exclude]}]
|> Poison.encode!
|> IO.puts
{:noreply, %{state | start_time: NaiveDateTime.utc_now}}
end
def handle_cast({:suite_finished, run_us, load_us}, state) do
["end",format_stats(state, run_us, load_us)]
|> Poison.encode!
|> IO.puts
{:noreply, state}
end
def handle_cast({:case_started, _}, state) do
{:noreply, state}
end
def handle_cast({:case_finished, test_case = %ExUnit.TestCase{state: {:failed, failure}}}, state) do
["fail", format_test_case_failure(test_case, failure)]
|> Poison.encode!
|> IO.puts
{:noreply, state}
end
def handle_cast({:case_finished, _}, state) do
{:noreply, %{state | case_counter: state[:case_counter] + 1}}
end
def handle_cast({:test_started, _}, state) do
{:noreply, state}
end
def handle_cast({:test_finished, test = %ExUnit.Test{state: nil}}, state) do
["pass", format_test_pass(test)]
|> Poison.encode!
|> IO.puts
{:noreply, %{state | pass_counter: state[:pass_counter] + 1}}
end
def handle_cast({:test_finished, test = %ExUnit.Test{state: {:failed, failure}}}, state) do
["fail", format_test_failure(test, failure)]
|> Poison.encode!
|> IO.puts
{:noreply, %{state | failure_counter: state[:failure_counter] + 1}}
end
def handle_cast({:test_finished, %ExUnit.Test{state: {:skip, _}}}, state) do
{:noreply, %{state | skipped_counter: state[:skipped_counter] + 1}}
end
def handle_cast({:test_finished, %ExUnit.Test{state: {:invalid, _}}}, state) do
{:noreply, %{state | invalid_counter: state[:invalid_counter] + 1}}
end
# FORMATTING FUNCTIONS
import Exception, only: [format_stacktrace_entry: 1, format_file_line: 3]
@counter_padding ""
@width 80
@no_value ExUnit.AssertionError.no_value
@doc """
Receives test stats and formats them to JSON
"""
def format_stats(%{pass_counter: passed, failure_counter: failed, skipped_counter: skipped,
invalid_counter: invalid, case_counter: cases, start_time: start},
run_us, load_us) do
stats = %{"duration" => run_us / 1_000,
"start" => NaiveDateTime.to_iso8601(start),
"end" => NaiveDateTime.to_iso8601(NaiveDateTime.utc_now),
"passes" => passed,
"failures" => failed,
"pending" => skipped,
"invalid" => invalid,
"tests" => passed + failed + skipped + invalid,
"suites" => cases}
if not is_nil(load_us), do: Map.put(stats, "loadTime", load_us / 1_000), else: stats
end
@doc """
Receives a test and formats its information
"""
def format_test_pass(test) do
%ExUnit.Test{case: case, name: name} = test
name_str = Atom.to_string(name)
case_str = case |> Atom.to_string |> String.trim_leading("Elixir.")
%{"title" => name_str, "fullTitle" => "#{case_str}: #{name_str}"}
end
@doc """
Receives a test and formats its failure.
"""
def format_test_failure(test, failures) do
%ExUnit.Test{name: name, case: case, tags: tags} = test
message = Enum.map_join(Enum.with_index(failures), "", fn {{kind, reason, stack}, index} ->
{text, stack} = format_kind_reason(test, kind, reason, stack, @width)
failure_header(failures, index) <> text <> format_stacktrace(stack, case, name, nil)
end) <> report(tags, failures, @width)
%{"title" => to_string(name),
"fullTitle" => "#{inspect case}: #{name}",
"err" => %{"file" => Path.relative_to_cwd(tags[:file]),
"line" => tags[:line],
"message" => message}}
end
defp format_assertion_error(test, struct, stack, width, counter_padding) do
label_padding_size = if has_value?(struct.right), do: 7, else: 6
padding_size = label_padding_size + byte_size(@counter_padding)
inspect = &inspect_multiline(&1, padding_size, width)
{left, right} = format_sides(struct, inspect)
[
note: if_value(struct.message, &format_message(&1)),
code: if_value(struct.expr, &code_multiline(&1, padding_size)),
code: unless_value(struct.expr, fn -> get_code(test, stack) || @no_value end),
left: left,
right: right
]
|> format_meta(label_padding_size)
|> make_into_lines(counter_padding)
end
defp report(tags, failures, width) do
case Map.take(tags, List.wrap(tags[:report])) do
report when map_size(report) == 0 ->
""
report ->
report_spacing(failures) <>
"tags:" <>
Enum.map_join(report, "", fn {key, value} ->
prefix = " #{key}: "
prefix <> inspect_multiline(value, byte_size(prefix), width) <> "\n"
end)
end
end
defp report_spacing([_]), do: ""
defp report_spacing(_), do: "\n"
@doc """
Receives a test case and formats its failure.
"""
def format_test_case_failure(test_case, failures) do
%ExUnit.TestCase{name: name, tests: tests} = test_case
tags = tests |> hd |> Map.get(:tags)
title = "#{inspect name}: failure on setup_all callback"
message = Enum.map_join Enum.with_index(failures), "", fn {{kind, reason, stack}, index} ->
{text, stack} = format_kind_reason(test_case, kind, reason, stack, 80)
failure_header(failures, index) <> text <> format_stacktrace(stack, name, nil, nil)
end
%{"title" => title,
"fullTitle" => title,
"err" => %{"file" => Path.relative_to_cwd(tags[:file]),
"message" => message}}
end
defp format_kind_reason(test, :error, %ExUnit.AssertionError{} = struct, stack, width) do
{format_assertion_error(test, struct, stack, width, @counter_padding), stack}
end
defp format_kind_reason(test, kind, reason, stack, _width) do
message = Exception.format_banner(kind, reason)
{message <> format_code(test, stack), stack}
end
defp format_code(test, stack) do
if snippet = get_code(test, stack) do
"code: " <> snippet <> "\n"
else
""
end
end
defp get_code(%{case: case, name: name}, stack) do
info = Enum.find_value(stack, fn {^case, ^name, _, info} -> info; _ -> nil end)
file = info[:file]
line = info[:line]
if line > 0 && file && File.exists?(file) do
file |> File.stream! |> Enum.at(line - 1) |> String.trim
end
rescue
_ -> nil
end
defp get_code(%{}, _) do
nil
end
defp format_meta(fields, padding_size) do
for {label, value} <- fields, has_value?(value) do
format_label(label, padding_size) <> value
end
end
defp if_value(value, fun) do
if has_value?(value) do
fun.(value)
else
value
end
end
defp unless_value(value, fun) do
if has_value?(value) do
@no_value
else
fun.()
end
end
defp has_value?(value) do
value != @no_value
end
defp format_label(:note, _padding_size), do: ""
defp format_label(label, padding_size) do
String.pad_trailing("#{label}:", padding_size)
end
defp format_message(value) do
String.replace(value, "\n", "\n" <> @counter_padding)
end
defp code_multiline(expr, padding_size) when is_binary(expr) do
padding = String.duplicate(" ", padding_size)
String.replace(expr, "\n", "\n" <> padding)
end
defp code_multiline({fun, _, [expr]}, padding_size) when is_atom(fun) do
code_multiline(Atom.to_string(fun) <> " " <> Macro.to_string(expr), padding_size)
end
defp code_multiline(expr, padding_size) do
code_multiline(Macro.to_string(expr), padding_size)
end
defp inspect_multiline(expr, padding_size, width) do
padding = String.duplicate(" ", padding_size)
width = if width == :infinity, do: width, else: width - padding_size
inspect(expr, [pretty: true, width: width])
|> String.replace("\n", "\n" <> padding)
end
defp make_into_lines(reasons, padding) do
padding <> Enum.join(reasons, "\n" <> padding) <> "\n"
end
defp format_sides(struct, inspect) do
%{left: left, right: right} = struct
case format_diff(left, right) do
{left, right} ->
{IO.iodata_to_binary(left), IO.iodata_to_binary(right)}
nil ->
{if_value(left, inspect), if_value(right, inspect)}
end
end
defp format_diff(left, right) do
if has_value?(left) and has_value?(right) do
if script = edit_script(left, right) do
colorize_diff(script, {[], []})
end
end
end
defp colorize_diff(script, acc) when is_list(script) do
Enum.reduce(script, acc, &colorize_diff(&1, &2))
end
defp colorize_diff({:eq, content}, {left, right}) do
{[left | content], [right | content]}
end
defp colorize_diff({:del, content}, {left, right}) do
{[left | content], right}
end
defp colorize_diff({:ins, content}, {left, right}) do
{left, [right | content]}
end
defp edit_script(left, right) do
task = Task.async(ExUnit.Diff, :script, [left, right])
case Task.yield(task, 1_500) || Task.shutdown(task, :brutal_kill) do
{:ok, script} -> script
nil -> nil
end
end
defp format_stacktrace([], _case, _test, _color) do
""
end
defp format_stacktrace(stacktrace, test_case, test, color) do
"stacktrace:" <>
Enum.map_join(stacktrace, fn entry ->
stacktrace_info format_stacktrace_entry(entry, test_case, test), color
end)
end
defp format_stacktrace_entry({test_case, test, _, location}, test_case, test) do
format_file_line(location[:file], location[:line], " (test)")
end
defp format_stacktrace_entry(entry, _test_case, _test) do
format_stacktrace_entry(entry)
end
defp failure_header([_], _), do: ""
defp failure_header(_, i), do: "\n#{@counter_padding}Failure ##{i+1}\n"
defp stacktrace_info("", _formatter), do: ""
defp stacktrace_info(msg, nil), do: " " <> msg <> "\n"
end
|
lib/exunit_json_formatter.ex
| 0.667364 | 0.521654 |
exunit_json_formatter.ex
|
starcoder
|
defmodule LiveAttribute do
use GenServer
require Logger
defstruct [:refresher, :subscribe, :target, :filter, :keys]
@moduledoc """
LiveAttribute makes binding updateable values easier. To use it add it to your LiveView using `use LiveAttribute`
and then use the function `assign_attribute(socket, subscribe_callback, property_callbacks)` to register attributes.
The attributes will listen to all incoming events and update their assigns of your LiveView automatically, saving
you the hassle of implementing independent `handle_info()` and `update_...()` calls.
## Example using LiveAttribute
```
defmodule UserLive do
use Phoenix.LiveView
use LiveAttribute
def mount(_params, _session, socket) do
{:ok, assign_attribute(socket, &User.subscribe/0, users: &User.list_users/0)}
end
def handle_event("delete_user", %{"id" => user_id}, socket) do
User.get_user!(user_id)
|> User.delete_user()
{:noreply, socket}
end
end
```
## Same Example without LiveAttribute
```
defmodule UserLive do
use Phoenix.LiveView
def mount(_params, _session, socket) do
if connected?(socket), do: User.subscribe()
{:ok, update_users(socket)}
end
defp update_users(socket) do
users = User.list_users()
assign(socket, users: users)
end
def handle_event("delete_user", %{"id" => user_id}, socket) do
User.get_user!(user_id)
|> User.delete_user()
{:noreply, socket}
end
def handle_info({User, [:user, _], _}, socket) do
{:noreply, update_users(socket)}
end
end
```
### assign\\_attribute(socket, subscribe, filter \\\\ :\\_, refresher)
* `socket` the LiveView socket where the assigns should be executed on
* `subscribe` the subscribe callback to start the subscription e.g. `&Users.subscribe/0`
* `filter` an optional filter if you don't want to update on each event. The filter can either be an expression
using `:_` as wildcard parameter such as `{User, [:user, :_], :_}`. Alternatively `filter`
can be a function with one parameter
_Note_ LiveAttribute is issuing each subscribe call in an isolated helper process, so you only need
to add filters to reduce the scope of a single subscription.
* `refresher` the function callback to load the new values after a subscription event has
fired.
"""
@doc false
@type socket :: %Phoenix.LiveView.Socket{}
@typedoc """
The refresher list is passed to `assign_attribute()` to know which assigns to update when
the subscription source issues an event.
It is a list of `{key, callback}` pairs specifying how to load the new attribute values.
The `callback` thereby can have optionally one argument to read context from the socket.
## refresher() examples
```
# 1. Zero argument callback to update the users list:
[users: &User.list_all/0]
# 2. Single argument callback to use the socket state in the update:
[users: fn socket ->
User.list_all() -- socket.assigns.blacklist
end]
# 3. Special `socket` key to assign multiple values at once manually
[socket: fn socket ->
assign(socket,
users: User.list_all() -- socket.assigns.blacklist,
last_update: System.os_time()
)
end]
```
## Usage Examples
```
iex> assign_attribute(socket, &User.subscribe(), users: &User.list_all/0)
iex> assign_attribute(socket, &User.subscribe(), fn socket ->
assign(socket, users: User.list_all() -- socket.assigns.blacklist)
end)
```
"""
@type refresher :: [{atom(), (() -> any()) | (socket() -> any())}] | (socket() -> socket())
@typedoc """
The filter allows doing optimzation by a) ignoring certain events of the subscription
source and b) pass event values directly to assign values, instead of using refresher
functions to re-load them.
It can be either a match object defining which events should be matched, or a function
returning `false` when the event should be ignored or a map when it should be processed.
Keys that are present in the map will be assigned to the socket. (if there are matching
keys in the refresher list)
## Filter function
The filter function receives the event and should return either `false` or a map of
the new values:
```
fn event ->
case event do
{User, :users_updated, users} -> %{users: users}
_ -> false
end
end)
```
## Filter object
Match objects are defined by example of a matching list or tuple. These can be customized
using two special terms:
- `:_` the wildcard which matches any value, but ignores it
- `{:"$", some_key}` - which matches any value, and uses it as update value in the socket assigns
## Examples
```
# Let's assumg the `User` module is generating the following event each time
# the user list is updated: `{User, :users_updated, all_users}`
# then the following match object will extract the users
{User, :users_updated, {:"$", :users}}
# Full function call with match object
assign_attribute(socket, &User.subscribe/0, users: &User.list/0, {User, :users_updated, {:"$", :users}})
# Now the same we could get with this function callback instead:
fn event ->
case event do
{User, :users_updated, users} -> %{users: users}
_ -> false
end
end)
# Full function call with callback
assign_attribute(socket, &User.subscribe/0, users: &User.list/0,
fn event ->
case event do
{User, :users_updated, users} -> %{users: users}
_ -> false
end
end)
```
"""
@type filter :: atom() | tuple() | list() | (() -> false | %{})
defmacro __using__(_opts) do
quote do
import LiveAttribute,
only: [update_attribute: 2, assign_attribute: 2, assign_attribute: 3, assign_attribute: 4]
def handle_info({LiveAttribute, refresher, updates}, socket) do
{:noreply, refresher.(socket, updates)}
end
end
end
@doc """
Shortcut version of `assign_attribute` to capture an attribute configuration
in a tuple and re-use in multiple LiveViews. This accepts two-element and three-
element tuples with: `{subscribe, refresher}` or `{subscribe, filter, refresher}`
correspondingly
Use with:
```
socket = assign_attribute(socket, User.live_attribute())
```
When there is an `User` method:
```
defmodule User do
def live_attribute() do
{&subscribe/0, users: &list_users/0}
end
...
end
```
"""
@spec assign_attribute(socket(), tuple()) :: socket()
def assign_attribute(socket, tuple) when is_tuple(tuple) do
case tuple do
{subscribe, refresher} -> assign_attribute(socket, subscribe, refresher)
{subscribe, filter, refresher} -> assign_attribute(socket, subscribe, filter, refresher)
end
end
@doc """
```
socket = assign_attribute(socket, &User.subscribe/0, users: &User.list/0)
```
`assign_attribute` updates the specified assign keys each time there is a new event sent from
the subscription source.
See `refresher()` and `filter()` for advanced usage of these parameters. Simple usage:
"""
@spec assign_attribute(
socket(),
(() -> any()),
filter(),
refresher()
) :: socket()
def assign_attribute(socket, subscribe, filter \\ :_, refresher)
def assign_attribute(socket, subscribe, filter, refresher) when is_list(refresher) do
keys = Keyword.keys(refresher)
update_fun = fn socket, updates ->
Enum.reduce(refresher, socket, fn
{:socket, value}, socket ->
value.(socket)
{key, value}, socket ->
reload = fn -> LiveAttribute.apply(socket, value) end
value = Map.get_lazy(updates, key, reload)
assign(socket, [{key, value}])
end)
end
socket =
if connected?(socket) do
{:ok, pid} = LiveAttribute.new(subscribe, filter, update_fun, keys)
id =
Keyword.keys(refresher)
|> Enum.sort()
meta = Map.get(socket.assigns, :_live_attributes, %{})
LiveAttribute.stop(Map.get(meta, id))
meta = Map.put(meta, id, pid)
assign(socket, _live_attributes: meta)
else
socket
end
update_fun.(socket, %{})
end
def assign_attribute(socket, subscribe, filter, refresher) when is_function(refresher, 1) do
# Logger.error("using deprecated assign_attribute/4 with fun as refresher")
if connected?(socket) do
LiveAttribute.new(subscribe, filter, refresher, [])
end
refresher.(socket)
end
@doc """
```
socket = update_attribute(socket, :users)
```
Helper method to issue a update callback manually on a live attribute, when there
is a known update but no subscription event.
"""
def update_attribute(socket, name) do
pid =
Map.get(socket.assigns, :_live_attributes, %{})
|> Map.get(name)
if pid != nil do
refresher = GenServer.call(pid, :get_refresher)
refresher.(socket, %{})
else
Logger.error("update_attribute: #{inspect(name)} is not bound")
socket
end
end
@doc false
def new(subscribe, filter, refresher, keys) do
la = %LiveAttribute{
filter: filter,
refresher: refresher,
subscribe: subscribe,
target: self(),
keys: keys
}
GenServer.start_link(__MODULE__, la, hibernate_after: 5_000)
end
@impl true
@doc false
def init(%LiveAttribute{target: target, subscribe: subscribe} = la) do
Process.monitor(target)
subscribe.()
{:ok, la}
end
@doc false
def stop(nil), do: :ok
def stop(pid) do
GenServer.cast(pid, :stop)
end
@impl true
@doc false
def handle_info({:DOWN, _ref, :process, _pid}, state) do
{:stop, :normal, state}
end
@impl true
@doc false
def handle_info(
any,
%LiveAttribute{target: target, refresher: refresher, filter: filter} = state
) do
case matches?(filter, any) do
false -> :noop
%{} = updates -> send(target, {LiveAttribute, refresher, updates})
end
{:noreply, state}
end
@impl true
def handle_call(:get_refresher, _from, %LiveAttribute{refresher: refresher} = state) do
{:reply, refresher, state}
end
@impl true
def handle_cast(:stop, %LiveAttribute{} = state) do
{:stop, :normal, state}
end
@doc false
def matches?({:"$", key}, value), do: %{key => value}
def matches?(:_, _any), do: %{}
def matches?(fun, any) when is_function(fun, 1), do: fun.(any)
def matches?(same, same), do: %{}
def matches?(tuple1, tuple2) when is_tuple(tuple1) and is_tuple(tuple2),
do: matches?(Tuple.to_list(tuple1), Tuple.to_list(tuple2))
def matches?([head1 | rest1], [head2 | rest2]) do
case matches?(head1, head2) do
false ->
false
%{} = updates ->
case matches?(rest1, rest2) do
false -> false
%{} = more_updates -> Map.merge(updates, more_updates)
end
end
end
def matches?(_, _), do: false
@doc false
def apply(_socket, refresher) when is_function(refresher, 0), do: refresher.()
def apply(socket, refresher) when is_function(refresher, 1), do: refresher.(socket)
defp connected?(socket) do
case socket do
%Phoenix.LiveView.Socket{} -> Phoenix.LiveView.connected?(socket)
%other{} -> other.connected?(socket)
end
end
defp assign(socket, values) do
case socket do
%Phoenix.LiveView.Socket{} -> Phoenix.LiveView.assign(socket, values)
%other{} -> other.assign(socket, values)
end
end
end
|
lib/live_attribute.ex
| 0.916152 | 0.76745 |
live_attribute.ex
|
starcoder
|
defmodule Erl2ex.Source do
@moduledoc """
Erl2ex.Source is a process that produces Erlang source, normally reading
files from the file system.
"""
@typedoc """
The ProcessID of a source process.
"""
@type t :: pid()
@doc """
Starts a source and returns its PID.
"""
@spec start_link(list) :: t
def start_link(opts) do
{:ok, pid} = GenServer.start_link(__MODULE__, opts)
pid
end
@doc """
Reads the source file at the given path or symbolic location, and returns a
tuple comprising the data in the file and the full path to it.
"""
@spec read_source(t, Erl2ex.file_id) :: {String.t, Erl2ex.file_id}
def read_source(source, path) do
source
|> GenServer.call({:read_source, path})
|> handle_result
end
@doc """
Reads the include file at the given path, given a context directory, and
returns a tuple comprising the data in the file and the full path to it.
"""
@spec read_include(t, Path.t, Path.t | nil) :: {String.t, Path.t}
def read_include(source, path, cur_dir) do
source
|> GenServer.call({:read_include, path, cur_dir})
|> handle_result
end
@doc """
Reads the include file at the given path, given a context library, and
returns a tuple comprising the data in the file and the full path to it.
"""
@spec read_lib_include(t, atom, Path.t) :: {String.t, Path.t}
def read_lib_include(source, lib, path) do
source
|> GenServer.call({:read_lib_include, lib, path})
|> handle_result
end
@doc """
Stops the source process.
"""
@spec stop(t) :: :ok
def stop(source) do
GenServer.cast(source, {:stop})
end
defp handle_result({:ok, data, path}), do: {data, path}
defp handle_result({:error, code, path}) do
raise CompileError,
file: path,
line: :unknown,
description: "Error #{code} while reading source file"
end
use GenServer
defmodule State do
@moduledoc false
defstruct(
source_dir: nil,
source_data: %{},
include_dirs: [],
include_data: %{},
lib_dirs: %{},
lib_data: %{}
)
end
def init(opts) do
source_dir = Keyword.get(opts, :source_dir, nil)
source_data = opts
|> Keyword.get_values(:source_data)
|> Enum.reduce(%{}, &(add_to_map(&2, &1)))
include_dirs = opts
|> Keyword.get_values(:include_dir)
|> Enum.reduce([], &([&1 | &2]))
include_data = opts
|> Keyword.get_values(:include_data)
|> Enum.reduce(%{}, &(add_to_map(&2, &1)))
lib_dirs = opts
|> Keyword.get_values(:lib_dir)
|> Enum.reduce(%{}, &(add_to_map(&2, &1)))
lib_data = opts
|> Keyword.get_values(:lib_data)
|> Enum.reduce(%{}, &(add_to_map(&2, &1)))
{:ok,
%State{
source_dir: source_dir,
source_data: source_data,
include_dirs: include_dirs,
include_data: include_data,
lib_dirs: lib_dirs,
lib_data: lib_data,
}
}
end
def handle_call(
{:read_source, path},
_from,
%State{source_dir: source_dir, source_data: source_data} = state)
do
dirs = if source_dir == nil, do: [], else: [source_dir]
result = read_impl(path, source_data, dirs)
{:reply, result, state}
end
def handle_call(
{:read_include, path, cur_dir},
_from,
%State{include_dirs: include_dirs, include_data: include_data} = state)
do
dirs =
if cur_dir == nil do
include_dirs
else
[cur_dir | include_dirs]
end
dirs = [File.cwd! | dirs]
result = read_impl(path, include_data, dirs)
{:reply, result, state}
end
def handle_call(
{:read_lib_include, lib, path},
_from,
%State{lib_data: lib_data, lib_dirs: lib_dirs} = state)
do
case get_lib_dir(lib_dirs, lib) do
{:error, code} ->
{:reply, {:error, code, path}, state}
{:ok, lib_dir} ->
result = read_impl(path, lib_data, [lib_dir])
{:reply, result, state}
end
end
def handle_cast({:stop}, state) do
{:stop, :normal, state}
end
defp read_impl(path, data_map, search_dirs) do
case Map.fetch(data_map, path) do
{:ok, data} when is_binary(data) ->
{:ok, data, path}
{:ok, io} when is_pid(io) ->
data = io |> IO.read(:all) |> IO.chardata_to_string
{:ok, data, path}
:error ->
Enum.find_value(search_dirs, {:error, :not_found, path}, fn dir ->
actual_path = Path.expand(path, dir)
if File.exists?(actual_path) do
case File.read(actual_path) do
{:ok, data} -> {:ok, data, actual_path}
{:error, code} -> {:error, code, path}
end
else
false
end
end)
end
end
defp get_lib_dir(lib_dirs, lib) do
case Map.fetch(lib_dirs, lib) do
{:ok, dir} ->
{:ok, dir}
:error ->
case :code.lib_dir(lib) do
{:error, code} -> {:error, code}
dir -> {:ok, dir}
end
end
end
defp add_to_map(map, value) when is_map(value), do:
Map.merge(map, value)
defp add_to_map(map, {key, value}), do:
Map.put(map, key, value)
defp add_to_map(map, value), do:
Map.put(map, nil, value)
end
|
lib/erl2ex/source.ex
| 0.549882 | 0.434941 |
source.ex
|
starcoder
|
defmodule Qoix do
@moduledoc """
Qoix is an Elixir implementation of the [Quite OK Image format](https://qoiformat.org).
"""
alias Qoix.Image
use Bitwise
@index_op <<0::2>>
@diff_op <<1::2>>
@luma_op <<2::2>>
@run_op <<3::2>>
@rgb_op <<254::8>>
@rgba_op <<255::8>>
@padding :binary.copy(<<0>>, 7) <> <<1>>
@empty_lut for i <- 0..63, into: %{}, do: {i, <<0::32>>}
@doc """
Returns true if the binary appears to contain a valid QOI image.
"""
@spec qoi?(binary) :: boolean
def qoi?(<<"qoif", _width::32, _height::32, channels::8, cspace::8, _rest::binary>> = _binary)
when channels in [3, 4] and cspace in [0, 1] do
true
end
def qoi?(binary) when is_binary(binary) do
false
end
@doc """
Encodes a `%Qoix.Image{}` using QOI, returning a binary with the encoded image.
Returns `{:ok, encoded}` on success, `{:error, reason}` on failure.
"""
@spec encode(Qoix.Image.t()) :: {:ok, binary} | {:error, any}
def encode(%Image{width: w, height: h, pixels: pixels, format: fmt, colorspace: cspace})
when w > 0 and h > 0 and fmt in [:rgb, :rgba] and cspace in [:srgb, :linear] and
is_binary(pixels) do
channels = channels(fmt)
colorspace = encode_colorspace(cspace)
chunks =
pixels
|> encode_pixels(fmt)
|> IO.iodata_to_binary()
# Return the final binary
data = <<"qoif", w::32, h::32, channels::8, colorspace::8, chunks::bits, @padding::bits>>
{:ok, data}
end
defp channels(:rgb), do: 3
defp channels(:rgba), do: 4
defp encode_colorspace(:srgb), do: 0
defp encode_colorspace(:linear), do: 1
defp encode_pixels(<<pixels::binary>>, format) when format == :rgb or format == :rgba do
# Previous pixel is initialized to 0,0,0,255
prev = <<0, 0, 0, 255>>
run_length = 0
lut = @empty_lut
acc = []
do_encode(pixels, format, prev, run_length, lut, acc)
end
# Here we go with all the possible cases. Order matters due to pattern matching.
# Maximum representable run_length, push out and start a new one
defp do_encode(<<pixels::bits>>, format, prev, run_length, lut, acc) when run_length == 62 do
acc = [acc | <<@run_op::bits, bias_run(run_length)::6>>]
do_encode(pixels, format, prev, 0, lut, acc)
end
# Same RGBA pixel as previous, consume and increase run_length
defp do_encode(<<pixel::32, rest::bits>>, :rgba = format, <<pixel::32>>, run_length, lut, acc) do
do_encode(rest, format, <<pixel::32>>, run_length + 1, lut, acc)
end
# Same RGB pixel as previous, consume and increase run_length
defp do_encode(<<pixel::24, rest::bits>>, :rgb = format, <<pixel::24>>, run_length, lut, acc) do
do_encode(rest, format, <<pixel::24>>, run_length + 1, lut, acc)
end
# Since we didn't match the previous head, the pixel is different from the previous.
# We don't have any ongoing run_length, so we just have to handle the pixel.
defp do_encode(<<r::8, g::8, b::8, a::8, rest::bits>>, :rgba = format, prev, 0, lut, acc) do
pixel = <<r::8, g::8, b::8, a::8>>
{chunk, new_lut} = handle_non_running_pixel(pixel, prev, lut)
acc = [acc | chunk]
do_encode(rest, format, pixel, 0, new_lut, acc)
end
# As above, but for RGB
defp do_encode(<<r::8, g::8, b::8, rest::bits>>, :rgb = format, prev, 0, lut, acc) do
pixel = <<r::8, g::8, b::8, 255::8>>
{chunk, new_lut} = handle_non_running_pixel(pixel, prev, lut)
acc = [acc | chunk]
do_encode(rest, format, pixel, 0, new_lut, acc)
end
# For the same reason as above, the pixel is different from the previous.
# Here we just emit the run length and leave the pixel handling to the next recursion,
# that will enter in the previous head.
defp do_encode(<<pixels::bits>>, format, prev, run_length, lut, acc)
when run_length > 0 do
acc = [acc | <<@run_op::bits, bias_run(run_length)::6>>]
do_encode(pixels, format, prev, 0, lut, acc)
end
# All pixels consumed, no ongoing run: just output the accumulator
defp do_encode(<<>>, _format, _prev, 0, _lut, acc) do
acc
end
# All pixels consumed, pending run: output the accumulator and the 6 bit run with its tag
defp do_encode(<<>>, _format, _prev, run_length, _lut, acc) do
[acc | <<@run_op::bits, bias_run(run_length)::6>>]
end
# Handle a pixel that is not part of a run, return a {chunk, updated_lut} tuple
defp handle_non_running_pixel(<<r::8, g::8, b::8, a::8>> = pixel, prev, lut) do
index = index(r, g, b, a)
case lut do
%{^index => <<^r::8, ^g::8, ^b::8, ^a::8>>} ->
{<<@index_op::bits, index::6>>, lut}
_other ->
# The value was different from our current pixel
chunk = diff_luma_color(pixel, prev)
new_lut = Map.put(lut, index, <<r, g, b, a>>)
{chunk, new_lut}
end
end
defguardp in_range_2?(val) when val in -2..1
defguardp in_range_4?(val) when val in -8..7
defguardp in_range_6?(val) when val in -32..31
# Check if value can be represented with diff op
defguardp diff_op?(dr, dg, db) when in_range_2?(dr) and in_range_2?(dg) and in_range_2?(db)
# Check if value can be represented with luma op
defguardp luma_op?(dr, dg, db)
when in_range_6?(dg) and in_range_4?(dr - dg) and in_range_4?(db - dg)
# Emit a diff, luma, rgb or rgba chunk
defp diff_luma_color(<<r::8, g::8, b::8, a::8>> = _pixel, <<pr::8, pg::8, pb::8, a::8>> = _prev)
when diff_op?(r - pr, g - pg, b - pb) do
<<@diff_op::bits, bias_diff(r - pr)::2, bias_diff(g - pg)::2, bias_diff(b - pb)::2>>
end
defp diff_luma_color(<<r::8, g::8, b::8, a::8>> = _pixel, <<pr::8, pg::8, pb::8, a::8>> = _prev)
when luma_op?(r - pr, g - pg, b - pb) do
dg = g - pg
dr_dg = r - pr - dg
db_dg = b - pb - dg
<<@luma_op::bits, bias_luma_dg(dg)::6, bias_luma_dr_db(dr_dg)::4, bias_luma_dr_db(db_dg)::4>>
end
defp diff_luma_color(<<r::8, g::8, b::8, a::8>>, <<_prgb::24, a::8>> = _prev) do
# Same alpha, emit RGB
<<@rgb_op, r::8, g::8, b::8>>
end
defp diff_luma_color(<<r::8, g::8, b::8, a::8>>, _prev) do
# Last resort, full RGBA color
<<@rgba_op, r::8, g::8, b::8, a::8>>
end
@doc """
Decodes a QOI image, returning an `%Image{}`.
Returns `{:ok, %Image{}}` on success, `{:error, reason}` on failure.
"""
@spec decode(binary) :: {:ok, Qoix.Image.t()} | {:error, any}
def decode(<<encoded::binary>> = _encoded) do
case encoded do
<<"qoif", width::32, height::32, channels::8, cspace::8, chunks::binary>> ->
format = format(channels)
colorspace = decode_colorspace(cspace)
pixels =
chunks
|> decode_chunks(format)
|> IO.iodata_to_binary()
image = %Image{
width: width,
height: height,
pixels: pixels,
format: format,
colorspace: colorspace
}
{:ok, image}
_ ->
{:error, :invalid_qoi}
end
end
defp format(3), do: :rgb
defp format(4), do: :rgba
defp decode_colorspace(0), do: :srgb
defp decode_colorspace(1), do: :linear
defp decode_chunks(<<chunks::bits>>, format) do
# Previous pixel is initialized to 0,0,0,255
prev = <<0, 0, 0, 255>>
lut = @empty_lut
acc = []
do_decode(chunks, format, prev, lut, acc)
end
# Let's decode, order matters since 8 bit opcodes have predence over 2 bit opcodes
# Final padding, we're done, return the accumulator
defp do_decode(@padding, _format, _prev, _lut, acc) do
acc
end
# RGB: take just alpha from previous pixel
defp do_decode(<<@rgb_op, r::8, g::8, b::8, rest::bits>>, format, prev, lut, acc) do
<<_prgb::24, pa::8>> = prev
pixel = <<r, g, b, pa>>
acc = [acc | maybe_drop_alpha(pixel, format)]
do_decode(rest, format, pixel, update_lut(lut, pixel), acc)
end
# RGBA: pixel encoded with full information
defp do_decode(<<@rgba_op, r::8, g::8, b::8, a::8, rest::bits>>, format, _prev, lut, acc) do
pixel = <<r, g, b, a>>
acc = [acc | maybe_drop_alpha(pixel, format)]
do_decode(rest, format, pixel, update_lut(lut, pixel), acc)
end
# Index: get the pixel from the LUT
defp do_decode(<<@index_op, index::6, rest::bits>>, format, _prev, lut, acc) do
%{^index => pixel} = lut
acc = [acc | maybe_drop_alpha(pixel, format)]
do_decode(rest, format, pixel, lut, acc)
end
# Run: repeat previous pixel
defp do_decode(<<@run_op, count::6, rest::bits>>, format, prev, lut, acc) do
pixels =
maybe_drop_alpha(prev, format)
|> :binary.copy(unbias_run(count))
acc = [acc | pixels]
do_decode(rest, format, prev, lut, acc)
end
# Diff: reconstruct pixel from previous + diff
defp do_decode(<<@diff_op, dr::2, dg::2, db::2, rest::bits>>, format, prev, lut, acc) do
<<pr, pg, pb, pa>> = prev
r = pr + unbias_diff(dr)
g = pg + unbias_diff(dg)
b = pb + unbias_diff(db)
pixel = <<r, g, b, pa>>
acc = [acc | maybe_drop_alpha(pixel, format)]
do_decode(rest, format, pixel, update_lut(lut, pixel), acc)
end
# Luma: reconstruct pixel from previous + diff
defp do_decode(<<@luma_op, b_dg::6, dr_dg::4, db_dg::4, rest::bits>>, format, prev, lut, acc) do
<<pr, pg, pb, pa>> = prev
dg = unbias_luma_dg(b_dg)
r = pr + unbias_luma_dr_db(dr_dg) + dg
g = pg + dg
b = pb + unbias_luma_dr_db(db_dg) + dg
pixel = <<r, g, b, pa>>
acc = [acc | maybe_drop_alpha(pixel, format)]
do_decode(rest, format, pixel, update_lut(lut, pixel), acc)
end
defp maybe_drop_alpha(pixel, :rgba), do: pixel
defp maybe_drop_alpha(<<r::8, g::8, b::8, _a::8>>, :rgb), do: <<r::8, g::8, b::8>>
defp index(r, g, b, a) do
(r * 3 + g * 5 + b * 7 + a * 11)
|> rem(64)
end
defp update_lut(lut, <<r, g, b, a>>) do
lut_index = index(r, g, b, a)
Map.put(lut, lut_index, <<r, g, b, a>>)
end
defp bias_run(val), do: val - 1
defp unbias_run(val), do: val + 1
defp bias_diff(val), do: val + 2
defp unbias_diff(val), do: val - 2
defp bias_luma_dg(val), do: val + 32
defp unbias_luma_dg(val), do: val - 32
defp bias_luma_dr_db(val), do: val + 8
defp unbias_luma_dr_db(val), do: val - 8
end
|
lib/qoix.ex
| 0.887125 | 0.454654 |
qoix.ex
|
starcoder
|
defmodule Membrane.Element do
@moduledoc """
Module containing functions spawning, shutting down, inspecting and controlling
playback of elements. These functions are usually called by `Membrane.Pipeline`,
and can be called from elsewhere only if there is a really good reason for
doing so.
"""
alias __MODULE__.Pad
alias Membrane.{Buffer, Caps, Core}
alias Core.Element.{MessageDispatcher, State}
alias Core.Message
import Membrane.Helper.GenServer
require Message
use Membrane.Log, tags: :core
use Bunch
use GenServer
use Membrane.Core.PlaybackRequestor
@typedoc """
Defines options that can be passed to `start/5` / `start_link/5` and received
in `c:Membrane.Element.Base.Mixin.CommonBehaviour.handle_init/1` callback.
"""
@type options_t :: struct | nil
@typedoc """
Type that defines an element name by which it is identified.
"""
@type name_t :: atom | {atom, non_neg_integer}
@typedoc """
Defines possible element types:
- source, producing buffers
- filter, processing buffers
- sink, consuming buffers
"""
@type type_t :: :source | :filter | :sink
@typedoc """
Describes how a pad should be declared in element.
"""
@type pad_specs_t :: output_pad_specs_t | input_pad_specs_t
@typedoc """
Describes how a output pad should be declared in element.
"""
@type output_pad_specs_t :: {Pad.name_t(), [common_pad_option_t]}
@typedoc """
Describes how a input pad should be declared in element.
"""
@type input_pad_specs_t ::
{Pad.name_t(), [common_pad_option_t | {:demand_unit, Buffer.Metric.unit_t()}]}
@typedoc """
Pad options used in `t:pad_specs_t/0`
"""
@type common_pad_option_t ::
{:availability, Pad.availability_t()}
| {:mode, Pad.mode_t()}
| {:caps, Caps.Matcher.caps_specs_t()}
@typedoc """
Type of user-managed state of element.
"""
@type state_t :: map | struct
@doc """
Chechs whether the given term is a valid element name
"""
defguard is_element_name(term)
when is_atom(term) or
(is_tuple(term) and tuple_size(term) == 2 and is_atom(elem(term, 0)) and
is_integer(elem(term, 1)) and elem(term, 1) >= 0)
@doc """
Checks whether module is an element.
"""
def element?(module) do
module |> Bunch.Module.check_behaviour(:membrane_element?)
end
@doc """
Works similarly to `start_link/5`, but passes element struct (with default values)
as element options.
If element does not define struct, `nil` is passed.
"""
@spec start_link(pid, module, name_t) :: GenServer.on_start()
def start_link(pipeline, module, name) do
start_link(
pipeline,
module,
name,
module |> Module.concat(Options) |> Bunch.Module.struct()
)
end
@doc """
Starts process for element of given module, initialized with given options and
links it to the current process in the supervision tree.
Calls `GenServer.start_link/3` underneath.
"""
@spec start_link(pid, module, name_t, options_t, GenServer.options()) :: GenServer.on_start()
def start_link(pipeline, module, name, element_options, process_options \\ []),
do: do_start(:start_link, pipeline, module, name, element_options, process_options)
@doc """
Works similarly to `start_link/3`, but does not link to the current process.
"""
@spec start(pid, module, name_t) :: GenServer.on_start()
def start(pipeline, module, name),
do: start(pipeline, module, name, module |> Module.concat(Options) |> Bunch.Module.struct())
@doc """
Works similarly to `start_link/5`, but does not link to the current process.
"""
@spec start(pid, module, name_t, options_t, GenServer.options()) :: GenServer.on_start()
def start(pipeline, module, name, element_options, process_options \\ []),
do: do_start(:start, pipeline, module, name, element_options, process_options)
defp do_start(method, pipeline, module, name, element_options, process_options) do
if element?(module) do
debug("""
Element start link: module: #{inspect(module)},
element options: #{inspect(element_options)},
process options: #{inspect(process_options)}
""")
apply(GenServer, method, [
__MODULE__,
{pipeline, module, name, element_options},
process_options
])
else
warn_error(
"""
Cannot start element, passed module #{inspect(module)} is not a Membrane Element.
Make sure that given module is the right one and it uses Membrane.Element.Base.*
""",
{:not_element, module}
)
end
end
@doc """
Stops given element process.
It will wait for reply for amount of time passed as second argument
(in milliseconds).
Will trigger calling `c:Membrane.Element.Base.Mixin.CommonBehaviour.handle_shutdown/1`
callback.
"""
@spec shutdown(pid, timeout) :: :ok
def shutdown(server, timeout \\ 5000) do
import Membrane.Log
debug("Shutdown -> #{inspect(server)}")
GenServer.stop(server, :normal, timeout)
:ok
end
@doc """
Sends synchronous call to the given element requesting it to set watcher.
It will wait for reply for amount of time passed as second argument
(in milliseconds).
"""
@spec set_watcher(pid, pid, timeout) :: :ok | {:error, any}
def set_watcher(server, watcher, timeout \\ 5000) when is_pid(server) do
Message.call(server, :set_watcher, watcher, timeout)
end
@doc """
Sends synchronous call to the given element requesting it to set controlling pid.
It will wait for reply for amount of time passed as second argument
(in milliseconds).
"""
@spec set_controlling_pid(pid, pid, timeout) :: :ok | {:error, any}
def set_controlling_pid(server, controlling_pid, timeout \\ 5000) when is_pid(server) do
Message.call(server, :set_controlling_pid, controlling_pid, timeout)
end
@doc """
Sends synchronous calls to two elements, telling them to link with each other.
"""
@spec link(
from_element :: pid,
to_element :: pid,
from_pad :: Pad.name_t(),
to_pad :: Pad.name_t(),
params :: list
) :: :ok | {:error, any}
def link(pid, pid, _, _, _) when is_pid(pid) do
{:error, :loop}
end
def link(from_pid, to_pid, from_pad, to_pad, params) when is_pid(from_pid) and is_pid(to_pid) do
with :ok <- Message.call(from_pid, :handle_link, [from_pad, :output, to_pid, to_pad, params]),
:ok <- Message.call(to_pid, :handle_link, [to_pad, :input, from_pid, from_pad, params]) do
:ok
end
end
def link(_, _, _, _, _), do: {:error, :invalid_element}
@doc """
Sends synchronous call to element, telling it to unlink all its pads.
"""
def unlink(server, timeout \\ 5000) do
server |> Message.call(:unlink, [], timeout)
end
@doc """
Sends synchronous call to element, requesting it to create a new instance of
`:on_request` pad.
"""
def handle_new_pad(server, direction, pad, timeout \\ 5000) when is_pid(server) do
server |> Message.call(:new_pad, [direction, pad], timeout)
end
@doc """
Sends synchronous call to element, informing it that linking has finished.
"""
def handle_linking_finished(server, timeout \\ 5000) when is_pid(server) do
server |> Message.call(:linking_finished, [], timeout)
end
@impl GenServer
def init({pipeline, module, name, options}) do
Process.monitor(pipeline)
state = State.new(module, name)
with {:ok, state} <-
MessageDispatcher.handle_message(
Message.new(:init, options),
:other,
state
) do
{:ok, state}
else
{{:error, reason}, _state} -> {:stop, {:element_init, reason}}
end
end
@impl GenServer
def terminate(reason, state) do
{:ok, _state} =
MessageDispatcher.handle_message(Message.new(:shutdown, reason), :other, state)
:ok
end
@impl GenServer
def handle_call(message, _from, state) do
message |> MessageDispatcher.handle_message(:call, state) |> reply(state)
end
@impl GenServer
def handle_info({:DOWN, _ref, :process, _pid, reason}, state) do
{:ok, state} =
MessageDispatcher.handle_message(Message.new(:pipeline_down, reason), :info, state)
{:stop, reason, state}
end
def handle_info(message, state) do
message |> MessageDispatcher.handle_message(:info, state) |> noreply(state)
end
end
|
lib/membrane/element.ex
| 0.872048 | 0.431464 |
element.ex
|
starcoder
|
defmodule Membrane.RTP.H264.Depayloader do
@moduledoc """
Depayloads H264 RTP payloads into H264 NAL Units.
Based on [RFC 6184](https://tools.ietf.org/html/rfc6184).
Supported types: Single NALU, FU-A, STAP-A.
"""
use Membrane.Filter
use Membrane.Log
alias Membrane.Buffer
alias Membrane.{RTP, RemoteStream}
alias Membrane.Caps.Video.H264
alias Membrane.Event.Discontinuity
alias Membrane.RTP.H264.{FU, NAL, StapA}
@frame_prefix <<1::32>>
def_input_pad :input, caps: RTP, demand_unit: :buffers
def_output_pad :output, caps: {RemoteStream, content_format: H264, type: :packetized}
defmodule State do
@moduledoc false
defstruct parser_acc: nil
end
def handle_init(_) do
{:ok, %State{}}
end
@impl true
def handle_caps(:input, _caps, _context, state) do
caps = %RemoteStream{content_format: H264, type: :packetized}
{{:ok, caps: {:output, caps}}, state}
end
@impl true
def handle_process(:input, %Buffer{payload: payload} = buffer, _ctx, state) do
with {:ok, {header, _} = nal} <- NAL.Header.parse_unit_header(payload),
unit_type = NAL.Header.decode_type(header),
{{:ok, actions}, new_state} <-
handle_unit_type(unit_type, nal, buffer, state) do
{{:ok, actions ++ [redemand: :output]}, new_state}
else
{:error, reason} ->
log_malformed_buffer(buffer, reason)
{{:ok, redemand: :output}, %State{state | parser_acc: nil}}
end
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
def handle_demand(:output, _, :bytes, _ctx, state), do: {{:error, :not_supported_unit}, state}
@impl true
def handle_event(:input, %Discontinuity{} = event, _context, %State{parser_acc: %FU{}} = state),
do: {{:ok, forward: event}, %State{state | parser_acc: nil}}
@impl true
def handle_event(pad, event, context, state), do: super(pad, event, context, state)
defp handle_unit_type(:single_nalu, _nal, buffer, state) do
buffer_output(buffer.payload, buffer, state)
end
defp handle_unit_type(:fu_a, {header, data}, buffer, state) do
%Buffer{metadata: %{rtp: %{sequence_number: seq_num}}} = buffer
case FU.parse(data, seq_num, map_state_to_fu(state)) do
{:ok, {data, type}} ->
data = NAL.Header.add_header(data, 0, header.nal_ref_idc, type)
buffer_output(data, buffer, %State{state | parser_acc: nil})
{:incomplete, fu} ->
{{:ok, []}, %State{state | parser_acc: fu}}
{:error, _} = error ->
error
end
end
defp handle_unit_type(:stap_a, {_, data}, buffer, state) do
with {:ok, result} <- StapA.parse(data) do
buffers = Enum.map(result, &%Buffer{buffer | payload: add_prefix(&1)})
{{:ok, buffer: {:output, buffers}}, state}
end
end
defp buffer_output(data, buffer, state),
do: {{:ok, action_from_data(data, buffer)}, state}
defp action_from_data(data, buffer) do
[buffer: {:output, %Buffer{buffer | payload: add_prefix(data)}}]
end
defp add_prefix(data), do: @frame_prefix <> data
defp map_state_to_fu(%State{parser_acc: %FU{} = fu}), do: fu
defp map_state_to_fu(_), do: %FU{}
defp log_malformed_buffer(packet, reason) do
warn("""
An error occurred while parsing H264 RTP payload.
Reason: #{reason}
Packet: #{inspect(packet, limit: :infinity)}
""")
end
end
|
lib/rtp_h264/depayloader.ex
| 0.78233 | 0.417064 |
depayloader.ex
|
starcoder
|
defprotocol Socket.Datagram.Protocol do
@doc """
Send a packet to the given recipient.
"""
@spec send(t, iodata, term) :: :ok | { :error, term }
def send(self, data, to)
@doc """
Receive a packet from the socket.
"""
@spec recv(t) :: { :ok, { iodata, { Socket.Address.t, :inet.port_number } } } | { :error, term }
def recv(self)
@doc """
Receive a packet with the given options or with the given size.
"""
@spec recv(t, non_neg_integer | Keyword.t) :: { :ok, { iodata, { Socket.Address.t, :inet.port_number } } } | { :error, term }
def recv(self, length_or_options)
@doc """
Receive a packet with the given size and options.
"""
@spec recv(t, non_neg_integer, Keyword.t) :: { :ok, { iodata, { Socket.Address.t, :inet.port_number } } } | { :error, term }
def recv(self, length, options)
end
defmodule Socket.Datagram do
@type t :: Socket.Datagram.Protocol.t
use Socket.Helpers
defdelegate send(self, packet, to), to: Socket.Datagram.Protocol
defbang send(self, packet, to), to: Socket.Datagram.Protocol
defdelegate recv(self), to: Socket.Datagram.Protocol
defbang recv(self), to: Socket.Datagram.Protocol
defdelegate recv(self, length_or_options), to: Socket.Datagram.Protocol
defbang recv(self, length_or_options), to: Socket.Datagram.Protocol
defdelegate recv(self, length, options), to: Socket.Datagram.Protocol
defbang recv(self, length, options), to: Socket.Datagram.Protocol
end
defimpl Socket.Datagram.Protocol, for: Port do
def send(self, data, { address, port }) do
address = if address |> is_binary do
address |> String.to_char_list
else
address
end
:gen_udp.send(self, address, port, data)
end
def recv(self) do
recv(self, 0, [])
end
def recv(self, length) when length |> is_integer do
recv(self, length, [])
end
def recv(self, options) when options |> is_list do
recv(self, 0, options)
end
def recv(self, length, options) do
timeout = options[:timeout] || :infinity
case :gen_udp.recv(self, length, timeout) do
{ :ok, { address, port, data } } ->
{ :ok, { data, { address, port } } }
{ :error, :closed } ->
{ :ok, nil }
{ :error, _ } = error ->
error
end
end
end
|
deps/socket/lib/socket/datagram.ex
| 0.82308 | 0.438424 |
datagram.ex
|
starcoder
|
defmodule Riak.Object do
@moduledoc """
The Data wrapper makes it convenient to work with Riak data in Elixir
"""
@doc """
Struct representing a Riak Object. Attributes:
* `type`: String; Bucket Type with a unique name within the cluster namespace
* `bucket`: String; Bucket with a unique name within the bucket type namespace
* `key`: String; Not required; Key with a unique name within the bucket namespace
* `data`: Any; Value to be stored under the key
* `metadata`: Orddict; User specified metadata
* `vclock`: String; Dotted Version Vector / Causal Context for object
* `content_type`: String; Content Type for object
"""
defstruct [bucket: nil, type: nil, key: nil, data: nil, metadata: nil, vclock: nil, content_type: "application/json"]
@doc """
Get all metadata entries
"""
def get_metadata(obj, key) do
case :riakc_obj.get_user_metadata_entry(
to_undefined(obj.metadata), to_undefined(key)) do
:notfound -> nil
val -> val
end
end
def get_all_metadata(obj) do
:riakc_obj.get_user_metadata_entries(
to_undefined(obj.metadata))
end
def delete_metadata(obj, key) do
%{obj | metadata: :riakc_obj.delete_user_metadata_entry(
to_undefined(obj.metadata), to_undefined(key))}
end
def delete_all_metadata(obj) do
%{obj | metadata: :riakc_obj.clear_user_metadata_entries(
to_undefined(obj.metadata))}
end
def put_metadata(obj, {key, value}) do
%{obj | metadata: :riakc_obj.set_user_metadata_entry(
to_undefined(obj.metadata), {key, value})}
end
# Secondary Index
def index_id({:binary_index, name}), do: "#{name}_bin"
def index_id({:integer_index, name}), do: "#{name}_int"
def get_index(obj, {type, name}) do
case :riakc_obj.get_secondary_index(
to_undefined(obj.metadata),
{to_undefined(type), to_undefined(name)}) do
:notfound -> nil
val -> val
end
end
def get_all_indexes(obj) do
:riakc_obj.get_secondary_indexes(to_undefined(obj.metadata))
end
def delete_index(obj, {type, name}) do
%{obj | metadata: :riakc_obj.delete_secondary_index(
to_undefined(obj.metadata),
{to_undefined(type), to_undefined(name)})}
end
def delete_all_indexes(obj) do
%{obj | metadata: :riakc_obj.clear_secondary_indexes(
to_undefined(obj.metadata))}
end
def put_index(obj, {type, name}, values) do
%{obj | metadata: :riakc_obj.add_secondary_index(
to_undefined(obj.metadata),
[{{to_undefined(type), to_undefined(name)},
to_undefined(values)}])}
end
# Links
def get_link(obj, tag) do
case :riakc_obj.get_links(
to_undefined(obj.metadata), tag) do
:notfound -> nil
val -> val
end
end
def get_all_links(obj) do
:riakc_obj.get_all_links(
to_undefined(obj.metadata))
end
def delete_link(obj, tag) do
%{obj | metadata: :riakc_obj.delete_links(
to_undefined(obj.metadata), tag)}
end
def delete_all_links(obj) do
%{obj | metadata: :riakc_obj.clear_links(
to_undefined(obj.metadata))}
end
def put_link(obj, tag, bucket, key) do
%{obj | metadata: :riakc_obj.add_link(
to_undefined(obj.metadata),
[{tag, [{to_undefined(bucket),to_undefined(key)}]}])}
end
def from_robj(robj) do
data =
try do
:riakc_obj.get_update_value(robj)
catch
:no_value -> nil
e -> raise e
end
%Riak.Object{bucket: to_nil(:riakc_obj.bucket(robj)),
type: to_nil(:riakc_obj.bucket_type(robj)),
key: to_nil(:riakc_obj.key(robj)),
data: to_nil(data),
metadata: to_nil(:riakc_obj.get_update_metadata(robj)),
vclock: to_nil(:riakc_obj.vclock(robj)),
content_type: to_nil(:riakc_obj.get_update_content_type(robj))}
end
def to_robj(obj) do
type = to_undefined(obj.type)
bucket = to_bucket(obj.bucket)
typed_bucket =
case {type, bucket} do
{:undefined, b} -> b;
{t, {_, b}} -> {t, b};
{t, b} -> {t, b};
end
robj = :riakc_obj.new(
typed_bucket,
to_undefined(obj.key),
to_undefined(obj.data),
to_undefined(obj.content_type))
robj =
case to_undefined(obj.vclock) do
:undefined -> robj;
v -> :riakc_obj.set_vclock(robj, v)
end
robj =
case to_undefined(obj.metadata) do
:undefined -> robj;
m -> :riakc_obj.update_metadata(robj, m)
end
robj
end
def create(args \\ [bucket: "default"]) do
obj = struct(Riak.Object, args)
from_robj(to_robj(obj))
end
defp to_undefined(nil) do
:undefined
end
defp to_undefined(v) do
v
end
defp to_nil(:undefined) do
nil
end
defp to_nil(v) do
v
end
defp to_bucket({nil, b}) do
to_undefined(b)
end
defp to_bucket({:undefined, b}) do
to_undefined(b)
end
defp to_bucket(b) do
to_undefined(b)
end
end
|
lib/riak/object.ex
| 0.688154 | 0.476823 |
object.ex
|
starcoder
|
defmodule Wax do
@moduledoc """
Functions for FIDO2 registration and authentication
## Options
The options are set when generating the challenge (for both registration and
authentication). Options can be configured either globally in the configuration
file or when generating the challenge. Some also have default values.
Option values set during challenge generation take precedence over globally configured
options, which takes precedence over default values.
These options are:
| Option | Type | Applies to | Default value | Notes |
|:-------------:|:-------------:|-------------------|:-----------------------------:|-------|
|`attestation`|`"none"` or `"direct"`|<ul style="margin:0"><li>registration</li></ul>| `"none"` | |
|`origin`|`String.t()`|<ul style="margin:0"><li>registration</li><li>authentication</li></ul>| | **Mandatory**. Example: `https://www.example.com` |
|`rp_id`|`String.t()` or `:auto`|<ul style="margin:0"><li>registration</li><li>authentication</li></ul>|If set to `:auto`, automatically determined from the `origin` (set to the host) | With `:auto`, it defaults to the full host (e.g.: `www.example.com`). This option allow you to set the `rp_id` to another valid value (e.g.: `example.com`) |
|`user_verification`|`"discouraged"`, `"preferred"` or `"required"`|<ul style="margin:0"><li>registration</li><li>authentication</li></ul>| `"preferred"`| |
|`trusted_attestation_types`|`[Wax.Attestation.type()]`|<ul style="margin:0"><li>registration</li></ul>|`[:none, :basic, :uncertain, :attca, :self]`| |
|`verify_trust_root`|`boolean()`|<ul style="margin:0"><li>registration</li></ul>|`true`| Only for `u2f` and `packed` attestation. `tpm` attestation format is always checked against metadata |
|`acceptable_authenticator_statuses`|`[Wax.Metadata.TOCEntry.StatusReport.status()]`|<ul style="margin:0"><li>registration</li></ul>|`[:fido_certified, :fido_certified_l1, :fido_certified_l1plus, :fido_certified_l2, :fido_certified_l2plus, :fido_certified_l3, :fido_certified_l3plus]`| The `:update_available` status is not whitelisted by default |
|`timeout`|`non_neg_integer()`|<ul style="margin:0"><li>registration</li><li>authentication</li></ul>|`20 * 60`| The validity duration of a challenge |
|`android_key_allow_software_enforcement`|`boolean()`|<ul style="margin:0"><li>registration</li></ul>|`false`| When registration is a Android key, determines whether software enforcement is acceptable (`true`) or only hardware enforcement is (`false`) |
|`silent_authentication_enabled`|`boolean()`|<ul style="margin:0"><li>authentication</li></ul>|`false`| See [https://github.com/fido-alliance/conformance-tools-issues/issues/434](https://github.com/fido-alliance/conformance-tools-issues/issues/434) |
## FIDO2 Metadata service (MDS) configuration
The FIDO Alliance provides with a list of metadata statements of certified **FIDO2**
authenticators. A metadata statement contains trust anchors (root certificates) to verify
attestations. Wax can automatically keep this metadata up to date but needs a access token which
is provided by the FIDO Alliance. One can request it here:
[https://mds2.fidoalliance.org/tokens/](https://mds2.fidoalliance.org/tokens/).
Once the token has been granted, it has to be added in the configuration file (consider
adding it to your `*.secret.exs` files) with the `:metadata_access_token` key. The update
frquency can be configured with the `:metadata_update_interval` key (in seconds, defaults
to 12 hours). Example:
`config/dev.exs`:
```elixir
use Mix.Config
config :wax,
metadata_update_interval: 3600,
```
`config/dev.secret.exs`:
```elixir
use Mix.Config
config :wax,
metadata_access_token: "d4904acd10a36f62d7a7d33e4c9a86628a2b0eea0c3b1a6c"
```
Note that some **FIDO1** certififed authenticators, such as Yubikeys, won't be present in this
list and Wax doesn't load data from the former ("FIDO1") metadata Web Service. The FIDO
Alliance plans to provides with a web service having both FIDO1 and FIDO2, but there is no
roadmap as of September 2019.
During the registration process, when trust root is verified against FIDO2 metadata, only
metadata entries whose last status is whitelisted by the `:acceptable_authenticator_statuses`
will be used. Otherwise a warning is logged and the registration process fails. Metadata is still
loaded for debugging purpose in the `:wax_metadata` ETS table.
## Loading FIDO2 metadata from a directory
In addition to the FIDO2 metadata service, it is possible to load metadata from a directory.
To do so, the `:metadata_dir` application environment variable must be set to one of:
- a `String.t()`: the path to the directory containing the metadata files
- an `atom()`: in this case, the files are loaded from the `"fido2_metadata"` directory of the
private (`"priv/"`) directory of the application (whose name is the atom)
In both case, Wax tries to load all files (even directories and other special files).
### Example configuration
```elixir
config :wax,
origin: "http://localhost:4000",
rp_id: :auto,
metadata_dir: :my_application
```
will try to load all files of the `"priv/fido2_metadata/"` if the `:my_application` as FIDO2
metadata statements. On failure, a warning is emitted.
"""
require Logger
alias Wax.Utils
@type opts :: [opt()]
@type opt ::
{:attestation, String.t()}
| {:origin, String.t()}
| {:rp_id, String.t() | :auto}
| {:user_verification, String.t()}
| {:trusted_attestation_types, [Wax.Attestation.type()]}
| {:verify_trust_root, boolean()}
| {:acceptable_authenticator_statuses, [Wax.Metadata.TOCEntry.StatusReport.status()]}
| {:issued_at, integer()}
| {:timeout, non_neg_integer()}
| {:android_key_allow_software_enforcement, boolean()}
| {:silent_authentication_enabled, boolean()}
@spec set_opts(opts()) :: opts()
defp set_opts(opts) do
attestation =
case opts[:attestation] do
"none" -> "none"
nil -> "none"
"direct" -> "direct"
_ -> raise "Invalid attestation, must be one of: `\"none\"`, `\"direct\"`"
end
origin =
if is_binary(opts[:origin]) do
opts[:origin]
else
case Application.get_env(:wax, :origin) do
origin when is_binary(origin) ->
origin
_ ->
raise "Missing mandatory parameter `origin` (String.t())"
end
end
unless URI.parse(origin).host == "localhost" or URI.parse(origin).scheme == "https" do
raise "Invalid origin `#{origin}` (must be either https scheme or `localhost`)"
end
rp_id =
if opts[:rp_id] == :auto or Application.get_env(:wax, :rp_id) == :auto do
URI.parse(origin).host
else
if is_binary(opts[:rp_id]) do
opts[:rp_id]
else
case Application.get_env(:wax, :rp_id) do
rp_id when is_binary(rp_id) ->
rp_id
_ ->
raise "Missing mandatory parameter `rp_id` (String.t())"
end
end
end
if opts[:user_verification] &&
opts[:user_verification] not in ["discouraged", "preferred", "required"] do
raise "Invalid `:user_verification` parameter, must be one of: " <>
"\"discouraged\", \"preferred\", \"required\""
end
[
type: opts[:type],
attestation: attestation,
origin: origin,
rp_id: rp_id,
user_verification:
opts[:user_verification]
|| Application.get_env(:wax, :user_verification, "preferred"),
trusted_attestation_types:
opts[:trusted_attestation_types] || Application.get_env(
:wax,
:trusted_attestation_types,
[:none, :basic, :uncertain, :attca, :self]
),
verify_trust_root:
opts[:verify_trust_root] || Application.get_env(:wax, :verify_trust_root, true),
acceptable_authenticator_statuses:
opts[:acceptable_authenticator_statuses] || Application.get_env(
:wax,
:acceptable_authenticator_statuses,
[
:fido_certified,
:fido_certified_l1,
:fido_certified_l1plus,
:fido_certified_l2,
:fido_certified_l2plus,
:fido_certified_l3,
:fido_certified_l3plus
]
),
issued_at: :erlang.monotonic_time(:second),
timeout: opts[:timeout] || Application.get_env(:wax, :timeout, 60 * 20),
android_key_allow_software_enforcement:
opts[:android_key_allow_software_enforcement]
|| Application.get_env(:wax, :android_key_allow_software_enforcement)
|| false,
silent_authentication_enabled:
opts[:silent_authentication_enabled]
|| Application.get_env(:wax, :silent_authentication_enabled, false)
]
end
@doc """
Generates a new challenge for registration
The returned structure:
- Contains the challenge bytes under the `bytes` key (e.g.: `challenge.bytes`). This is a
random value that must be used by the javascript WebAuthn call
- Must be passed backed to `register/3`
Typically, this structure is stored in the session (cookie...) for the time the WebAuthn
process is performed on the client side.
## Example:
```elixir
iex> Wax.new_registration_challenge(trusted_attestation_types: [:basic, :attca])
%Wax.Challenge{
allow_credentials: [],
bytes: <<192, 64, 240, 166, 163, 188, 76, 255, 108, 227, 18, 33, 123, 19, 61,
3, 166, 195, 190, 157, 24, 207, 210, 179, 180, 136, 10, 135, 82, 172, 134,
17>>,
origin: "http://localhost:4000",
rp_id: "localhost",
token_binding_status: nil,
trusted_attestation_types: [:basic, :attca],
user_verification: "preferred",
verify_trust_root: true
}
```
"""
@spec new_registration_challenge(opts()) :: Wax.Challenge.t()
def new_registration_challenge(opts) do
opts = set_opts(Keyword.put(opts, :type, :attestation))
Wax.Challenge.new(opts)
end
@doc """
Verifies a registration response from the client WebAuthn javascript call
The input params are:
- `attestation_object_cbor`: the **raw binary** response from the WebAuthn javascript API.
When transmitting it back from the browser to the server, it will probably be base64
encoded. Make sure to decode it before.
- `client_data_json_raw`: the JSON string (and **not** the decoded JSON) of the client data
JSON as returned by the WebAuthn javascript API
- `challenge`: the challenge that was generated beforehand, and whose bytes has been sent
to the browser and used as an input by the WebAuthn javascript API
The success return value is of the form:
`{authenticator_data, {attestation_type, trust_path, metadata_statement}}`.
One can access the credential public key i nthe authenticator data structure:
```elixir
auth_data.attested_credential_data.credential_public_key
```
Regarding the attestation processes' result, see `t:Wax.Attestation.result/0` for more
details. Note, however, that you can use
the returned metadata statement (if any) to further check the authenticator capabilites.
For example, the following conditions will only allow attestation generated by
hardware protected attestation keys:
```elixir
case Wax.register(attestation_object, client_data_json_raw, challenge) do
{:ok, {authenticator_data, {_, _, metadata_statement}}} ->
# tee is for "trusted execution platform"
if :key_protection_tee in metadata_statement.key_protection or
:key_protection_secure_element in metadata_statement.key_protection
do
register_key(user, credential_id, authenticator_data.attested_credential_data.cose_key)
:ok
else
{:error, :not_hardware_protected}
end
{:error, _} = error ->
error
end
```
When performing registration, the server has the 3 following pieces of data:
- user id: specific to the server implementation. Can be a email, login name, or an opaque
user identifier
- credential id: an ID returned by the WebAuthn javascript. It is a handle to further
authenticate the user. It is also available in the authenticator data in binary form, and
can be accessed by typing: `auth_data.attested_credential_data.credential_id`
- the COSE key: available in the authenticator data
(`auth_data.attested_credential_data.credential_public_key`) under the form of a map
containing a public key use for further authentication
A credential id is related to a cose key, and vice-versa.
Note that a user can have several (credential id, cose key) pairs, for example if the
user uses different authenticators. The unique key (for storage, etc.) is therefore the tuple
(user id, credential id).
In the success case, and after calling `register/3`, a server shall:
1. Verify that no other user has the same credential id (and should fail otherwise)
2. Store the new tuple (credential id, cose key) for the user
"""
@spec register(binary(), Wax.ClientData.raw_string(), Wax.Challenge.t())
:: {:ok, {Wax.AuthenticatorData.t(), Wax.Attestation.result()}} | {:error, atom()}
def register(attestation_object_cbor, client_data_json_raw, challenge) do
with :ok <- not_expired?(challenge),
{:ok, client_data} <- Wax.ClientData.parse_raw_json(client_data_json_raw),
:ok <- type_create?(client_data),
:ok <- valid_challenge?(client_data, challenge),
:ok <- valid_origin?(client_data, challenge),
client_data_hash = :crypto.hash(:sha256, client_data_json_raw),
{:ok, att_data, _} <- Utils.CBOR.decode(attestation_object_cbor),
%{"fmt" => fmt, "authData" => auth_data_bin, "attStmt" => att_stmt} = att_data,
{:ok, auth_data} <- Wax.AuthenticatorData.decode(auth_data_bin),
:ok <- valid_rp_id?(auth_data, challenge),
:ok <- user_present_flag_set?(auth_data, challenge),
:ok <- maybe_user_verified_flag_set?(auth_data, challenge),
{:ok, valid_attestation_statement_format?}
<- Wax.Attestation.statement_verify_fun(fmt),
{:ok, attestation_result_data} <- valid_attestation_statement_format?.(
att_stmt,
auth_data,
client_data_hash,
challenge
),
:ok <- attestation_trustworthy?(attestation_result_data, challenge)
do
{:ok, {auth_data, attestation_result_data}}
end
end
@doc """
Generates a new challenge for authentication
The first argument is a list of (credential id, cose key) which were previsouly
registered (after successful `register/3`) for a user. This can be retrieved from
a user database for instance.
The returned structure:
- Contains the challenge bytes under the `bytes` key (e.g.: `challenge.bytes`). This is a
random value that must be used by the javascript WebAuthn call
- Must be passed backed to `authenticate/5`
Typically, this structure is stored in the session (cookie...) for the time the WebAuthn
authentication process is performed on the client side.
## Example:
```elixir
iex> cred_ids_and_associated_keys = UserDatabase.load_cred_id("Georges")
[
{"<KEY>
%{
-3 => <<121, 21, 84, 106, 84, 48, 91, 21, 161, 78, 176, 199, 224, 86, 196,
226, 116, 207, 221, 200, 26, 202, 214, 78, 95, 112, 140, 236, 190, 183,
177, 223>>,
-2 => <<195, 105, 55, 252, 13, 134, 94, 208, 83, 115, 8, 235, 190, 173,
107, 78, 247, 125, 65, 216, 252, 232, 41, 13, 39, 104, 231, 65, 200, 149,
172, 118>>,
-1 => 1,
1 => 2,
3 => -7
}},
{"<KEY>
%{
-3 => <<113, 34, 76, 107, 120, 21, 246, 189, 21, 167, 119, 39, 245, 140,
143, 133, 209, 19, 63, 196, 145, 52, 43, 2, 193, 208, 200, 103, 3, 51,
37, 123>>,
-2 => <<199, 68, 146, 57, 216, 62, 11, 98, 8, 108, 9, 229, 40, 97, 201,
127, 47, 240, 50, 126, 138, 205, 37, 148, 172, 240, 65, 125, 70, 81, 213,
152>>,
-1 => 1,
1 => 2,
3 => -7
}}
]
iex> Wax.new_authentication_challenge(cred_ids_and_associated_keys, [])
%Wax.Challenge{
allow_credentials: [
{"<KEY>
%{
-3 => <<121, 21, 84, 106, 84, 48, 91, 21, 161, 78, 176, 199, 224, 86,
196, 226, 116, 207, 221, 200, 26, 202, 214, 78, 95, 112, 140, 236, 190,
183, 177, 223>>,
-2 => <<195, 105, 55, 252, 13, 134, 94, 208, 83, 115, 8, 235, 190, 173,
107, 78, 247, 125, 65, 216, 252, 232, 41, 13, 39, 104, 231, 65, 200,
149, 172, 118>>,
-1 => 1,
1 => 2,
3 => -7
}},
{"<KEY>
%{
-3 => <<113, 34, 76, 107, 120, 21, 246, 189, 21, 167, 119, 39, 245, 140,
143, 133, 209, 19, 63, 196, 145, 52, 43, 2, 193, 208, 200, 103, 3, 51,
37, 123>>,
-2 => <<199, 68, 146, 57, 216, 62, 11, 98, 8, 108, 9, 229, 40, 97, 201,
127, 47, 240, 50, 126, 138, 205, 37, 148, 172, 240, 65, 125, 70, 81,
213, 152>>,
-1 => 1,
1 => 2,
3 => -7
}}
],
bytes: <<130, 70, 153, 38, 189, 145, 193, 3, 132, 158, 170, 216, 8, 93, 221,
46, 206, 156, 104, 24, 78, 167, 182, 5, 6, 128, 194, 201, 196, 246, 243,
194>>,
exp: nil,
origin: "http://localhost:4000",
rp_id: "localhost",
token_binding_status: nil,
trusted_attestation_types: [:none, :basic, :uncertain, :attca, :self],
user_verification: "preferred",
verify_trust_root: true
}
```
"""
@spec new_authentication_challenge([{Wax.CredentialId.t(), Wax.CoseKey.t()}], opts())
:: Wax.Challenge.t()
def new_authentication_challenge(allow_credentials, opts) do
opts = set_opts(Keyword.put(opts, :type, :authentication))
Wax.Challenge.new(allow_credentials, opts)
end
@doc """
Verifies a authentication response from the client WebAuthn javascript call
The input params are:
- `credential_id`: the credential id returned by the WebAuthn javascript API. Must be of
the same form as the one passed to `new_authentication_challenge/2` as it will be
compared against the previously retrieved valid credential ids
- `auth_data_bin`: the authenticator data returned by the WebAuthn javascript API. Must
be the raw binary, not the base64 encoded form
- `sig`: the signature returned by the WebAuthn javascript API. Must
be the raw binary, not the base64 encoded form
- `client_data_json_raw`: the JSON string (and **not** the decoded JSON) of the client data
JSON as returned by the WebAuthn javascript API
- `challenge`: the challenge that was generated beforehand, and whose bytes has been sent
to the browser and used as an input by the WebAuthn javascript API
The call returns `{:ok, authenticator_data}` in case of success, or `{:error, :reason}`
otherwise.
The `auth_data.sign_count` is the number of signature performed by this authenticator for this
credential id, and can be used to detect cloning of authenticator. See point 17 of the
[7.2. Verifying an Authentication Assertion](https://www.w3.org/TR/webauthn-1/#verifying-assertion)
for more details.
"""
@spec authenticate(Wax.CredentialId.t(),
binary(),
binary(),
Wax.ClientData.raw_string(),
Wax.Challenge.t()
) :: {:ok, Wax.AuthenticatorData.t()} | {:error, atom()}
def authenticate(credential_id,
auth_data_bin,
sig,
client_data_json_raw,
challenge)
do
with :ok <- not_expired?(challenge),
{:ok, cose_key} <- cose_key_from_credential_id(credential_id, challenge),
{:ok, auth_data} <- Wax.AuthenticatorData.decode(auth_data_bin),
{:ok, client_data} <- Wax.ClientData.parse_raw_json(client_data_json_raw),
:ok <- type_get?(client_data),
:ok <- valid_challenge?(client_data, challenge),
:ok <- valid_origin?(client_data, challenge),
:ok <- valid_rp_id?(auth_data, challenge),
:ok <- user_present_flag_set?(auth_data, challenge),
:ok <- maybe_user_verified_flag_set?(auth_data, challenge),
client_data_hash = :crypto.hash(:sha256, client_data_json_raw),
:ok <- Wax.CoseKey.verify(auth_data_bin <> client_data_hash, cose_key, sig)
do
{:ok, auth_data}
end
end
@spec not_expired?(Wax.Challenge.t()) :: :ok | {:error, :challenge_expired}
defp not_expired?(%Wax.Challenge{issued_at: issued_at, timeout: timeout}) do
current_time = :erlang.monotonic_time(:second)
if current_time - issued_at < timeout do
:ok
else
{:error, :challenge_expired}
end
end
@spec type_create?(Wax.ClientData.t()) :: :ok | {:error, atom()}
defp type_create?(client_data) do
if client_data.type == :create do
:ok
else
{:error, :attestation_invalid_type}
end
end
@spec type_get?(Wax.ClientData.t()) :: :ok | {:error, atom()}
defp type_get?(client_data) do
if client_data.type == :get do
:ok
else
{:error, :attestation_invalid_type}
end
end
@spec valid_challenge?(Wax.ClientData.t(), Wax.Challenge.t()) :: :ok | {:error, any()}
defp valid_challenge?(client_data, challenge) do
if client_data.challenge == challenge.bytes do
:ok
else
{:error, :invalid_challenge}
end
end
@spec valid_origin?(Wax.ClientData.t(), Wax.Challenge.t()) :: :ok | {:error, atom()}
defp valid_origin?(client_data, challenge) do
if client_data.origin == challenge.origin do
:ok
else
{:error, :attestation_invalid_origin}
end
end
@spec valid_rp_id?(Wax.AuthenticatorData.t(), Wax.Challenge.t()) :: :ok | {:error, atom()}
defp valid_rp_id?(auth_data, challenge) do
if auth_data.rp_id_hash == :crypto.hash(:sha256, challenge.rp_id) do
:ok
else
{:error, :invalid_rp_id}
end
end
@spec user_present_flag_set?(
Wax.AuthenticatorData.t(),
Wax.Challenge.t()
) :: :ok | {:error, any()}
defp user_present_flag_set?(
_auth_data,
%Wax.Challenge{type: :authentication, silent_authentication_enabled: true})
do
:ok
end
defp user_present_flag_set?(auth_data, _challenge) do
if auth_data.flag_user_present == true do
:ok
else
{:error, :flag_user_present_not_set}
end
end
@spec maybe_user_verified_flag_set?(Wax.AuthenticatorData.t(), Wax.Challenge.t())
:: :ok | {:error, atom()}
defp maybe_user_verified_flag_set?(auth_data, challenge) do
case challenge.user_verification do
"required" ->
if auth_data.flag_user_verified do
:ok
else
{:error, :user_not_verified}
end
_ ->
:ok
end
end
@spec attestation_trustworthy?(Wax.Attestation.result(), Wax.Challenge.t())
:: :ok | {:error, any()}
defp attestation_trustworthy?({type, _, _}, %Wax.Challenge{trusted_attestation_types: tatl})
do
if type in tatl do
:ok
else
{:error, :untrusted_attestation_type}
end
end
@spec cose_key_from_credential_id(Wax.CredentialId.t(), Wax.Challenge.t())
:: {:ok, Wax.CoseKey.t()} | {:error, any()}
defp cose_key_from_credential_id(credential_id, challenge) do
case List.keyfind(challenge.allow_credentials, credential_id, 0) do
{_, cose_key} ->
{:ok, cose_key}
_ ->
{:error, :incorrect_credential_id_for_user}
end
end
end
|
lib/wax.ex
| 0.940497 | 0.913445 |
wax.ex
|
starcoder
|
defmodule Dynamo.HTTP.Hibernate do
@moduledoc """
Conveniences that allows a connection to hibernate or wait
a given amount or an unlimited amount of time.
Such conveniences are useful when a connection needs to be
kept open (because of long polling, websockets or streaming)
but you don't want to keep the current erlang process active
all times.
As such, waiting through small intervals or hibernating through
long intervals is convenient.
## Examples
There are two main functions defined by this module: `hibernate`
and `await`. They can receive either 2 arguments, with the
connection and a callback to be invoked on wake up:
hibernate(conn, on_wake_up(&1, &2))
await(conn, on_wake_up(&1, &2))
Or 4 arguments, in which a timeout and a callback to be invoked
on timeout must also be present:
hibernate(conn, timeout, on_wake_up(&1, &2), on_timeout(&1))
await(conn, timeout, on_wake_up(&1, &2), on_timeout(&1))
Besides an positive integer or `:infinity`, this module also
allows `:keep` as timeout value. This is useful to preserve
a previously set timeout value.
"""
@key :dynamo_timeref
@doc """
Hibernates the current process until a message is received.
The `on_wake_up` callback is invoked with the `conn` and the
received message on wake up.
For more information on hibernation, check:
http://www.erlang.org/doc/man/erlang.html#hibernate-3
"""
def hibernate(conn, on_wake_up) when is_function(on_wake_up, 2) do
clear_timeout(conn)
__loop__ conn, on_wake_up, :no_timeout_callback, 0, fn ->
:erlang.hibernate(__MODULE__, :__loop__, [conn, on_wake_up, :no_timeout_callback])
end
end
@doc """
Hibernates the current process until a message is received
but also sets a timeout for hibernation time.
The `on_wake_up` callback is invoked with the `conn` and the
received message on wake up. A `on_timeout` callback is
invoked when it times out.
For more information on hibernation, check:
http://www.erlang.org/doc/man/erlang.html#hibernate-3
"""
def hibernate(conn, timeout, on_wake_up, on_timeout) when (is_integer(timeout) or timeout in [:infinity, :keep]) and
is_function(on_wake_up, 2) and is_function(on_timeout, 1) do
clear_timeout(conn, timeout)
conn = set_timeout(conn, timeout)
__loop__ conn, on_wake_up, on_timeout, 0, fn ->
:erlang.hibernate(__MODULE__, :__loop__, [conn, on_wake_up, on_timeout])
end
end
@doc """
Sleeps the current process until a message is received.
The `on_wake_up` callback is invoked with the `conn` and the
received message on wake up.
"""
def await(conn, on_wake_up) when is_function(on_wake_up, 2) do
clear_timeout(conn)
__loop__ conn, on_wake_up, :no_timeout_callback, 0, fn ->
__loop__(conn, on_wake_up, :no_timeout_callback)
end
end
@doc """
Sleeps the current process until a message is received
but also sets a timeout.
The `on_wake_up` callback is invoked with the `conn` and the
received message on wake up. A `on_timeout` callback is
invoked when it times out.
"""
def await(conn, timeout, on_wake_up, on_timeout) when (is_integer(timeout) or timeout in [:infinity, :keep]) and
is_function(on_wake_up, 2) and is_function(on_timeout, 1) do
clear_timeout(conn, timeout)
conn = set_timeout(conn, timeout)
__loop__ conn, on_wake_up, on_timeout, 0, fn ->
__loop__(conn, on_wake_up, on_timeout)
end
end
@doc false
def __loop__(conn, on_wake_up, on_timeout) do
__loop__(conn, on_wake_up, on_timeout, :infinity, :no_after_callback)
end
defp __loop__(conn, on_wake_up, on_timeout, timer, callback) do
ref = conn.private[@key]
receive do
{ :timeout, ^ref, __MODULE__ } when is_function(on_timeout) ->
on_timeout.(conn)
{ :timeout, older_ref, __MODULE__ } when is_reference(older_ref) ->
__loop__(conn, on_wake_up, on_timeout, timer, callback)
msg ->
on_wake_up.(msg, conn)
after
timer ->
callback.()
end
end
defp clear_timeout(conn, :keep), do: conn
defp clear_timeout(conn, _), do: clear_timeout(conn)
defp clear_timeout(conn) do
ref = conn.private[@key]
ref && :erlang.cancel_timer(ref)
end
defp set_timeout(conn, timeout) when timeout in [:infinity, :keep], do: conn
defp set_timeout(conn, timeout) do
ref = :erlang.start_timer(timeout, self(), __MODULE__)
conn.put_private(@key, ref)
end
end
|
lib/dynamo/http/hibernate.ex
| 0.880245 | 0.420005 |
hibernate.ex
|
starcoder
|
defmodule Zaryn.Mining.DistributedWorkflow do
@moduledoc """
ARCH mining workflow is performed in distributed manner through a Finite State Machine
to ensure consistency of the actions and be able to postpone concurrent events and manage timeout
Every transaction mining follows these steps:
- Mining Context retrieval (previous tx, UTXOs, P2P view of chain/beacon storage nodes, cross validation nodes) (from everyone)
- Mining context notification (from cross validators, to coordinator)
- Validation stamp and replication tree creation (from coordinator, to cross validators)
- Cross validation of the validation stamp (from cross validators, to coordinator)
- Replication (once the atomic commitment is reached) (from everyone, to the dedicated storage nodes)
If the atomic commitment is not reached, it starts the malicious detection to ban the dishonest nodes
"""
alias Zaryn.Crypto
alias Zaryn.Mining.MaliciousDetection
alias Zaryn.Mining.PendingTransactionValidation
alias Zaryn.Mining.TransactionContext
alias Zaryn.Mining.ValidationContext
alias Zaryn.Mining.WorkflowRegistry
alias Zaryn.P2P
alias Zaryn.P2P.Message.AddMiningContext
alias Zaryn.P2P.Message.CrossValidate
alias Zaryn.P2P.Message.CrossValidationDone
alias Zaryn.P2P.Message.Ok
alias Zaryn.P2P.Message.ReplicateTransaction
alias Zaryn.P2P.Node
alias Zaryn.Replication
alias Zaryn.TaskSupervisor
alias Zaryn.TransactionChain.Transaction
alias Zaryn.TransactionChain.Transaction.CrossValidationStamp
alias Zaryn.TransactionChain.Transaction.ValidationStamp
require Logger
use GenStateMachine, callback_mode: [:handle_event_function, :state_enter], restart: :transient
@mining_timeout Application.compile_env!(:zaryn, [Zaryn.Mining, :timeout])
def start_link(args \\ []) do
GenStateMachine.start_link(__MODULE__, args, [])
end
@doc """
Add transaction mining context which built by another cross validation node
"""
@spec add_mining_context(
worker_pid :: pid(),
validation_node_public_key :: Crypto.key(),
previous_storage_nodes :: list(Node.t()),
cross_validation_nodes_view :: bitstring(),
chain_storage_nodes_view :: bitstring(),
beacon_storage_nodes_view :: bitstring()
) ::
:ok
def add_mining_context(
pid,
validation_node_public_key,
previous_storage_nodes,
cross_validation_nodes_view,
chain_storage_nodes_view,
beacon_storage_nodes_view
) do
GenStateMachine.cast(
pid,
{:add_mining_context, validation_node_public_key, previous_storage_nodes,
cross_validation_nodes_view, chain_storage_nodes_view, beacon_storage_nodes_view}
)
end
@doc """
Cross validate the validation stamp and the replication tree produced by the coordinator
If no inconsistencies, the validation stamp is stamped by the the node public key.
Otherwise the inconsistencies will be signed.
"""
@spec cross_validate(
worker_pid :: pid(),
ValidationStamp.t(),
replication_tree :: %{
chain: list(bitstring()),
beacon: list(bitstring()),
IO: list(bitstring())
}
) :: :ok
def cross_validate(pid, stamp = %ValidationStamp{}, replication_tree) do
GenStateMachine.cast(pid, {:cross_validate, stamp, replication_tree})
end
@doc """
Add a cross validation stamp to the transaction mining process
"""
@spec add_cross_validation_stamp(worker_pid :: pid(), stamp :: CrossValidationStamp.t()) :: :ok
def add_cross_validation_stamp(pid, stamp = %CrossValidationStamp{}) do
GenStateMachine.cast(pid, {:add_cross_validation_stamp, stamp})
end
def init(opts) do
{tx, welcome_node, validation_nodes, node_public_key, timeout} = parse_opts(opts)
Registry.register(WorkflowRegistry, tx.address, [])
Logger.info("Start mining", transaction: "#{tx.type}@#{Base.encode16(tx.address)}")
chain_storage_nodes = Replication.chain_storage_nodes_with_type(tx.address, tx.type)
beacon_storage_nodes = Replication.beacon_storage_nodes(tx.address, DateTime.utc_now())
context =
ValidationContext.new(
transaction: tx,
welcome_node: welcome_node,
validation_nodes: validation_nodes,
chain_storage_nodes: chain_storage_nodes,
beacon_storage_nodes: beacon_storage_nodes
)
next_events = [
{{:timeout, :stop_timeout}, timeout, :any},
{:next_event, :internal, :prior_validation}
]
{:ok, :idle,
%{node_public_key: node_public_key, context: context, start_time: System.monotonic_time()},
next_events}
end
defp parse_opts(opts) do
tx = Keyword.get(opts, :transaction)
welcome_node = Keyword.get(opts, :welcome_node)
validation_nodes = Keyword.get(opts, :validation_nodes)
node_public_key = Keyword.get(opts, :node_public_key)
timeout = Keyword.get(opts, :timeout, @mining_timeout)
{tx, welcome_node, validation_nodes, node_public_key, timeout}
end
def handle_event(:enter, :idle, :idle, _data = %{context: %ValidationContext{transaction: tx}}) do
Logger.debug("Validation started", transaction: "#{tx.type}@#{Base.encode16(tx.address)}")
:keep_state_and_data
end
def handle_event(
:internal,
:prior_validation,
:idle,
data = %{
node_public_key: node_public_key,
context:
context = %ValidationContext{
transaction: tx,
coordinator_node: %Node{last_public_key: coordinator_key}
}
}
) do
role = if node_public_key == coordinator_key, do: :coordinator, else: :cross_validator
case PendingTransactionValidation.validate(tx) do
:ok ->
new_data =
Map.put(
data,
:context,
ValidationContext.set_pending_transaction_validation(context, true)
)
next_events =
case role do
:cross_validator ->
[
{:next_event, :internal, :build_transaction_context},
{:next_event, :internal, :notify_context}
]
:coordinator ->
[{:next_event, :internal, :build_transaction_context}]
end
{:next_state, role, new_data, next_events}
_ ->
new_data =
Map.put(
data,
:context,
ValidationContext.set_pending_transaction_validation(context, false)
)
case role do
:coordinator ->
{:next_state, :coordinator, new_data,
{:next_event, :internal, :create_and_notify_validation_stamp}}
:cross_validator ->
{:next_state, :cross_validator, new_data}
end
end
end
def handle_event(
:internal,
:build_transaction_context,
_,
data = %{
context:
context = %ValidationContext{
transaction: tx,
coordinator_node: %Node{last_public_key: coordinator_key},
chain_storage_nodes: chain_storage_nodes,
beacon_storage_nodes: beacon_storage_nodes,
cross_validation_nodes: cross_validation_nodes
}
}
) do
Logger.debug("Retrieve transaction context",
transaction: "#{tx.type}@#{Base.encode16(tx.address)}"
)
start = System.monotonic_time()
{prev_tx, unspent_outputs, previous_storage_nodes, chain_storage_nodes_view,
beacon_storage_nodes_view,
validation_nodes_view} =
TransactionContext.get(
Transaction.previous_address(tx),
Enum.map(chain_storage_nodes, & &1.last_public_key),
Enum.map(beacon_storage_nodes, & &1.last_public_key),
[coordinator_key | Enum.map(cross_validation_nodes, & &1.last_public_key)]
)
:telemetry.execute([:zaryn, :mining, :fetch_context], %{
duration: System.monotonic_time() - start
})
new_context =
ValidationContext.put_transaction_context(
context,
prev_tx,
unspent_outputs,
previous_storage_nodes,
chain_storage_nodes_view,
beacon_storage_nodes_view,
validation_nodes_view
)
Logger.debug("Transaction context retrieved",
transaction: "#{tx.type}@#{Base.encode16(tx.address)}"
)
{:keep_state, %{data | context: new_context}}
end
def handle_event(
:enter,
:idle,
:cross_validator,
_data = %{
context: %ValidationContext{transaction: tx}
}
) do
Logger.debug("Act as cross validator", transaction: "#{tx.type}@#{Base.encode16(tx.address)}")
:keep_state_and_data
end
def handle_event(:internal, :notify_context, :cross_validator, %{
node_public_key: node_public_key,
context: context
}) do
notify_transaction_context(context, node_public_key)
:keep_state_and_data
end
def handle_event(
:enter,
:idle,
:coordinator,
_data = %{context: %ValidationContext{transaction: tx}}
) do
Logger.debug("Act as coordinator", transaction: "#{tx.type}@#{Base.encode16(tx.address)}")
:keep_state_and_data
end
def handle_event(:cast, {:add_mining_context, _, _, _, _, _}, :idle, _),
do: {:keep_state_and_data, :postpone}
def handle_event(
:cast,
{:add_mining_context, from, previous_storage_nodes, validation_nodes_view,
chain_storage_nodes_view, beacon_storage_nodes_view},
:coordinator,
data = %{
context:
context = %ValidationContext{
transaction: tx
}
}
) do
Logger.debug("Aggregate mining context",
transaction: "#{tx.type}@#{Base.encode16(tx.address)}"
)
if ValidationContext.cross_validation_node?(context, from) do
new_context =
ValidationContext.aggregate_mining_context(
context,
previous_storage_nodes,
validation_nodes_view,
chain_storage_nodes_view,
beacon_storage_nodes_view,
from
)
if ValidationContext.enough_confirmations?(new_context) do
Logger.debug("Create validation stamp",
transaction: "#{tx.type}@#{Base.encode16(tx.address)}"
)
{:keep_state, Map.put(data, :context, new_context),
{:next_event, :internal, :create_and_notify_validation_stamp}}
else
{:keep_state, %{data | context: new_context}}
end
else
:keep_state_and_data
end
end
def handle_event(:internal, :create_and_notify_validation_stamp, _, data = %{context: context}) do
new_context =
context
|> ValidationContext.create_validation_stamp()
|> ValidationContext.create_replication_tree()
request_cross_validations(new_context)
{:next_state, :wait_cross_validation_stamps, %{data | context: new_context}}
end
def handle_event(:cast, {:cross_validate, _}, :idle, _), do: {:keep_state_and_data, :postpone}
def handle_event(
:cast,
{:cross_validate, validation_stamp = %ValidationStamp{}, replication_tree},
:cross_validator,
data = %{
node_public_key: node_public_key,
context:
context = %ValidationContext{
transaction: tx,
cross_validation_nodes: cross_validation_nodes
}
}
) do
Logger.debug("Cross validation", transaction: "#{tx.type}@#{Base.encode16(tx.address)}")
new_context =
context
|> ValidationContext.add_validation_stamp(validation_stamp)
|> ValidationContext.add_replication_tree(replication_tree, node_public_key)
|> ValidationContext.cross_validate()
notify_cross_validation_stamp(new_context)
if length(cross_validation_nodes) == 1 and ValidationContext.atomic_commitment?(new_context) do
{:next_state, :replication, %{data | context: new_context}}
else
{:next_state, :wait_cross_validation_stamps, %{data | context: new_context}}
end
end
def handle_event(:cast, {:add_cross_validation_stamp, _}, :cross_validator, _),
do: {:keep_state_and_data, :postpone}
def handle_event(
:enter,
_,
:wait_cross_validation_stamps,
_data = %{context: %ValidationContext{transaction: tx}}
) do
Logger.debug("Waiting cross validation stamps",
transaction: "#{tx.type}@#{Base.encode16(tx.address)}"
)
:keep_state_and_data
end
def handle_event(
:cast,
{:add_cross_validation_stamp, cross_validation_stamp = %CrossValidationStamp{}},
:wait_cross_validation_stamps,
data = %{
context: context = %ValidationContext{transaction: tx}
}
) do
Logger.debug("Add cross validation stamp",
transaction: "#{tx.type}@#{Base.encode16(tx.address)}"
)
new_context = ValidationContext.add_cross_validation_stamp(context, cross_validation_stamp)
if ValidationContext.enough_cross_validation_stamps?(new_context) do
if ValidationContext.atomic_commitment?(new_context) do
{:next_state, :replication, %{data | context: new_context}}
else
{:next_state, :consensus_not_reached, %{data | context: new_context}}
end
else
{:keep_state, %{data | context: new_context}}
end
end
def handle_event(
:enter,
:wait_cross_validation_stamps,
:consensus_not_reached,
_data = %{context: context = %ValidationContext{transaction: tx}}
) do
Logger.error("Consensus not reached - Malicious Detection started",
transaction: "#{tx.type}@#{Base.encode16(tx.address)}"
)
MaliciousDetection.start_link(context)
:stop
end
def handle_event(
:enter,
:wait_cross_validation_stamps,
:replication,
_data = %{
context:
context = %ValidationContext{
transaction: %Transaction{address: tx_address, type: type}
}
}
) do
Logger.info("Start replication", transaction: "#{type}@#{Base.encode16(tx_address)}")
request_replication(context)
:keep_state_and_data
end
def handle_event(
:enter,
:cross_validator,
:replication,
_data = %{
context:
context = %ValidationContext{
transaction: %Transaction{address: tx_address, type: tx_type},
cross_validation_nodes: [_]
}
}
) do
Logger.info("Start replication", transaction: "#{tx_type}@#{Base.encode16(tx_address)}")
request_replication(context)
:keep_state_and_data
end
def handle_event(
:info,
{:acknowledge_storage, replication_node_public_key, tree_types},
:replication,
data = %{context: context = %ValidationContext{transaction: tx}, start_time: start_time}
) do
new_context =
ValidationContext.confirm_replication(context, replication_node_public_key, tree_types)
if ValidationContext.enough_replication_confirmations?(new_context) do
:telemetry.execute([:zaryn, :mining, :full_transaction_validation], %{
duration: System.monotonic_time() - start_time
})
Logger.info("Replication finished", transaction: "#{tx.type}@#{Base.encode16(tx.address)}")
:stop
else
{:keep_state, %{data | context: new_context}}
end
end
def handle_event(
{:timeout, :stop_timeout},
:any,
_state,
_data = %{context: %ValidationContext{transaction: tx}}
) do
Logger.warning("Timeout reached during mining",
transaction: "#{tx.type}@#{Base.encode16(tx.address)}"
)
:stop
end
# Reject unexpected events
def handle_event(_, _, _, _), do: :keep_state_and_data
defp notify_transaction_context(
%ValidationContext{
transaction: %Transaction{address: tx_address, type: tx_type},
coordinator_node: coordinator_node,
previous_storage_nodes: previous_storage_nodes,
validation_nodes_view: validation_nodes_view,
chain_storage_nodes_view: chain_storage_nodes_view,
beacon_storage_nodes_view: beacon_storage_nodes_view
},
node_public_key
) do
Logger.debug(
"Send mining context to #{Node.endpoint(coordinator_node)}",
transaction: "#{tx_type}@#{Base.encode16(tx_address)}"
)
P2P.send_message(coordinator_node, %AddMiningContext{
address: tx_address,
validation_node_public_key: node_public_key,
previous_storage_nodes_public_keys: Enum.map(previous_storage_nodes, & &1.last_public_key),
validation_nodes_view: validation_nodes_view,
chain_storage_nodes_view: chain_storage_nodes_view,
beacon_storage_nodes_view: beacon_storage_nodes_view
})
end
defp request_cross_validations(%ValidationContext{
cross_validation_nodes: cross_validation_nodes,
transaction: %Transaction{address: tx_address, type: tx_type},
validation_stamp: validation_stamp,
full_replication_tree: replication_tree
}) do
Logger.debug(
"Send validation stamp to #{cross_validation_nodes |> Enum.map(&:inet.ntoa(&1.ip)) |> Enum.join(", ")}",
transaction: "#{tx_type}@#{Base.encode16(tx_address)}"
)
P2P.broadcast_message(cross_validation_nodes, %CrossValidate{
address: tx_address,
validation_stamp: validation_stamp,
replication_tree: replication_tree
})
end
defp notify_cross_validation_stamp(%ValidationContext{
transaction: %Transaction{address: tx_address, type: tx_type},
coordinator_node: coordinator_node,
cross_validation_nodes: cross_validation_nodes,
cross_validation_stamps: [cross_validation_stamp | []]
}) do
nodes =
[coordinator_node | cross_validation_nodes]
|> P2P.distinct_nodes()
|> Enum.reject(&(&1.last_public_key == Crypto.last_node_public_key()))
Logger.debug(
"Send cross validation stamps to #{nodes |> Enum.map(&Node.endpoint/1) |> Enum.join(", ")}",
transaction: "#{tx_type}@#{Base.encode16(tx_address)}"
)
P2P.broadcast_message(nodes, %CrossValidationDone{
address: tx_address,
cross_validation_stamp: cross_validation_stamp
})
end
defp request_replication(
context = %ValidationContext{
transaction: tx,
welcome_node: %Node{last_public_key: welcome_node_public_key}
}
) do
storage_nodes = ValidationContext.get_replication_nodes(context)
worker_pid = self()
Logger.debug(
"Send validated transaction to #{storage_nodes |> Enum.map(fn {node, roles} -> "#{Node.endpoint(node)} as #{Enum.join(roles, ",")}" end) |> Enum.join(",")}",
transaction: "#{tx.type}@#{Base.encode16(tx.address)}"
)
validated_tx = ValidationContext.get_validated_transaction(context)
Task.Supervisor.async_stream_nolink(
TaskSupervisor,
storage_nodes,
fn {node, roles} ->
message = %ReplicateTransaction{
transaction: validated_tx,
roles: roles,
ack_storage?: true,
welcome_node_public_key: welcome_node_public_key
}
case P2P.send_message(node, message) do
{:ok, %Ok{}} ->
{:ok, node, roles}
_ ->
:error
end
end,
on_timeout: :kill_task,
ordered?: false
)
|> Stream.filter(&match?({:ok, {:ok, %Node{}, _}}, &1))
|> Stream.each(fn {:ok, {:ok, %Node{last_public_key: node_key}, roles}} ->
send(worker_pid, {:acknowledge_storage, node_key, roles})
end)
|> Stream.run()
end
end
|
lib/zaryn/mining/distributed_workflow.ex
| 0.833562 | 0.546496 |
distributed_workflow.ex
|
starcoder
|
defmodule FinancialSystem.Converter do
alias FinancialSystem.Coin, as: Coin
@moduledoc """
Module that deals with operations such as currency value conversion. Uses the dollar as a base to make the other conversions.
If the currency is not ISO 4217 standard and an error is returned.
"""
@doc false
def exchange(amount, from_coin, to_coin) when from_coin == to_coin do
case Coin.is_valid?(from_coin) do
true -> amount
false -> {:error, "Coin (#{from_coin}) not valid compared to ISO 4271"}
end
case Coin.is_valid?(to_coin) do
true -> amount
false -> {:error, "Coin (#{to_coin}) not valid compared to ISO 4271"}
end
end
def exchange(amount, from, :USD) do
case Coin.is_valid?(from) do
true ->
from = Atom.to_string(from)
rates = Coin.currency_rate
value_amount = Decimal.from_float(amount)
rate_from =
rates
|> Enum.find(fn {currency, _value} -> currency == from end)
|> elem(1)
|> Decimal.from_float()
converted_amount =
Decimal.div(value_amount, rate_from)
|> Decimal.round(2)
|> Decimal.to_float()
converted_amount
false ->
{:error, "Coin (#{from}) not valid compared to ISO 4271"}
end
end
def exchange(amount, :USD, to) do
case Coin.is_valid?(to) do
true ->
to = Atom.to_string(to)
rates = Coin.currency_rate
value_amount = Decimal.from_float(amount)
rate_to =
rates
|> Enum.find(fn {currency, _value} -> currency == to end)
|> elem(1)
|> Decimal.from_float()
converted_amount =
Decimal.mult(value_amount, rate_to)
|> Decimal.round(2)
|> Decimal.to_float()
converted_amount
false ->
{:error, "Coin (#{to}) not valid compared to ISO 4271"}
end
end
@doc """
The exchange function takes as arguments a value such as float and two types of atom currencies.
And it uses axillary functions to perform currency conversion and verification operations.
To perform the conversion of values is based in a cunsult api, where get the values for each currency.
## Examples
iex(1)> FinancialSystem.Converter.exchange(100.00, :BRL, :USD)
22.79
iex(2)> FinancialSystem.Converter.exchange(100.00, :USD, :BRL)
438.82
iex(3)> FinancialSystem.Converter.exchange(100.00, :BRL, :ANF)
{:error, "Coins (BRL 'or' ANF) not valid compared to ISO 4271"}
iex(4)> FinancialSystem.Converter.exchange(100.00, :BRL, :AFN)
1753.68
iex(5)> FinancialSystem.Converter.exchange(100.00, :AFN, :BRL)
5.7
iex(6)> FinancialSystem.Converter.exchange(100.00, :AFF, :USD)
{:error, "Coin (AFF) not valid compared to ISO 4271"}
"""
@spec exchange(float, atom, atom) :: float | {:error, String.t()}
def exchange(amount, from, to) do
case !Coin.is_valid?(from) or !Coin.is_valid?(to) do
true ->
{:error, "Coins (#{from} 'or' #{to}) not valid compared to ISO 4271"}
false ->
usd_value = FinancialSystem.Converter.exchange(amount, from, :USD)
FinancialSystem.Converter.exchange(usd_value, :USD, to)
end
end
end
|
apps/financial_system/lib/converter.ex
| 0.854854 | 0.651729 |
converter.ex
|
starcoder
|
defmodule Votex.Votable do
@moduledoc """
Defines a Votable Model
A Votable model will expose the required methods to enable voting functionality
Typically be used by models like Post, Image, Answer etc.
## Example
defmodule Post do
use Ecto.Schema
use Votex.Votable
schema "posts" do
field :title, :string
field :views, :integer, default: 0
end
end
"""
import Ecto.Query
import Votex.Core
alias Votex.{Vote, Votable, DB, CleanupBehaviour}
defmacro __using__(_opts) do
quote do
@behaviour unquote(__MODULE__)
@behaviour CleanupBehaviour
defdelegate vote_by(votable, voter), to: Votable
defdelegate unvote_by(votable, voter), to: Votable
defdelegate votes_for(votable), to: Votable
defdelegate cleanup_votes(result), to: Votable
end
end
@doc """
Primary method to cast a vote
## Example
{:ok, vote} = post |> Post.vote_by user
"""
def vote_by(votable, voter) do
{votable_type, voter_type} = extract_fields(votable, voter)
result =
%{
votable_id: votable.id,
votable_type: votable_type,
voter_id: voter.id,
voter_type: voter_type
}
|> create_vote
calculate_cached_fields_for_votable(get_module(votable_type), votable_type, votable.id, true)
result
end
@doc """
Primary method to remove a vote
## Example
{:ok, vote} = post |> Post.unvote_by user
"""
def unvote_by(votable, voter) do
{votable_type, voter_type} = extract_fields(votable, voter)
vote =
Vote
|> where(votable_type: ^votable_type)
|> where(votable_id: ^votable.id)
|> where(voter_type: ^voter_type)
|> where(voter_id: ^voter.id)
|> DB.repo().one
case vote do
%{} = v ->
calculate_cached_fields_for_votable(
get_module(votable_type),
votable_type,
votable.id,
false
)
v |> DB.repo().delete
nil ->
raise "Vote not present"
end
end
@doc """
Get a list of votes on votable record
## Example
votes = post |> Post.votes_for
"""
def votes_for(votable) do
{votable_type, _} = extract_fields(votable, nil)
Vote
|> where(votable_type: ^votable_type)
|> where(votable_id: ^votable.id)
|> DB.repo().all
|> preload_votes
end
@doc """
Clean up votes after a votable record is deleted
## Example
Repo.delete(user) |> User.cleanup_votes
"""
def cleanup_votes({status, %{} = payload}) do
case status do
:ok ->
{votable_type, _} = extract_fields(payload, nil)
Vote
|> where(votable_type: ^votable_type)
|> where(votable_id: ^payload.id)
|> DB.repo().delete_all
_ ->
{status, payload}
end
end
@doc """
Reserved for internal use
"""
def children() do
for({module, _} <- :code.all_loaded(), do: module)
|> Enum.filter(&is_child?/1)
end
# Private
defp get_module(type) do
children()
|> convert_modules_list_to_map
|> Map.get(type)
end
defp is_child?(module) do
module.module_info[:attributes]
|> Keyword.get(:behaviour, [])
|> Enum.member?(__MODULE__)
end
defp create_vote(%{} = vote) do
%Vote{}
|> Vote.changeset(vote)
|> DB.repo().insert
end
end
|
lib/votex/votable.ex
| 0.738763 | 0.523542 |
votable.ex
|
starcoder
|
defmodule Scenic.Scrollable.ScrollBars do
use Scenic.Component
use Scenic.Scrollable.SceneInspector, env: [:test, :dev]
import Scenic.Scrollable.Components, only: [scroll_bar: 3]
alias Scenic.Graph
alias Scenic.Scrollable.ScrollBar
alias Scenic.Scrollable.Direction
@moduledoc """
The scroll bars component can be used to add a horizontal, and a vertical scroll bar pair to the graph. This component is used internally by the `Scenic.Scrollable` component, and for most cases it is recommended to use the `Scenic.Scrollable` component instead.
## Data
`t:Scenic.Scrollable.ScrollBars.settings/0`
The scroll bars require the following data for initialization:
- width: number
- height: number
- content_size: `t:Scenic.Scrollable.ScrollBars.v2/0`
- scroll_position: number
- direction: :horizontal | :vertical
With and height define the size of the frame, and thus correspond to the width of the horizontal, and the height of the vertical scroll bars.
## Styles
`t:Scenic.Scrollable.ScrollBars.styles/0`
The scroll bars can be customized by using the following styles:
### scroll_bar
`t:Scenic.Scrollable.ScrollBar.styles/0`
The styles to customize both scrollbars as defined in the corresponding module `Scenic.Scrollable.Scrollbar`.
If different styles for the horizontal and vertical scroll bars are preffered, use the horizontal_scroll_bar and vertical_scroll_bar styles instead.
### horizontal_scroll_bar
`t:Scenic.Scrollable.ScrollBar.styles/0`
The styles to customize the horizontal scroll bar.
### vertical_scroll_bar
`t:Scenic.Scrollable.ScrollBar.styles/0`
The styles to customize the vertical scroll bar.
### scroll_drag
`t:Scenic.Scrollable.Drag/0`
Settings to specify which mouse buttons can be used in order to drag the scroll bar sliders.
### scroll_bar_thickness
number
Specify the height of the horizontal, and the width of the vertical scroll bars.
## Examples
iex> graph = Scenic.Scrollable.Components.scroll_bars(
...> Scenic.Graph.build(),
...> %{
...> width: 200,
...> height: 200,
...> content_size: {1000, 1000},
...> scroll_position: {0, 0}
...> },
...> [
...> scroll_bar: [
...> scroll_buttons: true,
...> scroll_bar_theme: Scenic.Primitive.Style.Theme.preset(:light),
...> scroll_bar_radius: 2,
...> scroll_bar_border: 2,
...> scroll_drag: %{
...> mouse_buttons: [:left, :right, :middle]
...> }
...> ],
...> scroll_drag: %{
...> mouse_buttons: [:left, :right, :middle]
...> },
...> id: :scroll_bars_component_1
...> ]
...> )
...> graph.primitives[1].id
:scroll_bars_component_1
"""
@typedoc """
Data structure representing a vector 2, in the form of an {x, y} tuple.
"""
@type v2 :: Scenic.Scrollable.v2()
@typedoc """
The required settings to initialize a scroll bars component.
For more information see the top of this module.
"""
@type settings :: %{
width: number,
height: number,
content_size: v2,
scroll_position: v2
}
@typedoc """
The optional styles to customize the scroll bars.
For more information see the top of this module.
"""
@type style ::
{:scroll_bar, Scenic.Scrollable.ScrollBar.styles()}
| {:horizontal_scroll_bar, Scenic.Scrollable.ScrollBar.styles()}
| {:vertical_scroll_bar, Scenic.Scrollable.ScrollBar.styles()}
| {:scroll_drag, Scenic.Scrollable.Drag.settings()}
| {:scroll_bar_thickness, number}
@typedoc """
A collection of optional styles to customize the scroll bars.
For more information see `t:Scenic.Scrollable.ScrollBars.style/0` and the top of this module.
"""
@type styles :: [style]
@typedoc """
An atom describing the state the scroll bars are in.
- idle: none of the scroll bars are currently being clicked or dragged.
- dragging: one of the scroll bars is being dragged.
- scrolling: one of the scroll bars is being scrolled using a scroll button.
"""
@type scroll_state ::
:idle
| :dragging
| :scrolling
@typedoc """
The state with which the scrollable components GenServer is running.
"""
@type t :: %__MODULE__{
id: atom,
graph: Graph.t(),
scroll_position: v2,
scroll_state: scroll_state,
pid: pid,
horizontal_scroll_bar_pid: {:some, pid} | :none,
vertical_scroll_bar_pid: {:some, pid} | :none
}
defstruct id: :scroll_bars,
graph: Graph.build(),
scroll_position: {0, 0},
scroll_state: :idle,
pid: nil,
horizontal_scroll_bar_pid: :none,
vertical_scroll_bar_pid: :none
@default_id :scroll_bars
@default_thickness 10
# PUBLIC API
@doc """
Find the direction the content should be scrolling in, depending on the scroll bar buttons pressed states.
"""
@spec direction(t) :: v2
def direction(state) do
{x, _} =
state.horizontal_scroll_bar_pid
|> OptionEx.map(&ScrollBar.direction/1)
|> OptionEx.or_else({0, 0})
{_, y} =
state.vertical_scroll_bar_pid
|> OptionEx.map(&ScrollBar.direction/1)
|> OptionEx.or_else({0, 0})
{x, y}
end
@doc """
Find out if one of the scroll bars is currently being dragged.
"""
@spec dragging?(t) :: boolean
def dragging?(%{scroll_state: :dragging}), do: true
def dragging?(_), do: false
@doc """
Find the latest position the scrollable content should be updated with.
The position corresponds to the contents translation, rather than the scroll bars drag control translation.
"""
@spec new_position(t) :: {:some, v2} | :none
def new_position(%{scroll_position: position}), do: {:some, position}
# CALLBACKS
@impl Scenic.Scene
def init(settings, opts) do
id = opts[:id] || @default_id
styles = Enum.into(opts[:styles] || %{}, [])
shared_styles = Keyword.take(styles, [:scroll_bar, :scroll_drag])
horizontal_bar_styles =
(styles[:horizontal_scroll_bar] || styles[:scroll_bar])
|> OptionEx.return()
|> OptionEx.map(&Keyword.merge(&1, shared_styles))
|> OptionEx.map(&Keyword.put(&1, :id, :horizontal_scroll_bar))
|> OptionEx.map(&Keyword.put(&1, :translate, {0, settings.height}))
vertical_bar_styles =
(styles[:vertical_scroll_bar] || styles[:scroll_bar])
|> OptionEx.return()
|> OptionEx.map(&Keyword.merge(&1, shared_styles))
|> OptionEx.map(&Keyword.put(&1, :id, :vertical_scroll_bar))
|> OptionEx.map(&Keyword.put(&1, :translate, {settings.width, 0}))
{content_width, content_height} = settings.content_size
{x, y} = settings.scroll_position
graph = Graph.build()
graph =
horizontal_bar_styles
|> OptionEx.map(fn styles ->
graph
|> scroll_bar(
%{
width: settings.width,
height: styles[:scroll_bar_thickness] || @default_thickness,
content_size: content_width,
scroll_position: x,
direction: :horizontal
},
styles
)
end)
|> OptionEx.or_else(graph)
graph =
vertical_bar_styles
|> OptionEx.map(fn styles ->
graph
|> scroll_bar(
%{
width: styles[:scroll_bar_thickness] || @default_thickness,
height: settings.height,
content_size: content_height,
scroll_position: y,
direction: :vertical
},
styles
)
end)
|> OptionEx.or_else(graph)
push_graph(graph)
state = %__MODULE__{
id: id,
graph: graph,
scroll_position: {x, y},
pid: self()
}
{send_event({:scroll_bars_initialized, state.id, state}), state}
end
@impl Scenic.Component
def verify(
%{
content_size: {content_x, content_y},
scroll_position: {x, y}
} = settings
)
when is_number(content_x) and is_number(content_y) and is_number(x) and is_number(y) do
{:ok, settings}
end
def verify(_), do: :invalid_input
@impl Scenic.Scene
def filter_event(
{:scroll_bar_initialized, :horizontal_scroll_bar, scroll_bar_state},
_from,
state
) do
{:stop, %{state | horizontal_scroll_bar_pid: OptionEx.return(scroll_bar_state.pid)}}
end
def filter_event(
{:scroll_bar_initialized, :vertical_scroll_bar, scroll_bar_state},
_from,
state
) do
{:stop, %{state | vertical_scroll_bar_pid: OptionEx.return(scroll_bar_state.pid)}}
end
def filter_event({:scroll_bar_button_pressed, _, scroll_bar_state}, _from, state) do
state = update_scroll_state(state, scroll_bar_state)
{:continue, {:scroll_bars_button_pressed, state.id, state}, state}
end
def filter_event({:scroll_bar_button_released, _, scroll_bar_state}, _from, state) do
state = update_scroll_state(state, scroll_bar_state)
{:continue, {:scroll_bars_button_released, state.id, state}, state}
end
def filter_event(
{:scroll_bar_position_change, _, _scroll_bar_state},
_from,
%{scroll_state: :scrolling} = state
) do
{:stop, state}
end
def filter_event(
{:scroll_bar_position_change, _, %{direction: direction} = scroll_bar_state},
_from,
state
) do
{x, y} = state.scroll_position
ScrollBar.new_position(scroll_bar_state)
|> Direction.from_vector_2(direction)
|> Direction.map_horizontal(&{&1, y})
|> Direction.map_vertical(&{x, &1})
|> Direction.unwrap()
|> (&Map.put(state, :scroll_position, &1)).()
|> update_scroll_state(scroll_bar_state)
|> (&{:continue, {:scroll_bars_position_change, &1.id, &1}, &1}).()
end
def filter_event({:scroll_bar_scroll_end, _id, scroll_bar_state}, _from, state) do
state = update_scroll_state(state, scroll_bar_state)
{:continue, {:scroll_bars_scroll_end, state.id, state}, state}
end
def filter_event(_event, _from, state) do
{:stop, state}
end
# no callback on the `Scenic.Scene` and no GenServer @behaviour, so impl will not work
@spec handle_call(request :: term(), GenServer.from(), state :: term()) ::
{:reply, reply :: term(), new_state :: term()}
def handle_call({:update_scroll_position, {x, y}}, _, state) do
state = %{state | scroll_position: {x, y}}
# TODO error handling
state.horizontal_scroll_bar_pid
|> OptionEx.map(fn pid -> GenServer.call(pid, {:update_scroll_position, x}) end)
state.vertical_scroll_bar_pid
|> OptionEx.map(fn pid -> GenServer.call(pid, {:update_scroll_position, y}) end)
{:reply, :ok, state}
end
def handle_call(msg, _, state) do
{:reply, {:error, {:unexpected_message, msg}}, state}
end
# UTILITY
@spec update_scroll_state(t, ScrollBar.t()) :: t
defp update_scroll_state(state, scroll_bar_state) do
%{state | scroll_state: scroll_bar_state.scroll_state}
end
end
|
lib/components/scroll_bars.ex
| 0.892281 | 0.574574 |
scroll_bars.ex
|
starcoder
|
defmodule StarkInfra.IssuingAuthorization do
alias __MODULE__, as: IssuingAuthorization
alias StarkInfra.Error
alias StarkInfra.Utils.JSON
alias StarkInfra.Utils.Parse
alias StarkInfra.Utils.Check
alias StarkInfra.User.Project
alias StarkInfra.User.Organization
@moduledoc """
Groups IssuingAuthorization related functions
"""
@doc """
An IssuingAuthorization presents purchase data to be analysed and answered with an approval or a declination.
## Attributes (return-only):
- `:end_to_end_id` [string]: central bank's unique transaction ID. ex: "E79457883202101262140HHX553UPqeq"
- `:amount` [integer]: IssuingPurchase value in cents. Minimum = 0. ex: 1234 (= R$ 12.34)
- `:tax` [integer]: IOF amount taxed for international purchases. ex: 1234 (= R$ 12.34)
- `:card_id` [string]: unique id returned when IssuingCard is created. ex: "5656565656565656"
- `:issuer_amount` [integer]: issuer amount. ex: 1234 (= R$ 12.34)
- `:issuer_currency_code` [string]: issuer currency code. ex: "USD"
- `:merchant_amount` [integer]: merchant amount. ex: 1234 (= R$ 12.34)
- `:merchant_currency_code` [string]: merchant currency code. ex: "USD"
- `:merchant_category_code` [string]: merchant category code. ex: "fastFoodRestaurants"
- `:merchant_country_code` [string]: merchant country code. ex: "USA"
- `:acquirer_id` [string]: acquirer ID. ex: "5656565656565656"
- `:merchant_id` [string]: merchant ID. ex: "5656565656565656"
- `:merchant_name` [string]: merchant name. ex: "Google Cloud Platform"
- `:merchant_fee` [integer]: merchant fee charged. ex: 200 (= R$ 2.00)
- `:wallet_id` [string]: virtual wallet ID. ex: "googlePay"
- `:method_code` [string]: method code. ex: "chip", "token", "server", "manual", "magstripe" or "contactless"
- `:score` [float]: internal score calculated for the authenticity of the purchase. Nil in case of insufficient data. ex: 7.6
- `:is_partial_allowed` [bool]: true if the the merchant allows partial purchases. ex: False
- `:purpose` [string]: purchase purpose. ex: "purchase"
- `:card_tags` [list of strings]: tags of the IssuingCard responsible for this purchase. ex: ["travel", "food"]
- `:holder_tags` [list of strings]: tags of the IssuingHolder responsible for this purchase. ex: ["technology", "<NAME>"]
"""
@enforce_keys [
:id,
:end_to_end_id,
:amount,
:tax,
:card_id,
:issuer_amount,
:issuer_currency_code,
:merchant_amount,
:merchant_currency_code,
:merchant_category_code,
:merchant_country_code,
:acquirer_id,
:merchant_id,
:merchant_name,
:merchant_fee,
:wallet_id,
:method_code,
:score,
:is_partial_allowed,
:purpose,
:card_tags,
:holder_tags
]
defstruct [
:id,
:end_to_end_id,
:amount,
:tax,
:card_id,
:issuer_amount,
:issuer_currency_code,
:merchant_amount,
:merchant_currency_code,
:merchant_category_code,
:merchant_country_code,
:acquirer_id,
:merchant_id,
:merchant_name,
:merchant_fee,
:wallet_id,
:method_code,
:score,
:is_partial_allowed,
:purpose,
:card_tags,
:holder_tags
]
@type t() :: %__MODULE__{}
@doc """
Create a single IssuingAuthorization struct received from IssuingAuthorization at the informed endpoint.
If the provided digital signature does not check out with the StarkInfra public key, a
starkinfra.error.InvalidSignatureError will be raised.
## Parameters (required):
- `:content` [string]: response content from request received at user endpoint (not parsed)
- `:signature` [string]: base-64 digital signature received at response header "Digital-Signature"
## Options
- `cache_pid` [PID, default nil]: PID of the process that holds the public key cache, returned on previous parses. If not provided, a new cache process will be generated.
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- Parsed IssuingAuthorization struct
"""
@spec parse(
content: binary,
signature: binary,
cache_pid: PID,
user: Project.t() | Organization.t()
)::
{:ok, IssuingAuthorization.t()} |
{:error, [error: Error.t()]}
def parse(options) do
%{content: content, signature: signature, cache_pid: cache_pid, user: user} =
Enum.into(
options |> Check.enforced_keys([:content, :signature]),
%{cache_pid: nil, user: nil}
)
Parse.parse_and_verify(
content: content,
signature: signature,
cache_pid: cache_pid,
key: nil,
resource_maker: &resource_maker/1,
user: user
)
end
@doc """
Same as parse(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec parse!(
content: binary,
signature: binary,
cache_pid: PID,
user: Project.t() | Organization.t()
) :: any
def parse!(options \\ []) do
%{content: content, signature: signature, cache_pid: cache_pid, user: user} =
Enum.into(
options |> Check.enforced_keys([:content, :signature]),
%{cache_pid: nil, user: nil}
)
Parse.parse_and_verify!(
content: content,
signature: signature,
cache_pid: cache_pid,
key: nil,
resource_maker: &resource_maker/1,
user: user
)
end
@doc """
Helps you respond IssuingAuthorization requests.
## Parameters (required):
- `:status` [string]: sub-issuer response to the authorization. ex: "accepted" or "denied"
## Options
- `:amount` [integer, default 0]: amount in cents that was authorized. ex: 1234 (= R$ 12.34)
- `:reason` [string, default ""]: denial reason. ex: "other"
- `:tags` [list of strings, default []]: tags to filter retrieved object. ex: ["tony", "stark"]
## Return:
- Dumped JSON string that must be returned to us on the IssuingAuthorization request
"""
@spec response!(
status: binary,
amount: integer,
reason: binary,
tags: [binary]
) :: any
def response!(status, options \\ []) do
options = options ++ [status: status]
JSON.encode!(%{authorization:
Enum.into(options |> Check.enforced_keys([:status]), %{amount: 0, reason: "", tags: []})
|> Enum.filter(fn {_, v} -> v != nil end)
|> Enum.into(%{})
})
end
@doc false
def resource() do
{
"IssuingAuthorization",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%IssuingAuthorization{
id: json[:id],
end_to_end_id: json[:end_to_end_id],
amount: json[:amount],
tax: json[:tax],
card_id: json[:card_id],
issuer_amount: json[:issuer_amount],
issuer_currency_code: json[:issuer_currency_code],
merchant_amount: json[:merchant_amount],
merchant_currency_code: json[:merchant_currency_code],
merchant_category_code: json[:merchant_category_code],
merchant_country_code: json[:merchant_country_code],
acquirer_id: json[:acquirer_id],
merchant_id: json[:merchant_id],
merchant_name: json[:merchant_name],
merchant_fee: json[:merchant_fee],
wallet_id: json[:wallet_id],
method_code: json[:method_code],
score: json[:score],
is_partial_allowed: json[:is_partial_allowed],
purpose: json[:purpose],
card_tags: json[:card_tags],
holder_tags: json[:holder_tags]
}
end
end
|
lib/issuing_authorization/issuing_authorization.ex
| 0.812496 | 0.663396 |
issuing_authorization.ex
|
starcoder
|
defmodule Export.Python do
@moduledoc """
Wrapper for ruby.
## Example
```elixir
defmodule SomePythonCall do
use Export.Python
def call_python_method
# path to our python files
{:ok, py} = Python.start(python_path: Path.expand("lib/python"))
# call "upcase" method from "test" file with "hello" argument
py |> Python.call("test", "upcase", ["hello"])
# same as above but prettier
py |> Python.call(upcase("hello"), from_file: "test")
end
end
```
"""
import Export.Helpers
@doc false
defmacro __using__(_opts) do
quote do
alias Export.Python
require Export.Python
end
end
@doc """
Start Python instance with the default options.
Returns `{:ok, pid}`.
## Examples
iex> Export.Python.start()
{:ok, pid}
"""
def start(), do: :python.start()
@doc """
Start Python instance with options.
The instance will be registered with name. The `options` argument should be a map with the following options.
## Python options
- python: Path to the Python interpreter executable
- python_path: The Python modules search path. The Path variable can be a string in PYTHONPATH format or a list of paths.
"""
def start(options), do: options |> convert_options |> :python.start
@doc """
Start Python instance with name and options.
The instance will be registered with name. The `options` argument should be a map with the following options.
## Python options
- python: Path to the Python interpreter executable
- python_path: The Python modules search path. The Path variable can be a string in PYTHONPATH format or a list of paths.
"""
def start(name, options) when not is_tuple(name), do: :python.start({:local, name}, options |> convert_options)
def start(name, options), do: :python.start(name, options |> convert_options)
@doc """
The same as start/0 except the link to the current process is also created.
"""
def start_link(), do: :python.start_link()
@doc """
The same as start/1 except the link to the current process is also created.
"""
def start_link(options), do: options |> convert_options |> :python.start_link
@doc """
The same as start/2 except the link to the current process is also created.
"""
def start_link(name, options) when not is_tuple(name), do: :python.start_link({:local, name}, options |> convert_options)
def start_link(name, options), do: :python.start_link(name, options |> convert_options)
@doc """
Stop Python instance
"""
def stop(instance), do: :python.stop(instance)
@doc """
Call Python function.
## Parameters
- instance: pid witch is returned by one of the `start` function
- file: file to run ruby function from
- function: name of the function
- arguments: arguments to pass to the function
## Example
```
# call "upcase" method from "test" file with "hello" argument
py |> Python.call("test", "upcase", ["hello"])
```
"""
def call(instance, file, function, arguments), do: :python.call(instance, String.to_atom(file), String.to_atom(function), arguments)
@doc """
Call Python function.
## Parameters
- instance: pid witch is returned by one of the `start` function
- expression: function expression to execute in ruby world
- from_file: file to run ruby function from
## Example
```
# call "upcase" method from "test" file with "hello" argument
py |> Python.call(upcase("hello"), from_file: "test")
```
"""
defmacro call(instance, expression, from_file: file) do
{function, _meta, arguments} = expression
arguments = arguments || []
quote do
:python.call(unquote(instance), String.to_atom(unquote(file)), unquote(function), unquote(arguments))
end
end
end
|
lib/export/python.ex
| 0.860105 | 0.849285 |
python.ex
|
starcoder
|
defmodule Bonbon.APICase do
@moduledoc """
This module defines the test case to be used by
GraphQL endpoint tests.
"""
use ExUnit.CaseTemplate
use Phoenix.ConnTest
using do
quote do
import Bonbon.APICase
use Phoenix.ConnTest
alias Bonbon.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
# The default endpoint for testing
@endpoint Bonbon.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Bonbon.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Bonbon.Repo, { :shared, self() })
end
conn = build_conn()
|> put_req_header("content-type", "application/graphql")
conn = if tags[:locale] do
put_req_header(conn, "accept-language", tags[:locale])
else
delete_req_header(conn, "accept-language")
end
{ :ok, conn: conn }
end
@doc """
Submit a GraphQL query and retrieve the result.
This macro simplifies running GraphQL calls by handling the submission, response,
and converting the JSON result to an Elixir Map.
The `conn` argument is the current Plug connection to be used to send the request.
The `query` argument is the GraphQL query itself.
The `code` argument is the status code returned. See `[Plug.Conn.Status](https://hexdocs.pm/plug/Plug.Conn.Status.html)`
"""
@spec run(Plug.Conn.t, String.t, integer | atom) :: Macro.t
defmacro run(conn, query, code \\ :ok) do
quote do
Poison.decode!(response(post(unquote(conn), "/", unquote(query)), unquote(code)))
end
end
@doc false
def to_root(root), do: to_string(root)
defp format_var([]), do: "[]"
defp format_var(var) when is_list(var), do: to_args(var) |> String.replace("(", "{ ") |> String.replace(")", " }")
defp format_var(var) when is_binary(var), do: "\"#{var}\""
defp format_var(var), do: to_string(var)
defp to_args([]), do: ""
defp to_args(args = [arg|_]) do
args = Enum.map_join(args, ", ", fn
{ name, var } -> "#{to_string(name)}: #{format_var(var)}"
args -> to_args(args)
end)
if(is_list(arg), do: "[#{args}]", else: "(#{args})")
end
defp to_fields([]), do: ""
defp to_fields(fields = [_|_]), do: "{ #{Enum.map_join(fields, " ", &to_fields/1)} }"
defp to_fields({ name, fields }), do: "#{to_string(name)}#{to_fields(fields)}"
defp to_fields(field), do: to_string(field)
#todo: need to add support for more elaborate queries
def build_query(root, fields, args), do: "{ #{to_root(root)}#{to_args(args)}#{to_fields(fields)} }"
@doc """
Build and run a GraphQL query.
This macro simplifies constructing GraphQL calls. It then passes them to `run/3`.
The `conn` argument is the current Plug connection to be used to send the request.
The `root` argument is the root GraphQL query type.
The `fields` argument is the GraphQL subfields.
The `args` argument are the GraphQL query arguments.
"""
@spec query(Plug.Conn.t, atom, [atom], keyword(), integer | atom) :: Macro.t
defmacro query(conn, root, fields, args \\ [], code \\ :ok) do
quote do
run(unquote(conn), build_query(unquote(root), unquote(fields), unquote(args)), unquote(code))
end
end
@doc """
Build and run a GraphQL mutation.
This macro simplifies constructing GraphQL calls. It then passes them to `run/3`.
The `conn` argument is the current Plug connection to be used to send the request.
The `root` argument is the root GraphQL query type.
The `fields` argument is the GraphQL subfields.
The `args` argument are the GraphQL mutation arguments.
"""
@spec mutation(Plug.Conn.t, atom, [atom], keyword(), integer | atom) :: Macro.t
defmacro mutation(conn, root, fields, args \\ [], code \\ :ok) do
quote do
run(unquote(conn), "mutation " <> build_query(unquote(root), unquote(fields), unquote(args)), unquote(code))
end
end
@doc """
Build and run a GraphQL query.
The root and subfields are obtained from `@root`, and `@fields`. For more information see
`query/4`.
"""
@spec query(Plug.Conn.t, keyword()) :: Macro.t
defmacro query(conn, args \\ []) do
quote do
query(unquote(conn), @root, @fields, unquote(args))
end
end
@doc """
Build and run a GraphQL mutation.
The root and subfields are obtained from `@root`, and `@fields`. For more information see
`mutation/4`.
"""
@spec mutation(Plug.Conn.t, keyword()) :: Macro.t
defmacro mutation(conn, args \\ []) do
quote do
mutation(unquote(conn), @root, @fields, unquote(args))
end
end
@doc """
Build and run a GraphQL query, and retrieve the root data.
This macro simplifies constructing GraphQL calls, and retrieving the root data. For
more information see: `query/4`
"""
@spec query_data(Plug.Conn.t, atom, [atom], keyword(), integer | atom) :: Macro.t
defmacro query_data(conn, root, fields, args \\ [], code \\ :ok) do
quote do
query(unquote(conn), unquote(root), unquote(fields), unquote(args), unquote(code))["data"][to_root(unquote(root))]
end
end
@doc """
Build and run a GraphQL query, and retrieve the root data.
The root and subfields are obtained from `@root`, and `@fields`. For more information see
`query_data/4`.
"""
@spec query_data(Plug.Conn.t, keyword()) :: Macro.t
defmacro query_data(conn, args \\ []) do
quote do
query_data(unquote(conn), @root, @fields, unquote(args))
end
end
@doc """
Build and run a GraphQL mutation, and retrieve the root data.
This macro simplifies constructing GraphQL calls, and retrieving the root data. For
more information see: `mutation/4`
"""
@spec mutation_data(Plug.Conn.t, atom, [atom], keyword(), integer | atom) :: Macro.t
defmacro mutation_data(conn, root, fields, args \\ [], code \\ :ok) do
quote do
mutation(unquote(conn), unquote(root), unquote(fields), unquote(args), unquote(code))["data"][to_root(unquote(root))]
end
end
@doc """
Build and run a GraphQL mutation, and retrieve the root data.
The root and subfields are obtained from `@root`, and `@fields`. For more information see
`mutation_data/4`.
"""
@spec mutation_data(Plug.Conn.t, keyword()) :: Macro.t
defmacro mutation_data(conn, args \\ []) do
quote do
mutation_data(unquote(conn), @root, @fields, unquote(args))
end
end
@doc """
Get the custom portion of the error message.
"""
@spec get_message(String.t) :: String.t
def get_message(message) do
case String.split(message, ":", parts: 2) do
[_, message] -> message
[message] -> message
end |> String.trim
end
@doc """
Build and run a GraphQL query, and retrieve the custom portion of the root error message.
This macro simplifies constructing GraphQL calls, and retrieving the root error message.
For more information see: `query/4`
"""
@spec query_error(Plug.Conn.t, atom, [atom], keyword(), integer | atom) :: Macro.t
defmacro query_error(conn, root, fields, args \\ [], code \\ :ok) do
quote do
get_message(List.first(query(unquote(conn), unquote(root), unquote(fields), unquote(args), unquote(code))["errors"])["message"])
end
end
@doc """
Build and run a GraphQL query, and retrieve the custom portion of the root error message.
The root and subfields are obtained from `@root`, and `@fields`. For more information see
`query_error/4`.
"""
@spec query_error(Plug.Conn.t, keyword()) :: Macro.t
defmacro query_error(conn, args \\ []) do
quote do
query_error(unquote(conn), @root, @fields, unquote(args))
end
end
@doc """
Build and run a GraphQL mutation, and retrieve the custom portion of the root error message.
This macro simplifies constructing GraphQL calls, and retrieving the root error message.
For more information see: `mutation/4`
"""
@spec mutation_error(Plug.Conn.t, atom, [atom], keyword(), integer | atom) :: Macro.t
defmacro mutation_error(conn, root, fields, args \\ [], code \\ :ok) do
quote do
get_message(List.first(mutation(unquote(conn), unquote(root), unquote(fields), unquote(args), unquote(code))["errors"])["message"])
end
end
@doc """
Build and run a GraphQL mutation, and retrieve the custom portion of the root error message.
The root and subfields are obtained from `@root`, and `@fields`. For more information see
`mutation_error/4`.
"""
@spec mutation_error(Plug.Conn.t, keyword()) :: Macro.t
defmacro mutation_error(conn, args \\ []) do
quote do
mutation_error(unquote(conn), @root, @fields, unquote(args))
end
end
@doc false
def eval_arg_funs(args, db) do
Enum.map(args, fn
{ name, val } when is_list(val) -> { name, eval_arg_funs(val, db) }
{ name, val } when is_function(val) -> { name, val.(db) }
arg when is_list(arg) -> eval_arg_funs(arg, db)
arg -> arg
end)
end
@doc false
def eval_result(result, locale, db) when is_function(result), do: result.(locale, db)
def eval_result(result, _, _), do: result
@doc """
Test localisation support of GraphQL queries.
This macro simplifies testing localised GraphQL calls.
The `message` argument is the message describing the related tests.
The `result` argument is the expected result to assert against. This takes the form of a function
accepting the current locale and db, and returns the expected result to test against.
The `root` argument is the root GraphQL query type.
The `fields` argument is the GraphQL subfields.
The `args` argument are the GraphQL query arguments. Any functions may take the form of
`({ atom, map() } -> { atom, any })` in which they'll be evaluated with the db argument as input, and the
result is used as the argument in the GraphQL query.
"""
@spec test_localisable_query(String.t, (:en | :fr, map() -> any), atom, [atom], keyword()) :: Macro.t
defmacro test_localisable_query(message, result, root, fields, args \\ []) do
quote do
describe unquote(message) do
@tag locale: nil
test "without locale", %{ conn: conn, db: db } do
assert "no locale was specified, it must be set either in the argument ('locale:') or as a default locale using the Accept-Language header field" == query_error(conn, unquote(root), unquote(fields), eval_arg_funs(unquote(args), db))
end
@tag locale: "zz"
test "with invalid locale", %{ conn: conn, db: db } do
assert "no locale exists for code: zz" == query_error(conn, unquote(root), unquote(fields), eval_arg_funs(unquote(args), db))
end
@tag locale: "en"
test "in english", %{ conn: conn, db: db } do
assert eval_result(unquote(result), :en, db) == query_data(conn, unquote(root), unquote(fields), eval_arg_funs(unquote(args), db))
end
@tag locale: "fr"
test "in french", %{ conn: conn, db: db } do
assert eval_result(unquote(result), :fr, db) == query_data(conn, unquote(root), unquote(fields), eval_arg_funs(unquote(args), db))
end
@tag locale: "fr"
test "with overriden locale", %{ conn: conn, db: db } do
assert eval_result(unquote(result), :en, db) == query_data(conn, unquote(root), unquote(fields), eval_arg_funs(unquote(Keyword.put(args, :locale, "en")), db))
end
end
end
end
@doc """
Test localisation support of GraphQL queries.
The root and subfields are obtained from `@root`, and `@fields`. For more information see
`test_localisable_query/5`.
"""
@spec test_localisable_query(String.t, (:en | :fr, map() -> any), keyword()) :: Macro.t
defmacro test_localisable_query(message, result, args \\ []) do
quote do
test_localisable_query(unquote(message), unquote(result), @root, @fields, unquote(args))
end
end
end
|
test/support/api_case.ex
| 0.739611 | 0.431045 |
api_case.ex
|
starcoder
|
defmodule OT.Text.Component do
@moduledoc """
An individual unit of work to be performed on a piece of text.
A component represents a retain or modification of the text:
- `5`: Retain 5 characters of the text
- `%{i:"Hello"}`: Insert the string "Hello"
- `%{d:"World"}`: Delete the string "World"
A delete component can also be an integer for brevity:
- `%{d:5}`: Delete the string of 5 length
"""
alias OT.Text
alias Text.Operation
alias Text.JSString
@typedoc """
A delete component, in which a string of zero or more characters are deleted
from the text. This can also be an integer
"""
@type delete :: %{d: Text.datum() | non_neg_integer}
@typedoc """
An insert component, in which a string of zero or more characters are inserted
into the text
"""
@type insert :: %{i: Text.datum()}
@typedoc """
A retain component, in which a number of characters in the text are skipped
over
"""
@type retain :: non_neg_integer
@typedoc """
An atom declaring the type of a component
"""
@type type :: :delete | :insert | :retain
@typedoc """
The result of comparing two components
"""
@type comparison :: :eq | :gt | :lt
@typedoc """
A single unit of "work" performed on a piece of text
"""
@type t :: delete | insert | retain
@doc """
Invert a component.
## Examples
iex> OT.Text.Component.invert(%{i: "Foo"})
%{d: "Foo"}
iex> OT.Text.Component.invert(%{d: "Foo"})
%{i: "Foo"}
iex> OT.Text.Component.invert(4)
4
"""
@spec invert(t) :: t
def invert(comp) when is_integer(comp), do: comp
def invert(%{d: del}), do: %{i: del}
def invert(%{i: ins}), do: %{d: ins}
@doc """
Determine the length of a component.
## Examples
iex> OT.Text.Component.length(4)
4
iex> OT.Text.Component.length(%{i: "Foo"})
3
"""
@spec length(t) :: non_neg_integer
def length(comp) when is_integer(comp), do: comp
def length(%{d: del}) when is_integer(del), do: del
def length(%{d: del}), do: JSString.length(del)
def length(%{i: ins}), do: JSString.length(ins)
@doc """
Determine the type of a component.
## Examples
iex> OT.Text.Component.type(4)
:retain
iex> OT.Text.Component.type(%{i: "Foo"})
:insert
iex> OT.Text.Component.type(%{d: "Foo"})
:delete
"""
@spec type(t) :: type
def type(comp) when is_integer(comp), do: :retain
def type(%{d: _}), do: :delete
def type(%{i: _}), do: :insert
@doc """
Compare the length of two components.
Will return `:gt` if first is greater than second, `:lt` if first is less
than second, or `:eq` if they span equal lengths.
## Example
iex> OT.Text.Component.compare(%{i: "Foo"}, 1)
:gt
"""
@spec compare(t, t) :: comparison
def compare(comp_a, comp_b) do
length_a = __MODULE__.length(comp_a)
length_b = __MODULE__.length(comp_b)
cond do
length_a > length_b -> :gt
length_a < length_b -> :lt
true -> :eq
end
end
@doc """
Join two components into an operation, combining them into a single component
if they are of the same type.
## Example
iex> OT.Text.Component.join(%{i: "Foo"}, %{i: "Bar"})
[%{i: "FooBar"}]
"""
@spec join(t, t) :: Operation.t()
def join(retain_a, retain_b)
when is_integer(retain_a) and is_integer(retain_b),
do: [retain_a + retain_b]
def join(%{i: ins_a}, %{i: ins_b}),
do: [%{i: ins_a <> ins_b}]
def join(%{d: del_a}, %{d: del_b}) when is_integer(del_a) and is_integer(del_b),
do: [%{d: del_a + del_b}]
def join(%{d: del_a}, %{d: del_b}),
do: [%{d: del_a <> del_b}]
def join(comp_a, comp_b),
do: [comp_a, comp_b]
@doc """
Determine whether a comopnent is a no-op.
## Examples
iex> OT.Text.Component.no_op?(0)
true
iex> OT.Text.Component.no_op?(%{i: ""})
true
"""
@spec no_op?(t) :: boolean
def no_op?(0), do: true
def no_op?(%{d: ""}), do: true
def no_op?(%{d: 0}), do: true
def no_op?(%{i: ""}), do: true
def no_op?(_), do: false
@doc """
Split a component at a given index.
Returns a tuple containing a new component before the index, and a new
component after the index.
## Examples
iex> OT.Text.Component.split(4, 3)
{3, 1}
iex> OT.Text.Component.split(%{i: "Foo"}, 2)
{%{i: "Fo"}, %{i: "o"}}
"""
@spec split(t, non_neg_integer) :: {t, t}
def split(comp, index) when is_integer(comp) do
{index, comp - index}
end
def split(%{d: del}, index) when is_integer(del) do
{%{d: index}, %{d: del - index}}
end
def split(%{d: del}, index) do
{%{d: JSString.slice(del, 0, index)}, %{d: JSString.slice(del, index, -1)}}
end
def split(%{i: ins}, index) do
{%{i: JSString.slice(ins, 0, index)}, %{i: JSString.slice(ins, index, -1)}}
end
@doc false
@spec random(Text.datum()) :: t
def random(text), do: do_random(random_type(), text)
@spec do_random(type, Text.datum()) :: t
defp do_random(:delete, text),
do: %{d: text}
defp do_random(:insert, _text),
do: %{i: Text.init_random(:rand.uniform(16))}
defp do_random(:retain, text),
do: JSString.length(text)
@spec random_type :: type
defp random_type, do: Enum.random([:delete, :insert, :retain])
end
|
lib/ot/text/component.ex
| 0.876344 | 0.626067 |
component.ex
|
starcoder
|
defmodule StructyRecord do
@moduledoc """
`StructyRecord` provides a Struct-like interface for your `Record`s.
- Use your record's macros in the _same module_ where it is defined!
- Access and update fields in your record through named macro calls.
- Create and update records at runtime (not limited to compile time).
- Calculate 1-based indexes to access record fields in `:ets` tables.
"""
@doc """
Defines a module named `alias` that is also a `Record` composed of `fields`.
## Parameters
- `alias` is the name of the module being defined. It also serves as the
`tag` parameter of `Record.defrecord/3`, which helps identify the record.
- `fields` specifies the shape of the record being defined. It is either:
- a list of `Atom` field names whose default values are always `nil`
- a `Keyword` list of field names along with their own default values
- `do_block` is an optional block of code that is passed into `defmodule/2`.
It allows you to extend the module being defined with your own custom code,
which has compile-time access to all the guards and macros described below.
## Results
The defined module contains the following guards, macros, and functions.
Guards (available at compile time):
- `is_record/1` to check if argument _loosely_ matches this record's shape
Macros (available at compile time):
- `{}/0` to create a new record with default values for all fields
- `{}/1` to create a new record with the given fields and values
- `{}/1` to get the zero-based index of the given field in a record
- `{{}}/1` to convert a record into a list of its fields and values
- `{{}}/2` to get the value of a given field in a given record
- `{{}}/2` to assign the given fields and values in a given record
- `record?/1` to check if argument _strictly_ matches this record's shape
- `record/0` to create a new record with default values for all fields
- `record/1` to create a new record with the given fields and values
- `record/1` to get the zero-based index of the given field in a record
- `record/1` to convert a record into a list of its fields and values
- `record/2` to get the value of a given field in a given record
- `record/2` to assign the given fields and values in a given record
- `get/2` to fetch the value of a given field in a given record
- `put/2` to assign the given fields and values inside a given record
- `get_${field}/1` to fetch the value of a specific field in a given record
- `put_${field}/2` to assign the value of a specific field in a given record
- `index/1` to get the zero-based index of the given field in a record
- `keypos/1` to get the 1-based index of the given field in a record
- `to_list/0` to get a template of fields and default values for this record
- `to_list/1` to convert a record into a list of its fields and values
Functions (available at runtime only):
- `from_list/1` to create a new record with the given fields and values
- `merge/2` to assign the given fields and values inside a given record
- `inspect/2` to inspect the contents of a record using `Kernel.inspect/2`
## Examples
Activate this macro in your environment:
require StructyRecord
Define a structy record for a rectangle:
StructyRecord.defrecord Rectangle, [:width, :height] do
def area(r = record()) do
get_width(r) * get_height(r)
end
def perimeter(record(width: w, height: h)) do
2 * (w + h)
end
def square?(record(width: same, height: same)), do: true
def square?(_), do: false
end
Activate its macros in your environment:
use Rectangle
Create instances of your structy record:
rect = Rectangle.{} #-> {Rectangle, nil, nil}
rect = Rectangle.{[]} #-> {Rectangle, nil, nil}
no_h = Rectangle.{[width: 1]} #-> {Rectangle, 1, nil}
no_w = Rectangle.{[height: 2]} #-> {Rectangle, nil, 2}
wide = Rectangle.{[width: 10, height: 5]} #-> {Rectangle, 10, 5}
tall = Rectangle.{[width: 4, height: 25]} #-> {Rectangle, 4, 25}
even = Rectangle.{[width: 10, height: 10]} #-> {Rectangle, 10, 10}
Inspect the contents of those instances:
rect |> Rectangle.inspect() #-> "Rectangle.{[width: nil, height: nil]}"
no_h |> Rectangle.inspect() #-> "Rectangle.{[width: 1, height: nil]}"
no_w |> Rectangle.inspect() #-> "Rectangle.{[width: nil, height: 2]}"
wide |> Rectangle.inspect() #-> "Rectangle.{[width: 10, height: 5]}"
tall |> Rectangle.inspect() #-> "Rectangle.{[width: 4, height: 25]}"
even |> Rectangle.inspect() #-> "Rectangle.{[width: 10, height: 10]}"
Get values of fields in those instances:
Rectangle.{{tall, :height}} #-> 25
Rectangle.{[height: h]} = tall; h #-> 25
tall |> Rectangle.get_height() #-> 25
Set values of fields in those instances:
Rectangle.{{even, width: 1}} #-> {Rectangle, 1, 10}
even |> Rectangle.put(width: 1) #-> {Rectangle, 1, 10}
even |> Rectangle.put_width(1) #-> {Rectangle, 1, 10}
Rectangle.{{even, width: 1, height: 2}} #-> {Rectangle, 1, 2}
even |> Rectangle.put(width: 1, height: 2) #-> {Rectangle, 1, 2}
even |> Rectangle.put_width(1) |> Rectangle.put_height(2) #-> {Rectangle, 1, 2}
Use your custom code on those instances:
rect |> Rectangle.area() #-> (ArithmeticError) bad argument in arithmetic expression: nil * nil
no_h |> Rectangle.area() #-> (ArithmeticError) bad argument in arithmetic expression: 1 * nil
no_w |> Rectangle.area() #-> (ArithmeticError) bad argument in arithmetic expression: nil * 2
wide |> Rectangle.area() #-> 50
tall |> Rectangle.area() #-> 100
even |> Rectangle.area() #-> 100
rect |> Rectangle.perimeter() #-> (ArithmeticError) bad argument in arithmetic expression: nil + nil
no_h |> Rectangle.perimeter() #-> (ArithmeticError) bad argument in arithmetic expression: 1 + nil
no_w |> Rectangle.perimeter() #-> (ArithmeticError) bad argument in arithmetic expression: nil + 2
wide |> Rectangle.perimeter() #-> 30
tall |> Rectangle.perimeter() #-> 58
even |> Rectangle.perimeter() #-> 40
rect |> Rectangle.square?() #-> true
no_h |> Rectangle.square?() #-> false
no_w |> Rectangle.square?() #-> false
wide |> Rectangle.square?() #-> false
tall |> Rectangle.square?() #-> false
even |> Rectangle.square?() #-> true
"""
defmacro defrecord(alias, fields, do_block \\ []) do
definition = alias |> append_alias(:StructyRecord)
field_names = fields |> field_names()
quote do
require Record, as: StructyRecord_Record
defmodule unquote(definition) do
@moduledoc false
StructyRecord_Record.defrecord(:record, unquote(alias), unquote(fields))
end
defmodule unquote(alias) do
require unquote(definition), as: StructyRecord_Definition
alias __MODULE__, as: StructyRecord_Interface
defmacro __using__(_options) do
quote do
require StructyRecord_Definition
require StructyRecord_Interface
end
end
unquote(record_primitives())
unquote(elixiry_interface())
unquote(field_accessors(field_names))
unquote(do_block)
end
end
end
defp append_alias({tag = :__aliases__, context, namespace}, addendum) do
{tag, context, namespace ++ [addendum]}
end
defp record_primitives do
quote do
@doc """
Creates a new record with fields set to default values.
"""
defmacro {} do
quote do
StructyRecord_Definition.record()
end
end
@doc """
Either fetches the value of a given field in a given record,
or assigns the given fields and values inside a given record.
"""
defmacro {{record, field_or_contents}} do
quote do
StructyRecord_Definition.record(unquote(record), unquote(field_or_contents))
end
end
@doc """
Converts the given record into a list of its fields and values.
"""
defmacro {{_tag = :{}, _context, [record]}} do
quote do
StructyRecord_Definition.record(unquote(record))
end
end
@doc """
Either creates a new record with the given fields and values,
or returns the zero-based index of the given field in a record.
"""
defmacro {contents_or_field_or_record} do
quote do
StructyRecord_Definition.record(unquote(contents_or_field_or_record))
end
end
@doc """
Either creates a new record with the given fields and values,
or returns the zero-based index of the given field in a record,
or converts the given record into a list of its fields and values.
Defaults to creating a new record with fields set to default values.
"""
defmacro record(contents_or_field_or_record \\ []) do
quote do
StructyRecord_Definition.record(unquote(contents_or_field_or_record))
end
end
@doc """
Either fetches the value of a given field in a given record,
or assigns the given fields and values inside a given record.
"""
defmacro record(record, field_or_contents) do
quote do
StructyRecord_Definition.record(unquote(record), unquote(field_or_contents))
end
end
@doc """
Checks if the given argument _loosely_ matches this record's shape.
"""
defguard is_record(record)
when record
|> StructyRecord_Record.is_record(StructyRecord_Interface)
@doc """
Checks if the given argument _strictly_ matches this record's shape.
"""
defmacro record?(record) do
quote do
match?(StructyRecord_Definition.record(), unquote(record))
end
end
end
end
defp elixiry_interface do
quote do
@record StructyRecord_Definition.record()
@template StructyRecord_Definition.record(@record)
@doc """
Returns the zero-based index of the given field in this kind of record.
"""
defmacro index(field) when is_atom(field) do
quote do
StructyRecord_Definition.record(unquote(field))
end
end
defmacro index(field) do
quote bind_quoted: [field: field, template: @template] do
StructyRecord.index(field, StructyRecord_Interface, template)
end
end
@doc """
Returns the 1-based position of the given field in this kind of record.
"""
defmacro keypos(field) do
quote do
1 + StructyRecord_Definition.record(unquote(field))
end
end
@doc """
Returns a template of fields and default values for this kind of record.
"""
defmacro to_list do
quote do
unquote(@template)
end
end
@doc """
Converts the given record into a `Keyword` list of its fields and values.
"""
defmacro to_list(record) do
quote do
StructyRecord_Definition.record(unquote(record))
end
end
@doc """
Fetches the value of the given field in the given record.
"""
defmacro get(record, field) do
quote do
StructyRecord_Definition.record(unquote(record), unquote(field))
end
end
@doc """
Assigns the given fields and values inside a given record.
"""
defmacro put(record, contents) do
quote do
StructyRecord_Definition.record(unquote(record), unquote(contents))
end
end
@doc """
Creates a new record _at runtime_ with the given fields and values.
"""
def from_list(contents) do
StructyRecord.from_list(contents, StructyRecord_Interface, @template)
end
@doc """
Assigns the given fields and values _at runtime_ inside a given record.
"""
def merge(record, contents_or_record)
def merge(record, other = record()), do: merge(record, to_list(other))
def merge(record, contents) do
template = record |> StructyRecord_Definition.record()
StructyRecord.from_list(contents, StructyRecord_Interface, template)
end
@doc """
Inspects the given record's contents _at runtime_ using `Kernel.inspect/2`.
"""
def inspect(record, options \\ []) do
contents = record |> StructyRecord_Definition.record()
StructyRecord.inspect(contents, StructyRecord_Interface, options)
end
end
end
defp field_accessors(field_names) do
getters = field_names |> Enum.map(&getter_macro/1)
putters = field_names |> Enum.map(&putter_macro/1)
quote do
unquote(getters)
unquote(putters)
end
end
defp field_names(fields) do
fields
|> Enum.map(fn
{field, _default_value} -> field
field -> field
end)
end
defp getter_macro(field) do
quote do
@doc """
Fetches the value of the `#{unquote(inspect(field))}` field in the given record.
"""
defmacro unquote(:"get_#{field}")(record) do
quote do
StructyRecord_Definition.record(unquote(record), :field)
end
|> case do
{call, meta, _args = [record, :field]} ->
args = [record, unquote(field)]
{call, meta, args}
end
end
end
end
defp putter_macro(field) do
quote do
@doc """
Assigns the value of the `#{unquote(inspect(field))}` field in the given record.
"""
defmacro unquote(:"put_#{field}")(record, value) do
quote do
StructyRecord_Definition.record(unquote(record), unquote(value))
end
|> case do
{call, meta, _args = [record, value]} ->
args = [record, [{unquote(field), value}]]
{call, meta, args}
end
end
end
end
@doc false
# Returns the zero-based index of the given field in the given kind of record.
def index(field, record_tag, record_template) do
case find_index(record_template, field) do
nil ->
# error out in _exactly_ the same way as Record.index/3 for uniformity
raise ArgumentError,
"record #{inspect(record_tag)} does not have the key: #{inspect(field)}"
index ->
# add +1 for record_tag which occupies the first position in the tuple
1 + index
end
end
defp find_index(template, field) do
template
|> Enum.find_index(&match?({^field, _default_value}, &1))
end
@doc false
# Creates a new record of the given type with the given fields and values.
def from_list(contents, record_tag) do
values = Keyword.values(contents)
[record_tag | values] |> :erlang.list_to_tuple()
end
@doc false
# Creates a new record of the given type with the given fields and values
# according to the given template of known fields and their default values.
def from_list(contents, record_tag, record_template) do
contents
|> intersect(record_template)
|> from_list(record_tag)
end
defp intersect(contents, template) do
template
|> Enum.map(fn {field, template_value} ->
value = Access.get(contents, field, template_value)
{field, value}
end)
end
@doc false
# Inspects the contents of the given record type using `Kernel.inspect/2`.
def inspect(contents, record_tag, options \\ []) when is_list(contents) do
"#{inspect(record_tag)}.{#{inspect_contents(contents, options)}}"
end
defp inspect_contents([], _), do: ""
defp inspect_contents(list, options), do: Kernel.inspect(list, options)
end
|
lib/structy_record.ex
| 0.920576 | 0.915507 |
structy_record.ex
|
starcoder
|
defmodule NaiveBayes do
defstruct vocab: %Vocab{}, data: %Data{}, smoothing: 1, binarized: false, assume_uniform: false
def new(opts \\ []) do
binarized = opts[:binarized] || false
assume_uniform = opts[:assume_uniform] || false
smoothing = opts[:smoothing] || 1
%NaiveBayes{smoothing: smoothing, binarized: binarized, assume_uniform: assume_uniform}
end
def train(classifier, [_ | _] = tokens, category) when is_binary(category) do
tokens = if classifier.binarized, do: Enum.uniq(tokens), else: tokens
classifier = put_in(classifier.data, Data.increment_examples(classifier.data, category))
Enum.reduce(tokens, classifier, fn token, classifier ->
classifier =
put_in(classifier.data, Data.add_token_to_category(classifier.data, category, token))
put_in(classifier.vocab, Vocab.seen_token(classifier.vocab, token))
end)
end
def train(%NaiveBayes{} = classifier, [_ | _] = tokens, [_ | _] = categories)
when is_list(categories) do
Enum.reduce(categories, classifier, fn category, classifier ->
train(classifier, tokens, category)
end)
end
def classify(%NaiveBayes{} = classifier, tokens) do
tokens = if classifier.binarized, do: Enum.uniq(tokens), else: tokens
calculate_probabilities(classifier, tokens)
end
def purge_less_than(%NaiveBayes{} = classifier, x) do
{classifier, remove_list} =
Enum.reduce(classifier.vocab.tokens, {classifier, []}, fn {token, _},
{classifier, remove_list} ->
case Data.purge_less_than(classifier.data, token, x) do
false -> {classifier, remove_list}
data -> {put_in(classifier.data, data), remove_list ++ [token]}
end
end)
Enum.reduce(remove_list, classifier, fn token, classifier ->
put_in(classifier.vocab, Vocab.remove_token(classifier.vocab, token))
end)
end
defp calculate_probabilities(classifier, tokens) do
v_size = Enum.count(classifier.vocab.tokens)
total_example_count = Data.total_examples(classifier.data)
prob_numerator =
Enum.reduce(classifier.data.categories, %{}, fn {cat_name, cat_data}, probs ->
cat_prob =
case classifier.assume_uniform do
true -> :math.log(1 / Enum.count(classifier.data.categories))
false -> :math.log(Data.example_count(cat_data) / total_example_count)
end
denominator = cat_data[:total_tokens] + classifier.smoothing * v_size
log_probs =
Enum.reduce(tokens, 0, fn token, log_probs ->
numerator = (cat_data[:tokens][token] || 0) + classifier.smoothing
log_probs + :math.log(numerator / denominator)
end)
put_in(probs[cat_name], log_probs + cat_prob)
end)
normalize(prob_numerator)
end
defp normalize(prob_numerator) do
normalizer =
Enum.reduce(prob_numerator, 0, fn {_, numerator}, normalizer ->
normalizer + numerator
end)
{intermed, renormalizer} =
Enum.reduce(prob_numerator, {%{}, 0}, fn {cat, numerator}, {intermed, renormalizer} ->
r = normalizer / numerator
intermed = put_in(intermed, [cat], r)
renormalizer = renormalizer + r
{intermed, renormalizer}
end)
Enum.reduce(intermed, %{}, fn {cat, value}, final_probs ->
put_in(final_probs, [cat], value / renormalizer)
end)
end
end
|
lib/naive_bayes.ex
| 0.678966 | 0.62691 |
naive_bayes.ex
|
starcoder
|
defmodule Pavlov.Matchers.Messages do
@moduledoc false
@doc false
def message_for_matcher(matcher_name, [actual, expected], :assertion) do
actual = inspect actual
expected = inspect expected
case matcher_name do
:eq -> "Expected #{actual} to equal #{expected}"
:have_key -> "Expected #{actual} to have key #{expected}"
:include -> "Expected #{actual} to include #{expected}"
:have_raised -> "Expected function to have raised #{expected}"
:have_thrown -> "Expected function to have thrown #{expected}"
_ -> "Assertion with #{matcher_name} failed: #{actual}, #{expected}"
end
end
def message_for_matcher(matcher_name, [actual], :assertion) do
actual = inspect actual
case matcher_name do
:be_true -> "Expected #{actual} to be true"
:be_truthy -> "Expected #{actual} to be truthy"
:be_falsey -> "Expected #{actual} to be falsey"
:be_nil -> "Expected #{actual} to be nil"
:be_empty -> "Expected #{actual} to be empty"
:have_exited -> "Expected function to have exited"
_ -> "Assertion with #{matcher_name} failed: #{actual}"
end
end
def message_for_matcher(matcher_name, [actual, expected], :refutation) do
actual = inspect actual
expected = inspect expected
case matcher_name do
:eq -> "Expected #{actual} not to equal #{expected}"
:have_key -> "Expected #{actual} not to have key #{expected}"
:include -> "Expected #{actual} not to include #{expected}"
:have_raised -> "Expected function not to have raised #{expected}"
:have_thrown -> "Expected function not to have thrown #{expected}"
_ -> "Refutation with #{matcher_name} failed: #{actual}, #{expected}"
end
end
def message_for_matcher(matcher_name, [actual], :refutation) do
actual = inspect actual
case matcher_name do
:be_true -> "Expected #{actual} not to be true"
:be_truthy -> "Expected #{actual} not to be truthy"
:be_falsey -> "Expected #{actual} not to be falsey"
:be_nil -> "Expected #{actual} not to be nil"
:be_empty -> "Expected #{actual} not to be empty"
:have_exited -> "Expected function not to have exited"
_ -> "Refutation with #{matcher_name} failed: #{actual}"
end
end
end
|
lib/matchers/messages.ex
| 0.698844 | 0.582966 |
messages.ex
|
starcoder
|
defmodule XDR.Error do
@typedoc """
A single piece of a path, which will be either an atom or a binary
"""
@type path_segment() :: binary() | atom()
@typedoc """
The "cursor" in the current path, which is usually just one segment,
but may be a list. For example, if an error happens while resolving the
type of an arm in a union, the current path_descriptor will be [:arm_key, :arms]
so that the resulting error path after bubbling up might be e.g. "account.arms.user_account"
"""
@type path_descriptor() :: path_segment() | list(path_segment())
@moduledoc """
Errors explicitly created by XDR will usually be an `XDR.Error`.
Also, errors triggered inside a complex data type will be wrapped
and annotated with path info before being re-raised or returned to the user.
"""
defexception [:data, :message, :path, :type]
@doc """
Wrap an error (or anything really) into an `XDR.Error` so it can be
annotated with XDR-specific metadata and bubbled up to the top level
where it will be raised a final time or returned in an error tuple
"""
def wrap(message) when is_binary(message), do: %XDR.Error{message: message}
def wrap(message) when is_atom(message), do: %XDR.Error{message: message}
def wrap(%XDR.Error{} = error), do: error
def wrap({:error, error}), do: wrap(error)
def wrap(%{message: message}), do: wrap(message)
def wrap(error), do: %XDR.Error{message: "Unknown error", data: error}
def wrap(error, []) do
wrap(error)
end
def wrap(error, [head_segment | rest_segments]) do
error
|> wrap(head_segment)
|> wrap(rest_segments)
end
def wrap(error, path_segment) do
error
|> wrap()
|> prepend_path(path_segment)
end
@doc """
Call a function and wrap any resulting error with the given path segment
metadata. Used to make errors easier to trace back when working with types
that have subsidiary child types
"""
@spec wrap_call(atom(), list(), path_descriptor()) :: any()
def wrap_call(function, args, current_path) do
wrap_call(XDR, function, args, current_path)
end
@spec wrap_call(atom(), atom(), list(), path_descriptor()) :: any()
def wrap_call(module, function, args, current_path)
when is_atom(module) and is_atom(function) and is_list(args) do
apply(module, function, args)
rescue
error -> reraise wrap(error, current_path), __STACKTRACE__
end
defp prepend_path(%XDR.Error{path: nil} = error, path_segment) do
%{error | path: "#{path_segment}"}
end
defp prepend_path(error, path_segment) when is_atom(path_segment) do
prepend_path(error, to_string(path_segment))
end
defp prepend_path(%XDR.Error{path: path} = error, path_segment) do
%{error | path: "#{path_segment}.#{path}"}
end
end
|
lib/xdr/error/error.ex
| 0.889646 | 0.482917 |
error.ex
|
starcoder
|
defmodule ExSenml.SenmlStruct do
@moduledoc """
Below Table provides an overview of all SenML fields defined by rfc8428
with their respective labels and data types.
+---------------+-------+------------+------------+------------+
| Name | Label | CBOR Label | JSON Type | XML Type |
+---------------+-------+------------+------------+------------+
| Base Name | bn | -2 | String | string |
| Base Time | bt | -3 | Number | double |
| Base Unit | bu | -4 | String | string |
| Base Value | bv | -5 | Number | double |
| Base Sum | bs | -6 | Number | double |
| Base Version | bver | -1 | Number | int |
| Name | n | 0 | String | string |
| Unit | u | 1 | String | string |
| Value | v | 2 | Number | double |
| String Value | vs | 3 | String | string |
| Boolean Value | vb | 4 | Boolean | boolean |
| Data Value | vd | 8 | String (*) | string (*) |
| Sum | s | 5 | Number | double |
| Time | t | 6 | Number | double |
| Update Time | ut | 7 | Number | double |
+---------------+-------+------------+------------+------------+
Table 1: SenML Labels
This struct will be used to keep the field(s) values as helper for base fileds resolve record
"""
@type t :: %__MODULE__{
bn: String.t(),
bt: integer,
bu: String.t(),
bv: integer,
bs: integer,
bver: integer,
n: String.t(),
t: integer,
v: float,
vs: String.t(),
vd: String.t(),
vb: boolean,
s: integer,
u: String.t(),
ut: integer,
ct: String.t()
}
@derive Jason.Encoder
defstruct bn: nil,
bt: nil,
bu: nil,
bv: nil,
bs: nil,
bver: nil,
n: nil,
t: nil,
v: nil,
vs: nil,
vb: nil,
vd: nil,
s: nil,
u: nil,
ut: nil,
ct: nil
end
|
lib/ex_senml/senml_struct.ex
| 0.880496 | 0.611527 |
senml_struct.ex
|
starcoder
|
defmodule Gealts do
@moduledoc """
Gealts is a basic implementation of a genetic
algorithm based on http://arxiv.org/pdf/1308.4675.pdf
exposed functions:
Gealts.start/1
Gealts.iterate/1
Gealts.best/0
Gealts.population/0
Gealts.config/0
"""
alias Gealts.Population
alias Gealts.Chromosome
alias Gealts.MathUtils
@doc """
Starts the "population" agent.
A config map must be provided. The config values are:
:input (optional)
A list of lists with prepopulated chromosomes.
For example: [
[18, 298, 37],
[87, 242, 1],
[90, 0, 1]
]
Input will be used as gen 0 of population.
Empty by default, chromosomes will be generated
randomly based on :min_val, :max_val, :genes and :chromes.
:min_val (optional)
Gene minimum. When mutating genes (see Gealts.Mutator) and
generating new values, :min_val will act as floor of random ranges.
Defaults to 0.
:max_val
Gene maximum. When mutating genes (see Gealts.Mutator) and
generating new values, :max_val will act as ceiling of random ranges.
:genes (optional when input is provided)
Max number of genes (values) in each individual chromosome.
:chromes (optional when input is provided)
Max number of chromes in a population.
:eval_fn
Evaluation function. Applied to chromosome values upon each iterration.
Should takes a list and return a number.
For example:
(fn [a, b, c, d] ->
:erlang.abs((a + 2 * b + 3 * c + 4 * d) - 30)
end)
"""
@spec start(map) :: Agent.on_start()
def start(config) do
do_start(validate(config))
end
@doc """
Performs n number of itererations,
altering chromosome population in the process.
"""
@spec iterate(non_neg_integer) :: :ok
def iterate(n) do
Population.iterate(n)
end
@doc """
Returns the "best" (most fittest) chromosome of current population.
"""
@spec best() :: Chromosome.t
def best do
Population.best
end
@doc """
Returns current population.
"""
@spec population() :: [Chromosome.t]
def population do
Population.population
end
@doc """
Returns current config.
"""
@spec config() :: map
def config do
Population.config
end
# internal
@spec do_start(map) :: Agent.on_start()
defp do_start(config = %{input: []}) do
input = for _ <- 1..config[:chromes] do
MathUtils.random_values_list(config[:genes],
config[:min_val],
config[:max_val])
end
start(%{config | input: input})
end
defp do_start(config = %{input: input}) when is_list(input) do
input
|> Enum.map(fn input -> %Chromosome{values: input} end)
|> Population.start_link(config)
end
@spec validate(map) :: map
defp validate(config = %{input: _input, min_val: _min_val}) do
validate(config, :max_val)
end
defp validate(config = %{input: _input}) do
config
|> Map.put(:min_val, 0)
|> validate(:max_val)
end
defp validate(config = %{min_val: _min_val}) do
config
|> Map.put(:input, [])
|> validate(:max_val)
end
defp validate(config) do
config
|> Map.put(:input, [])
|> Map.put(:min_val, 0)
|> validate(:max_val)
end
defp validate(config = %{max_val: max_val}, :max_val) when is_number(max_val) and max_val > 0 do
validate(config, :genes)
end
defp validate(config = %{input: [], genes: genes}, :genes) when is_number(genes) and genes > 0 do
validate(config, :chromes)
end
defp validate(config = %{input: [chrome | _rest]}, :genes) when is_list(chrome) do
config
|> Map.put(:genes, length(chrome))
|> validate(:chromes)
end
defp validate(config = %{input: [], chromes: chromes}, :chromes) when is_number(chromes) and chromes > 0 do
validate(config, :eval_fn)
end
defp validate(config = %{input: input}, :chromes) do
config
|> Map.put(:chromes, length(input))
|> validate(:eval_fn)
end
defp validate(config = %{eval_fn: fun}, :eval_fn) when is_function(fun) do
config
end
defp validate(_config, param) do
raise "Invalid or missing config param: #{param}."
end
end
|
lib/gealts.ex
| 0.859575 | 0.683789 |
gealts.ex
|
starcoder
|
defmodule Mix.Utils do
@moduledoc """
Utilities used throughout Mix and tasks.
## Conversions
This module handles two types of conversions:
* From command names to module names, i.e. how the command
`deps.get` translates to `Deps.Get` and vice-versa;
* From underscore to camelize, i.e. how the file path
`my_project` translates to `MyProject`;
"""
@doc """
Gets the mix home. It defaults to `~/.mix` unless the
MIX_HOME environment variable is set.
"""
def mix_home do
System.get_env("MIX_HOME") || Path.expand("~/.mix")
end
@doc """
Gets all extra paths defined in the environment variable
MIX_PATH. MIX_PATH may contain multiple paths. If on windows,
those paths should be separated by `;`, if on unix systems,
use `:`.
"""
def mix_path do
if path = System.get_env("MIX_PATH") do
String.split(path, path_separator)
else
[]
end
end
defp path_separator do
case :os.type do
{ :win32, _ } -> ";"
{ :unix, _ } -> ":"
end
end
@doc """
Gets the source location of a module as a binary.
"""
def source(module) do
source = module.__info__(:compile)[:source]
source && list_to_binary(source)
end
@doc """
Takes a `command` name and try to load a module
with the command name converted to a module name
in the given `at` scope.
Returns `{ :module, module }` in case a module
exists and is loaded, `{ :error, reason }` otherwise.
## Examples
Mix.Utils.command_to_module("compile", Mix.Tasks)
#=> { :module, Mix.Tasks.Compile }
"""
def command_to_module(command, at // Elixir) do
module = Module.concat(at, command_to_module_name(command))
Code.ensure_loaded(module)
end
@doc """
Returns true if any of the sources are stale
compared to the given target.
"""
def stale?(sources, targets) do
extract_stale(sources, targets) != []
end
@doc """
Extract all stale sources compared to the given targets.
"""
def extract_stale(sources, targets) do
last_modifieds = Enum.map(targets, last_modified(&1))
Enum.filter sources, fn(source) ->
source_stat = source_mtime(source)
Enum.any?(last_modifieds, source_stat > &1)
end
end
defp source_mtime({ _, { { _, _, _ }, { _, _, _ } } = source }) do
source
end
defp source_mtime(source) do
File.stat!(source).mtime
end
defp last_modified(path) do
case File.stat(path) do
{ :ok, File.Stat[mtime: mtime] } -> mtime
{ :error, _ } -> { { 1970, 1, 1 }, { 0, 0, 0 } }
end
end
@doc """
Generates a manifest containing all files generated
during a given compilation step. It receives the manifest
file name and a function to execute. The result of the
function is compared to the manifest in order do detect
the files removed from the manifest file.
"""
def manifest(file, fun) do
old =
case File.read(file) do
{ :ok, contents } -> String.split(contents, "\n")
{ :error, _ } -> []
end
current = fun.()
File.write!(file, Enum.join(current, "\n"))
{ current, old -- current }
end
@doc """
Extract files from a list of paths.
In case any of the paths is a directory, the directory is looped
recursively searching for the given extensions or the given pattern.
When looking up directories, files starting with "." are ignored.
"""
def extract_files(paths, exts_or_pattern)
def extract_files(paths, exts) when is_list(exts) do
extract_files(paths, "*.{#{Enum.join(exts, ",")}}")
end
def extract_files(paths, pattern) do
files = List.concat(lc path inlist paths do
if File.regular?(path), do: [path], else: Path.wildcard("#{path}/**/#{pattern}")
end)
files |> exclude_files |> Enum.uniq
end
defp exclude_files(files) do
filter = fn(x) -> not match?("." <> _, Path.basename(x)) end
Enum.filter files, filter
end
@doc """
Merges two configs recursively, merging keyword lists
and concatenating normal lists.
"""
def config_merge(old, new) do
Keyword.merge old, new, fn(_, x, y) ->
if is_list(x) and is_list(y) do
if is_keyword(x) and is_keyword(y) do
config_merge(x, y)
else
x ++ y
end
else
y
end
end
end
defp is_keyword(x) do
Enum.all? x, match?({ atom, _ } when is_atom(atom), &1)
end
@doc """
Converts the given atom or binary to underscore format.
If an atom is given, it is assumed to be an Elixir module,
so it is converted to a binary and then processed.
## Examples
Mix.Utils.underscore "FooBar" #=> "foo_bar"
Mix.Utils.underscore "Foo.Bar" #=> "foo/bar"
Mix.Utils.underscore Foo.Bar #=> "foo/bar"
In general, underscore can be thought as the reverse of
camelize, however, in some cases formatting may be lost:
Mix.Utils.underscore "SAPExample" #=> "sap_example"
Mix.Utils.camelize "sap_example" #=> "SapExample"
"""
def underscore(atom) when is_atom(atom) do
"Elixir." <> rest = atom_to_binary(atom)
underscore(rest)
end
def underscore(<<h, t :: binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<h, t, rest :: binary>>, _) when h in ?A..?Z and not t in ?A..?Z do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
defp do_underscore(<<h, t :: binary>>, prev) when h in ?A..?Z and not prev in ?A..?Z do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<?-, t :: binary>>, _) do
<<?_>> <> do_underscore(t, ?-)
end
defp do_underscore(<<?., t :: binary>>, _) do
<<?/>> <> underscore(t)
end
defp do_underscore(<<h, t :: binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
@doc """
Converts the given string to camelize format.
## Examples
Mix.Utils.camelize "foo_bar" #=> "FooBar"
"""
def camelize(<<?_, t :: binary>>) do
camelize(t)
end
def camelize(<<h, t :: binary>>) do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_, ?_, t :: binary>>) do
do_camelize(<< ?_, t :: binary >>)
end
defp do_camelize(<<?_, h, t :: binary>>) when h in ?a..?z do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_>>) do
<<>>
end
defp do_camelize(<<?/, t :: binary>>) do
<<?.>> <> camelize(t)
end
defp do_camelize(<<h, t :: binary>>) do
<<h>> <> do_camelize(t)
end
defp do_camelize(<<>>) do
<<>>
end
@doc """
Returns the given path string relative to the current
working directory.
"""
def relative_to_cwd(path) do
case File.cwd do
{ :ok, base } -> Path.relative_to(path, base)
_ -> path
end
end
@doc """
Takes a module and converts it to a command. The nesting
argument can be given in order to remove the nesting of
module.
## Examples
module_name_to_command(Mix.Tasks.Compile, 2)
#=> "compile"
module_name_to_command("Mix.Tasks.Compile.Elixir", 2)
#=> "compile.elixir"
"""
def module_name_to_command(module, nesting // 0)
def module_name_to_command(module, nesting) when is_atom(module) do
module_name_to_command(inspect(module), nesting)
end
def module_name_to_command(module, nesting) do
t = Regex.split(%r/\./, to_binary(module))
t |> Enum.drop(nesting) |> Enum.map(first_to_lower(&1)) |> Enum.join(".")
end
@doc """
Takes a command and converts it to a module name format.
## Examples
command_to_module_name("compile.elixir")
#=> "Compile.Elixir"
"""
def command_to_module_name(s) do
Regex.split(%r/\./, to_binary(s)) |>
Enum.map(first_to_upper(&1)) |>
Enum.join(".")
end
defp first_to_upper(<<s, t :: binary>>), do: <<to_upper_char(s)>> <> t
defp first_to_upper(<<>>), do: <<>>
defp first_to_lower(<<s, t :: binary>>), do: <<to_lower_char(s)>> <> t
defp first_to_lower(<<>>), do: <<>>
defp to_upper_char(char) when char in ?a..?z, do: char - 32
defp to_upper_char(char), do: char
defp to_lower_char(char) when char in ?A..?Z, do: char + 32
defp to_lower_char(char), do: char
@doc """
Opens and reads content from either a URL or a local filesystem path.
Used by local.install and local.rebar.
"""
def read_path(path) do
cond do
is_url?(path) -> read_url(path)
is_file?(path) -> read_file(path)
:else -> raise Mix.Error, message: "expected #{path} to be a url or a local file path"
end
end
defp read_file(path) do
File.read!(path)
end
defp read_url(path) do
if URI.parse(path).scheme == "https" do
:ssl.start
end
:inets.start
case :httpc.request(binary_to_list(path)) do
{ :ok, { { _, status, _ }, _, body } } when status in 200..299 ->
iolist_to_binary(body)
{ :ok, { { _, status, _ }, _, _ } } ->
raise Mix.Error, message: "could not access url #{path}, got status: #{status}"
{ :error, reason } ->
raise Mix.Error, message: "could not access url #{path}, error: #{inspect reason}"
end
end
defp is_file?(path) do
File.regular?(path)
end
defp is_url?(path) do
URI.parse(path).scheme in ["http", "https"]
end
end
|
lib/mix/lib/mix/utils.ex
| 0.86501 | 0.613237 |
utils.ex
|
starcoder
|
defmodule Gettext.Interpolation do
@moduledoc false
@interpolation_regex ~r/
(?<left>) # Start, available through :left
%{ # Literal '%{'
[^}]+ # One or more non-} characters
} # Literal '}'
(?<right>) # End, available through :right
/x
@doc """
Extracts interpolations from a given string.
This function extracts all interpolations in the form `%{interpolation}`
contained inside `str`, converts them to atoms and then returns a list of
string and interpolation keys.
## Examples
iex> Gettext.Interpolation.to_interpolatable("Hello %{name}, you have %{count} unread messages")
["Hello ", :name, ", you have ", :count, " unread messages"]
"""
@spec to_interpolatable(binary) :: [binary | atom]
def to_interpolatable(str) do
split = Regex.split(@interpolation_regex, str, on: [:left, :right], trim: true)
Enum.map split, fn
"%{" <> rest -> rest |> String.rstrip(?}) |> String.to_atom
segment -> segment
end
end
@doc """
Tells which `required` keys are missing in `bindings`.
Returns an error message which tells which keys in `required` don't appear in
`bindings`.
## Examples
iex> Gettext.Interpolation.missing_interpolation_keys %{foo: 1}, [:foo, :bar, :baz]
"missing interpolation keys: bar, baz"
"""
@spec missing_interpolation_keys(%{}, [atom]) :: binary
def missing_interpolation_keys(bindings, required) do
present = Dict.keys(bindings)
missing = required -- present
"missing interpolation keys: " <> Enum.map_join(missing, ", ", &to_string/1)
end
@doc """
Returns all the interpolation keys contained in the given string or list of
segments.
This function returns a list of all the interpolation keys (patterns in the
form `%{interpolation}`) contained in its argument.
If the argument is a segment list, i.e., a list of strings and atoms where
atoms represent interpolation keys, then only the atoms in the list are
returned.
## Examples
iex> Gettext.Interpolation.keys("Hey %{name}, I'm %{other_name}")
[:name, :other_name]
iex> Gettext.Interpolation.keys(["Hello ", :name, "!"])
[:name]
"""
@spec keys(binary | [atom]) :: [atom]
def keys(str) when is_binary(str),
do: str |> to_interpolatable |> Enum.filter(&is_atom/1)
def keys(segments) when is_list(segments),
do: Enum.filter(segments, &is_atom/1)
@doc """
Dynimically interpolates `str` with the given `bindings`.
This function replaces all interpolations (like `%{this}`) inside `str` with
the keys contained in `bindings`. It returns `{:ok, str}` if all the
interpolation keys in `str` are present in `bindings`, `{:error, msg}`
otherwise.
## Examples
iex> Gettext.Interpolation.interpolate "Hello %{name}", %{name: "José"}
{:ok, "Hello José"}
iex> Gettext.Interpolation.interpolate "%{count} errors", %{name: "Jane"}
{:error, "missing interpolation keys: count"}
"""
@spec interpolate(binary, %{}) :: {:ok, binary} | {:error, binary}
def interpolate(str, bindings) do
segments = to_interpolatable(str)
keys = keys(segments)
if keys -- Map.keys(bindings) != [] do
{:error, missing_interpolation_keys(bindings, keys)}
else
interpolated = Enum.map_join segments, "", fn
key when is_atom(key) -> Map.fetch!(bindings, key)
other -> other
end
{:ok, interpolated}
end
end
end
|
deps/gettext/lib/gettext/interpolation.ex
| 0.901758 | 0.52074 |
interpolation.ex
|
starcoder
|
defmodule PgContrivance do
alias PgContrivance.SqlCommand
alias PgContrivance.Query
@doc """
Primary entry point to working with sql statements. Parameters can be
either $1,$2...$n or atom format (:first, :last). When parameters are in
atom format, a hash must be passed to the params function.
```ex
sql "SELECT name, email FROM USERS"
|> query
|> to_list
```
"""
def sql(statement) when is_binary(statement),
do: %SqlCommand{statement: statement}
@doc """
Parameters should either be a list of params or a map with key/value pairs
to be used with a sql statement with named parameters.
```ex
sql "SELECT name, email FROM users WHERE username = $1"
|> params ["<EMAIL>"]
|> query
|> to_list
sql("SELECT name, email FROM users WHERE username = :username")
|> params(%{username: "<EMAIL>"})
|> query
|> to_list
```
"""
def params(%SqlCommand{} = cmd, parameters) when is_list(parameters),
do: %SqlCommand{cmd | params: parameters}
def params(%SqlCommand{} = cmd, parameters) when is_map(parameters),
do: %SqlCommand{cmd | params: parameters}
@doc """
This causes the query to be executed and returns an %Postgrex.Result{}
"""
def query(%SqlCommand{} = cmd) do
Query.query(cmd)
end
@doc """
The results that come back from Postgrex are in a `%Postgrex.Result{}` struct i.e.:
```
{:ok, %Postgrex.Result{columns: ["id", "email", "first", "last"],
command: :select, connection_id: 10298, num_rows: 1,
rows: [[1, "<EMAIL>", "Rob", "Blah"]]}}
```
This combines the columns and rows into a list of maps, i.e.:
```
to_map(...)
[%{"id" => 1, "email" => "<EMAIL>", "first" => "Rob", "last" => "Blah"}]
to_map(..., keys: :atoms)
[%{id: 1, email: "<EMAIL>", first: "Rob", last: "Blah"}]
```
"""
def to_list(%Postgrex.Result{} = results) do
PgContrivance.Transformer.lists_to_map(results.columns, results.rows)
end
def to_list(%Postgrex.Result{} = results, keys: :atoms) do
PgContrivance.Transformer.lists_to_map(results.columns, results.rows, keys: :atoms)
end
end
|
lib/pg_contrivance.ex
| 0.659405 | 0.746278 |
pg_contrivance.ex
|
starcoder
|
defmodule RList.ActiveSupport do
@moduledoc """
Summarized all of List functions in Rails.ActiveSupport.
If a function with the same name already exists in Elixir, that is not implemented.
Defines all of here functions when `use RList.ActiveSupport`.
"""
@spec __using__(any) :: list
defmacro __using__(_opts) do
RUtils.define_all_functions!(__MODULE__)
end
alias RList.Support
# https://www.rubydoc.info/gems/activesupport/Array
# [:as_json, :compact_blank!, :deep_dup, :excluding, :extract!, :extract_options!, :fifth, :forty_two, :fourth, :from, :in_groups, :in_groups_of, :including, :inquiry, :second, :second_to_last, :split, :sum, :third, :third_to_last, :to, :to_default_s, :to_formatted_s, :to_param, :to_query, :to_s, :to_sentence, :to_xml]
# |> RUtils.required_functions([List, RList.Ruby, REnum])
# × as_json
# × deep_dup
# ✔ fifth
# ✔ forty_two
# ✔ fourth
# ✔ from
# ✔ in_groups
# ✔ in_groups_of
# inquiry
# ✔ second
# ✔ second_to_last
# ✔ third
# ✔ third_to_last
# ✔ to
# ✔ to_default_s
# × to_formatted_s
# to_param
# to_query
# ✔ to_sentence
# to_xml
@doc """
Returns the tail of the list from position.
## Examples
iex> ~w[a b c d]
iex> |> RList.from(0)
["a", "b", "c", "d"]
iex> ~w[a b c d]
iex> |> RList.from(2)
["c", "d"]
iex> ~w[a b c d]
iex> |> RList.from(10)
[]
iex> ~w[]
iex> |> RList.from(0)
[]
iex> ~w[a b c d]
iex> |> RList.from(-2)
["c", "d"]
iex> ~w[a b c d]
iex> |> RList.from(-10)
[]
"""
@spec from(list(), integer()) :: list()
def from(list, position) do
list
|> Enum.slice(position..Enum.count(list))
end
@doc """
Returns the beginning of the list up to position.
## Examples
iex> ~w[a b c d]
iex> |> RList.to(0)
["a"]
iex> ~w[a b c d]
iex> |> RList.to(2)
["a", "b", "c"]
iex> ~w[a b c d]
iex> |> RList.to(10)
["a", "b", "c", "d"]
iex> ~w[]
iex> |> RList.to(0)
[]
iex> ~w[a b c d]
iex> |> RList.to(-2)
["a", "b", "c"]
iex> ~w[a b c d]
iex> |> RList.to(-10)
[]
"""
@spec to(list(), integer()) :: list()
def to(list, position) do
list
|> Enum.slice(0..position)
end
@doc """
Equal to `Enum.at(list, 1)`.
## Examples
iex> ~w[a b c d]
iex> |> RList.second()
"b"
"""
@spec second(list()) :: any()
def second(list) do
Enum.at(list, 1)
end
@doc """
Equal to `Enum.at(list, 2)`.
## Examples
iex> ~w[a b c d]
iex> |> RList.third()
"c"
"""
@spec third(list()) :: any()
def third(list) do
Enum.at(list, 2)
end
@doc """
Equal to `Enum.at(list, 3)`.
## Examples
iex> ~w[a b c d]
iex> |> RList.fourth()
"d"
"""
@spec fourth(list()) :: any()
def fourth(list) do
Enum.at(list, 3)
end
@doc """
Equal to `Enum.at(list, 4)`.
## Examples
iex> ~w[a b c d e]
iex> |> RList.fifth()
"e"
"""
@spec fifth(list()) :: any()
def fifth(list) do
Enum.at(list, 4)
end
@doc """
Equal to `Enum.at(list, 41)`. Also known as accessing "the reddit".
## Examples
iex> 1..42
iex> |> RList.forty_two()
42
"""
@spec forty_two(list()) :: any()
def forty_two(list) do
Enum.at(list, 41)
end
@doc """
Equal to `Enum.at(list, -2)`.
## Examples
iex> ~w[a b c d e]
iex> |> RList.second_to_last()
"d"
"""
@spec second_to_last(list()) :: any()
def second_to_last(list) do
Enum.at(list, -2)
end
@doc """
Equal to `Enum.at(list, -3)`.
## Examples
iex> ~w[a b c d e]
iex> |> RList.third_to_last()
"c"
"""
@spec third_to_last(list()) :: any()
def third_to_last(list) do
Enum.at(list, -3)
end
@doc """
Converts the list to a comma-separated sentence where the last element is joined by the connector word.
You can pass the following options to change the default behavior. If you pass an option key that doesn't exist in the list below, it will raise an
** Options **
* `:words_connector` - The sign or word used to join all but the last
element in lists with three or more elements (default: ", ").
* `:last_word_connector` - The sign or word used to join the last element
in lists with three or more elements (default: ", and ").
* `:two_words_connector` - The sign or word used to join the elements
in lists with two elements (default: " and ").
## Examples
iex> ["one", "two"]
iex> |> RList.to_sentence()
"one and two"
iex> ["one", "two", "three"]
iex> |> RList.to_sentence()
"one, two, and three"
iex> ["one", "two"]
iex> |> RList.to_sentence(two_words_connector: "-")
"one-two"
iex> ["one", "two", "three"]
iex> |> RList.to_sentence(words_connector: " or ", last_word_connector: " or at least ")
"one or two or at least three"
iex> ["one", "two", "three"]
iex> |> RList.to_sentence()
"one, two, and three"
"""
@spec to_sentence(list(), list(keyword()) | nil) :: String.t()
def to_sentence(list, opts \\ []) do
words_connector = Keyword.get(opts, :words_connector) || ", "
two_words_connector = Keyword.get(opts, :two_words_connector) || " and "
last_word_connector = Keyword.get(opts, :last_word_connector) || ", and "
case Enum.count(list) do
0 -> ""
1 -> "#{Enum.at(list, 0)}"
2 -> "#{Enum.at(list, 0)}#{two_words_connector}#{Enum.at(list, 1)}"
_ -> "#{to(list, -2) |> Enum.join(words_connector)}#{last_word_connector}#{List.last(list)}"
end
end
@doc """
Splits or iterates over the list in number of groups, padding any remaining slots with fill_with unless it is false.
## Examples
iex> ~w[1 2 3 4 5 6 7 8 9 10]
iex> |> RList.in_groups(3)
[
["1", "2", "3", "4"],
["5", "6", "7", nil],
["8", "9", "10", nil]
]
iex> ~w[1 2 3 4 5 6 7 8 9 10]
iex> |> RList.in_groups(3, " ")
[
["1", "2", "3", "4"],
["5", "6", "7", " "],
["8", "9", "10", " "]
]
iex> ~w[1 2 3 4 5 6 7]
iex> |> RList.in_groups(3, false)
[
["1", "2", "3"],
["4", "5"],
["6", "7"]
]
"""
@spec in_groups(list(), non_neg_integer(), any() | nil) :: list()
def in_groups(list, number, fill_with \\ nil) do
division = div(Enum.count(list), number)
modulo = rem(Enum.count(list), number)
range = 0..(number - 1)
length_list =
range
|> Enum.map(&(division + if(modulo > 0 && modulo > &1, do: 1, else: 0)))
|> IO.inspect()
range
|> Enum.reduce([], fn index, acc ->
length = length_list |> Enum.at(index)
group =
Enum.slice(
list,
length_list
|> Enum.take(index)
|> Enum.sum(),
length
)
if fill_with != false && modulo > 0 && length == division do
acc ++ [group ++ [fill_with]]
else
acc ++ [group]
end
end)
end
@doc """
Splits or iterates over the list in groups of size number, padding any remaining slots with fill_with unless it is +false+.
## Examples
iex> ~w[1 2 3 4 5 6 7 8 9 10]
iex> |> RList.in_groups_of(3)
[
["1", "2", "3"],
["4", "5", "6"],
["7", "8", "9"],
["10", nil, nil]
]
iex> ~w[1 2 3 4 5]
iex> |> RList.in_groups_of(2, " ")
[
["1", "2"],
["3", "4"],
["5", " "]
]
iex> ~w[1 2 3 4 5]
iex> |> RList.in_groups_of(2, false)
[
["1", "2"],
["3", "4"],
["5"]
]
"""
@spec in_groups_of(list(), non_neg_integer(), any() | nil) :: list()
def in_groups_of(list, number, fill_with \\ nil) do
if(fill_with == false) do
list
else
padding = rem(number - rem(Enum.count(list), number), number)
list ++ Support.new(fill_with, padding)
end
|> REnum.each_slice(number)
|> Enum.to_list()
end
defdelegate to_default_s(list), to: Kernel, as: :inspect
end
|
lib/r_list/active_support.ex
| 0.818374 | 0.464659 |
active_support.ex
|
starcoder
|
defmodule PiviEx.Period do
@moduledoc """
Helper to create a period from an integer.
"""
## NaiveDates
def naive_date(day) do
{:ok, date} = NaiveDateTime.from_iso8601 "#{day} 00:00:00"
date
end
def period(%Date{} = date) do
date.year * 100 + date.month
end
def period_dates(str) when is_binary(str) do
period_dates(String.to_integer(str))
end
def period_dates(int) when int > 190_000 do
year = div(int, 100)
month = rem(int, 100)
{:ok, first_date} = Date.new(year, month, 1)
{:ok, last_date} = Date.new(year, month, Date.days_in_month(first_date))
{:ok, first_date, last_date}
end
def period_dates(int) when int > 1900 and int < 2030 do
{:ok, first_date} = Date.new(int, 1, 1)
{:ok, last_date} = Date.new(int, 12, 31)
{:ok, first_date, last_date}
end
@doc """
From an integer in format yyyymmdd returns the date.
"""
def date_from_int(date_int) when is_integer(date_int) do
day = rem(date_int, 100)
year = div(date_int, 10000)
period = div(date_int, 100)
month = rem(period, 100)
Date.new(year, month, day)
end
def date_from_int(date_int) when is_binary(date_int) do
{d, _} = Integer.parse(date_int)
date_from_int(d)
end
@doc """
Takes a German date format with a dot. e.g. 24.12.2019 Happy X-Mas
"""
def from_string(:d_m_y, str) when is_binary(str) do
case String.split(str, ".") do
[d, m, y] -> from_string(:d_m_y, d, m, y)
_ -> :error
end
end
def from_string(:d_m_y, _anything) do
false
end
def from_string(:d_m_y, d, m, y) do
[{d, _}, {m, _}, {y, _}] = [Integer.parse(d), Integer.parse(m), Integer.parse(y)]
Date.new(y, m, d)
end
def from_string(:m_d_y, m, d, y) do
[{d, _}, {m, _}, {y, _}] = [Integer.parse(d), Integer.parse(m), Integer.parse(y)]
Date.new(y, m, d)
end
def to_iso_8859_str(%Date{} = date) do
[y, m, d] = Date.to_string(date) |> String.split("-")
d <> "." <> m <> "." <> y
end
def to_integer({:ok, first_date, last_date}) do
first_date = Date.to_string(first_date) |> String.replace("-", "") |> String.to_integer()
last_date = Date.to_string(last_date) |> String.replace("-", "") |> String.to_integer()
{:ok, first_date, last_date}
end
def to_string({:ok, first_date, last_date}) do
first_date = Date.to_string(first_date)
last_date = Date.to_string(last_date)
{:ok, first_date, last_date}
end
@doc """
Returns date_from and date_to from a string or integer.
"""
def date_to_from(str) when is_binary(str) do
cond do
String.contains?(str, "..") ->
[from, to] = String.split(str, "..")
{date_to_from(from), date_to_from(to)}
Regex.match?(~r/^\d{1,2}\.\d{1,2}\.\d{4}$/, str) ->
[d, m, y] = String.split(str, ".")
Date.new(String.to_integer(y), String.to_integer(m), String.to_integer(d))
Regex.match?(~r/^\d{1,2}\.\d{1,2}\.\d{1,2}$/, str) ->
[d, m, y] = String.split(str, ".")
Date.new(String.to_integer(y) + 2000, String.to_integer(m), String.to_integer(d))
Regex.match?(~r/^\d{4}-\d{1,2}-\d{1,2}$/, str) ->
[y, m, d] = String.split(str, "-")
Date.new(String.to_integer(y), String.to_integer(m), String.to_integer(d))
Regex.match?(~r/^\d{2}-\d{1,2}-\d{1,2}$/, str) ->
[y, m, d] = String.split(str, "-")
Date.new(String.to_integer(y) + 2000, String.to_integer(m), String.to_integer(d))
true ->
String.to_integer(str) |> date_to_from()
end
end
def date_to_from(int) when is_integer(int) do
case length(Integer.digits(int)) do
4 ->
period_dates(int)
6 ->
period_dates(int)
8 ->
date_from_int(int)
_ ->
{:error, :invalid_date}
end
end
def name(%Date{} = date) do
{y, m, _d} = Date.to_erl(date)
period = (y * 100) + m
name(period)
end
def name(str) when is_binary(str) do
[y, m, _d] = String.split(str, "-")
p = y <> m
name(String.to_integer(p))
end
def name(:de, period) when period > 190_000 and period < 205_000 do
year = div(period, 100)
month = rem(period, 100)
case month do
1 -> "Jan-#{year}"
2 -> "Feb-#{year}"
3 -> "Mrz-#{year}"
4 -> "Apr-#{year}"
5 -> "Mai-#{year}"
6 -> "Jun-#{year}"
7 -> "Jul-#{year}"
8 -> "Aug-#{year}"
9 -> "Sep-#{year}"
10 -> "Okt-#{year}"
11 -> "Nov-#{year}"
12 -> "Dez-#{year}"
_ -> :error
end
end
def name(:us, period) when period > 190_000 and period < 205_000 do
year = div(period, 100)
month = rem(period, 100)
case month do
1 -> "January #{year}"
2 -> "February #{year}"
3 -> "March #{year}"
4 -> "April #{year}"
5 -> "May #{year}"
6 -> "June #{year}"
7 -> "July #{year}"
8 -> "August #{year}"
9 -> "September #{year}"
10 -> "October #{year}"
11 -> "November #{year}"
12 -> "December #{year}"
_ -> :error
end
end
def yield_periods(start_period, end_period) do
raise("crap")
start_year = div(start_period, 100)
start_month = rem(start_period, 100)
start_year_dec = start_year * 100 + 12
end_year = div(end_period, 100)
end_month = rem(end_period, 100)
end_year_jan = end_year * 100 + 1
cond do
end_month > 12 -> raise {:error, "End month error."}
start_month > 12 -> raise {:error, "Start month error."}
end_year - start_year > 1 -> raise {:error, "Helper only implemented for two years ..."}
end_year - start_year < 1 -> raise {:error, "Helper only implemented for two years ..."}
true -> :ok
end
Enum.map(start_period..start_year_dec, & &1) ++ Enum.map(end_year_jan..end_period, & &1)
end
def from_quarter(quarter) when is_integer(quarter), do: quarter
def from_quarter(quarter) when is_binary(quarter) do
if String.contains?(quarter, "-") do
[year, quarter] = String.split(quarter, "-")
{year, quarter} =
if String.contains?(year, "Q") do
{String.to_integer(quarter) * 100, year}
else
{String.to_integer(year) * 100, quarter}
end
cond do
quarter=="Q1" -> {:ok, (year + 1)..(year + 3)}
quarter=="Q2" -> {:ok, (year + 4)..(year + 6)}
quarter=="Q3" -> {:ok, (year + 7)..(year + 9)}
quarter=="Q4" -> {:ok, (year + 10)..(year + 12)}
true -> {:error, quarter}
end
else
{:error, quarter}
end
end
def to_quarter(period) when is_integer(period) do
y = div(period, 100)
m = rem(period, 100)
cond do
m in 1..3 -> "#{y}-Q1"
m in 4..6 -> "#{y}-Q2"
m in 7..9 -> "#{y}-Q3"
m in 10..12 -> "#{y}-Q4"
true -> raise("Error in period calculation for quarter")
end
end
end
|
lib/period.ex
| 0.809803 | 0.599602 |
period.ex
|
starcoder
|
defmodule TheFuzz do
@moduledoc """
Contains shortforms to execute different string metric algorithms to compare
given strings.
"""
@spec compare(atom, String.t(), String.t()) :: number
@doc """
Compares given strings using the corresponding string metric algorithm.
Available metric types are:
- Sorensen Dice coefficient: **:dice_sorensen**
- Hamming distance: **:hamming**
- Jaccard Similarity coefficient: **:jaccard**
- Jaro distance: **:jaro**
- Jaro Winkler distance: **:jaro_winkler**
- Levenshtein distance: **:levenshtein**
- n Gram similarity: **:n_gram**
- Overlap coefficient: **:overlap**
- Tanimoto coefficient: **:tanimoto**
- Weighted Levenshtein distance: **:weighted_levenshtein**
Note: Some of these metrics will use default values for other parameters
they might need like n gram size in case of Jaccard
"""
def compare(metric_type, a, b)
def compare(:dice_sorensen, a, b) do
TheFuzz.Similarity.DiceSorensen.compare(a, b)
end
def compare(:hamming, a, b) do
TheFuzz.Similarity.Hamming.compare(a, b)
end
def compare(:jaccard, a, b) do
TheFuzz.Similarity.Jaccard.compare(a, b)
end
def compare(:jaro, a, b) do
TheFuzz.Similarity.Jaro.compare(a, b)
end
def compare(:jaro_winkler, a, b) do
TheFuzz.Similarity.JaroWinkler.compare(a, b)
end
def compare(:levenshtein, a, b) do
TheFuzz.Similarity.Levenshtein.compare(a, b)
end
def compare(:n_gram, a, b) do
TheFuzz.Similarity.NGram.compare(a, b)
end
def compare(:overlap, a, b) do
TheFuzz.Similarity.Overlap.compare(a, b)
end
def compare(:tanimoto, a, b) do
TheFuzz.Similarity.Tversky.compare(a, b)
end
def compare(:weighted_levenshtein, a, b) do
TheFuzz.Similarity.WeightedLevenshtein.compare(a, b)
end
@doc """
Compares given strings using the corresponding string metric algorithm with
given `opts`
`opts` can be n gram size in case of Dice Sorensen, Jaccard, N Gram similarity
and can be weights in case of Weighted Levenshtein
Available metric types are:
- Sorensen Dice coefficient: **:dice_sorensen**
- Jaccard Similarity coefficient: **:jaccard**
- n Gram similarity: **:n_gram**
- Tversky index: **:tversky**
- Weighted Levenshtein distance: **:weighted_levenshtein**
"""
def compare(metric_type, a, b, opts)
def compare(:dice_sorensen, a, b, n) do
TheFuzz.Similarity.DiceSorensen.compare(a, b, n)
end
def compare(:jaccard, a, b, n) do
TheFuzz.Similarity.Jaccard.compare(a, b, n)
end
def compare(:n_gram, a, b, n) do
TheFuzz.Similarity.NGram.compare(a, b, n)
end
def compare(:tversky, a, b, %{} = opts) do
TheFuzz.Similarity.Tversky.compare(a, b, opts)
end
def compare(:weighted_levenshtein, a, b, %{} = weights) do
TheFuzz.Similarity.WeightedLevenshtein.compare(a, b, weights)
end
end
|
lib/the_fuzz.ex
| 0.908866 | 0.757436 |
the_fuzz.ex
|
starcoder
|
defmodule Comeonin.Password do
@moduledoc """
Module to generate random passwords and check password strength.
The function to check password strength checks that it is long enough
and contains at least one digit and one punctuation character.
# Password policy
The guidelines below are mainly intended for any business, or organization,
that needs to create and implement a password policy. However, much of the
advice is also applicable to other users.
## Writing down passwords
Opinion seems to be divided on this matter, with several authors
arguing that remembering multiple strong passwords can be very difficult
for many users, and if users are forced to remember passwords,
they are likely to create weaker passwords that are easier to remember
as a result.
If users are allowed to write down passwords, they should keep the
password in a safe place and treat its loss seriously, that is, as
seriously as the loss of an id card or a bank card.
## Password strength
Strong passwords should:
* be long
* contain as large a character set as possible (digits, punctuation characters, etc.)
* not contain dictionary words (this applies to multiple languages, not just English)
* be kept secret (not shared between multiple users)
If a password fails to meet any of the above criteria, then that makes it
easier for programs to guess the password. It is important, therefore,
that you try to ensure that all of the above criteria are met.
## Password length
Ideally, the password should be as long as possible. However, many users
would not be happy if they had to type in passwords 20 or 30 characters
long every time they had to access a service (although this might be
justifiable in certain cases), and so there needs to be a balance struck
between usability and the ideal password length. Please read the section
`User compliance` below for information about why usability is such
an important consideration.
In this module, the default length of the randomly generated passwords
is 12 characters, and with the `strong_password?` function, the minimum
length of passwords is 8 characters. Both of these values can be changed
in the config file.
With bcrypt, the maximum password length is 72 characters. Longer passwords
can be used, but the extra characters (after the 72nd character) are ignored.
## Creating strong passwords
For passwords that need to be remembered, creating a password by using
the first or second letter of each word in an uncommon phrase can be
a way of creating a strong password which is also easy to remember.
For passwords that do not need to be remembered, that can be written
down, generating passwords programmatically seems to be the best option,
as computer programs are generally better than humans at creating
random passwords.
## User compliance
One major theme in the research on password policies is the difficulty
of getting users to comply with the guidelines. It seems that if users
find it difficult to follow the rules for creating, remembering and using
passwords, then they will find creative ways of breaking the rules to
make it easier to get their work done.
This question of user compliance is an issue that needs to be taken
into serious consideration when formulating any password policy,
especially as a user not following the rules can have a serious
impact on the security of the rest of the organization.
## Further information
Visit our wiki (https://github.com/elixircnx/comeonin/wiki)
for links to further information about these and related issues.
"""
@alpha Enum.concat ?A..?Z, ?a..?z
@alphabet ',./!@#$%^&*();:?<>' ++ @alpha ++ '0123456789'
@digits String.codepoints("0123456789")
@punc String.codepoints(" ,./!@#$%^&*();:?<>")
@char_map Enum.map_reduce(@alphabet, 0, fn x, acc ->
{{acc, x}, acc + 1} end)
|> elem(0) |> Enum.into(%{})
@doc """
Randomly generate a password.
The default length of the password is 12 characters, and it is guaranteed
to contain at least one digit and one punctuation character.
"""
def gen_password(len \\ 12) do
rand_password(len) |> to_string
end
defp rand_password(len) do
case rand_numbers(len) |> pass_check do
false -> rand_password(len)
code -> for val <- code, do: Map.get(@char_map, val)
end
end
defp rand_numbers(len) do
for _ <- 1..len, do: :crypto.rand_uniform(0, 80)
end
defp pass_check(code) do
Enum.any?(code, &(&1 < 18)) and Enum.any?(code, &(&1 > 69)) and code
end
@doc """
Check the strength of the password.
There are two options: min_length and extra_chars.
min_length checks that the password is not shorter than the minimum length.
extra_chars checks that the password contains at least one digit and one
punctuation character (spaces are counted as punctuation characters).
extra_chars is true by default, and min_length's default is 8 characters
if extra_chars is set to true, but 12 characters if extra_chars is set to false.
## Examples
This example will check that the password is at least 8 characters long and
will check that it contains at least one punctuation character and one digit.
Comeonin.Password.strong_password?("<PASSWORD>")
The following example will check that the password is at least 16 characters
long and will not check for punctuation characters or digits.
Comeonin.Password.strong_password?("<PASSWORD>", [min_length: 16, extra_chars: false])
"""
def strong_password?(password, opts \\ []) do
{min_len, extra_chars} = case Keyword.get(opts, :extra_chars, true) do
true -> {Keyword.get(opts, :min_length, 8), true}
_ -> {Keyword.get(opts, :min_length, 12), false}
end
case pass_length?(String.length(password), min_len) do
true -> extra_chars and has_punc_digit?(password)
message -> message
end
end
defp pass_length?(word_len, min_len) when word_len < min_len do
"The password should be at least #{min_len} characters long."
end
defp pass_length?(_, _), do: true
defp has_punc_digit?(word) do
if :binary.match(word, @digits) != :nomatch and :binary.match(word, @punc) != :nomatch do
true
else
"The password should contain at least one number and one punctuation character."
end
end
end
|
deps/comeonin/lib/comeonin/password.ex
| 0.699768 | 0.704745 |
password.ex
|
starcoder
|
defmodule Crux.Structs.Guild do
@moduledoc """
Represents a Discord [Guild Object](https://discord.com/developers/docs/resources/guild#guild-object).
Differences opposed to the Discord API Object:
- `:channels` is a `MapSet` of channel ids
- `:emojis` is a `MapSet` of emoji ids
- `:presences` does not exists at all
"""
@moduledoc since: "0.1.0"
@behaviour Crux.Structs
alias Crux.Structs
alias Crux.Structs.{Channel, Guild, Member, Message, Role, Snowflake, Util, VoiceState}
alias Guild.SystemChannelFlags
defstruct [
:id,
:name,
:icon,
:splash,
:discovery_splash,
:owner_id,
:region,
:afk_channel_id,
:afk_timeout,
:widget_enabled,
:widget_channel_id,
:verification_level,
:default_message_notifications,
:explicit_content_filter,
:roles,
:emojis,
:features,
:mfa_level,
:application_id,
:system_channel_id,
:system_channel_flags,
:rules_channel_id,
:joined_at,
:large,
:unavailable,
:member_count,
:voice_states,
:members,
:channels,
# :presences,
:max_presences,
:max_members,
:vanity_url_code,
:description,
:banner,
:premium_tier,
:premium_subscription_count,
:preferred_locale,
:public_updates_channel_id,
:max_video_channel_users,
:approximate_member_count,
:approximate_presence_count,
:welcome_screen
]
@typedoc since: "0.1.0"
@type t :: %__MODULE__{
id: Snowflake.t(),
name: String.t(),
icon: String.t() | nil,
splash: String.t() | nil,
discovery_splash: String.t() | nil,
owner_id: Snowflake.t(),
region: String.t(),
afk_channel_id: Snowflake.t(),
afk_timeout: non_neg_integer(),
widget_enabled: boolean() | nil,
widget_channel_id: Snowflake.t() | nil,
verification_level: 0..4,
default_message_notifications: 0..1,
explicit_content_filter: 0..2,
roles: %{optional(Snowflake.t()) => Role.t()},
emojis: MapSet.t(Snowflake.t()),
features: MapSet.t(String.t()),
mfa_level: 0..1,
application_id: Snowflake.t() | nil,
system_channel_id: Snowflake.t() | nil,
system_channel_flags: Crux.Structs.Guild.SystemChannelFlags.t(),
rules_channel_id: Snowflake.t(),
joined_at: String.t(),
large: boolean(),
unavailable: boolean(),
member_count: pos_integer(),
voice_states: %{optional(Snowflake.t()) => VoiceState.t()},
members: %{required(Snowflake.t()) => Member.t()},
channels: MapSet.t(Snowflake.t()),
# presences: %{required(Snowflake.t()) => Presence.t()},
max_presences: pos_integer() | nil,
max_members: pos_integer(),
vanity_url_code: String.t() | nil,
description: String.t() | nil,
banner: String.t() | nil,
premium_tier: 0..3,
premium_subscription_count: non_neg_integer(),
preferred_locale: String.t(),
public_updates_channel_id: Snowflake.t() | nil,
max_video_channel_users: non_neg_integer(),
approximate_member_count: pos_integer(),
approximate_presence_count: pos_integer(),
welcome_screen: welcome_screen() | nil
}
@type welcome_screen :: %{
description: String.t(),
welcome_channels: %{
required(Snowflake.t()) => %{
channel_id: Snowflake.t(),
description: String.t(),
emoji_id: Snowflake.t() | nil,
emoji_name: String.t() | nil
}
}
}
@typedoc """
All available types that can be resolved into a guild id.
"""
@typedoc since: "0.2.1"
@type id_resolvable() :: Guild.t() | Channel.t() | Message.t() | Snowflake.t()
@doc """
Resolves the id of a `t:Crux.Structs.Guild.t/0`.
> Automatically invoked by `Crux.Structs.resolve_id/2`.
```elixir
iex> %Crux.Structs.Guild{id: 516569101267894284}
...> |> Crux.Structs.Guild.resolve_id()
516569101267894284
iex> %Crux.Structs.Channel{guild_id: 516569101267894284}
...> |> Crux.Structs.Guild.resolve_id()
516569101267894284
iex> %Crux.Structs.Message{guild_id: 516569101267894284}
...> |> Crux.Structs.Guild.resolve_id()
516569101267894284
iex> 516569101267894284
...> |> Crux.Structs.Guild.resolve_id()
516569101267894284
iex> "516569101267894284"
...> |> Crux.Structs.Guild.resolve_id()
516569101267894284
# DMs
iex> %Crux.Structs.Channel{guild_id: nil}
...> |> Crux.Structs.Guild.resolve_id()
nil
iex> %Crux.Structs.Message{guild_id: nil}
...> |> Crux.Structs.Guild.resolve_id()
nil
```
"""
@doc since: "0.2.1"
@spec resolve_id(id_resolvable()) :: Snowflake.t() | nil
def resolve_id(%Guild{id: id}) do
resolve_id(id)
end
def resolve_id(%Channel{guild_id: guild_id}) do
resolve_id(guild_id)
end
def resolve_id(%Message{guild_id: guild_id}) do
resolve_id(guild_id)
end
def resolve_id(resolvable), do: Structs.resolve_id(resolvable)
@doc """
Creates a `t:Crux.Structs.Guild.t/0` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`.
"""
@doc since: "0.1.0"
@spec create(data :: map()) :: t()
def create(data) do
data =
data
|> Util.atomify()
|> Map.update(:id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:owner_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:afk_channel_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:widget_channel_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:application_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:system_channel_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:public_updates_channel_id, nil, &Snowflake.to_snowflake/1)
# :roles
|> Map.update(:emojis, nil, &MapSet.new(&1, Util.map_to_id()))
|> Map.update(:features, nil, &MapSet.new/1)
|> Map.update(:system_channel_flags, nil, &SystemChannelFlags.resolve/1)
# :voice_states
# :members
|> Map.update(:channels, nil, &MapSet.new(&1, Util.map_to_id()))
|> Map.update(:welcome_screen, nil, &create_welcome_screen/1)
# :presences
guild =
data
|> Map.update(
:roles,
nil,
&Map.new(&1, fn role ->
role =
role
|> Map.put(:guild_id, data.id)
|> Structs.create(Role)
{role.id, role}
end)
)
|> Map.update(
:voice_states,
nil,
&Map.new(&1, fn voice_state ->
voice_state =
voice_state
|> Map.put(:guild_id, data.id)
|> Structs.create(VoiceState)
{voice_state.user_id, voice_state}
end)
)
|> Map.update(
:members,
nil,
&Map.new(&1, fn member ->
member =
member
|> Map.put(:guild_id, data.id)
|> Structs.create(Member)
{member.user, member}
end)
)
struct(__MODULE__, guild)
end
defp create_welcome_screen(nil), do: nil
defp create_welcome_screen(welcome_screen) do
Map.update!(
welcome_screen,
:welcome_channels,
&Map.new(&1, fn welcome_channel ->
welcome_channel =
welcome_channel
|> Map.update!(:channel_id, fn
channel_id -> Snowflake.to_snowflake(channel_id)
end)
|> Map.update!(:emoji_id, fn
nil -> nil
id -> Snowflake.to_snowflake(id)
end)
{welcome_channel.channel_id, welcome_channel}
end)
)
end
defimpl String.Chars, for: Crux.Structs.Guild do
@spec to_string(Guild.t()) :: String.t()
def to_string(%Guild{name: name}), do: name
end
end
|
lib/structs/guild.ex
| 0.804905 | 0.624923 |
guild.ex
|
starcoder
|
defmodule Modbus.Rtu.Master do
@moduledoc """
RTU module.
```elixir
```
"""
alias Modbus.Rtu
@doc """
Starts the RTU server.
`params` *must* contain a keyword list to be merged with the following defaults:
```elixir
[
device: nil, #serial port name: "COM1", "ttyUSB0", "cu.usbserial-FTYHQD9MA"
speed: 9600, #either 1200, 2400, 4800, 9600, 19200, 38400, 57600, 115200
#win32 adds 14400, 128000, 256000
config: "8N1", #either "8N1", "7E1", "7O1"
]
```
`opts` is optional and is passed verbatim to GenServer.
Returns `{:ok, pid}`.
## Example
```
Rtu.start_link([device: "COM8"])
```
"""
def start_link(params, opts \\ []) do
Agent.start_link(fn -> init(params) end, opts)
end
@sleep 1
@to 800
@doc """
Stops the RTU server.
Returns `:ok`.
"""
def stop(pid) do
Agent.get(pid, fn nid ->
:ok = Sniff.close nid
end, @to)
Agent.stop(pid)
end
def exec(pid, cmd, timeout \\ @to) do
Agent.get(pid, fn nid ->
now = now()
dl = now + timeout
request = Rtu.pack_req(cmd)
length = Rtu.res_len(cmd)
:ok = Sniff.write nid, request
response = read_n(nid, [], 0, length, dl)
^length = byte_size(response)
values = Rtu.parse_res(cmd, response)
case values do
nil -> :ok
_ -> {:ok, values}
end
end, 2*timeout)
end
defp init(params) do
device = Keyword.fetch!(params, :device)
speed = Keyword.get(params, :speed, 9600)
config = Keyword.get(params, :config, "8N1")
{:ok, nid} = Sniff.open device, speed, config
nid
end
defp read_n(nid, iol, size, count, dl) do
case size >= count do
true -> flat iol
false ->
{:ok, data} = Sniff.read nid
case data do
<<>> ->
:timer.sleep @sleep
now = now()
case now > dl do
true -> flat iol
false -> read_n(nid, iol, size, count, dl)
end
_ -> read_n(nid, [data | iol], size + byte_size(data),
count, dl)
end
end
end
defp flat(list) do
reversed = Enum.reverse list
:erlang.iolist_to_binary(reversed)
end
defp now(), do: :os.system_time :milli_seconds
#defp now(), do: :erlang.monotonic_time :milli_seconds
end
|
lib/Master.ex
| 0.776877 | 0.698882 |
Master.ex
|
starcoder
|
defmodule Soap.Response.Parser do
@moduledoc """
Provides a functions for parse an xml-like response body.
"""
import SwXml, only: [xpath: 2, sigil_x: 2]
@soap_version_namespaces %{
"1.1" => :"http://schemas.xmlsoap.org/soap/envelope/",
"1.2" => :"http://www.w3.org/2003/05/soap-envelope"
}
@doc """
Executing with xml response body.
If a list is empty then `parse/1` returns full parsed response structure into map.
"""
@spec parse(String.t(), integer()) :: map()
def parse(xml_response, :fault) do
fault_tag = get_fault_tag(xml_response)
xml_response
|> xpath(~x"//#{fault_tag}/*"l)
|> parse_elements()
end
def parse(xml_response, :header) do
body_tag = get_header_tag(xml_response)
xml_response
|> xpath(~x"//#{body_tag}/*"l)
|> parse_elements()
end
def parse(xml_response, _response_type) do
body_tag = get_body_tag(xml_response)
xml_response
|> xpath(~x"//#{body_tag}/*"l)
|> parse_elements()
end
@spec parse_record(tuple()) :: map() | String.t()
def parse_record({:xmlElement, tag_name, _, _, _, _, _, _, elements, _, _, _}) do
%{tag_name => parse_elements(elements)}
end
def parse_record({:xmlText, _, _, _, value, _}), do: transform_record_value(value)
def transform_record_value(nil), do: nil
def transform_record_value(value) when is_list(value), do: value |> to_string() |> String.trim()
def transform_record_value(value) when is_binary(value), do: value |> String.trim()
@spec parse_elements(list() | tuple()) :: map()
def parse_elements([]), do: %{}
def parse_elements(elements) when is_tuple(elements), do: parse_record(elements)
def parse_elements(elements) when is_list(elements) do
elements
|> Enum.map(&parse_record/1)
|> parse_element_values()
end
@spec parse_element_values(list()) :: any()
def parse_element_values(elements) do
cond do
Enum.all?(elements, &is_map/1) && unique_tags?(elements) ->
Enum.reduce(elements, &Map.merge/2)
Enum.all?(elements, &is_map/1) ->
elements |> Enum.map(&Map.to_list/1) |> List.flatten()
true ->
extract_value_from_list(elements)
end
end
@spec extract_value_from_list(list()) :: any()
def extract_value_from_list([element]), do: element
def extract_value_from_list(elements), do: elements
def unique_tags?(elements) do
keys =
elements
|> Enum.map(&Map.keys/1)
|> List.flatten()
Enum.uniq(keys) == keys
end
def get_envelope_namespace(xml_response) do
env_namespace = @soap_version_namespaces[soap_version()]
xml_response
|> xpath(~x"//namespace::*"l)
|> Enum.find(fn {_, _, _, _, namespace_url} -> namespace_url == env_namespace end)
|> elem(3)
end
def get_fault_tag(xml_response) do
xml_response
|> get_envelope_namespace()
|> List.to_string()
|> apply_namespace_to_tag("Fault")
end
def get_body_tag(xml_response) do
xml_response
|> get_envelope_namespace()
|> List.to_string()
|> apply_namespace_to_tag("Body")
end
def get_header_tag(xml_response) do
xml_response
|> get_envelope_namespace()
|> List.to_string()
|> apply_namespace_to_tag("Header")
end
def apply_namespace_to_tag(nil, tag), do: tag
def apply_namespace_to_tag(env_namespace, tag), do: env_namespace <> ":" <> tag
def soap_version, do: Application.fetch_env!(:soa, :globals)[:version]
end
|
lib/soap/response/parser.ex
| 0.648578 | 0.412412 |
parser.ex
|
starcoder
|
defmodule Plaid.Accounts do
@moduledoc """
[Plaid Accounts API](https://plaid.com/docs/api/accounts) calls and schema.
"""
alias Plaid.Castable
defmodule GetResponse do
@moduledoc """
[Plaid API /accounts/get response schema.](https://plaid.com/docs/api/accounts).
"""
@behaviour Castable
alias Plaid.Account
alias Plaid.Item
@type t :: %__MODULE__{
accounts: [Account.t()],
item: Item.t(),
request_id: String.t()
}
defstruct [:accounts, :item, :request_id]
@impl true
def cast(generic_map) do
%__MODULE__{
accounts: Castable.cast_list(Account, generic_map["accounts"]),
item: Castable.cast(Item, generic_map["item"]),
request_id: generic_map["request_id"]
}
end
end
@doc """
Get information about all available accounts.
Does a `POST /accounts/get` call to retrieve high level account information
associated with an access_token's item.
Params:
* `access_token` - Token to fetch accounts for.
Options:
* `:account_ids` - Specific account ids to fetch accounts for.
## Examples
Accounts.get("access-sandbox-123xxx", client_id: "123", secret: "abc")
{:ok, %Accounts.GetResponse{}}
"""
@spec get(String.t(), options, Plaid.config()) ::
{:ok, GetResponse.t()} | {:error, Plaid.Error.t()}
when options: %{optional(:account_ids) => [String.t()]}
def get(access_token, options \\ %{}, config) do
options_payload = Map.take(options, [:account_ids])
payload =
%{}
|> Map.put(:access_token, access_token)
|> Map.put(:options, options_payload)
Plaid.Client.call("/accounts/get", payload, GetResponse, config)
end
@doc """
Get information about all available balances.
Does a `POST /accounts/balance/get` call to retrieve real-time balance
information for all accounts associated with an access_token's item.
This API call will force balances to be refreshed, rather than use
the cache like other API calls that return balances.
Params:
* `access_token` - Token to fetch accounts for.
Options:
* `:account_ids` - Specific account ids to fetch balances for.
## Examples
Accounts.get_balance("access-sandbox-123xxx", client_id: "123", secret: "abc")
{:ok, %Accounts.GetResponse{}}
"""
@spec get_balance(String.t(), options, Plaid.config()) ::
{:ok, GetResponse.t()} | {:error, Plaid.Error.t()}
when options: %{optional(:account_ids) => [String.t()]}
def get_balance(access_token, options \\ %{}, config) do
options_payload = Map.take(options, [:account_ids])
payload =
%{}
|> Map.put(:access_token, access_token)
|> Map.put(:options, options_payload)
Plaid.Client.call("/accounts/balance/get", payload, GetResponse, config)
end
end
|
lib/plaid/accounts.ex
| 0.852935 | 0.450057 |
accounts.ex
|
starcoder
|
defmodule Bamboo.ElasticEmail do
@moduledoc """
Helper functions for manipulating Bamboo.Email to enable Elastic Email
functionality.
"""
alias Bamboo.Email
@doc """
Add attachment identifiers to the email.
> Names or IDs of attachments previously uploaded to your account (via the
> File/Upload request) that should be sent with this e-mail.
"""
@spec attachments(Email.t(), [binary(), ...] | binary()) :: Email.t()
def attachments(%Email{} = email, attachments) do
put_elastic_send_option(email, :attachments, List.wrap(attachments))
end
@doc """
Sets the channel for the email.
> An ID field (max 191 chars) that can be used for reporting [will default
> to HTTP API or SMTP API]
"""
@spec channel(Email.t(), binary()) :: Email.t()
def channel(%Email{} = email, channel) do
put_elastic_send_option(email, :channel, String.slice(channel, 0..190))
end
@doc """
Sets the character set for the email or one of the MIME parts. Overrides the
global default of "utf-8".
> Text value of charset encoding for example: iso-8859-1, windows-1251,
> utf-8, us-ascii, windows-1250 and more…
The `part` parameter is interpreted as:
- `:amp`: sets the `charsetBodyAmp` option (the AMP body MIME part)
- `:html`: sets the `charsetBodyHtml` option (the HTML body MIME part)
- `:text`: sets the `charsetBodyText` option (the text body MIME part)
Any other value sets the global `charset` option.
"""
@spec charset(Email.t(), part :: nil | :amp | :html | :text, binary()) :: Email.t()
def charset(%Email{} = email, part \\ nil, charset) do
put_elastic_send_option(
email,
case part do
:amp -> :charset_body_amp
:html -> :charset_body_html
:text -> :charset_body_text
_ -> :charset
end,
charset
)
end
@doc """
Sets the data source of the email.
> Name or ID of the previously uploaded file (via the File/Upload request)
> which should be a CSV list of Recipients.
"""
@spec data_source(Email.t(), binary()) :: Email.t()
def data_source(%Email{} = email, name) do
put_elastic_send_option(email, :data_source, name)
end
@doc """
Sets the email encoding type. The default encoding is `:base64`, which is
the recommended value. Either `:base64` or `:quoted_printable` is
recommended if you are validating your domain(s) with DKIM.
Supported encoding types are:
- `:none`: no encoding
- `:raw_7bit`: Raw 7bit. Must be plain ASCII.
- `:raw_8bit`: Raw 8bit.
- `:quoted_printable`: Quoted printable format.
- `:base64`: Base64
- `:uue`: UU
"""
@spec encoding_type(Email.t(), atom) :: Email.t()
def encoding_type(%Email{} = email, encoding) do
put_elastic_send_option(
email,
:encoding_type,
case encoding do
:none -> 0
:raw_7bit -> 1
:raw_8bit -> 2
:quoted_printable -> 3
:uue -> 5
_ -> 4
end
)
end
@doc "The name or names of a contact list you would like to send to."
@spec lists(Email.t(), [binary(), ...] | binary()) :: Email.t()
def lists(%Email{} = email, lists) do
put_elastic_send_option(email, :lists, Enum.join(List.wrap(lists), ";"))
end
@doc """
Sets merge parameters. The params must be a list of maps or keyword lists
(or other 2-tuple lists).
> Repeated list of string keys and string values
> Request parameters prefixed by merge_ like merge_firstname,
> merge_lastname. If sending to a template you can send merge_ fields to
> merge data with the template. Template fields are entered with
> {firstname}, {lastname} etc.
"""
@spec merge(Email.t(), map() | keyword() | [map() | keyword(), ...]) :: Email.t()
def merge(%Email{} = email, params) do
put_elastic_send_option(email, :merge, Enum.flat_map(List.wrap(params), &to_merge_params/1))
end
@doc """
The file name of a previously uploaded attachment which is a CSV list of
Recipients.
"""
@spec merge_source_filename(Email.t(), binary()) :: Email.t()
def merge_source_filename(%Email{} = email, filename) do
put_elastic_send_option(email, :merge_source_filename, filename)
end
@doc "Name of the custom IP Pool to be used in the sending process."
@spec pool_name(Email.t(), binary()) :: Email.t()
def pool_name(%Email{} = email, name) do
put_elastic_send_option(email, :pool_name, name)
end
@doc "Optional header returned in notifications."
@spec post_back(Email.t(), binary()) :: Email.t()
def post_back(%Email{} = email, post_back) do
put_elastic_send_option(email, :post_back, post_back)
end
@doc """
The name or names of the Contact segment(s) you wish to send. Use :all
for all active contacts.
"""
@spec segments(Email.t(), :all | binary() | [binary() | :all, ...]) :: Email.t()
def segments(%Email{} = email, segments) do
segments =
segments
|> List.wrap()
|> Enum.map(&if(match?(:all, &1), do: "0", else: &1))
|> Enum.uniq()
|> Enum.join(";")
put_elastic_send_option(email, :segments, segments)
end
@doc "The ID of an email template you have created in your account."
@spec template(Email.t(), binary()) :: Email.t()
def template(%Email{} = email, template) do
put_elastic_send_option(email, :template, template)
end
@doc """
The number of minutes in the future this email should be sent up to a
maximum of 1 year (524,160 minutes).
"""
@spec time_off_set_minutes(Email.t(), 1..524_160) :: Email.t()
def time_off_set_minutes(%Email{} = email, minutes) when is_integer(minutes) do
put_elastic_send_option(
email,
:time_off_set_minutes,
cond do
minutes < 1 -> 1
minutes > 524_160 -> 524_160
true -> minutes
end
)
end
@doc "Indicates whether clicks should be tracked on this email."
@spec track_clicks(Email.t(), boolean) :: Email.t()
def track_clicks(%Email{} = email, track_clicks) do
put_elastic_send_option(email, :track_clicks, track_clicks)
end
@doc "Indicates whether opens should be tracked on this email."
@spec track_opens(Email.t(), boolean) :: Email.t()
def track_opens(%Email{} = email, track_opens) do
put_elastic_send_option(email, :track_opens, track_opens)
end
@doc "Set UTM Marketing Parameters for campaign links."
@spec utm_parameters(
Email.t(),
%{optional(:campaign | :content | :medium | :source) => binary()} | keyword
) :: Email.t()
def utm_parameters(%Email{} = email, params) do
merge_elastic_send_options(email, Map.new(params, &to_utm_parameter/1))
end
defp put_elastic_send_option(%{private: private} = email, key, value) do
send_options =
private
|> Map.get(:elastic_send_options, %{})
|> Map.put(key, value)
%{email | private: Map.put(private, :elastic_send_options, send_options)}
end
defp merge_elastic_send_options(%{private: private} = email, %{} = options) do
send_options =
private
|> Map.get(:elastic_send_options, %{})
|> Map.merge(options)
%{email | private: Map.put(private, :elastic_send_options, send_options)}
end
defp to_merge_params({k, v}), do: [{"merge_#{k}", v}]
defp to_merge_params(params) when is_list(params) or is_map(params),
do: Enum.map(params, &to_merge_params/1)
defp to_utm_parameter({:campaign, value}), do: {:utm_campaign, value}
defp to_utm_parameter({:content, value}), do: {:utm_content, value}
defp to_utm_parameter({:medium, value}), do: {:utm_medium, value}
defp to_utm_parameter({:source, value}), do: {:utm_source, value}
end
|
lib/bamboo/elastic_email.ex
| 0.784649 | 0.50415 |
elastic_email.ex
|
starcoder
|
defmodule Hangman.Action.Robot do
@moduledoc """
Implements robot action player specific functionality
The `robot` action is reliant on the game strategy to automatically
self select the best guess.
"""
alias Hangman.{Action.Robot, Round, Letter.Strategy, Pass}
defstruct type: :robot, display: false, round: nil, strategy: nil
@type t :: %__MODULE__{}
@doc """
Sets up round by running a reduction pass. Informs Strategy
of reduction pass result to be later used in the guess method.
"""
@spec setup(t) :: tuple
def setup(%Robot{} = robot) do
round = robot.round
strategy = robot.strategy
# Retrieve the exclusion set, simply the list of already guessed letters
exclusion = strategy |> Strategy.guessed()
# Setup game round, passing in strategy callback routine
{round, %Pass{} = pass} = round |> Round.setup(exclusion)
# Process the strategy against the latest reduced word set data
# We are using the auto self-selecting mode
strategy = strategy |> Strategy.process(:auto, pass)
robot = Kernel.put_in(robot.round, round)
robot = Kernel.put_in(robot.strategy, strategy)
{robot, []}
end
@doc """
Routine for `:robot` player type. Performs `auto-generated` guess,
returns round `status`
"""
@spec guess(t, Guess.t()) :: tuple()
def guess(%Robot{} = robot, _data) do
strategy = robot.strategy
round = robot.round
guess = strategy |> Strategy.guess(:auto)
round = round |> Round.guess(guess)
robot = Kernel.put_in(robot.round, round)
status = Round.status(round)
{robot, status}
end
# EXTRA
# Returns player information
@spec info(t) :: Keyword.t()
def info(%Robot{} = robot) do
_info = [
display: robot.display
]
end
# Allows users to inspect this module type in a controlled manner
defimpl Inspect do
import Inspect.Algebra
def inspect(t, opts) do
robot_info = Inspect.List.inspect(Robot.info(t), opts)
round_info = Inspect.List.inspect(Round.info(t.round), opts)
concat(["#Action.Robot<", robot_info, round_info, ">"])
end
end
defimpl Hangman.Player.Action, for: Robot do
def setup(%Robot{} = player) do
# returns {player, []} tuple
Robot.setup(player)
end
def guess(%Robot{} = player, _guess) do
# returns {player, status} tuple
Robot.guess(player, nil)
end
end
end
|
lib/hangman/action_robot.ex
| 0.863694 | 0.514522 |
action_robot.ex
|
starcoder
|
defmodule Nectar.Product do
use Nectar.Web, :model
use Arc.Ecto.Model
schema "products" do
field :name, :string
field :description, :string
field :available_on, Ecto.Date
field :discontinue_on, Ecto.Date
field :slug, :string
has_one :master, Nectar.Variant, on_delete: :nilify_all # As this and below association same, how to handle on_delete
has_many :variants, Nectar.Variant, on_delete: :nilify_all
has_many :product_option_types, Nectar.ProductOptionType, on_delete: :nilify_all
has_many :option_types, through: [:product_option_types, :option_type]
has_many :product_categories, Nectar.ProductCategory, on_delete: :nilify_all
has_many :categories, through: [:product_categories, :category]
extensions()
timestamps()
end
@required_fields ~w(name description available_on)a
@optional_fields ~w(slug)a
@doc """
Creates a changeset based on the `model` and `params`.
If no params are provided, an invalid changeset is returned
with no validation performed.
"""
def changeset(model, params \\ %{}) do
model
|> cast(params, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
|> cast(params, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
|> Validations.Date.validate_not_past_date(:available_on)
|> Nectar.Slug.generate_slug()
|> cast_assoc(:product_option_types, with: &Nectar.ProductOptionType.from_product_changeset/2)
|> cast_assoc(:product_categories, with: &Nectar.ProductCategory.from_product_changeset/2)
|> unique_constraint(:slug)
end
def create_changeset(model, params \\ %{}) do
changeset(model, params)
|> cast_assoc(:master, required: true, with: &Nectar.Variant.create_master_changeset/2)
end
def update_changeset(model, params \\ %{}) do
changeset(model, params)
|> cast_assoc(:master, required: true, with: &(Nectar.Variant.update_master_changeset(&1, model, &2)))
|> validate_available_on_lt_discontinue_on
end
defp validate_available_on_lt_discontinue_on(changeset) do
changeset
|> Validations.Date.validate_lt_date(:available_on, changed_discontinue_on(changeset))
end
defp changed_discontinue_on(changeset) do
changed_master = get_change(changeset, :master)
if changed_master do
get_change(changed_master, :discontinue_on) || changed_master.data.discontinue_on
else
changeset.data.master.discontinue_on
end
end
end
|
apps/nectar/web/models/product.ex
| 0.736021 | 0.416292 |
product.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.